diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index ef9a9a324d..32fe7261de 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -27,6 +27,8 @@ ENV TABLEGEN_170_PREFIX=/usr/lib/llvm-17 # To allow independent workflow of the container, the rust-toolchain is explicitely given. RUN echo "1.80.0" > rust_toolchain_version +# Make sure to sync the nightly version with the scripts in ./scripts +RUN echo "nightly-2024-08-28" > nightly_rust_toolchain_version # Install cargo-binstall RUN curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash @@ -36,8 +38,8 @@ RUN rustup toolchain install $(cat rust_toolchain_version) && \ rustup component add clippy && \ rustup component add rustfmt -RUN rustup toolchain install nightly && \ - rustup component add rustfmt clippy --toolchain nightly +RUN rustup toolchain install $(cat nightly_rust_toolchain_version) && \ + rustup component add rustfmt clippy --toolchain $(cat nightly_rust_toolchain_version) RUN rustup target add x86_64-pc-windows-msvc && \ rustup target add wasm32-unknown-unknown @@ -60,7 +62,7 @@ RUN if [ "$TARGETPLATFORM" = "linux/arm64" ] ; then \ rm -r hurl-4.1.0-x86_64-unknown-linux-gnu && \ rm hurl.tar.gz && \ rustup component add llvm-tools-preview --toolchain $(cat rust_toolchain_version)-x86_64-unknown-linux-gnu && \ - rustup target add x86_64-fortanix-unknown-sgx --toolchain nightly; \ + rustup target add x86_64-fortanix-unknown-sgx --toolchain $(cat nightly_rust_toolchain_version); \ fi ARG DOJO_VERSION=stable diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index aaa3174fcd..c8299d57a8 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -2,7 +2,7 @@ // https://github.com/microsoft/vscode-dev-containers/tree/v0.245.2/containers/rust { "name": "Rust", - "image": "ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.7", + "image": "ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.11", "runArgs": [ "--cap-add=SYS_PTRACE", "--security-opt", diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index bdc97e5c67..f68928b7e2 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -14,7 +14,7 @@ jobs: bench-katana: runs-on: ubuntu-latest container: - image: ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.7 + image: ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.11 steps: - uses: actions/checkout@v3 - run: git config --global --add safe.directory "$GITHUB_WORKSPACE" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8c97ca011e..de62d93612 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -31,7 +31,7 @@ jobs: needs: ensure-docker runs-on: ubuntu-latest-32-cores container: - image: ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.7 + image: ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.11 steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 @@ -55,7 +55,7 @@ jobs: ensure-wasm: runs-on: ubuntu-latest container: - image: ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.7 + image: ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.11 steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 @@ -120,7 +120,7 @@ jobs: needs: build runs-on: ubuntu-latest container: - image: ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.7 + image: ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.11 steps: - uses: actions/download-artifact@v4 with: @@ -135,7 +135,7 @@ jobs: needs: build runs-on: ubuntu-latest container: - image: ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.7 + image: ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.11 steps: - uses: actions/download-artifact@v4 with: @@ -149,7 +149,7 @@ jobs: dojo-world-bindings-check: runs-on: ubuntu-latest container: - image: ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.7 + image: ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.11 steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 @@ -158,7 +158,7 @@ jobs: clippy: runs-on: ubuntu-latest-4-cores container: - image: ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.7 + image: ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.11 steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 @@ -167,7 +167,7 @@ jobs: fmt: runs-on: ubuntu-latest container: - image: ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.7 + image: ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.11 steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 @@ -176,7 +176,7 @@ jobs: docs: runs-on: ubuntu-latest container: - image: ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.7 + image: ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.11 steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 diff --git a/.github/workflows/release-dispatch.yml b/.github/workflows/release-dispatch.yml index 9fca4fac7f..85f9e1c27e 100644 --- a/.github/workflows/release-dispatch.yml +++ b/.github/workflows/release-dispatch.yml @@ -14,7 +14,7 @@ jobs: contents: write runs-on: ubuntu-latest container: - image: ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.7 + image: ghcr.io/dojoengine/dojo-dev:v1.0.0-alpha.11 env: VERSION: "" steps: diff --git a/.gitignore b/.gitignore index a789cfea6e..cf16ca3022 100644 --- a/.gitignore +++ b/.gitignore @@ -20,3 +20,4 @@ bindings justfile spawn-and-move-db types-test-db +examples/spawn-and-move/manifests/saya/** diff --git a/.tool-versions b/.tool-versions index 045dc3e7cc..aabe197960 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1 +1,2 @@ scarb 2.7.0 +starknet-foundry 0.30.0 diff --git a/Cargo.lock b/Cargo.lock index 3beb3d732a..e95ed003d2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1879,7 +1879,7 @@ dependencies = [ [[package]] name = "blockifier" version = "0.8.0-dev.2" -source = "git+https://github.com/dojoengine/blockifier?branch=cairo-2.7#5e6e47ea47eeca12316d4a0e3394f38aa4870c71" +source = "git+https://github.com/dojoengine/blockifier?branch=cairo-2.7#9fa0ab0aab6fb1038a76432f7099fd198da94ed1" dependencies = [ "anyhow", "ark-ec", @@ -4269,7 +4269,7 @@ checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" [[package]] name = "dojo-bindgen" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "assert_matches", "async-trait", @@ -4289,15 +4289,15 @@ dependencies = [ [[package]] name = "dojo-core" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" [[package]] name = "dojo-examples-spawn-and-move" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" [[package]] name = "dojo-lang" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "anyhow", "assert_fs", @@ -4353,7 +4353,7 @@ dependencies = [ [[package]] name = "dojo-language-server" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "cairo-lang-language-server", "clap", @@ -4362,7 +4362,7 @@ dependencies = [ [[package]] name = "dojo-metrics" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "anyhow", "hyper 0.14.30", @@ -4380,7 +4380,7 @@ dependencies = [ [[package]] name = "dojo-test-utils" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "anyhow", "assert_fs", @@ -4416,7 +4416,7 @@ dependencies = [ [[package]] name = "dojo-types" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "cainome", "crypto-bigint", @@ -4433,13 +4433,14 @@ dependencies = [ [[package]] name = "dojo-utils" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "anyhow", "assert_matches", "dojo-test-utils", "futures", "reqwest 0.12.5", + "rpassword", "starknet 0.11.0", "thiserror", "tokio", @@ -4447,7 +4448,7 @@ dependencies = [ [[package]] name = "dojo-world" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "anyhow", "assert_fs", @@ -4489,7 +4490,7 @@ dependencies = [ [[package]] name = "dojo-world-abigen" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "cairo-lang-starknet", "cairo-lang-starknet-classes", @@ -5136,12 +5137,13 @@ dependencies = [ [[package]] name = "futures-rustls" -version = "0.24.0" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35bd3cf68c183738046838e300353e4716c674dc5e56890de4826801a6622a28" +checksum = "a8f2f12607f92c69b12ed746fabf9ca4f5c482cba46679c1a75b874ed7c26adb" dependencies = [ "futures-io", - "rustls 0.21.12", + "rustls 0.23.11", + "rustls-pki-types", ] [[package]] @@ -6374,6 +6376,15 @@ dependencies = [ "hashbrown 0.14.5", ] +[[package]] +name = "hashlink" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" +dependencies = [ + "hashbrown 0.14.5", +] + [[package]] name = "hdrhistogram" version = "7.5.4" @@ -6652,7 +6663,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.5.7", + "socket2 0.4.10", "tokio", "tower-service", "tracing", @@ -7127,9 +7138,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" dependencies = [ "cfg-if", - "js-sys", - "wasm-bindgen", - "web-sys", ] [[package]] @@ -7148,7 +7156,7 @@ dependencies = [ "tokio", "waitgroup", "webrtc-srtp", - "webrtc-util", + "webrtc-util 0.8.1", ] [[package]] @@ -7418,7 +7426,7 @@ dependencies = [ "jsonrpsee-types 0.16.3", "pin-project", "rustls-native-certs 0.6.3", - "soketto", + "soketto 0.7.1", "thiserror", "tokio", "tokio-rustls 0.24.1", @@ -7438,7 +7446,7 @@ dependencies = [ "jsonrpsee-core 0.20.3", "pin-project", "rustls-native-certs 0.6.3", - "soketto", + "soketto 0.7.1", "thiserror", "tokio", "tokio-rustls 0.24.1", @@ -7469,7 +7477,7 @@ dependencies = [ "rustc-hash", "serde", "serde_json", - "soketto", + "soketto 0.7.1", "thiserror", "tokio", "tracing", @@ -7577,7 +7585,7 @@ dependencies = [ "jsonrpsee-types 0.16.3", "serde", "serde_json", - "soketto", + "soketto 0.7.1", "tokio", "tokio-stream", "tokio-util", @@ -7674,7 +7682,7 @@ dependencies = [ [[package]] name = "katana" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "alloy-primitives", "anyhow", @@ -7702,7 +7710,7 @@ dependencies = [ [[package]] name = "katana-cairo" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "cairo-lang-casm", "cairo-lang-runner", @@ -7717,7 +7725,7 @@ dependencies = [ [[package]] name = "katana-codecs" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "bytes", "katana-primitives", @@ -7725,7 +7733,7 @@ dependencies = [ [[package]] name = "katana-codecs-derive" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "proc-macro2", "quote", @@ -7735,7 +7743,7 @@ dependencies = [ [[package]] name = "katana-core" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "alloy-contract 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "alloy-network 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -7774,7 +7782,7 @@ dependencies = [ [[package]] name = "katana-db" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "anyhow", "criterion", @@ -7796,7 +7804,7 @@ dependencies = [ [[package]] name = "katana-executor" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "alloy-primitives", "anyhow", @@ -7822,7 +7830,7 @@ dependencies = [ [[package]] name = "katana-node" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "anyhow", "dojo-metrics", @@ -7836,10 +7844,10 @@ dependencies = [ "katana-provider", "katana-rpc", "katana-rpc-api", + "katana-tasks", "num-traits 0.2.19", "serde_json", "starknet 0.11.0", - "tokio", "tower", "tower-http", "tracing", @@ -7847,7 +7855,7 @@ dependencies = [ [[package]] name = "katana-node-bindings" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "serde", "serde_json", @@ -7861,11 +7869,12 @@ dependencies = [ [[package]] name = "katana-pool" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "futures", "katana-executor", "katana-primitives", + "katana-provider", "parking_lot 0.12.3", "rand", "thiserror", @@ -7875,10 +7884,11 @@ dependencies = [ [[package]] name = "katana-primitives" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "alloy-primitives", "anyhow", + "assert_matches", "base64 0.21.7", "derive_more", "flate2", @@ -7898,7 +7908,7 @@ dependencies = [ [[package]] name = "katana-provider" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "alloy-primitives", "anyhow", @@ -7924,7 +7934,7 @@ dependencies = [ [[package]] name = "katana-rpc" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "alloy", "alloy-primitives", @@ -7936,6 +7946,7 @@ dependencies = [ "dojo-utils", "dojo-world", "futures", + "indexmap 2.2.6", "jsonrpsee 0.16.3", "katana-cairo", "katana-core", @@ -7952,10 +7963,12 @@ dependencies = [ "metrics", "num-traits 0.2.19", "rand", + "rstest 0.18.2", "serde", "serde_json", "starknet 0.11.0", "tempfile", + "thiserror", "tokio", "tracing", "url", @@ -7963,7 +7976,7 @@ dependencies = [ [[package]] name = "katana-rpc-api" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "jsonrpsee 0.16.3", "katana-core", @@ -7974,7 +7987,7 @@ dependencies = [ [[package]] name = "katana-rpc-types" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "alloy-primitives", "anyhow", @@ -7984,6 +7997,7 @@ dependencies = [ "katana-cairo", "katana-core", "katana-executor", + "katana-pool", "katana-primitives", "katana-provider", "num-traits 0.2.19", @@ -7997,7 +8011,7 @@ dependencies = [ [[package]] name = "katana-rpc-types-builder" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "anyhow", "katana-executor", @@ -8009,7 +8023,7 @@ dependencies = [ [[package]] name = "katana-runner" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "anyhow", "assert_fs", @@ -8025,7 +8039,7 @@ dependencies = [ [[package]] name = "katana-slot-controller" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "account_sdk", "alloy-primitives", @@ -8041,7 +8055,7 @@ dependencies = [ [[package]] name = "katana-tasks" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "futures", "rayon", @@ -8212,20 +8226,19 @@ checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" [[package]] name = "libp2p" -version = "0.54.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.54.1" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "bytes", "either", "futures", "futures-timer", "getrandom", - "instant", "libp2p-allow-block-list", "libp2p-connection-limits", - "libp2p-core 0.41.2", + "libp2p-core 0.42.0", "libp2p-dns", - "libp2p-gossipsub 0.46.1 (git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54)", + "libp2p-gossipsub 0.47.0", "libp2p-identify", "libp2p-identity", "libp2p-mdns", @@ -8234,70 +8247,71 @@ dependencies = [ "libp2p-ping", "libp2p-quic", "libp2p-relay", - "libp2p-swarm 0.44.2 (git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54)", + "libp2p-swarm 0.45.1", "libp2p-tcp", "libp2p-upnp", + "libp2p-websocket", "libp2p-yamux", "multiaddr 0.18.1", "pin-project", - "rw-stream-sink 0.4.0 (git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54)", + "rw-stream-sink 0.4.0 (git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3)", "thiserror", ] [[package]] name = "libp2p-allow-block-list" -version = "0.3.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.4.0" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ - "libp2p-core 0.41.2", + "libp2p-core 0.42.0", "libp2p-identity", - "libp2p-swarm 0.44.2 (git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54)", + "libp2p-swarm 0.45.1", "void", ] [[package]] name = "libp2p-connection-limits" -version = "0.3.1" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.4.0" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ - "libp2p-core 0.41.2", + "libp2p-core 0.42.0", "libp2p-identity", - "libp2p-swarm 0.44.2 (git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54)", + "libp2p-swarm 0.45.1", "void", ] [[package]] name = "libp2p-core" -version = "0.41.2" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.41.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5a8920cbd8540059a01950c1e5c96ea8d89eb50c51cd366fc18bdf540a6e48f" dependencies = [ "either", "fnv", "futures", "futures-timer", - "instant", "libp2p-identity", "multiaddr 0.18.1", "multihash 0.19.1", - "multistream-select 0.13.0 (git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54)", + "multistream-select 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)", "once_cell", "parking_lot 0.12.3", "pin-project", "quick-protobuf", "rand", - "rw-stream-sink 0.4.0 (git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54)", + "rw-stream-sink 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec", "thiserror", "tracing", "unsigned-varint 0.8.0", "void", + "web-time", ] [[package]] name = "libp2p-core" -version = "0.41.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5a8920cbd8540059a01950c1e5c96ea8d89eb50c51cd366fc18bdf540a6e48f" +version = "0.42.0" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "either", "fnv", @@ -8306,13 +8320,13 @@ dependencies = [ "libp2p-identity", "multiaddr 0.18.1", "multihash 0.19.1", - "multistream-select 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)", + "multistream-select 0.13.0 (git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3)", "once_cell", "parking_lot 0.12.3", "pin-project", "quick-protobuf", "rand", - "rw-stream-sink 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rw-stream-sink 0.4.0 (git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3)", "smallvec", "thiserror", "tracing", @@ -8323,13 +8337,13 @@ dependencies = [ [[package]] name = "libp2p-dns" -version = "0.41.1" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.42.0" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "async-trait", "futures", "hickory-resolver", - "libp2p-core 0.41.2", + "libp2p-core 0.42.0", "libp2p-identity", "parking_lot 0.12.3", "smallvec", @@ -8355,7 +8369,7 @@ dependencies = [ "instant", "libp2p-core 0.41.3", "libp2p-identity", - "libp2p-swarm 0.44.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libp2p-swarm 0.44.2", "prometheus-client", "quick-protobuf", "quick-protobuf-codec 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -8369,8 +8383,8 @@ dependencies = [ [[package]] name = "libp2p-gossipsub" -version = "0.46.1" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.47.0" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "asynchronous-codec", "base64 0.22.1", @@ -8382,37 +8396,37 @@ dependencies = [ "futures-ticker", "getrandom", "hex_fmt", - "instant", - "libp2p-core 0.41.2", + "libp2p-core 0.42.0", "libp2p-identity", - "libp2p-swarm 0.44.2 (git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54)", + "libp2p-swarm 0.45.1", "prometheus-client", "quick-protobuf", - "quick-protobuf-codec 0.3.1 (git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54)", + "quick-protobuf-codec 0.3.1 (git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3)", "rand", "regex", "sha2 0.10.8", "smallvec", "tracing", "void", + "web-time", ] [[package]] name = "libp2p-identify" -version = "0.44.2" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.45.0" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "asynchronous-codec", "either", "futures", "futures-bounded", "futures-timer", - "libp2p-core 0.41.2", + "libp2p-core 0.42.0", "libp2p-identity", - "libp2p-swarm 0.44.2 (git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54)", + "libp2p-swarm 0.45.1", "lru", "quick-protobuf", - "quick-protobuf-codec 0.3.1 (git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54)", + "quick-protobuf-codec 0.3.1 (git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3)", "smallvec", "thiserror", "tracing", @@ -8439,16 +8453,16 @@ dependencies = [ [[package]] name = "libp2p-mdns" -version = "0.45.1" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.46.0" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "data-encoding", "futures", "hickory-proto", "if-watch", - "libp2p-core 0.41.2", + "libp2p-core 0.42.0", "libp2p-identity", - "libp2p-swarm 0.44.2 (git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54)", + "libp2p-swarm 0.45.1", "rand", "smallvec", "socket2 0.5.7", @@ -8459,32 +8473,32 @@ dependencies = [ [[package]] name = "libp2p-metrics" -version = "0.14.1" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.15.0" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "futures", - "instant", - "libp2p-core 0.41.2", - "libp2p-gossipsub 0.46.1 (git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54)", + "libp2p-core 0.42.0", + "libp2p-gossipsub 0.47.0", "libp2p-identify", "libp2p-identity", "libp2p-ping", "libp2p-relay", - "libp2p-swarm 0.44.2 (git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54)", + "libp2p-swarm 0.45.1", "pin-project", "prometheus-client", + "web-time", ] [[package]] name = "libp2p-noise" -version = "0.44.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.45.0" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "asynchronous-codec", "bytes", "curve25519-dalek", "futures", - "libp2p-core 0.41.2", + "libp2p-core 0.42.0", "libp2p-identity", "multiaddr 0.18.1", "multihash 0.19.1", @@ -8502,38 +8516,38 @@ dependencies = [ [[package]] name = "libp2p-ping" -version = "0.44.1" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.45.0" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "either", "futures", "futures-timer", - "instant", - "libp2p-core 0.41.2", + "libp2p-core 0.42.0", "libp2p-identity", - "libp2p-swarm 0.44.2 (git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54)", + "libp2p-swarm 0.45.1", "rand", "tracing", "void", + "web-time", ] [[package]] name = "libp2p-quic" -version = "0.10.3" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.11.1" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "bytes", "futures", "futures-timer", "if-watch", - "libp2p-core 0.41.2", + "libp2p-core 0.42.0", "libp2p-identity", "libp2p-tls", "parking_lot 0.12.3", - "quinn 0.10.2", + "quinn", "rand", - "ring 0.16.20", - "rustls 0.21.12", + "ring 0.17.8", + "rustls 0.23.11", "socket2 0.5.7", "thiserror", "tokio", @@ -8542,8 +8556,8 @@ dependencies = [ [[package]] name = "libp2p-relay" -version = "0.17.2" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.18.0" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "asynchronous-codec", "bytes", @@ -8551,11 +8565,11 @@ dependencies = [ "futures", "futures-bounded", "futures-timer", - "libp2p-core 0.41.2", + "libp2p-core 0.42.0", "libp2p-identity", - "libp2p-swarm 0.44.2 (git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54)", + "libp2p-swarm 0.45.1", "quick-protobuf", - "quick-protobuf-codec 0.3.1 (git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54)", + "quick-protobuf-codec 0.3.1 (git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3)", "rand", "static_assertions", "thiserror", @@ -8588,20 +8602,19 @@ dependencies = [ [[package]] name = "libp2p-swarm" -version = "0.44.2" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.45.1" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "either", "fnv", "futures", "futures-timer", "getrandom", - "instant", - "libp2p-core 0.41.2", + "libp2p-core 0.42.0", "libp2p-identity", "libp2p-swarm-derive", "lru", - "multistream-select 0.13.0 (git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54)", + "multistream-select 0.13.0 (git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3)", "once_cell", "rand", "smallvec", @@ -8609,12 +8622,13 @@ dependencies = [ "tracing", "void", "wasm-bindgen-futures", + "web-time", ] [[package]] name = "libp2p-swarm-derive" -version = "0.34.2" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.35.0" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "heck 0.5.0", "proc-macro2", @@ -8624,14 +8638,14 @@ dependencies = [ [[package]] name = "libp2p-tcp" -version = "0.41.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.42.0" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "futures", "futures-timer", "if-watch", "libc", - "libp2p-core 0.41.2", + "libp2p-core 0.42.0", "libp2p-identity", "socket2 0.5.7", "tokio", @@ -8640,16 +8654,16 @@ dependencies = [ [[package]] name = "libp2p-tls" -version = "0.3.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.5.0" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "futures", "futures-rustls", - "libp2p-core 0.41.2", + "libp2p-core 0.42.0", "libp2p-identity", - "rcgen", + "rcgen 0.11.3", "ring 0.17.8", - "rustls 0.21.12", + "rustls 0.23.11", "rustls-webpki 0.101.7", "thiserror", "x509-parser 0.16.0", @@ -8658,14 +8672,14 @@ dependencies = [ [[package]] name = "libp2p-upnp" -version = "0.2.2" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.3.0" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "futures", "futures-timer", "igd-next", - "libp2p-core 0.41.2", - "libp2p-swarm 0.44.2 (git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54)", + "libp2p-core 0.42.0", + "libp2p-swarm 0.45.1", "tokio", "tracing", "void", @@ -8673,8 +8687,8 @@ dependencies = [ [[package]] name = "libp2p-webrtc" -version = "0.7.1-alpha" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.8.0-alpha" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "async-trait", "bytes", @@ -8682,15 +8696,15 @@ dependencies = [ "futures-timer", "hex", "if-watch", - "libp2p-core 0.41.2", + "libp2p-core 0.42.0", "libp2p-identity", "libp2p-noise", "libp2p-webrtc-utils", "multihash 0.19.1", "rand", - "rcgen", + "rcgen 0.11.3", "serde", - "stun", + "stun 0.6.0", "thiserror", "tinytemplate", "tokio", @@ -8701,18 +8715,18 @@ dependencies = [ [[package]] name = "libp2p-webrtc-utils" -version = "0.2.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.3.0" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "asynchronous-codec", "bytes", "futures", "hex", - "libp2p-core 0.41.2", + "libp2p-core 0.42.0", "libp2p-identity", "libp2p-noise", "quick-protobuf", - "quick-protobuf-codec 0.3.1 (git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54)", + "quick-protobuf-codec 0.3.1 (git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3)", "rand", "serde", "sha2 0.10.8", @@ -8723,15 +8737,15 @@ dependencies = [ [[package]] name = "libp2p-webrtc-websys" -version = "0.3.0-alpha" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.4.0-alpha" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "bytes", "futures", "getrandom", "hex", "js-sys", - "libp2p-core 0.41.2", + "libp2p-core 0.42.0", "libp2p-identity", "libp2p-webrtc-utils", "send_wrapper 0.6.0", @@ -8742,14 +8756,51 @@ dependencies = [ "web-sys", ] +[[package]] +name = "libp2p-websocket" +version = "0.44.0" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" +dependencies = [ + "either", + "futures", + "futures-rustls", + "libp2p-core 0.42.0", + "libp2p-identity", + "parking_lot 0.12.3", + "pin-project-lite", + "rw-stream-sink 0.4.0 (git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3)", + "soketto 0.8.0", + "thiserror", + "tracing", + "url", + "webpki-roots 0.25.4", +] + +[[package]] +name = "libp2p-websocket-websys" +version = "0.4.0" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" +dependencies = [ + "bytes", + "futures", + "js-sys", + "libp2p-core 0.42.0", + "parking_lot 0.12.3", + "send_wrapper 0.6.0", + "thiserror", + "tracing", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "libp2p-yamux" -version = "0.45.1" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +version = "0.46.0" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "either", "futures", - "libp2p-core 0.41.2", + "libp2p-core 0.42.0", "thiserror", "tracing", "yamux 0.12.1", @@ -9231,7 +9282,7 @@ dependencies = [ [[package]] name = "multistream-select" version = "0.13.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "bytes", "futures", @@ -10749,7 +10800,7 @@ dependencies = [ [[package]] name = "quick-protobuf-codec" version = "0.3.1" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "asynchronous-codec", "bytes", @@ -10767,24 +10818,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "quinn" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cc2c5017e4b43d5995dcea317bc46c1e09404c0a9664d2908f7f02dfe943d75" -dependencies = [ - "bytes", - "futures-io", - "pin-project-lite", - "quinn-proto 0.10.6", - "quinn-udp 0.4.1", - "rustc-hash", - "rustls 0.21.12", - "thiserror", - "tokio", - "tracing", -] - [[package]] name = "quinn" version = "0.11.2" @@ -10792,9 +10825,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e4ceeeeabace7857413798eb1ffa1e9c905a9946a57d81fb69b4b71c4d8eb3ad" dependencies = [ "bytes", + "futures-io", "pin-project-lite", - "quinn-proto 0.11.3", - "quinn-udp 0.5.2", + "quinn-proto", + "quinn-udp", "rustc-hash", "rustls 0.23.11", "thiserror", @@ -10802,23 +10836,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "quinn-proto" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "141bf7dfde2fbc246bfd3fe12f2455aa24b0fbd9af535d8c86c7bd1381ff2b1a" -dependencies = [ - "bytes", - "rand", - "ring 0.16.20", - "rustc-hash", - "rustls 0.21.12", - "slab", - "thiserror", - "tinyvec", - "tracing", -] - [[package]] name = "quinn-proto" version = "0.11.3" @@ -10836,19 +10853,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "quinn-udp" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "055b4e778e8feb9f93c4e439f71dc2156ef13360b432b799e179a8c4cdf0b1d7" -dependencies = [ - "bytes", - "libc", - "socket2 0.5.7", - "tracing", - "windows-sys 0.48.0", -] - [[package]] name = "quinn-udp" version = "0.5.2" @@ -10990,6 +10994,19 @@ dependencies = [ "yasna", ] +[[package]] +name = "rcgen" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54077e1872c46788540de1ea3d7f4ccb1983d12f9aa909b234468676c1a36779" +dependencies = [ + "pem", + "ring 0.17.8", + "rustls-pki-types", + "time", + "yasna", +] + [[package]] name = "redb" version = "2.1.1" @@ -11172,7 +11189,7 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "quinn 0.11.2", + "quinn", "rustls 0.23.11", "rustls-pemfile 2.1.2", "rustls-pki-types", @@ -11465,7 +11482,7 @@ checksum = "33648a781874466a62d89e265fee9f17e32bc7d05a256e6cca41bf97eadcd8aa" dependencies = [ "bytes", "thiserror", - "webrtc-util", + "webrtc-util 0.8.1", ] [[package]] @@ -11503,7 +11520,7 @@ dependencies = [ "rand", "serde", "thiserror", - "webrtc-util", + "webrtc-util 0.8.1", ] [[package]] @@ -11516,7 +11533,7 @@ dependencies = [ "rand", "serde", "thiserror", - "webrtc-util", + "webrtc-util 0.8.1", ] [[package]] @@ -11551,7 +11568,7 @@ checksum = "48fd7bd8a6377e15ad9d42a8ec25371b94ddc67abe7c8b9127bec79bebaaae18" [[package]] name = "runner-macro" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "quote", "syn 2.0.71", @@ -11803,7 +11820,7 @@ dependencies = [ [[package]] name = "rw-stream-sink" version = "0.4.0" -source = "git+https://github.com/libp2p/rust-libp2p?rev=451bcb60bb472262f96071006b19e5d236b1dd54#451bcb60bb472262f96071006b19e5d236b1dd54" +source = "git+https://github.com/libp2p/rust-libp2p?rev=f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3#f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" dependencies = [ "futures", "pin-project", @@ -11865,12 +11882,13 @@ dependencies = [ [[package]] name = "saya" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "anyhow", "cairo-proof-parser", "clap", "console", + "dojo-utils", "katana-primitives", "katana-rpc-api", "saya-core", @@ -11885,7 +11903,7 @@ dependencies = [ [[package]] name = "saya-core" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "anyhow", "async-trait", @@ -11918,7 +11936,7 @@ dependencies = [ [[package]] name = "saya-provider" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "alloy-primitives", "anyhow", @@ -12107,7 +12125,7 @@ dependencies = [ [[package]] name = "scheduler" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "clap", "katana-primitives", @@ -12798,9 +12816,24 @@ dependencies = [ "sha-1", ] +[[package]] +name = "soketto" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37468c595637c10857701c990f93a40ce0e357cedb0953d1c26c8d8027f9bb53" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures", + "httparse", + "log", + "rand", + "sha1", +] + [[package]] name = "sozo" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "account_sdk", "anyhow", @@ -12852,6 +12885,7 @@ dependencies = [ "smol_str", "snapbox", "sozo-ops", + "sozo-walnut", "starknet 0.11.0", "starknet-crypto 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "thiserror", @@ -12864,7 +12898,7 @@ dependencies = [ [[package]] name = "sozo-ops" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "anyhow", "assert_fs", @@ -12907,6 +12941,7 @@ dependencies = [ "serde_json", "serde_with 3.9.0", "smol_str", + "sozo-walnut", "starknet 0.11.0", "starknet-crypto 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "tee", @@ -12919,12 +12954,32 @@ dependencies = [ [[package]] name = "sozo-signers" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "anyhow", "starknet 0.11.0", ] +[[package]] +name = "sozo-walnut" +version = "1.0.0-alpha.11" +dependencies = [ + "anyhow", + "console", + "dojo-world", + "futures", + "reqwest 0.12.5", + "scarb", + "scarb-ui", + "serde", + "serde_json", + "starknet 0.11.0", + "thiserror", + "url", + "urlencoding", + "walkdir", +] + [[package]] name = "spin" version = "0.5.2" @@ -13006,7 +13061,7 @@ dependencies = [ "futures-intrusive", "futures-io", "futures-util", - "hashlink", + "hashlink 0.8.4", "hex", "indexmap 2.2.6", "log", @@ -13712,7 +13767,26 @@ dependencies = [ "thiserror", "tokio", "url", - "webrtc-util", + "webrtc-util 0.8.1", +] + +[[package]] +name = "stun" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28fad383a1cc63ae141e84e48eaef44a1063e9d9e55bcb8f51a99b886486e01b" +dependencies = [ + "base64 0.21.7", + "crc", + "lazy_static", + "md-5", + "rand", + "ring 0.17.8", + "subtle", + "thiserror", + "tokio", + "url", + "webrtc-util 0.9.0", ] [[package]] @@ -14422,7 +14496,7 @@ checksum = "ea68304e134ecd095ac6c3574494fc62b909f416c4fca77e440530221e549d3d" [[package]] name = "torii" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "anyhow", "async-trait", @@ -14467,7 +14541,7 @@ dependencies = [ [[package]] name = "torii-client" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "async-trait", "camino", @@ -14476,7 +14550,7 @@ dependencies = [ "dojo-world", "futures", "futures-util", - "libp2p-gossipsub 0.46.1 (registry+https://github.com/rust-lang/crates.io-index)", + "libp2p-gossipsub 0.46.1", "num-traits 0.2.19", "parking_lot 0.12.3", "prost 0.11.9", @@ -14496,7 +14570,7 @@ dependencies = [ [[package]] name = "torii-core" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "anyhow", "async-trait", @@ -14511,6 +14585,7 @@ dependencies = [ "dojo-world", "futures-channel", "futures-util", + "hashlink 0.9.1", "hex", "katana-runner", "lazy_static", @@ -14536,7 +14611,7 @@ dependencies = [ [[package]] name = "torii-graphql" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "anyhow", "async-graphql", @@ -14579,7 +14654,7 @@ dependencies = [ [[package]] name = "torii-grpc" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "bytes", "cainome", @@ -14627,7 +14702,7 @@ dependencies = [ [[package]] name = "torii-relay" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "anyhow", "async-trait", @@ -14643,7 +14718,9 @@ dependencies = [ "libp2p", "libp2p-webrtc", "libp2p-webrtc-websys", + "libp2p-websocket-websys", "rand", + "rcgen 0.13.1", "regex", "serde", "serde_json", @@ -14664,7 +14741,7 @@ dependencies = [ [[package]] name = "torii-server" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "base64 0.21.7", "http 0.2.12", @@ -14925,11 +15002,11 @@ dependencies = [ "md-5", "rand", "ring 0.17.8", - "stun", + "stun 0.5.1", "thiserror", "tokio", "tokio-util", - "webrtc-util", + "webrtc-util 0.8.1", ] [[package]] @@ -14966,7 +15043,7 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "types-test" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" [[package]] name = "u256-literal" @@ -15520,7 +15597,7 @@ dependencies = [ "log", "pem", "rand", - "rcgen", + "rcgen 0.11.3", "regex", "ring 0.16.20", "rtcp", @@ -15531,7 +15608,7 @@ dependencies = [ "serde_json", "sha2 0.10.8", "smol_str", - "stun", + "stun 0.5.1", "thiserror", "time", "tokio", @@ -15545,7 +15622,7 @@ dependencies = [ "webrtc-media", "webrtc-sctp", "webrtc-srtp", - "webrtc-util", + "webrtc-util 0.8.1", ] [[package]] @@ -15559,7 +15636,7 @@ dependencies = [ "thiserror", "tokio", "webrtc-sctp", - "webrtc-util", + "webrtc-util 0.8.1", ] [[package]] @@ -15584,7 +15661,7 @@ dependencies = [ "pem", "rand", "rand_core", - "rcgen", + "rcgen 0.11.3", "ring 0.16.20", "rustls 0.21.12", "sec1", @@ -15594,7 +15671,7 @@ dependencies = [ "subtle", "thiserror", "tokio", - "webrtc-util", + "webrtc-util 0.8.1", "x25519-dalek", "x509-parser 0.15.1", ] @@ -15612,7 +15689,7 @@ dependencies = [ "rand", "serde", "serde_json", - "stun", + "stun 0.5.1", "thiserror", "tokio", "turn", @@ -15620,7 +15697,7 @@ dependencies = [ "uuid 1.10.0", "waitgroup", "webrtc-mdns", - "webrtc-util", + "webrtc-util 0.8.1", ] [[package]] @@ -15633,7 +15710,7 @@ dependencies = [ "socket2 0.5.7", "thiserror", "tokio", - "webrtc-util", + "webrtc-util 0.8.1", ] [[package]] @@ -15663,7 +15740,7 @@ dependencies = [ "rand", "thiserror", "tokio", - "webrtc-util", + "webrtc-util 0.8.1", ] [[package]] @@ -15686,7 +15763,7 @@ dependencies = [ "subtle", "thiserror", "tokio", - "webrtc-util", + "webrtc-util 0.8.1", ] [[package]] @@ -15709,6 +15786,27 @@ dependencies = [ "winapi", ] +[[package]] +name = "webrtc-util" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc8d9bc631768958ed97b8d68b5d301e63054ae90b09083d43e2fefb939fd77e" +dependencies = [ + "async-trait", + "bitflags 1.3.2", + "bytes", + "ipnet", + "lazy_static", + "libc", + "log", + "nix 0.26.4", + "portable-atomic", + "rand", + "thiserror", + "tokio", + "winapi", +] + [[package]] name = "which" version = "4.4.2" @@ -16204,7 +16302,7 @@ checksum = "9d422e8e38ec76e2f06ee439ccc765e9c6a9638b9e7c9f2e8255e4d41e8bd852" [[package]] name = "xtask-generate-test-db" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" dependencies = [ "dojo-test-utils", "dojo-utils", diff --git a/Cargo.toml b/Cargo.toml index d6992003a5..bb9dc771c3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -39,6 +39,7 @@ members = [ "crates/saya/core", "crates/saya/provider", "crates/sozo/signers", + "crates/sozo/walnut", "crates/torii/client", "crates/torii/server", "crates/torii/types-test", @@ -51,7 +52,7 @@ edition = "2021" license = "Apache-2.0" license-file = "LICENSE" repository = "https://github.com/dojoengine/dojo/" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" [profile.performance] codegen-units = 1 @@ -83,7 +84,7 @@ katana-codecs = { path = "crates/katana/storage/codecs" } katana-codecs-derive = { path = "crates/katana/storage/codecs/derive" } katana-core = { path = "crates/katana/core", default-features = false } katana-db = { path = "crates/katana/storage/db" } -katana-executor = { path = "crates/katana/executor", default-features = false } +katana-executor = { path = "crates/katana/executor" } katana-node = { path = "crates/katana/node", default-features = false } katana-pool = { path = "crates/katana/pool" } katana-primitives = { path = "crates/katana/primitives" } @@ -111,6 +112,7 @@ saya-provider = { path = "crates/saya/provider" } # sozo sozo-ops = { path = "crates/sozo/ops" } sozo-signers = { path = "crates/sozo/signers" } +sozo-walnut = { path = "crates/sozo/walnut" } anyhow = "1.0.80" assert_fs = "1.1" @@ -156,6 +158,7 @@ derive_more = "0.99.17" flate2 = "1.0.24" futures = "0.3.30" futures-util = "0.3.30" +hashlink = "0.9.1" hex = "0.4.3" http = "0.2.9" indexmap = "2.2.5" @@ -199,6 +202,7 @@ tower-http = "0.4.4" tracing = "0.1.34" tracing-subscriber = { version = "0.3.16", features = [ "env-filter", "json" ] } url = { version = "2.4.0", features = [ "serde" ] } +walkdir = "2.5.0" # server hyper = "0.14.27" diff --git a/Dockerfile b/Dockerfile index 18c996cead..261dac7ad2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,3 +1,19 @@ +FROM debian:bookworm-slim as builder + +RUN apt-get update && apt install -y git libtool automake autoconf make + +RUN git clone https://github.com/Comcast/Infinite-File-Curtailer.git curtailer \ + && cd curtailer \ + && libtoolize \ + && aclocal \ + && autoheader \ + && autoconf \ + && automake --add-missing \ + && ./configure \ + && make \ + && make install \ + && curtail --version + FROM debian:bookworm-slim as base ARG TARGETPLATFORM @@ -11,6 +27,8 @@ COPY --from=artifacts $TARGETPLATFORM/katana /usr/local/bin/katana COPY --from=artifacts $TARGETPLATFORM/sozo /usr/local/bin/sozo COPY --from=artifacts $TARGETPLATFORM/torii /usr/local/bin/torii +COPY --from=builder /usr/local/bin/curtail /usr/local/bin/curtail + RUN chmod +x /usr/local/bin/katana \ && chmod +x /usr/local/bin/sozo \ - && chmod +x /usr/local/bin/torii \ No newline at end of file + && chmod +x /usr/local/bin/torii diff --git a/bin/katana/src/cli/node.rs b/bin/katana/src/cli/node.rs index 07bad6f844..2d46261dbf 100644 --- a/bin/katana/src/cli/node.rs +++ b/bin/katana/src/cli/node.rs @@ -226,18 +226,22 @@ impl NodeArgs { let starknet_config = self.starknet_config()?; // build the node and start it - let (rpc_handle, backend) = - katana_node::start(server_config, sequencer_config, starknet_config).await?; + let node = katana_node::start(server_config, sequencer_config, starknet_config).await?; if !self.silent { #[allow(deprecated)] - let genesis = &backend.config.genesis; - print_intro(&self, genesis, rpc_handle.addr); + let genesis = &node.backend.config.genesis; + print_intro(&self, genesis, node.rpc.addr); } - // Wait until Ctrl + C is pressed, then shutdown - ctrl_c().await?; - rpc_handle.handle.stop()?; + // Wait until ctrl-c signal is received or TaskManager signals shutdown + tokio::select! { + _ = ctrl_c() => {}, + _ = node.task_manager.wait_for_shutdown() => {} + } + + info!("Shutting down..."); + node.stop().await?; Ok(()) } @@ -271,7 +275,7 @@ impl NodeArgs { } fn server_config(&self) -> ServerConfig { - let mut apis = vec![ApiKind::Starknet, ApiKind::Katana, ApiKind::Torii, ApiKind::Saya]; + let mut apis = vec![ApiKind::Starknet, ApiKind::Torii, ApiKind::Saya]; // only enable `katana` API in dev mode if self.dev { apis.push(ApiKind::Dev); diff --git a/bin/saya/Cargo.toml b/bin/saya/Cargo.toml index c9bcec697a..b289b8b211 100644 --- a/bin/saya/Cargo.toml +++ b/bin/saya/Cargo.toml @@ -10,6 +10,7 @@ version.workspace = true anyhow.workspace = true clap.workspace = true console.workspace = true +dojo-utils.workspace = true katana-primitives.workspace = true katana-rpc-api.workspace = true saya-core.workspace = true diff --git a/bin/saya/src/args/mod.rs b/bin/saya/src/args/mod.rs index 2e14cdb297..3c22e0e881 100644 --- a/bin/saya/src/args/mod.rs +++ b/bin/saya/src/args/mod.rs @@ -4,10 +4,12 @@ use std::io::BufReader; use std::path::PathBuf; use clap::Parser; +use dojo_utils::keystore::prompt_password_if_needed; use saya_core::data_availability::celestia::CelestiaConfig; use saya_core::data_availability::DataAvailabilityConfig; use saya_core::{ProverAccessKey, SayaConfig, StarknetAccountData}; use starknet::core::utils::cairo_short_string_to_felt; +use starknet::signers::SigningKey; use starknet_account::StarknetAccountOptions; use tracing::Subscriber; use tracing_subscriber::{fmt, EnvFilter}; @@ -130,11 +132,30 @@ impl TryFrom for SayaConfig { None => None, }; + // Check if the private key is from keystore or provided directly to follow `sozo` + // conventions. + let private_key = if let Some(pk) = args.starknet_account.signer_key { + pk + } else if let Some(path) = args.starknet_account.signer_keystore_path { + let password = prompt_password_if_needed( + args.starknet_account.signer_keystore_password.as_deref(), + false, + )?; + + SigningKey::from_keystore(path, &password)?.secret_scalar() + } else { + return Err(Box::new(std::io::Error::new( + std::io::ErrorKind::InvalidInput, + "Could not find private key. Please specify the private key or path to the \ + keystore file.", + ))); + }; + let starknet_account = StarknetAccountData { starknet_url: args.starknet_account.starknet_url, chain_id: cairo_short_string_to_felt(&args.starknet_account.chain_id)?, signer_address: args.starknet_account.signer_address, - signer_key: args.starknet_account.signer_key, + signer_key: private_key, }; let prover_key = @@ -200,7 +221,9 @@ mod tests { starknet_url: Url::parse("http://localhost:5030").unwrap(), chain_id: "SN_SEPOLIA".to_string(), signer_address: Default::default(), - signer_key: Default::default(), + signer_key: None, + signer_keystore_path: None, + signer_keystore_password: None, }, }; diff --git a/bin/saya/src/args/proof.rs b/bin/saya/src/args/proof.rs index 41afd9d6ee..0549bda680 100644 --- a/bin/saya/src/args/proof.rs +++ b/bin/saya/src/args/proof.rs @@ -1,11 +1,12 @@ use clap::Args; +use dojo_utils::env::DOJO_WORLD_ADDRESS_ENV_VAR; use katana_primitives::felt::FieldElement; use url::Url; #[derive(Debug, Args, Clone)] pub struct ProofOptions { #[arg(help = "The address of the World contract.")] - #[arg(long = "world")] + #[arg(long = "world", env = DOJO_WORLD_ADDRESS_ENV_VAR)] pub world_address: FieldElement, #[arg(help = "The address of the Fact Registry contract.")] diff --git a/bin/saya/src/args/starknet_account.rs b/bin/saya/src/args/starknet_account.rs index ce03441e7f..cf8eb1b5aa 100644 --- a/bin/saya/src/args/starknet_account.rs +++ b/bin/saya/src/args/starknet_account.rs @@ -1,13 +1,16 @@ //! Data availability options. use clap::Args; +use dojo_utils::env::{ + DOJO_ACCOUNT_ADDRESS_ENV_VAR, DOJO_KEYSTORE_PASSWORD_ENV_VAR, DOJO_KEYSTORE_PATH_ENV_VAR, + DOJO_PRIVATE_KEY_ENV_VAR, STARKNET_RPC_URL_ENV_VAR, +}; use katana_primitives::felt::FieldElement; use url::Url; #[derive(Debug, Args, Clone)] pub struct StarknetAccountOptions { - #[arg(long)] - #[arg(env)] + #[arg(long, env = STARKNET_RPC_URL_ENV_VAR)] #[arg(help = "The url of the starknet node.")] pub starknet_url: Url, @@ -16,13 +19,21 @@ pub struct StarknetAccountOptions { #[arg(help = "The chain id of the starknet node.")] pub chain_id: String, - #[arg(long)] - #[arg(env)] + #[arg(long, env = DOJO_ACCOUNT_ADDRESS_ENV_VAR)] #[arg(help = "The address of the starknet account.")] pub signer_address: FieldElement, - #[arg(long)] - #[arg(env)] + #[arg(long, env = DOJO_PRIVATE_KEY_ENV_VAR)] #[arg(help = "The private key of the starknet account.")] - pub signer_key: FieldElement, + pub signer_key: Option, + + #[arg(long = "keystore", env = DOJO_KEYSTORE_PATH_ENV_VAR)] + #[arg(value_name = "PATH")] + #[arg(help = "The path to the keystore file.")] + pub signer_keystore_path: Option, + + #[arg(long = "password", env = DOJO_KEYSTORE_PASSWORD_ENV_VAR)] + #[arg(value_name = "PASSWORD")] + #[arg(help = "The password to the keystore file.")] + pub signer_keystore_password: Option, } diff --git a/bin/sozo/Cargo.toml b/bin/sozo/Cargo.toml index fc0958680c..53fe96d40e 100644 --- a/bin/sozo/Cargo.toml +++ b/bin/sozo/Cargo.toml @@ -51,6 +51,7 @@ serde.workspace = true serde_json.workspace = true smol_str.workspace = true sozo-ops.workspace = true +sozo-walnut = { workspace = true, optional = true } starknet.workspace = true starknet-crypto.workspace = true thiserror.workspace = true @@ -70,5 +71,7 @@ katana-runner.workspace = true snapbox = "0.4.6" [features] +default = [ "controller", "walnut" ] + controller = [ "dep:account_sdk", "dep:reqwest", "dep:slot" ] -default = [ "controller" ] +walnut = [ "dep:sozo-walnut", "sozo-ops/walnut" ] diff --git a/bin/sozo/src/commands/auth.rs b/bin/sozo/src/commands/auth.rs index e1a07f08f0..0c98c0de4a 100644 --- a/bin/sozo/src/commands/auth.rs +++ b/bin/sozo/src/commands/auth.rs @@ -5,6 +5,8 @@ use dojo_world::metadata::get_default_namespace_from_ws; use scarb::core::Config; use scarb_ui::Ui; use sozo_ops::auth; +#[cfg(feature = "walnut")] +use sozo_walnut::WalnutDebugger; use tracing::trace; use super::options::account::AccountOptions; @@ -149,7 +151,11 @@ pub async fn grant( ) -> Result<()> { trace!(?kind, ?world, ?starknet, ?account, ?transaction, "Executing Grant command."); let world = - utils::world_from_env_metadata(world, account, starknet, &env_metadata, config).await?; + utils::world_from_env_metadata(world, account, &starknet, &env_metadata, config).await?; + + #[cfg(feature = "walnut")] + let walnut_debugger = + WalnutDebugger::new_from_flag(transaction.walnut, starknet.url(env_metadata.as_ref())?); match kind { AuthKind::Writer { models_contracts } => { @@ -157,16 +163,32 @@ pub async fn grant( contracts=?models_contracts, "Granting Writer permissions." ); - auth::grant_writer(ui, &world, &models_contracts, transaction.into(), default_namespace) - .await + auth::grant_writer( + ui, + &world, + &models_contracts, + &transaction.into(), + default_namespace, + #[cfg(feature = "walnut")] + &walnut_debugger, + ) + .await } AuthKind::Owner { owners_resources } => { trace!( resources=?owners_resources, "Granting Owner permissions." ); - auth::grant_owner(ui, &world, &owners_resources, transaction.into(), default_namespace) - .await + auth::grant_owner( + ui, + &world, + &owners_resources, + &transaction.into(), + default_namespace, + #[cfg(feature = "walnut")] + &walnut_debugger, + ) + .await } } } @@ -185,7 +207,11 @@ pub async fn revoke( ) -> Result<()> { trace!(?kind, ?world, ?starknet, ?account, ?transaction, "Executing Revoke command."); let world = - utils::world_from_env_metadata(world, account, starknet, &env_metadata, config).await?; + utils::world_from_env_metadata(world, account, &starknet, &env_metadata, config).await?; + + #[cfg(feature = "walnut")] + let walnut_debugger = + WalnutDebugger::new_from_flag(transaction.walnut, starknet.url(env_metadata.as_ref())?); match kind { AuthKind::Writer { models_contracts } => { @@ -197,8 +223,10 @@ pub async fn revoke( ui, &world, &models_contracts, - transaction.into(), + &transaction.into(), default_namespace, + #[cfg(feature = "walnut")] + &walnut_debugger, ) .await } @@ -207,8 +235,16 @@ pub async fn revoke( resources=?owners_resources, "Revoking Owner permissions." ); - auth::revoke_owner(ui, &world, &owners_resources, transaction.into(), default_namespace) - .await + auth::revoke_owner( + ui, + &world, + &owners_resources, + &transaction.into(), + default_namespace, + #[cfg(feature = "walnut")] + &walnut_debugger, + ) + .await } } } diff --git a/bin/sozo/src/commands/build.rs b/bin/sozo/src/commands/build.rs index 49be74a057..46cb25e8d9 100644 --- a/bin/sozo/src/commands/build.rs +++ b/bin/sozo/src/commands/build.rs @@ -11,6 +11,7 @@ use scarb_ui::args::{FeaturesSpec, PackagesFilter}; use sozo_ops::statistics::{get_contract_statistics_for_dir, ContractStatistics}; use tracing::trace; +use crate::commands::check_package_dojo_version; use crate::commands::clean::CleanArgs; const BYTECODE_SIZE_LABEL: &str = "Bytecode size [in felts]\n(Sierra, Casm)"; @@ -46,6 +47,10 @@ pub struct BuildArgs { /// Specify packages to build. #[command(flatten)] pub packages: Option, + + #[arg(long)] + #[arg(help = "Output the Sierra debug information for the compiled contracts.")] + pub output_debug_info: bool, } impl BuildArgs { @@ -58,6 +63,10 @@ impl BuildArgs { ws.members().collect() }; + for p in &packages { + check_package_dojo_version(&ws, p)?; + } + let profile_name = ws.current_profile().expect("Scarb profile is expected at this point.").to_string(); @@ -162,6 +171,7 @@ impl Default for BuildArgs { bindings_output: "bindings".to_string(), stats: false, packages: None, + output_debug_info: false, } } } diff --git a/bin/sozo/src/commands/call.rs b/bin/sozo/src/commands/call.rs index beb2497809..d24494ce20 100644 --- a/bin/sozo/src/commands/call.rs +++ b/bin/sozo/src/commands/call.rs @@ -3,11 +3,11 @@ use clap::Args; use dojo_world::contracts::naming::ensure_namespace; use dojo_world::metadata::get_default_namespace_from_ws; use scarb::core::Config; -use starknet::core::types::Felt; use tracing::trace; use super::options::starknet::StarknetOptions; use super::options::world::WorldOptions; +use crate::commands::calldata_decoder; use crate::utils; #[derive(Debug, Args)] @@ -22,8 +22,14 @@ pub struct CallArgs { #[arg(short, long)] #[arg(value_delimiter = ',')] #[arg(help = "The calldata to be passed to the entrypoint. Comma separated values e.g., \ - 0x12345,0x69420.")] - pub calldata: Vec, + 0x12345,128,u256:9999999999. Sozo supports some prefixes that you can use to \ + automatically parse some types. The supported prefixes are: + - u256: A 256-bit unsigned integer. + - sstr: A cairo short string. + - str: A cairo string (ByteArray). + - int: A signed integer. + - no prefix: A cairo felt or any type that fit into one felt.")] + pub calldata: Option, #[arg(short, long)] #[arg(help = "The block ID (could be a hash, a number, 'pending' or 'latest')")] @@ -57,11 +63,18 @@ impl CallArgs { .await .unwrap(); + let calldata = if let Some(cd) = self.calldata { + calldata_decoder::decode_calldata(&cd)? + } else { + vec![] + }; + sozo_ops::call::call( + &config.ui(), world_reader, tag_or_address, self.entrypoint, - self.calldata, + calldata, self.block_id, ) .await diff --git a/bin/sozo/src/commands/execute.rs b/bin/sozo/src/commands/execute.rs index 145b0b81e0..a0ee15de56 100644 --- a/bin/sozo/src/commands/execute.rs +++ b/bin/sozo/src/commands/execute.rs @@ -4,6 +4,8 @@ use dojo_world::contracts::naming::ensure_namespace; use dojo_world::metadata::get_default_namespace_from_ws; use scarb::core::Config; use sozo_ops::execute; +#[cfg(feature = "walnut")] +use sozo_walnut::WalnutDebugger; use tracing::trace; use super::calldata_decoder; @@ -61,11 +63,17 @@ impl ExecuteArgs { ensure_namespace(&self.tag_or_address, &default_namespace) }; + #[cfg(feature = "walnut")] + let walnut_debugger = WalnutDebugger::new_from_flag( + self.transaction.walnut, + self.starknet.url(env_metadata.as_ref())?, + ); + config.tokio_handle().block_on(async { let world = utils::world_from_env_metadata( self.world, self.account, - self.starknet, + &self.starknet, &env_metadata, config, ) @@ -93,6 +101,8 @@ impl ExecuteArgs { calldata, &world, &tx_config, + #[cfg(feature = "walnut")] + &walnut_debugger, ) .await }) diff --git a/bin/sozo/src/commands/mod.rs b/bin/sozo/src/commands/mod.rs index 3933e9f70f..815265bdfd 100644 --- a/bin/sozo/src/commands/mod.rs +++ b/bin/sozo/src/commands/mod.rs @@ -2,7 +2,7 @@ use core::fmt; use anyhow::Result; use clap::Subcommand; -use scarb::core::Config; +use scarb::core::{Config, Package, Workspace}; pub(crate) mod account; pub(crate) mod auth; @@ -133,3 +133,45 @@ pub fn run(command: Commands, config: &Config) -> Result<()> { Commands::Completions(args) => args.run(), } } + +/// Checks if the package has a compatible version of dojo-core. +/// In case of a workspace with multiple packages, each package is individually checked +/// and the workspace manifest path is returned in case of virtual workspace. +pub fn check_package_dojo_version(ws: &Workspace<'_>, package: &Package) -> anyhow::Result<()> { + if let Some(dojo_dep) = + package.manifest.summary.dependencies.iter().find(|dep| dep.name.as_str() == "dojo") + { + let dojo_version = env!("CARGO_PKG_VERSION"); + + let dojo_dep_str = dojo_dep.to_string(); + + // Only in case of git dependency with an explicit tag, we check if the tag is the same as + // the current version. + if dojo_dep_str.contains("git+") + && dojo_dep_str.contains("tag=v") + && !dojo_dep_str.contains(dojo_version) + { + if let Ok(cp) = ws.current_package() { + let path = + if cp.id == package.id { package.manifest_path() } else { ws.manifest_path() }; + + anyhow::bail!( + "Found dojo-core version mismatch: expected {}. Please verify your dojo \ + dependency in {}", + dojo_version, + path + ) + } else { + // Virtual workspace. + anyhow::bail!( + "Found dojo-core version mismatch: expected {}. Please verify your dojo \ + dependency in {}", + dojo_version, + ws.manifest_path() + ) + } + } + } + + Ok(()) +} diff --git a/bin/sozo/src/commands/options/account/mod.rs b/bin/sozo/src/commands/options/account/mod.rs index 08f8a641cf..9f73d74d8a 100644 --- a/bin/sozo/src/commands/options/account/mod.rs +++ b/bin/sozo/src/commands/options/account/mod.rs @@ -2,6 +2,7 @@ use std::str::FromStr; use anyhow::{anyhow, Context, Result}; use clap::Args; +use dojo_utils::env::DOJO_ACCOUNT_ADDRESS_ENV_VAR; use dojo_world::config::Environment; use scarb::core::Config; use starknet::accounts::{ExecutionEncoding, SingleOwnerAccount}; @@ -13,7 +14,6 @@ use url::Url; use super::signer::SignerOptions; use super::starknet::StarknetOptions; -use super::DOJO_ACCOUNT_ADDRESS_ENV_VAR; #[cfg(feature = "controller")] pub mod controller; diff --git a/bin/sozo/src/commands/options/mod.rs b/bin/sozo/src/commands/options/mod.rs index dc5e608b60..a92c824a2d 100644 --- a/bin/sozo/src/commands/options/mod.rs +++ b/bin/sozo/src/commands/options/mod.rs @@ -3,10 +3,3 @@ pub mod signer; pub mod starknet; pub mod transaction; pub mod world; - -const STARKNET_RPC_URL_ENV_VAR: &str = "STARKNET_RPC_URL"; -const DOJO_PRIVATE_KEY_ENV_VAR: &str = "DOJO_PRIVATE_KEY"; -const DOJO_KEYSTORE_PATH_ENV_VAR: &str = "DOJO_KEYSTORE_PATH"; -const DOJO_KEYSTORE_PASSWORD_ENV_VAR: &str = "DOJO_KEYSTORE_PASSWORD"; -const DOJO_ACCOUNT_ADDRESS_ENV_VAR: &str = "DOJO_ACCOUNT_ADDRESS"; -const DOJO_WORLD_ADDRESS_ENV_VAR: &str = "DOJO_WORLD_ADDRESS"; diff --git a/bin/sozo/src/commands/options/signer.rs b/bin/sozo/src/commands/options/signer.rs index ef39333f5f..eb4cc5fe32 100644 --- a/bin/sozo/src/commands/options/signer.rs +++ b/bin/sozo/src/commands/options/signer.rs @@ -2,13 +2,15 @@ use std::str::FromStr; use anyhow::{anyhow, Result}; use clap::Args; +use dojo_utils::env::{ + DOJO_KEYSTORE_PASSWORD_ENV_VAR, DOJO_KEYSTORE_PATH_ENV_VAR, DOJO_PRIVATE_KEY_ENV_VAR, +}; +use dojo_utils::keystore::prompt_password_if_needed; use dojo_world::config::Environment; use starknet::core::types::Felt; use starknet::signers::{LocalWallet, SigningKey}; use tracing::trace; -use super::{DOJO_KEYSTORE_PASSWORD_ENV_VAR, DOJO_KEYSTORE_PATH_ENV_VAR, DOJO_PRIVATE_KEY_ENV_VAR}; - #[derive(Debug, Args, Clone)] #[command(next_help_heading = "Signer options")] // INVARIANT: @@ -42,35 +44,86 @@ pub struct SignerOptions { } impl SignerOptions { + /// Retrieves the signer from the CLI or environment metadata. + /// First, attempt to locate the signer from CLI arguments or environment variables via CLAP. + /// If unsuccessful, then search for the signer within the Dojo environment metadata. + /// If the signer is not found in any of the above locations, return an error. pub fn signer(&self, env_metadata: Option<&Environment>, no_wait: bool) -> Result { - if let Some(private_key) = self.private_key(env_metadata) { - trace!(private_key, "Signing using private key."); - return Ok(LocalWallet::from_signing_key(SigningKey::from_secret_scalar( - Felt::from_str(&private_key)?, - ))); + let pk_cli = self.private_key.clone(); + let pk_env = env_metadata.and_then(|env| env.private_key().map(|s| s.to_string())); + + let pk_keystore_cli = self.private_key_from_keystore_cli(env_metadata, no_wait)?; + let pk_keystore_env = self.private_key_from_keystore_env(env_metadata, no_wait)?; + + let private_key = if let Some(private_key) = pk_cli { + trace!("Signing using private key from CLI."); + SigningKey::from_secret_scalar(Felt::from_str(&private_key)?) + } else if let Some(private_key) = pk_keystore_cli { + trace!("Signing using private key from CLI keystore."); + private_key + } else if let Some(private_key) = pk_env { + trace!("Signing using private key from env metadata."); + SigningKey::from_secret_scalar(Felt::from_str(&private_key)?) + } else if let Some(private_key) = pk_keystore_env { + trace!("Signing using private key from env metadata keystore."); + private_key + } else { + return Err(anyhow!( + "Could not find private key. Please specify the private key or path to the \ + keystore file." + )); + }; + + Ok(LocalWallet::from_signing_key(private_key)) + } + + /// Retrieves the private key from the CLI keystore. + /// If the keystore path is not set, it returns `None`. + pub fn private_key_from_keystore_cli( + &self, + env_metadata: Option<&Environment>, + no_wait: bool, + ) -> Result> { + if let Some(path) = &self.keystore_path { + let maybe_password = if self.keystore_password.is_some() { + self.keystore_password.as_deref() + } else { + env_metadata.and_then(|env| env.keystore_password()) + }; + + let password = prompt_password_if_needed(maybe_password, no_wait)?; + + let private_key = SigningKey::from_keystore(path, &password)?; + return Ok(Some(private_key)); } - if let Some(path) = self.keystore_path(env_metadata) { - let password = { - if let Some(password) = self.keystore_password(env_metadata) { - password.to_owned() - } else if no_wait { - return Err(anyhow!("Could not find password. Please specify the password.")); - } else { - trace!("Prompting user for keystore password."); - rpassword::prompt_password("Enter password: ")? - } + Ok(None) + } + + /// Retrieves the private key from the keystore in the environment metadata. + /// If the keystore path is not set, it returns `None`. + pub fn private_key_from_keystore_env( + &self, + env_metadata: Option<&Environment>, + no_wait: bool, + ) -> Result> { + if let Some(path) = env_metadata.and_then(|env| env.keystore_path()) { + let maybe_password = if self.keystore_password.is_some() { + self.keystore_password.as_deref() + } else { + env_metadata.and_then(|env| env.keystore_password()) }; + + let password = prompt_password_if_needed(maybe_password, no_wait)?; + let private_key = SigningKey::from_keystore(path, &password)?; - return Ok(LocalWallet::from_signing_key(private_key)); + return Ok(Some(private_key)); } - Err(anyhow!( - "Could not find private key. Please specify the private key or path to the keystore \ - file." - )) + Ok(None) } + /// Retrieves the private key from the CLI or environment metadata. pub fn private_key(&self, env_metadata: Option<&Environment>) -> Option { if let Some(s) = &self.private_key { Some(s.to_owned()) @@ -79,6 +132,7 @@ impl SignerOptions { } } + /// Retrieves the keystore path from the CLI or environment metadata. pub fn keystore_path(&self, env_metadata: Option<&Environment>) -> Option { if let Some(s) = &self.keystore_path { Some(s.to_owned()) @@ -86,14 +140,6 @@ impl SignerOptions { env_metadata.and_then(|env| env.keystore_path().map(|s| s.to_string())) } } - - pub fn keystore_password(&self, env_metadata: Option<&Environment>) -> Option { - if let Some(s) = &self.keystore_password { - Some(s.to_owned()) - } else { - env_metadata.and_then(|env| env.keystore_password().map(|s| s.to_string())) - } - } } #[cfg(test)] diff --git a/bin/sozo/src/commands/options/starknet.rs b/bin/sozo/src/commands/options/starknet.rs index dee2a0cc7f..78e32734ff 100644 --- a/bin/sozo/src/commands/options/starknet.rs +++ b/bin/sozo/src/commands/options/starknet.rs @@ -1,13 +1,12 @@ use anyhow::Result; use clap::Args; +use dojo_utils::env::STARKNET_RPC_URL_ENV_VAR; use dojo_world::config::Environment; use starknet::providers::jsonrpc::HttpTransport; use starknet::providers::JsonRpcClient; use tracing::trace; use url::Url; -use super::STARKNET_RPC_URL_ENV_VAR; - #[derive(Debug, Args, Clone)] #[command(next_help_heading = "Starknet options")] pub struct StarknetOptions { @@ -49,9 +48,9 @@ impl StarknetOptions { #[cfg(test)] mod tests { use clap::Parser; + use dojo_utils::env::STARKNET_RPC_URL_ENV_VAR; use super::StarknetOptions; - use crate::commands::options::STARKNET_RPC_URL_ENV_VAR; const ENV_RPC: &str = "http://localhost:7474/"; const METADATA_RPC: &str = "http://localhost:6060/"; diff --git a/bin/sozo/src/commands/options/transaction.rs b/bin/sozo/src/commands/options/transaction.rs index 22ae591087..998ea89ce2 100644 --- a/bin/sozo/src/commands/options/transaction.rs +++ b/bin/sozo/src/commands/options/transaction.rs @@ -37,6 +37,11 @@ pub struct TransactionOptions { )] #[arg(global = true)] pub receipt: bool, + + #[arg(long)] + #[arg(help = "Display the link to debug the transaction with Walnut.")] + #[arg(global = true)] + pub walnut: bool, } impl TransactionOptions { @@ -52,10 +57,11 @@ impl TransactionOptions { (true, false) => Ok(TxnAction::Estimate), (false, true) => Ok(TxnAction::Simulate), (false, false) => Ok(TxnAction::Send { - wait: self.wait, + wait: self.wait || self.walnut, receipt: self.receipt, max_fee_raw: self.max_fee_raw, fee_estimate_multiplier: self.fee_estimate_multiplier, + walnut: self.walnut, }), } } @@ -71,9 +77,10 @@ impl From for TxnConfig { ); Self { fee_estimate_multiplier: value.fee_estimate_multiplier, - wait: value.wait, + wait: value.wait || value.walnut, receipt: value.receipt, max_fee_raw: value.max_fee_raw, + walnut: value.walnut, } } } diff --git a/bin/sozo/src/commands/options/world.rs b/bin/sozo/src/commands/options/world.rs index 0ca680c07a..86d8aab560 100644 --- a/bin/sozo/src/commands/options/world.rs +++ b/bin/sozo/src/commands/options/world.rs @@ -2,12 +2,11 @@ use std::str::FromStr; use anyhow::{anyhow, Result}; use clap::Args; +use dojo_utils::env::DOJO_WORLD_ADDRESS_ENV_VAR; use dojo_world::config::Environment; use starknet::core::types::Felt; use tracing::trace; -use super::DOJO_WORLD_ADDRESS_ENV_VAR; - #[derive(Debug, Args, Clone)] #[command(next_help_heading = "World options")] pub struct WorldOptions { diff --git a/bin/sozo/src/commands/register.rs b/bin/sozo/src/commands/register.rs index 0f019643bb..d320ed65d8 100644 --- a/bin/sozo/src/commands/register.rs +++ b/bin/sozo/src/commands/register.rs @@ -3,6 +3,8 @@ use clap::{Args, Subcommand}; use dojo_world::contracts::WorldContractReader; use scarb::core::Config; use sozo_ops::register; +#[cfg(feature = "walnut")] +use sozo_walnut::WalnutDebugger; use starknet::accounts::ConnectedAccount; use starknet::core::types::{BlockId, BlockTag, Felt}; use tracing::trace; @@ -58,9 +60,13 @@ impl RegisterArgs { let world_address = world.world_address.unwrap_or_default(); trace!(?world_address, "Using world address."); + #[cfg(feature = "walnut")] + let walnut_debugger = + WalnutDebugger::new_from_flag(transaction.walnut, starknet.url(env_metadata.as_ref())?); + config.tokio_handle().block_on(async { let world = - utils::world_from_env_metadata(world, account, starknet, &env_metadata, config) + utils::world_from_env_metadata(world, account, &starknet, &env_metadata, config) .await?; let provider = world.account.provider(); let mut world_reader = WorldContractReader::new(world_address, &provider); @@ -69,10 +75,12 @@ impl RegisterArgs { register::model_register( models, &world, - transaction.into(), + &transaction.into(), world_reader, world_address, config, + #[cfg(feature = "walnut")] + &walnut_debugger, ) .await }) diff --git a/bin/sozo/src/commands/test.rs b/bin/sozo/src/commands/test.rs index 040565b4d3..05a3dc4daa 100644 --- a/bin/sozo/src/commands/test.rs +++ b/bin/sozo/src/commands/test.rs @@ -20,6 +20,8 @@ use scarb::ops::{self, CompileOpts}; use scarb_ui::args::{FeaturesSpec, PackagesFilter}; use tracing::trace; +use super::check_package_dojo_version; + pub(crate) const LOG_TARGET: &str = "sozo::cli::commands::test"; #[derive(Debug, Clone, PartialEq, clap::ValueEnum)] @@ -81,6 +83,10 @@ impl TestArgs { ws.members().collect() }; + for p in &packages { + check_package_dojo_version(&ws, p)?; + } + let resolve = ops::resolve_workspace(&ws)?; let opts = CompileOpts { diff --git a/bin/sozo/src/main.rs b/bin/sozo/src/main.rs index eea47706c5..9fcddf4c7f 100644 --- a/bin/sozo/src/main.rs +++ b/bin/sozo/src/main.rs @@ -33,10 +33,16 @@ fn cli_main(args: SozoArgs) -> Result<()> { let cairo_plugins = CairoPluginRepository::default(); match &args.command { - Commands::Build(_) | Commands::Dev(_) | Commands::Migrate(_) => { + Commands::Build(args) => { trace!("Adding DojoCompiler to compiler repository."); - compilers.add(Box::new(DojoCompiler)).unwrap() + compilers.add(Box::new(DojoCompiler::new(args.output_debug_info))).unwrap() } + + Commands::Dev(_) | Commands::Migrate(_) => { + trace!("Adding DojoCompiler to compiler repository."); + compilers.add(Box::new(DojoCompiler::default())).unwrap() + } + _ => {} } diff --git a/bin/sozo/src/utils.rs b/bin/sozo/src/utils.rs index e658364049..662076e4df 100644 --- a/bin/sozo/src/utils.rs +++ b/bin/sozo/src/utils.rs @@ -52,7 +52,7 @@ pub fn load_metadata_from_config(config: &Config) -> Result, pub async fn world_from_env_metadata( world: WorldOptions, account: AccountOptions, - starknet: StarknetOptions, + starknet: &StarknetOptions, env_metadata: &Option, config: &Config, ) -> Result>>, Error> { @@ -64,7 +64,7 @@ pub async fn world_from_env_metadata( .account( provider, WorldAddressOrName::Address(world_address), - &starknet, + starknet, env_metadata, config, ) diff --git a/bin/sozo/tests/test_data/policies.json b/bin/sozo/tests/test_data/policies.json index 725f381d4e..60b4ca826e 100644 --- a/bin/sozo/tests/test_data/policies.json +++ b/bin/sozo/tests/test_data/policies.json @@ -27,6 +27,10 @@ "target": "0x2d24481107b55ecd73c4d1b62f6bfe8c42a224447b71db7dcec2eab484d53cd", "method": "set_player_server_profile" }, + { + "target": "0x2d24481107b55ecd73c4d1b62f6bfe8c42a224447b71db7dcec2eab484d53cd", + "method": "set_models" + }, { "target": "0x2d24481107b55ecd73c4d1b62f6bfe8c42a224447b71db7dcec2eab484d53cd", "method": "enter_dungeon" diff --git a/bin/torii/Cargo.toml b/bin/torii/Cargo.toml index bd90036636..977764b26d 100644 --- a/bin/torii/Cargo.toml +++ b/bin/torii/Cargo.toml @@ -19,21 +19,21 @@ dojo-utils.workspace = true dojo-world.workspace = true either = "1.9.0" futures.workspace = true -http.workspace = true http-body = "0.4.5" -hyper.workspace = true +http.workspace = true hyper-reverse-proxy = { git = "https://github.com/tarrencev/hyper-reverse-proxy" } +hyper.workspace = true indexmap.workspace = true lazy_static.workspace = true scarb.workspace = true serde.workspace = true serde_json.workspace = true sqlx.workspace = true -starknet.workspace = true starknet-crypto.workspace = true -tokio.workspace = true +starknet.workspace = true tokio-stream = "0.1.11" tokio-util = "0.7.7" +tokio.workspace = true torii-core.workspace = true torii-graphql.workspace = true torii-grpc = { workspace = true, features = [ "server" ] } @@ -42,8 +42,8 @@ torii-server.workspace = true tower.workspace = true tower-http.workspace = true -tracing.workspace = true tracing-subscriber.workspace = true +tracing.workspace = true url.workspace = true webbrowser = "0.8" diff --git a/bin/torii/src/main.rs b/bin/torii/src/main.rs index d9aa561b74..40a3514fd9 100644 --- a/bin/torii/src/main.rs +++ b/bin/torii/src/main.rs @@ -29,6 +29,7 @@ use tokio::sync::broadcast::Sender; use tokio_stream::StreamExt; use torii_core::engine::{Engine, EngineConfig, Processors}; use torii_core::processors::event_message::EventMessageProcessor; +use torii_core::processors::generate_event_processors_map; use torii_core::processors::metadata_update::MetadataUpdateProcessor; use torii_core::processors::register_model::RegisterModelProcessor; use torii_core::processors::store_del_record::StoreDelRecordProcessor; @@ -79,6 +80,10 @@ struct Args { #[arg(long, value_name = "PORT", default_value = "9091")] relay_webrtc_port: u16, + /// Port to serve Libp2p WebRTC transport + #[arg(long, value_name = "PORT", default_value = "9092")] + relay_websocket_port: u16, + /// Path to a local identity key file. If not specified, a new identity will be generated #[arg(long, value_name = "PATH")] relay_local_key_path: Option, @@ -168,8 +173,9 @@ async fn main() -> anyhow::Result<()> { let world = WorldContractReader::new(args.world_address, &provider); let db = Sql::new(pool.clone(), args.world_address).await?; + let processors = Processors { - event: vec![ + event: generate_event_processors_map(vec![ Box::new(RegisterModelProcessor), Box::new(StoreSetRecordProcessor), Box::new(MetadataUpdateProcessor), @@ -177,7 +183,7 @@ async fn main() -> anyhow::Result<()> { Box::new(EventMessageProcessor), Box::new(StoreUpdateRecordProcessor), Box::new(StoreUpdateMemberProcessor), - ], + ])?, transaction: vec![Box::new(StoreTransactionProcessor)], ..Processors::default() }; @@ -214,6 +220,7 @@ async fn main() -> anyhow::Result<()> { provider.clone(), args.relay_port, args.relay_webrtc_port, + args.relay_websocket_port, args.relay_local_key_path, args.relay_cert_path, ) @@ -258,7 +265,7 @@ async fn main() -> anyhow::Result<()> { } tokio::select! { - _ = engine.start() => {}, + res = engine.start() => res?, _ = proxy_server.start(shutdown_tx.subscribe()) => {}, _ = graphql_server => {}, _ = grpc_server => {}, diff --git a/crates/dojo-bindgen/src/lib.rs b/crates/dojo-bindgen/src/lib.rs index 4f88942a4b..4f8f3f7f15 100644 --- a/crates/dojo-bindgen/src/lib.rs +++ b/crates/dojo-bindgen/src/lib.rs @@ -1,3 +1,4 @@ +use std::cmp::Ordering; use std::collections::HashMap; use std::fs; use std::path::PathBuf; @@ -119,7 +120,7 @@ fn gather_dojo_data( let mut base_manifest = BaseManifest::load_from_path(&base_manifest_dir)?; if let Some(skip_manifests) = skip_migration { - base_manifest.remove_tags(skip_manifests); + base_manifest.remove_tags(&skip_manifests); } let mut models = HashMap::new(); @@ -211,6 +212,13 @@ fn filter_model_tokens(tokens: &TokenizedAbi) -> TokenizedAbi { TokenizedAbi { structs, enums, ..Default::default() } } +/// Compares two tokens by their type name. +pub fn compare_tokens_by_type_name(a: &Token, b: &Token) -> Ordering { + let a_name = a.to_composite().expect("composite expected").type_name_or_alias(); + let b_name = b.to_composite().expect("composite expected").type_name_or_alias(); + a_name.cmp(&b_name) +} + #[cfg(test)] mod tests { use dojo_test_utils::compiler::CompilerTestSetup; diff --git a/crates/dojo-bindgen/src/plugins/typescript/mod.rs b/crates/dojo-bindgen/src/plugins/typescript/mod.rs index 49151f8546..4d4645840d 100644 --- a/crates/dojo-bindgen/src/plugins/typescript/mod.rs +++ b/crates/dojo-bindgen/src/plugins/typescript/mod.rs @@ -7,7 +7,7 @@ use dojo_world::contracts::naming; use crate::error::BindgenResult; use crate::plugins::BuiltinPlugin; -use crate::{DojoContract, DojoData, DojoModel}; +use crate::{compare_tokens_by_type_name, DojoContract, DojoData, DojoModel}; #[cfg(test)] mod tests; @@ -277,28 +277,20 @@ export const {name}Definition = {{ for model in models { let tokens = &model.tokens; - for token in &tokens.enums { + let mut sorted_structs = tokens.structs.clone(); + sorted_structs.sort_by(compare_tokens_by_type_name); + + let mut sorted_enums = tokens.enums.clone(); + sorted_enums.sort_by(compare_tokens_by_type_name); + + for token in &sorted_enums { handled_tokens.push(token.to_composite().unwrap().to_owned()); } - for token in &tokens.structs { + for token in &sorted_structs { handled_tokens.push(token.to_composite().unwrap().to_owned()); } - let mut structs = tokens.structs.to_owned(); - structs.sort_by(|a, b| { - if a.to_composite() - .unwrap() - .inners - .iter() - .any(|field| field.token.type_name() == b.type_name()) - { - std::cmp::Ordering::Greater - } else { - std::cmp::Ordering::Less - } - }); - - for token in &tokens.enums { + for token in &sorted_enums { if handled_tokens.iter().filter(|t| t.type_name() == token.type_name()).count() > 1 { continue; @@ -306,7 +298,7 @@ export const {name}Definition = {{ out += TypescriptPlugin::format_enum(token.to_composite().unwrap()).as_str(); } - for token in &structs { + for token in &sorted_structs { if handled_tokens.iter().filter(|t| t.type_name() == token.type_name()).count() > 1 { continue; @@ -454,7 +446,7 @@ export function defineContractComponents(world: World) { \"{namespace}\" ); }} catch (error) {{ - console.error(\"Error executing spawn:\", error); + console.error(\"Error executing {system_name}:\", error); throw error; }} }}; @@ -601,14 +593,22 @@ impl BuiltinPlugin for TypescriptPlugin { // Handle codegen for models let models_path = Path::new("models.gen.ts").to_owned(); - let models = data.models.values().collect::>(); + let mut models = data.models.values().collect::>(); + + // Sort models based on their tag to ensure deterministic output. + models.sort_by(|a, b| a.tag.cmp(&b.tag)); + let code = self.handle_model(models.as_slice(), &mut handled_tokens); out.insert(models_path, code.as_bytes().to_vec()); // Handle codegen for contracts & systems let contracts_path = Path::new("contracts.gen.ts").to_owned(); - let contracts = data.contracts.values().collect::>(); + let mut contracts = data.contracts.values().collect::>(); + + // Sort contracts based on their tag to ensure deterministic output. + contracts.sort_by(|a, b| a.tag.cmp(&b.tag)); + let code = self.handle_contracts(contracts.as_slice(), &handled_tokens); out.insert(contracts_path, code.as_bytes().to_vec()); diff --git a/crates/dojo-bindgen/src/plugins/unity/mod.rs b/crates/dojo-bindgen/src/plugins/unity/mod.rs index bd5ed274a4..34ba0cde65 100644 --- a/crates/dojo-bindgen/src/plugins/unity/mod.rs +++ b/crates/dojo-bindgen/src/plugins/unity/mod.rs @@ -7,7 +7,7 @@ use dojo_world::contracts::naming::{self, get_namespace_from_tag}; use crate::error::BindgenResult; use crate::plugins::BuiltinPlugin; -use crate::{DojoContract, DojoData, DojoModel}; +use crate::{compare_tokens_by_type_name, DojoContract, DojoData, DojoModel}; #[derive(Debug)] pub struct UnityPlugin {} @@ -237,7 +237,14 @@ namespace {namespace} {{ let mut model_struct: Option<&Composite> = None; let tokens = &model.tokens; - for token in &tokens.structs { + + let mut sorted_structs = tokens.structs.clone(); + sorted_structs.sort_by(compare_tokens_by_type_name); + + let mut sorted_enums = tokens.enums.clone(); + sorted_enums.sort_by(compare_tokens_by_type_name); + + for token in &sorted_structs { if handled_tokens.contains_key(&token.type_path()) { continue; } @@ -253,7 +260,7 @@ namespace {namespace} {{ out += UnityPlugin::format_struct(token.to_composite().unwrap()).as_str(); } - for token in &tokens.enums { + for token in &sorted_enums { if handled_tokens.contains_key(&token.type_path()) { continue; } @@ -542,8 +549,12 @@ impl BuiltinPlugin for UnityPlugin { let mut out: HashMap> = HashMap::new(); let mut handled_tokens = HashMap::::new(); + let mut models = data.models.iter().collect::>(); + // Sort models based on their tag to ensure deterministic output. + models.sort_by(|(_, a), (_, b)| a.tag.cmp(&b.tag)); + // Handle codegen for models - for (name, model) in &data.models { + for (name, model) in &models { let models_path = Path::new(&format!("Models/{}.gen.cs", name)).to_owned(); println!("Generating model: {}", name); @@ -552,8 +563,12 @@ impl BuiltinPlugin for UnityPlugin { out.insert(models_path, code.as_bytes().to_vec()); } + let mut contracts = data.contracts.iter().collect::>(); + // Sort contracts based on their tag to ensure deterministic output. + contracts.sort_by(|(_, a), (_, b)| a.tag.cmp(&b.tag)); + // Handle codegen for systems - for (name, contract) in &data.contracts { + for (name, contract) in &contracts { let contracts_path = Path::new(&format!("Contracts/{}.gen.cs", name)).to_owned(); println!("Generating contract: {}", name); diff --git a/crates/dojo-core/src/model/metadata.cairo b/crates/dojo-core/src/model/metadata.cairo index affc619aa5..7a8e18080c 100644 --- a/crates/dojo-core/src/model/metadata.cairo +++ b/crates/dojo-core/src/model/metadata.cairo @@ -52,13 +52,13 @@ pub impl ResourceMetadataModel of Model { ResourceMetadataTrait::from_values(*keys.at(0), ref values) } - fn set(self: @ResourceMetadata, world: IWorldDispatcher,) { + fn set_model(self: @ResourceMetadata, world: IWorldDispatcher,) { IWorldDispatcherTrait::set_entity( world, Self::selector(), ModelIndex::Keys(self.keys()), self.values(), Self::layout() ); } - fn delete(self: @ResourceMetadata, world: IWorldDispatcher,) { + fn delete_model(self: @ResourceMetadata, world: IWorldDispatcher,) { world.delete_entity(Self::selector(), ModelIndex::Keys(self.keys()), Self::layout()); } diff --git a/crates/dojo-core/src/model/model.cairo b/crates/dojo-core/src/model/model.cairo index 63aa9ec645..6b11143ca0 100644 --- a/crates/dojo-core/src/model/model.cairo +++ b/crates/dojo-core/src/model/model.cairo @@ -17,9 +17,12 @@ pub trait ModelEntity { fn id(self: @T) -> felt252; fn values(self: @T) -> Span; fn from_values(entity_id: felt252, ref values: Span) -> T; + // Get is always used with the trait path, which results in no ambiguity for the compiler. fn get(world: IWorldDispatcher, entity_id: felt252) -> T; - fn update(self: @T, world: IWorldDispatcher); - fn delete(self: @T, world: IWorldDispatcher); + // Update and delete can be used directly on the entity, which results in ambiguity. + // Therefore, they are implemented with the `update_entity` and `delete_entity` names. + fn update_entity(self: @T, world: IWorldDispatcher); + fn delete_entity(self: @T, world: IWorldDispatcher); fn get_member( world: IWorldDispatcher, entity_id: felt252, member_id: felt252, ) -> Span; @@ -27,11 +30,15 @@ pub trait ModelEntity { } pub trait Model { + // Get is always used with the trait path, which results in no ambiguity for the compiler. fn get(world: IWorldDispatcher, keys: Span) -> T; // Note: `get` is implemented with a generated trait because it takes // the list of model keys as separated parameters. - fn set(self: @T, world: IWorldDispatcher); - fn delete(self: @T, world: IWorldDispatcher); + + // Set and delete can be used directly on the entity, which results in ambiguity. + // Therefore, they are implemented with the `set_model` and `delete_model` names. + fn set_model(self: @T, world: IWorldDispatcher); + fn delete_model(self: @T, world: IWorldDispatcher); fn get_member( world: IWorldDispatcher, keys: Span, member_id: felt252, diff --git a/crates/dojo-core/src/tests/benchmarks.cairo b/crates/dojo-core/src/tests/benchmarks.cairo index 96a43a4df7..298057be2a 100644 --- a/crates/dojo-core/src/tests/benchmarks.cairo +++ b/crates/dojo-core/src/tests/benchmarks.cairo @@ -17,7 +17,7 @@ use dojo::model::introspect::Introspect; use dojo::storage::{database, storage}; use dojo::world::{IWorldDispatcher, IWorldDispatcherTrait}; -use dojo::tests::helpers::{Foo, Sword, Case, case, Character, Abilities, Stats, Weapon}; +use dojo::tests::helpers::{Foo, Sword, Case, CaseStore, case, Character, Abilities, Stats, Weapon}; use dojo::utils::test::{spawn_test_world, GasCounterTrait}; #[derive(Drop, Serde)] diff --git a/crates/dojo-core/src/tests/helpers.cairo b/crates/dojo-core/src/tests/helpers.cairo index c955fa1d5b..54d22cff28 100644 --- a/crates/dojo-core/src/tests/helpers.cairo +++ b/crates/dojo-core/src/tests/helpers.cairo @@ -44,6 +44,13 @@ pub mod foo_setter { #[dojo::contract] pub mod test_contract {} +#[dojo::contract] +pub mod test_contract_with_dojo_init_args { + fn dojo_init(world: @IWorldDispatcher, _arg1: felt252) { + let _u = world.uuid(); + } +} + #[dojo::contract(namespace: "buzz_namespace", nomapping: true)] pub mod buzz_contract {} diff --git a/crates/dojo-core/src/tests/world.cairo b/crates/dojo-core/src/tests/world.cairo deleted file mode 100644 index 491c4bd345..0000000000 --- a/crates/dojo-core/src/tests/world.cairo +++ /dev/null @@ -1,1705 +0,0 @@ -use core::array::{ArrayTrait, SpanTrait}; -use core::clone::Clone; -use core::option::OptionTrait; -use core::result::ResultTrait; -use core::traits::{Into, TryInto}; - -use starknet::{contract_address_const, ContractAddress, ClassHash, get_caller_address}; -use starknet::syscalls::deploy_syscall; - -use dojo::world::config::Config::{ - DifferProgramHashUpdate, MergerProgramHashUpdate, FactsRegistryUpdate -}; -use dojo::world::config::{IConfigDispatcher, IConfigDispatcherTrait}; -use dojo::model::{ModelIndex, Layout, FieldLayout, Model, ResourceMetadata}; -use dojo::model::introspect::{Introspect}; -use dojo::utils::bytearray_hash; -use dojo::storage::database::MAX_ARRAY_LENGTH; -use dojo::utils::test::{spawn_test_world, deploy_with_world_address, assert_array, GasCounterTrait}; -use dojo::utils::entity_id_from_keys; -use dojo::world::{ - IWorldDispatcher, IWorldDispatcherTrait, world, IUpgradeableWorld, IUpgradeableWorldDispatcher, - IUpgradeableWorldDispatcherTrait, Resource -}; -use dojo::world::world::NamespaceRegistered; - -use super::benchmarks; -use super::benchmarks::Character; - -#[derive(Introspect, Copy, Drop, Serde)] -enum OneEnum { - FirstArm: (u8, felt252), - SecondArm, -} - -#[derive(Introspect, Drop, Serde)] -enum AnotherEnum { - FirstArm: (u8, OneEnum, ByteArray), - SecondArm: (u8, OneEnum, ByteArray) -} - -#[derive(Copy, Drop, Serde)] -#[dojo::model] -pub struct Foo { - #[key] - pub caller: ContractAddress, - pub a: felt252, - pub b: u128, -} - -#[derive(Copy, Drop, Serde)] -#[dojo::model(namespace: "another_namespace", nomapping: true)] -pub struct Buzz { - #[key] - pub caller: ContractAddress, - pub a: felt252, - pub b: u128, -} - - -fn create_foo() -> Span { - [1, 2].span() -} - -#[derive(Copy, Drop, Serde)] -#[dojo::model] -pub struct Fizz { - #[key] - pub caller: ContractAddress, - pub a: felt252 -} - -#[derive(Copy, Drop, Serde)] -#[dojo::model] -pub struct StructSimpleModel { - #[key] - pub caller: ContractAddress, - pub a: felt252, - pub b: u128, -} - -fn create_struct_simple_model() -> Span { - [1, 2].span() -} - -#[derive(Copy, Drop, Serde)] -#[dojo::model] -pub struct StructWithTuple { - #[key] - pub caller: ContractAddress, - pub a: (u8, u64) -} - -fn create_struct_with_tuple() -> Span { - [12, 58].span() -} - -#[derive(Copy, Drop, Serde)] -#[dojo::model] -pub struct StructWithEnum { - #[key] - pub caller: ContractAddress, - pub a: OneEnum, -} - -fn create_struct_with_enum_first_variant() -> Span { - [0, 1, 2].span() -} - -fn create_struct_with_enum_second_variant() -> Span { - [1].span() -} - -#[derive(Copy, Drop, Serde)] -#[dojo::model] -pub struct StructSimpleArrayModel { - #[key] - pub caller: ContractAddress, - pub a: felt252, - pub b: Array, - pub c: u128, -} - -impl ArrayU64Copy of core::traits::Copy>; - -fn create_struct_simple_array_model() -> Span { - [1, 4, 10, 20, 30, 40, 2].span() -} - -#[derive(Drop, Serde)] -#[dojo::model] -pub struct StructByteArrayModel { - #[key] - pub caller: ContractAddress, - pub a: felt252, - pub b: ByteArray, -} - -fn create_struct_byte_array_model() -> Span { - [1, 3, 'first', 'second', 'third', 'pending', 7].span() -} - -#[derive(Introspect, Copy, Drop, Serde)] -pub struct ModelData { - pub x: u256, - pub y: u32, - pub z: felt252 -} - -#[derive(Drop, Serde)] -#[dojo::model] -pub struct StructComplexArrayModel { - #[key] - pub caller: ContractAddress, - pub a: felt252, - pub b: Array, - pub c: AnotherEnum, -} - -fn create_struct_complex_array_model() -> Span { - [ - 1, // a - 2, // b (array length) - 1, - 2, - 3, - 4, // item 1 - 5, - 6, - 7, - 8, // item 2 - 1, // c (AnotherEnum variant) - 1, // u8 - 0, // OneEnum variant - 0, // u8 - 123, // felt252 - 1, - 'first', - 'pending', - 7 // ByteArray - ].span() -} - -#[derive(Drop, Serde)] -#[dojo::model] -pub struct StructNestedModel { - #[key] - pub caller: ContractAddress, - pub x: (u8, u16, (u32, ByteArray, u8), Array<(u8, u16)>), - pub y: Array> -} - -fn create_struct_nested_model() -> Span { - [ - // -- x - 1, // u8 - 2, // u16 - 3, - 1, - 'first', - 'pending', - 7, - 9, // (u32, ByteArray, u8) - 3, - 1, - 2, - 3, - 4, - 5, - 6, // Array<(u8, u16)> with 3 items - // -- y - 2, // Array> with 2 items - 3, // first array item - Array<(u8, (u16, u256))> of 3 items - 1, - 2, - 0, - 3, // first array item - (u8, (u16, u256)) - 4, - 5, - 0, - 6, // second array item - (u8, (u16, u256)) - 8, - 7, - 9, - 10, // third array item - (u8, (u16, u256)) - 1, // second array item - Array<(u8, (u16, u256))> of 1 item - 5, - 4, - 6, - 7 // first array item - (u8, (u16, u256)) - ].span() -} - -#[derive(Introspect, Copy, Drop, Serde)] -pub enum EnumGeneric { - One: T, - Two: U -} - -#[derive(Drop, Serde)] -#[dojo::model] -pub struct StructWithGeneric { - #[key] - pub caller: ContractAddress, - pub x: EnumGeneric, -} - -fn create_struct_generic_first_variant() -> Span { - [0, 1].span() -} - -fn create_struct_generic_second_variant() -> Span { - [1, 1, 2].span() -} - -fn get_key_test() -> Span { - [0x01234].span() -} - -#[starknet::interface] -trait IMetadataOnly { - fn selector(self: @T) -> felt252; - fn name(self: @T) -> ByteArray; - fn namespace(self: @T) -> ByteArray; - fn namespace_hash(self: @T) -> felt252; -} - -#[starknet::contract] -mod resource_metadata_malicious { - use dojo::model::{Model, ResourceMetadata}; - use dojo::utils::bytearray_hash; - - #[storage] - struct Storage {} - - #[abi(embed_v0)] - impl InvalidModelName of super::IMetadataOnly { - fn selector(self: @ContractState) -> felt252 { - Model::::selector() - } - - fn namespace(self: @ContractState) -> ByteArray { - "dojo" - } - - fn namespace_hash(self: @ContractState) -> felt252 { - bytearray_hash(@Self::namespace(self)) - } - - fn name(self: @ContractState) -> ByteArray { - "invalid_model_name" - } - } -} - -#[starknet::interface] -trait Ibar { - fn set_foo(self: @TContractState, a: felt252, b: u128); - fn delete_foo(self: @TContractState); - fn delete_foo_macro(self: @TContractState, foo: Foo); - fn set_char(self: @TContractState, a: felt252, b: u32); -} - -#[starknet::contract] -mod bar { - use core::traits::Into; - use starknet::{get_caller_address, ContractAddress}; - use starknet::storage::{StoragePointerReadAccess, StoragePointerWriteAccess}; - use dojo::model::{Model, ModelIndex}; - - use super::{Foo, IWorldDispatcher, IWorldDispatcherTrait, Introspect}; - use super::benchmarks::{Character, Abilities, Stats, Weapon, Sword}; - - #[storage] - struct Storage { - world: IWorldDispatcher, - } - #[constructor] - fn constructor(ref self: ContractState, world: ContractAddress) { - self.world.write(IWorldDispatcher { contract_address: world }) - } - - #[abi(embed_v0)] - impl IbarImpl of super::Ibar { - fn set_foo(self: @ContractState, a: felt252, b: u128) { - set!(self.world.read(), Foo { caller: get_caller_address(), a, b }); - } - - fn delete_foo(self: @ContractState) { - self - .world - .read() - .delete_entity( - Model::::selector(), - ModelIndex::Keys([get_caller_address().into()].span()), - Model::::layout() - ); - } - - fn delete_foo_macro(self: @ContractState, foo: Foo) { - delete!(self.world.read(), Foo { caller: foo.caller, a: foo.a, b: foo.b }); - } - - fn set_char(self: @ContractState, a: felt252, b: u32) { - set!( - self.world.read(), - Character { - caller: get_caller_address(), - heigth: a, - abilities: Abilities { - strength: 0x12, - dexterity: 0x34, - constitution: 0x56, - intelligence: 0x78, - wisdom: 0x9a, - charisma: 0xbc, - }, - stats: Stats { - kills: 0x123456789abcdef, - deaths: 0x1234, - rests: 0x12345678, - hits: 0x123456789abcdef, - blocks: 0x12345678, - walked: 0x123456789abcdef, - runned: 0x123456789abcdef, - finished: true, - romances: 0x1234, - }, - weapon: Weapon::DualWield( - ( - Sword { swordsmith: get_caller_address(), damage: 0x12345678, }, - Sword { swordsmith: get_caller_address(), damage: 0x12345678, } - ) - ), - gold: b, - } - ); - } - } -} - -// Tests - -fn deploy_world_and_bar() -> (IWorldDispatcher, IbarDispatcher) { - // Spawn empty world - let world = deploy_world(); - world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); - - // System contract - let bar_contract = IbarDispatcher { - contract_address: deploy_with_world_address(bar::TEST_CLASS_HASH, world) - }; - - (world, bar_contract) -} - -#[test] -#[available_gas(2000000)] -fn test_model() { - let world = deploy_world(); - world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); -} - -#[test] -fn test_system() { - let (world, bar_contract) = deploy_world_and_bar(); - - bar_contract.set_foo(1337, 1337); - - let stored: Foo = get!(world, get_caller_address(), Foo); - assert(stored.a == 1337, 'data not stored'); - assert(stored.b == 1337, 'data not stored'); -} - -#[test] -fn test_delete() { - let (world, bar_contract) = deploy_world_and_bar(); - - // set model - bar_contract.set_foo(1337, 1337); - let stored: Foo = get!(world, get_caller_address(), Foo); - assert(stored.a == 1337, 'data not stored'); - assert(stored.b == 1337, 'data not stored'); - - // delete model - bar_contract.delete_foo_macro(stored); - - let deleted: Foo = get!(world, get_caller_address(), Foo); - assert(deleted.a == 0, 'data not deleted'); - assert(deleted.b == 0, 'data not deleted'); -} - -#[test] -#[available_gas(6000000)] -fn test_contract_getter() { - let world = deploy_world(); - - let _ = world - .deploy_contract('salt1', test_contract::TEST_CLASS_HASH.try_into().unwrap(), [].span()); - - if let Resource::Contract((class_hash, _)) = world - .resource(selector_from_tag!("dojo-test_contract")) { - assert( - class_hash == test_contract::TEST_CLASS_HASH.try_into().unwrap(), - 'invalid contract class hash' - ); - } else { - core::panic_with_felt252('invalid resource type'); - } -} - -#[test] -#[available_gas(6000000)] -fn test_model_class_hash_getter() { - let world = deploy_world(); - world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); - - if let Resource::Model((foo_class_hash, _)) = world.resource(Model::::selector()) { - assert(foo_class_hash == foo::TEST_CLASS_HASH.try_into().unwrap(), 'foo wrong class hash'); - } else { - core::panic_with_felt252('invalid resource type'); - } -} - -#[test] -#[ignore] -#[available_gas(6000000)] -fn test_legacy_model_class_hash_getter() { - let world = deploy_world(); - world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); - - if let Resource::Model((foo_class_hash, _)) = world.resource(Model::::selector()) { - assert(foo_class_hash == foo::TEST_CLASS_HASH.try_into().unwrap(), 'foo wrong class hash'); - } else { - core::panic_with_felt252('invalid resource type'); - } -} - -#[test] -fn test_register_namespace() { - let world = deploy_world(); - - let caller = starknet::contract_address_const::<0xb0b>(); - starknet::testing::set_account_contract_address(caller); - - drop_all_events(world.contract_address); - - let namespace = "namespace"; - let hash = bytearray_hash(@namespace); - - world.register_namespace(namespace); - - assert(world.is_owner(hash, caller), 'namespace not registered'); - - assert_eq!( - starknet::testing::pop_log(world.contract_address), - Option::Some(NamespaceRegistered { namespace: "namespace", hash }) - ); -} - -#[test] -fn test_register_namespace_already_registered_same_caller() { - let world = deploy_world(); - - let caller = starknet::contract_address_const::<0xb0b>(); - starknet::testing::set_account_contract_address(caller); - - let namespace = "namespace"; - let hash = bytearray_hash(@namespace); - - world.register_namespace(namespace); - - drop_all_events(world.contract_address); - - world.register_namespace("namespace"); - - assert(world.is_owner(hash, caller), 'namespace not registered'); - - let event = starknet::testing::pop_log_raw(world.contract_address); - assert(event.is_none(), 'unexpected event'); -} - -#[test] -#[should_panic(expected: ('namespace already registered', 'ENTRYPOINT_FAILED',))] -fn test_register_namespace_already_registered_other_caller() { - let world = deploy_world(); - - let account = starknet::contract_address_const::<0xb0b>(); - starknet::testing::set_account_contract_address(account); - - world.register_namespace("namespace"); - - let another_account = starknet::contract_address_const::<0xa11ce>(); - starknet::testing::set_account_contract_address(another_account); - - world.register_namespace("namespace"); -} - -#[test] -#[available_gas(6000000)] -fn test_emit() { - let world = deploy_world(); - - let mut keys = ArrayTrait::new(); - keys.append('MyEvent'); - let mut values = ArrayTrait::new(); - values.append(1); - values.append(2); - world.emit(keys, values.span()); -} - -#[test] -fn test_set_entity_admin() { - let (world, bar_contract) = deploy_world_and_bar(); - - let alice = starknet::contract_address_const::<0x1337>(); - starknet::testing::set_contract_address(alice); - - bar_contract.set_foo(420, 1337); - - let foo: Foo = get!(world, alice, Foo); - assert(foo.a == 420, 'data not stored'); - assert(foo.b == 1337, 'data not stored'); -} - -#[test] -#[available_gas(8000000)] -#[should_panic] -fn test_set_entity_unauthorized() { - // Spawn empty world - let world = deploy_world(); - - let bar_contract = IbarDispatcher { - contract_address: deploy_with_world_address(bar::TEST_CLASS_HASH, world) - }; - - world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); - - let caller = starknet::contract_address_const::<0x1337>(); - starknet::testing::set_account_contract_address(caller); - - // Call bar system, should panic as it's not authorized - bar_contract.set_foo(420, 1337); -} - -// Utils -fn deploy_world() -> IWorldDispatcher { - spawn_test_world("dojo", array![]) -} - -#[test] -#[available_gas(60000000)] -fn test_set_metadata_world() { - let world = deploy_world(); - - let metadata = ResourceMetadata { - resource_id: 0, metadata_uri: format!("ipfs:world_with_a_long_uri_that") - }; - - world.set_metadata(metadata.clone()); - - assert(world.metadata(0) == metadata, 'invalid metadata'); -} - -#[test] -#[available_gas(60000000)] -fn test_set_metadata_model_writer() { - let world = spawn_test_world("dojo", array![foo::TEST_CLASS_HASH],); - - let bar_contract = IbarDispatcher { - contract_address: deploy_with_world_address(bar::TEST_CLASS_HASH, world) - }; - - world.grant_writer(Model::::selector(), bar_contract.contract_address); - - let bob = starknet::contract_address_const::<0xb0b>(); - starknet::testing::set_account_contract_address(bob); - starknet::testing::set_contract_address(bar_contract.contract_address); - - bar_contract.set_foo(1337, 1337); - - let metadata = ResourceMetadata { - resource_id: Model::::selector(), metadata_uri: format!("ipfs:bob") - }; - - // A system that has write access on a model should be able to update the metadata. - // This follows conventional ACL model. - world.set_metadata(metadata.clone()); - assert(world.metadata(Model::::selector()) == metadata, 'bad metadata'); -} - -#[test] -#[available_gas(60000000)] -#[should_panic(expected: ('no write access', 'ENTRYPOINT_FAILED',))] -fn test_set_metadata_same_model_rules() { - let world = deploy_world(); - - let metadata = ResourceMetadata { // World metadata. - resource_id: 0, metadata_uri: format!("ipfs:bob"), - }; - - let bob = starknet::contract_address_const::<0xb0b>(); - starknet::testing::set_contract_address(bob); - starknet::testing::set_account_contract_address(bob); - - // Bob access follows the conventional ACL, he can't write the world - // metadata if he does not have access to it. - world.set_metadata(metadata); -} - -#[test] -#[available_gas(60000000)] -#[should_panic(expected: ('only owner can update', 'ENTRYPOINT_FAILED',))] -fn test_metadata_update_owner_only() { - let world = deploy_world(); - - let bob = starknet::contract_address_const::<0xb0b>(); - starknet::testing::set_contract_address(bob); - - world.grant_owner(bytearray_hash(@"dojo"), bob); - - starknet::testing::set_account_contract_address(bob); - - world.register_model(resource_metadata_malicious::TEST_CLASS_HASH.try_into().unwrap()); -} - -#[test] -#[available_gas(6000000)] -fn test_owner() { - let world = deploy_world(); - world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); - let foo_selector = Model::::selector(); - - let alice = starknet::contract_address_const::<0x1337>(); - let bob = starknet::contract_address_const::<0x1338>(); - - assert(!world.is_owner(0, alice), 'should not be owner'); - assert(!world.is_owner(foo_selector, bob), 'should not be owner'); - - world.grant_owner(0, alice); - assert(world.is_owner(0, alice), 'should be owner'); - - world.grant_owner(foo_selector, bob); - assert(world.is_owner(foo_selector, bob), 'should be owner'); - - world.revoke_owner(0, alice); - assert(!world.is_owner(0, alice), 'should not be owner'); - - world.revoke_owner(foo_selector, bob); - assert(!world.is_owner(foo_selector, bob), 'should not be owner'); -} - -#[test] -#[available_gas(6000000)] -#[should_panic] -fn test_set_owner_fails_for_non_owner() { - let world = deploy_world(); - - let alice = starknet::contract_address_const::<0x1337>(); - starknet::testing::set_account_contract_address(alice); - - world.revoke_owner(0, alice); - assert(!world.is_owner(0, alice), 'should not be owner'); - - world.grant_owner(0, alice); -} - -#[test] -#[available_gas(6000000)] -fn test_writer() { - let world = deploy_world(); - world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); - let foo_selector = Model::::selector(); - - assert(!world.is_writer(foo_selector, 69.try_into().unwrap()), 'should not be writer'); - - world.grant_writer(foo_selector, 69.try_into().unwrap()); - assert(world.is_writer(foo_selector, 69.try_into().unwrap()), 'should be writer'); - - world.revoke_writer(foo_selector, 69.try_into().unwrap()); - assert(!world.is_writer(foo_selector, 69.try_into().unwrap()), 'should not be writer'); -} - -#[test] -#[should_panic(expected: ('resource not registered', 'ENTRYPOINT_FAILED'))] -fn test_writer_not_registered_resource() { - let world = deploy_world(); - - // 42 is not a registered resource ID - world.grant_writer(42, 69.try_into().unwrap()); -} - -#[test] -#[available_gas(6000000)] -#[should_panic] -fn test_system_not_writer_fail() { - let world = spawn_test_world("dojo", array![foo::TEST_CLASS_HASH],); - - let bar_address = deploy_with_world_address(bar::TEST_CLASS_HASH, world); - let bar_contract = IbarDispatcher { contract_address: bar_address }; - - // Caller is not owner now - let account = starknet::contract_address_const::<0xb0b>(); - starknet::testing::set_account_contract_address(account); - - // Should panic, system not writer - bar_contract.set_foo(25, 16); -} - -#[test] -fn test_system_writer_access() { - let world = spawn_test_world("dojo", array![foo::TEST_CLASS_HASH],); - - let bar_address = deploy_with_world_address(bar::TEST_CLASS_HASH, world); - let bar_contract = IbarDispatcher { contract_address: bar_address }; - - world.grant_writer(Model::::selector(), bar_address); - assert(world.is_writer(Model::::selector(), bar_address), 'should be writer'); - - // Caller is not owner now - let caller = starknet::contract_address_const::<0x1337>(); - starknet::testing::set_account_contract_address(caller); - - // Should not panic, system is writer - bar_contract.set_foo(25, 16); -} - -#[test] -#[available_gas(6000000)] -#[should_panic] -fn test_set_writer_fails_for_non_owner() { - let world = deploy_world(); - - let alice = starknet::contract_address_const::<0x1337>(); - starknet::testing::set_contract_address(alice); - - assert(!world.is_owner(0, alice), 'should not be owner'); - - world.grant_writer(42, 69.try_into().unwrap()); -} - -#[test] -fn test_execute_multiple_worlds() { - // Deploy world contract - let world1 = spawn_test_world("dojo", array![foo::TEST_CLASS_HASH],); - - let bar1_contract = IbarDispatcher { - contract_address: deploy_with_world_address(bar::TEST_CLASS_HASH, world1) - }; - - // Deploy another world contract - let world2 = spawn_test_world("dojo", array![foo::TEST_CLASS_HASH],); - - let bar2_contract = IbarDispatcher { - contract_address: deploy_with_world_address(bar::TEST_CLASS_HASH, world2) - }; - - let alice = starknet::contract_address_const::<0x1337>(); - starknet::testing::set_contract_address(alice); - - bar1_contract.set_foo(1337, 1337); - bar2_contract.set_foo(7331, 7331); - - let data1 = get!(world1, alice, Foo); - let data2 = get!(world2, alice, Foo); - - assert(data1.a == 1337, 'data1 not stored'); - assert(data2.a == 7331, 'data2 not stored'); -} - -#[test] -#[available_gas(60000000)] -fn bench_execute() { - let world = spawn_test_world("dojo", array![foo::TEST_CLASS_HASH],); - let bar_contract = IbarDispatcher { - contract_address: deploy_with_world_address(bar::TEST_CLASS_HASH, world) - }; - - let alice = starknet::contract_address_const::<0x1337>(); - starknet::testing::set_contract_address(alice); - - let gas = GasCounterTrait::start(); - - bar_contract.set_foo(1337, 1337); - gas.end("foo set call"); - - let gas = GasCounterTrait::start(); - let data = get!(world, alice, Foo); - gas.end("foo get macro"); - - assert(data.a == 1337, 'data not stored'); -} - -#[test] -fn bench_execute_complex() { - let world = spawn_test_world("dojo", array![benchmarks::character::TEST_CLASS_HASH],); - let bar_contract = IbarDispatcher { - contract_address: deploy_with_world_address(bar::TEST_CLASS_HASH, world) - }; - - let alice = starknet::contract_address_const::<0x1337>(); - starknet::testing::set_contract_address(alice); - - let gas = GasCounterTrait::start(); - - bar_contract.set_char(1337, 1337); - gas.end("char set call"); - - let gas = GasCounterTrait::start(); - - let data = get!(world, alice, Character); - gas.end("char get macro"); - - assert(data.heigth == 1337, 'data not stored'); -} - -#[starknet::interface] -trait IWorldUpgrade { - fn hello(self: @TContractState) -> felt252; -} - -#[starknet::contract] -mod worldupgrade { - use super::{IWorldUpgrade, IWorldDispatcher, ContractAddress}; - use starknet::storage::{StoragePointerReadAccess, StoragePointerWriteAccess}; - - #[storage] - struct Storage { - world: IWorldDispatcher, - } - - #[abi(embed_v0)] - impl IWorldUpgradeImpl of super::IWorldUpgrade { - fn hello(self: @ContractState) -> felt252 { - 'dojo' - } - } -} - - -#[test] -#[available_gas(60000000)] -fn test_upgradeable_world() { - // Deploy world contract - let world = deploy_world(); - - let mut upgradeable_world_dispatcher = IUpgradeableWorldDispatcher { - contract_address: world.contract_address - }; - upgradeable_world_dispatcher.upgrade(worldupgrade::TEST_CLASS_HASH.try_into().unwrap()); - - let res = (IWorldUpgradeDispatcher { contract_address: world.contract_address }).hello(); - - assert(res == 'dojo', 'should return dojo'); -} - -#[test] -#[available_gas(60000000)] -#[should_panic(expected: ('invalid class_hash', 'ENTRYPOINT_FAILED'))] -fn test_upgradeable_world_with_class_hash_zero() { - // Deploy world contract - let world = deploy_world(); - - starknet::testing::set_contract_address(starknet::contract_address_const::<0x1337>()); - - let mut upgradeable_world_dispatcher = IUpgradeableWorldDispatcher { - contract_address: world.contract_address - }; - upgradeable_world_dispatcher.upgrade(0.try_into().unwrap()); -} - -#[test] -#[available_gas(60000000)] -#[should_panic(expected: ('only owner can upgrade', 'ENTRYPOINT_FAILED'))] -fn test_upgradeable_world_from_non_owner() { - // Deploy world contract - let world = deploy_world(); - - let not_owner = starknet::contract_address_const::<0x1337>(); - starknet::testing::set_contract_address(not_owner); - starknet::testing::set_account_contract_address(not_owner); - - let mut upgradeable_world_dispatcher = IUpgradeableWorldDispatcher { - contract_address: world.contract_address - }; - upgradeable_world_dispatcher.upgrade(worldupgrade::TEST_CLASS_HASH.try_into().unwrap()); -} - -fn drop_all_events(address: ContractAddress) { - loop { - match starknet::testing::pop_log_raw(address) { - core::option::Option::Some(_) => {}, - core::option::Option::None => { break; }, - }; - } -} - -#[test] -#[available_gas(6000000)] -fn test_differ_program_hash_event_emit() { - let world = deploy_world(); - drop_all_events(world.contract_address); - let config = IConfigDispatcher { contract_address: world.contract_address }; - - config.set_differ_program_hash(program_hash: 98758347158781475198374598718743); - - assert_eq!( - starknet::testing::pop_log(world.contract_address), - Option::Some(DifferProgramHashUpdate { program_hash: 98758347158781475198374598718743 }) - ); -} - -#[test] -#[available_gas(6000000)] -fn test_facts_registry_event_emit() { - let world = deploy_world(); - drop_all_events(world.contract_address); - let config = IConfigDispatcher { contract_address: world.contract_address }; - - config.set_facts_registry(contract_address_const::<0x12>()); - - assert_eq!( - starknet::testing::pop_log(world.contract_address), - Option::Some(FactsRegistryUpdate { address: contract_address_const::<0x12>() }) - ); -} - -#[starknet::interface] -trait IDojoInit { - fn dojo_init(self: @ContractState) -> felt252; -} - -#[dojo::contract] -mod test_contract {} - -#[dojo::contract(namespace: "buzz_namespace", nomapping: true)] -mod buzz_contract {} - -#[test] -#[available_gas(6000000)] -#[should_panic(expected: ('Only world can init', 'ENTRYPOINT_FAILED'))] -fn test_can_call_init() { - let world = deploy_world(); - let address = world - .deploy_contract('salt1', test_contract::TEST_CLASS_HASH.try_into().unwrap(), [].span()); - - let dojo_init = IDojoInitDispatcher { contract_address: address }; - dojo_init.dojo_init(); -} - -#[test] -fn test_set_entity_by_id() { - let world = deploy_world(); - world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); - let selector = Model::::selector(); - let entity_id = entity_id_from_keys([0x01234].span()); - let values = create_foo(); - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Id(entity_id), values, layout); - let read_values = world.entity(selector, ModelIndex::Id(entity_id), layout); - assert_array(read_values, values); -} - -#[test] -fn test_set_entity_with_fixed_layout() { - let world = deploy_world(); - world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); - let selector = Model::::selector(); - let keys = get_key_test(); - let values = create_foo(); - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Keys(get_key_test()), values, layout); - let read_values = world.entity(selector, ModelIndex::Keys(keys), layout); - assert_array(read_values, values); -} - -#[test] -fn test_set_entity_with_struct_layout() { - let world = deploy_world(); - world.register_model(struct_simple_model::TEST_CLASS_HASH.try_into().unwrap()); - - let selector = Model::::selector(); - let keys = get_key_test(); - let values = create_struct_simple_model(); - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Keys(keys), values, layout); - - let read_values = world.entity(selector, ModelIndex::Keys(keys), layout); - assert_array(read_values, values); -} - -#[test] -fn test_set_entity_with_struct_tuple_layout() { - let world = deploy_world(); - world.register_model(struct_with_tuple::TEST_CLASS_HASH.try_into().unwrap()); - - let selector = Model::::selector(); - let keys = get_key_test(); - let values = create_struct_with_tuple(); - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Keys(keys), values, layout); - - let read_values = world.entity(selector, ModelIndex::Keys(keys), layout); - assert_array(read_values, values); -} - -#[test] -fn test_set_entity_with_struct_enum_layout() { - let world = deploy_world(); - world.register_model(struct_with_enum::TEST_CLASS_HASH.try_into().unwrap()); - - let selector = Model::::selector(); - let keys = get_key_test(); - let values = create_struct_with_enum_first_variant(); - let layout = Model::::layout(); - - // test with the first variant - world.set_entity(selector, ModelIndex::Keys(keys), values, layout); - - let read_values = world.entity(selector, ModelIndex::Keys(keys), layout); - assert_array(read_values, values); - - // then override with the second variant - let values = create_struct_with_enum_second_variant(); - world.set_entity(selector, ModelIndex::Keys(keys), values, layout); - - let read_values = world.entity(selector, ModelIndex::Keys(keys), layout); - assert_array(read_values, values); -} - -#[test] -fn test_set_entity_with_struct_simple_array_layout() { - let world = deploy_world(); - world.register_model(struct_simple_array_model::TEST_CLASS_HASH.try_into().unwrap()); - - let selector = Model::::selector(); - let keys = get_key_test(); - let values = create_struct_simple_array_model(); - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Keys(keys), values, layout); - - let read_values = world.entity(selector, ModelIndex::Keys(keys), layout); - assert_array(read_values, values); -} - -#[test] -fn test_set_entity_with_struct_complex_array_layout() { - let world = deploy_world(); - world.register_model(struct_complex_array_model::TEST_CLASS_HASH.try_into().unwrap()); - - let selector = Model::::selector(); - let keys = get_key_test(); - let values = create_struct_complex_array_model(); - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Keys(keys), values, layout); - - let read_values = world.entity(selector, ModelIndex::Keys(keys), layout); - assert_array(read_values, values); -} - -#[test] -fn test_set_entity_with_struct_layout_and_byte_array() { - let world = deploy_world(); - world.register_model(struct_byte_array_model::TEST_CLASS_HASH.try_into().unwrap()); - - let selector = Model::::selector(); - let keys = get_key_test(); - let values = create_struct_byte_array_model(); - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Keys(keys), values, layout); - - let read_values = world.entity(selector, ModelIndex::Keys(keys), layout); - assert_array(read_values, values); -} - -#[test] -fn test_set_entity_with_nested_elements() { - let world = deploy_world(); - world.register_model(struct_nested_model::TEST_CLASS_HASH.try_into().unwrap()); - - let selector = Model::::selector(); - let keys = get_key_test(); - let values = create_struct_nested_model(); - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Keys(keys), values, layout); - - let read_values = world.entity(selector, ModelIndex::Keys(keys), layout); - assert_array(read_values, values); -} - -fn assert_empty_array(values: Span) { - let mut i = 0; - loop { - if i >= values.len() { - break; - } - assert!(*values.at(i) == 0); - i += 1; - }; -} - -#[test] -fn test_set_entity_with_struct_generics_enum_layout() { - let world = deploy_world(); - world.register_model(struct_with_generic::TEST_CLASS_HASH.try_into().unwrap()); - - let selector = Model::::selector(); - let keys = get_key_test(); - let values = create_struct_generic_first_variant(); - let layout = Model::::layout(); - - // test with the first variant - world.set_entity(selector, ModelIndex::Keys(keys), values, layout); - - let read_values = world.entity(selector, ModelIndex::Keys(keys), layout); - assert_array(read_values, values); - - // then override with the second variant - let values = create_struct_generic_second_variant(); - world.set_entity(selector, ModelIndex::Keys(keys), values, layout); - - let read_values = world.entity(selector, ModelIndex::Keys(keys), layout); - assert_array(read_values, values); -} - -#[test] -fn test_delete_entity_by_id() { - let world = deploy_world(); - world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); - let selector = Model::::selector(); - let entity_id = entity_id_from_keys(get_key_test()); - let values = create_foo(); - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Id(entity_id), values, layout); - - world.delete_entity(selector, ModelIndex::Id(entity_id), layout); - - let read_values = world.entity(selector, ModelIndex::Id(entity_id), layout); - - assert!(read_values.len() == values.len()); - assert_empty_array(read_values); -} - -#[test] -fn test_delete_entity_with_fixed_layout() { - let world = deploy_world(); - world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); - let selector = Model::::selector(); - let keys = get_key_test(); - let values = create_foo(); - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Keys(get_key_test()), values, layout); - - world.delete_entity(selector, ModelIndex::Keys(keys), layout); - - let read_values = world.entity(selector, ModelIndex::Keys(keys), layout); - - assert!(read_values.len() == values.len()); - assert_empty_array(read_values); -} - -#[test] -fn test_delete_entity_with_simple_struct_layout() { - let world = deploy_world(); - world.register_model(struct_simple_model::TEST_CLASS_HASH.try_into().unwrap()); - - let selector = Model::::selector(); - let keys = get_key_test(); - let values = create_struct_simple_model(); - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Keys(keys), values, layout); - - world.delete_entity(selector, ModelIndex::Keys(keys), layout); - - let read_values = world.entity(selector, ModelIndex::Keys(keys), layout); - - assert!(read_values.len() == values.len()); - assert_empty_array(read_values); -} - -#[test] -fn test_delete_entity_with_struct_simple_array_layout() { - let world = deploy_world(); - world.register_model(struct_simple_array_model::TEST_CLASS_HASH.try_into().unwrap()); - - let selector = Model::::selector(); - let keys = get_key_test(); - let values = create_struct_simple_array_model(); - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Keys(keys), values, layout); - - world.delete_entity(selector, ModelIndex::Keys(keys), layout); - - let read_values = world.entity(selector, ModelIndex::Keys(keys), layout); - - // array length set to 0, so the expected value span is shorter than the initial values - let expected_values = [0, 0, 0].span(); - - assert!(read_values.len() == expected_values.len()); - assert_empty_array(read_values); -} - -#[test] -fn test_delete_entity_with_complex_array_struct_layout() { - let world = deploy_world(); - world.register_model(struct_complex_array_model::TEST_CLASS_HASH.try_into().unwrap()); - - let selector = Model::::selector(); - let keys = get_key_test(); - let values = create_struct_complex_array_model(); - - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Keys(keys), values, layout); - - world.delete_entity(selector, ModelIndex::Keys(keys), layout); - - let read_values = world.entity(selector, ModelIndex::Keys(keys), layout); - - // array length set to 0, so the expected value span is shorter than the initial values - let expected_values = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0].span(); - - assert!(read_values.len() == expected_values.len()); - assert_empty_array(read_values); -} - -#[test] -fn test_delete_entity_with_struct_tuple_layout() { - let world = deploy_world(); - world.register_model(struct_with_tuple::TEST_CLASS_HASH.try_into().unwrap()); - - let selector = Model::::selector(); - let keys = get_key_test(); - let values = create_struct_with_tuple(); - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Keys(keys), values, layout); - - world.delete_entity(selector, ModelIndex::Keys(keys), layout); - - let expected_values = [0, 0].span(); - let read_values = world.entity(selector, ModelIndex::Keys(keys), layout); - - assert!(read_values.len() == expected_values.len()); - assert_empty_array(read_values); -} - -#[test] -fn test_delete_entity_with_struct_enum_layout() { - let world = deploy_world(); - world.register_model(struct_with_enum::TEST_CLASS_HASH.try_into().unwrap()); - - let selector = Model::::selector(); - let keys = get_key_test(); - let values = create_struct_with_enum_first_variant(); - let layout = Model::::layout(); - - // test with the first variant - world.set_entity(selector, ModelIndex::Keys(keys), values, layout); - - world.delete_entity(selector, ModelIndex::Keys(keys), layout); - - let expected_values = [0, 0, 0].span(); - let read_values = world.entity(selector, ModelIndex::Keys(keys), layout); - - assert!(read_values.len() == expected_values.len()); - assert_empty_array(read_values); -} - -#[test] -fn test_delete_entity_with_struct_layout_and_byte_array() { - let world = deploy_world(); - world.register_model(struct_byte_array_model::TEST_CLASS_HASH.try_into().unwrap()); - - let selector = Model::::selector(); - let keys = get_key_test(); - let values = create_struct_byte_array_model(); - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Keys(keys), values, layout); - - world.delete_entity(selector, ModelIndex::Keys(keys), layout); - - let expected_values = [0, 0, 0, 0].span(); - let read_values = world.entity(selector, ModelIndex::Keys(keys), layout); - - assert!(read_values.len() == expected_values.len()); - assert_empty_array(read_values); -} - -#[test] -fn test_delete_entity_with_nested_elements() { - let world = deploy_world(); - world.register_model(struct_nested_model::TEST_CLASS_HASH.try_into().unwrap()); - - let selector = Model::::selector(); - let keys = get_key_test(); - let values = create_struct_nested_model(); - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Keys(keys), values, layout); - - world.delete_entity(selector, ModelIndex::Keys(keys), layout); - - let expected_values = [0, 0, 0, 0, 0, 0, 0, 0, 0].span(); - let read_values = world.entity(selector, ModelIndex::Keys(keys), layout); - - assert!(read_values.len() == expected_values.len()); - assert_empty_array(read_values); -} - -#[test] -fn test_delete_entity_with_struct_generics_enum_layout() { - let world = deploy_world(); - world.register_model(struct_with_generic::TEST_CLASS_HASH.try_into().unwrap()); - - let selector = Model::::selector(); - let keys = get_key_test(); - let values = create_struct_generic_first_variant(); - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Keys(keys), values, layout); - - world.delete_entity(selector, ModelIndex::Keys(keys), layout); - - let expected_values = [0, 0].span(); - let read_values = world.entity(selector, ModelIndex::Keys(keys), layout); - - assert!(read_values.len() == expected_values.len()); - assert_empty_array(read_values); -} - -#[test] -#[should_panic(expected: ("Unexpected layout type for a model.", 'ENTRYPOINT_FAILED'))] -fn test_set_entity_with_unexpected_array_model_layout() { - let world = deploy_world(); - world.register_model(struct_simple_array_model::TEST_CLASS_HASH.try_into().unwrap()); - - let layout = Layout::Array([Introspect::::layout()].span()); - - world - .set_entity( - Model::::selector(), - ModelIndex::Keys([].span()), - [].span(), - layout - ); -} - -#[test] -#[should_panic(expected: ("Unexpected layout type for a model.", 'ENTRYPOINT_FAILED'))] -fn test_set_entity_with_unexpected_tuple_model_layout() { - let world = deploy_world(); - world.register_model(struct_simple_array_model::TEST_CLASS_HASH.try_into().unwrap()); - - let layout = Layout::Tuple([Introspect::::layout()].span()); - - world - .set_entity( - Model::::selector(), - ModelIndex::Keys([].span()), - [].span(), - layout - ); -} - -#[test] -#[should_panic(expected: ("Unexpected layout type for a model.", 'ENTRYPOINT_FAILED'))] -fn test_delete_entity_with_unexpected_array_model_layout() { - let world = deploy_world(); - world.register_model(struct_simple_array_model::TEST_CLASS_HASH.try_into().unwrap()); - - let layout = Layout::Array([Introspect::::layout()].span()); - - world - .delete_entity( - Model::::selector(), ModelIndex::Keys([].span()), layout - ); -} - -#[test] -#[should_panic(expected: ("Unexpected layout type for a model.", 'ENTRYPOINT_FAILED'))] -fn test_delete_entity_with_unexpected_tuple_model_layout() { - let world = deploy_world(); - world.register_model(struct_simple_array_model::TEST_CLASS_HASH.try_into().unwrap()); - - let layout = Layout::Tuple([Introspect::::layout()].span()); - - world - .delete_entity( - Model::::selector(), ModelIndex::Keys([].span()), layout - ); -} - -#[test] -#[should_panic(expected: ("Unexpected layout type for a model.", 'ENTRYPOINT_FAILED'))] -fn test_get_entity_with_unexpected_array_model_layout() { - let world = deploy_world(); - world.register_model(struct_simple_array_model::TEST_CLASS_HASH.try_into().unwrap()); - - let layout = Layout::Array([Introspect::::layout()].span()); - - world.entity(Model::::selector(), ModelIndex::Keys([].span()), layout); -} - -#[test] -#[should_panic(expected: ("Unexpected layout type for a model.", 'ENTRYPOINT_FAILED'))] -fn test_get_entity_with_unexpected_tuple_model_layout() { - let world = deploy_world(); - world.register_model(struct_simple_array_model::TEST_CLASS_HASH.try_into().unwrap()); - - let layout = Layout::Tuple([Introspect::::layout()].span()); - - world.entity(Model::::selector(), ModelIndex::Keys([].span()), layout); -} - - -#[test] -#[should_panic(expected: ('Invalid values length', 'ENTRYPOINT_FAILED',))] -fn test_set_entity_with_bad_values_length_error_for_array_layout() { - let world = deploy_world(); - world.register_model(struct_simple_array_model::TEST_CLASS_HASH.try_into().unwrap()); - - let selector = Model::::selector(); - let keys = get_key_test(); - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Keys(keys), [1].span(), layout); -} - -#[test] -#[should_panic(expected: ('invalid array length', 'ENTRYPOINT_FAILED',))] -fn test_set_entity_with_too_big_array_length() { - let world = deploy_world(); - world.register_model(struct_simple_array_model::TEST_CLASS_HASH.try_into().unwrap()); - - let selector = Model::::selector(); - let keys = get_key_test(); - let values: Span = [ - 1, MAX_ARRAY_LENGTH.try_into().unwrap() + 1, 10, 20, 30, 40, 2 - ].span(); - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Keys(keys), values, layout); -} - -#[test] -#[should_panic(expected: ('invalid array length', 'ENTRYPOINT_FAILED',))] -fn test_set_entity_with_struct_layout_and_bad_byte_array_length() { - let world = deploy_world(); - world.register_model(struct_byte_array_model::TEST_CLASS_HASH.try_into().unwrap()); - - let selector = Model::::selector(); - let keys = get_key_test(); - let values: Span = [ - 1, MAX_ARRAY_LENGTH.try_into().unwrap(), 'first', 'second', 'third', 'pending', 7 - ].span(); - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Keys(keys), values, layout); -} - -#[test] -#[should_panic(expected: ('Invalid values length', 'ENTRYPOINT_FAILED',))] -fn test_set_entity_with_struct_layout_and_bad_value_length_for_byte_array() { - let world = deploy_world(); - world.register_model(struct_byte_array_model::TEST_CLASS_HASH.try_into().unwrap()); - - let selector = Model::::selector(); - let keys = get_key_test(); - let values: Span = [1, 3, 'first', 'second', 'third', 'pending'].span(); - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Keys(keys), values, layout); -} - -fn write_foo_record(world: IWorldDispatcher) { - let selector = Model::::selector(); - let values = create_foo(); - let layout = Model::::layout(); - - world.set_entity(selector, ModelIndex::Keys(get_key_test()), values, layout); -} - -#[test] -fn test_write_model_for_namespace_owner() { - let world = deploy_world(); - world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); - - let account = starknet::contract_address_const::<0xb0b>(); - let contract = starknet::contract_address_const::<0xdeadbeef>(); - - // the caller account is a model namespace owner - world.grant_owner(Model::::namespace_hash(), account); - starknet::testing::set_account_contract_address(account); - starknet::testing::set_contract_address(contract); - - write_foo_record(world); -} - -#[test] -fn test_write_model_for_model_owner() { - let world = deploy_world(); - world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); - - // the caller account is a model owner - let account = starknet::contract_address_const::<0xb0b>(); - let contract = starknet::contract_address_const::<0xdeadbeef>(); - - world.grant_owner(Model::::selector(), account); - starknet::testing::set_account_contract_address(account); - starknet::testing::set_contract_address(contract); - - write_foo_record(world); -} - -#[test] -fn test_write_model_for_namespace_writer() { - let world = deploy_world(); - world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); - - let account = starknet::contract_address_const::<0xb0b>(); - let contract = starknet::contract_address_const::<0xdeadbeef>(); - - world.grant_writer(Model::::namespace_hash(), contract); - - // the account does not own anything - starknet::testing::set_account_contract_address(account); - starknet::testing::set_contract_address(contract); - - write_foo_record(world); -} - -#[test] -fn test_write_model_for_model_writer() { - let world = deploy_world(); - world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); - - let account = starknet::contract_address_const::<0xb0b>(); - let contract = starknet::contract_address_const::<0xdeadbeef>(); - - world.grant_writer(Model::::selector(), contract); - - // the account does not own anything - starknet::testing::set_account_contract_address(account); - starknet::testing::set_contract_address(contract); - - write_foo_record(world); -} - -#[test] -fn test_write_namespace_for_namespace_owner() { - let world = deploy_world(); - - let account = starknet::contract_address_const::<0xb0b>(); - let contract = starknet::contract_address_const::<0xdeadbeef>(); - - world.grant_owner(Model::::namespace_hash(), account); - - // the account owns the Foo model namespace so it should be able to deploy - // and register the model. - starknet::testing::set_account_contract_address(account); - starknet::testing::set_contract_address(contract); - - world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); -} - -#[test] -fn test_write_namespace_for_namespace_writer() { - let world = deploy_world(); - - let account = starknet::contract_address_const::<0xb0b>(); - let contract = starknet::contract_address_const::<0xdeadbeef>(); - - world.grant_writer(Model::::namespace_hash(), account); - - // the account has write access to the Foo model namespace so it should be able - // to deploy and register the model. - starknet::testing::set_account_contract_address(account); - starknet::testing::set_contract_address(contract); - - world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); -} - -#[test] -#[should_panic(expected: ('no model write access', 'ENTRYPOINT_FAILED',))] -fn test_write_model_no_write_access() { - let world = deploy_world(); - world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); - - // the caller account does not own the model nor the model namespace nor the world - let account = starknet::contract_address_const::<0xb0b>(); - starknet::testing::set_account_contract_address(account); - - // the contract is not a writer for the model nor for the model namespace - let contract = starknet::contract_address_const::<0xdeadbeef>(); - starknet::testing::set_contract_address(contract); - - write_foo_record(world); -} - -#[test] -#[should_panic(expected: ('namespace not registered', 'ENTRYPOINT_FAILED',))] -fn test_register_model_with_unregistered_namespace() { - let world = deploy_world(); - world.register_model(buzz::TEST_CLASS_HASH.try_into().unwrap()); -} - -#[test] -fn test_deploy_contract_for_namespace_owner() { - let world = deploy_world(); - - let account = starknet::contract_address_const::<0xb0b>(); - world.grant_owner(bytearray_hash(@"dojo"), account); - - // the account owns the 'test_contract' namespace so it should be able to deploy - // and register the model. - starknet::testing::set_account_contract_address(account); - - world.deploy_contract('salt1', test_contract::TEST_CLASS_HASH.try_into().unwrap(), [].span()); -} - -#[test] -fn test_deploy_contract_for_namespace_writer() { - let world = deploy_world(); - - let account = starknet::contract_address_const::<0xb0b>(); - world.grant_writer(bytearray_hash(@"dojo"), account); - - // the account has write access to the 'test_contract' namespace so it should be able - // to deploy and register the model. - starknet::testing::set_account_contract_address(account); - - world.deploy_contract('salt1', test_contract::TEST_CLASS_HASH.try_into().unwrap(), [].span()); -} - -#[test] -#[should_panic(expected: ('namespace not registered', 'ENTRYPOINT_FAILED',))] -fn test_deploy_contract_with_unregistered_namespace() { - let world = deploy_world(); - world.deploy_contract('salt1', buzz_contract::TEST_CLASS_HASH.try_into().unwrap(), [].span()); -} - -#[test] -#[should_panic(expected: ('no namespace write access', 'ENTRYPOINT_FAILED',))] -fn test_deploy_contract_no_namespace_write_access() { - let world = deploy_world(); - - let account = starknet::contract_address_const::<0xb0b>(); - starknet::testing::set_account_contract_address(account); - - world.deploy_contract('salt1', test_contract::TEST_CLASS_HASH.try_into().unwrap(), [].span()); -} - diff --git a/crates/dojo-core/src/tests/world/acl.cairo b/crates/dojo-core/src/tests/world/acl.cairo index a6422f087c..9679f1e58b 100644 --- a/crates/dojo-core/src/tests/world/acl.cairo +++ b/crates/dojo-core/src/tests/world/acl.cairo @@ -44,7 +44,12 @@ fn test_grant_owner_not_registered_resource() { } #[test] -#[should_panic(expected: ("Caller `57005` is not an account", 'ENTRYPOINT_FAILED'))] +#[should_panic( + expected: ( + "Caller `57005` is not the owner of the resource `3123252206139358744730647958636922105676576163624049771737508399526017186883`", + 'ENTRYPOINT_FAILED' + ) +)] fn test_grant_owner_through_malicious_contract() { let world = deploy_world(); world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); @@ -84,7 +89,12 @@ fn test_grant_owner_fails_for_non_owner() { } #[test] -#[should_panic(expected: ("Caller `57005` is not an account", 'ENTRYPOINT_FAILED'))] +#[should_panic( + expected: ( + "Caller `57005` is not the owner of the resource `3123252206139358744730647958636922105676576163624049771737508399526017186883`", + 'ENTRYPOINT_FAILED' + ) +)] fn test_revoke_owner_through_malicious_contract() { let world = deploy_world(); world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); @@ -151,7 +161,12 @@ fn test_writer_not_registered_resource() { } #[test] -#[should_panic(expected: ("Caller `57005` is not an account", 'ENTRYPOINT_FAILED'))] +#[should_panic( + expected: ( + "Caller `57005` is not the owner of the resource `3123252206139358744730647958636922105676576163624049771737508399526017186883`", + 'ENTRYPOINT_FAILED' + ) +)] fn test_grant_writer_through_malicious_contract() { let world = deploy_world(); world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); @@ -191,7 +206,12 @@ fn test_grant_writer_fails_for_non_owner() { } #[test] -#[should_panic(expected: ("Caller `57005` is not an account", 'ENTRYPOINT_FAILED'))] +#[should_panic( + expected: ( + "Caller `57005` is not the owner of the resource `3123252206139358744730647958636922105676576163624049771737508399526017186883`", + 'ENTRYPOINT_FAILED' + ) +)] fn test_revoke_writer_through_malicious_contract() { let world = deploy_world(); world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); diff --git a/crates/dojo-core/src/tests/world/resources.cairo b/crates/dojo-core/src/tests/world/resources.cairo index 39c4fb5488..94702f02a7 100644 --- a/crates/dojo-core/src/tests/world/resources.cairo +++ b/crates/dojo-core/src/tests/world/resources.cairo @@ -100,7 +100,12 @@ fn test_set_metadata_not_possible_for_random_account() { } #[test] -#[should_panic(expected: ("Caller `57005` is not an account", 'ENTRYPOINT_FAILED',))] +#[should_panic( + expected: ( + "Caller `57005` is not the owner of the resource `3123252206139358744730647958636922105676576163624049771737508399526017186883`", + 'ENTRYPOINT_FAILED', + ) +)] fn test_set_metadata_through_malicious_contract() { let world = spawn_test_world(["dojo"].span(), [foo::TEST_CLASS_HASH].span(),); @@ -217,7 +222,7 @@ fn test_upgrade_model_from_model_writer() { let alice = starknet::contract_address_const::<0xa11ce>(); let world = deploy_world(); - world.register_namespace(Model::::namespace()); + // dojo namespace is registered by the deploy_world function. world.register_model(foo::TEST_CLASS_HASH.try_into().unwrap()); world.grant_owner(Model::::namespace_hash(), bob); world.grant_writer(Model::::namespace_hash(), alice); @@ -257,8 +262,10 @@ fn test_register_model_with_unregistered_namespace() { world.register_model(buzz::TEST_CLASS_HASH.try_into().unwrap()); } +// It's CONTRACT_NOT_DEPLOYED for now as in this example the contract is not a dojo contract +// and it's not the account that is calling the register_model function. #[test] -#[should_panic(expected: ("Caller `57005` is not an account", 'ENTRYPOINT_FAILED',))] +#[should_panic(expected: ('CONTRACT_NOT_DEPLOYED', 'ENTRYPOINT_FAILED',))] fn test_register_model_through_malicious_contract() { let bob = starknet::contract_address_const::<0xb0b>(); let malicious_contract = starknet::contract_address_const::<0xdead>(); @@ -295,6 +302,7 @@ fn test_register_namespace() { } #[test] +#[should_panic(expected: ("Namespace `namespace` is already registered", 'ENTRYPOINT_FAILED',))] fn test_register_namespace_already_registered_same_caller() { let world = deploy_world(); @@ -302,19 +310,8 @@ fn test_register_namespace_already_registered_same_caller() { starknet::testing::set_account_contract_address(bob); starknet::testing::set_contract_address(bob); - let namespace = "namespace"; - let hash = bytearray_hash(@namespace); - - world.register_namespace(namespace.clone()); - - drop_all_events(world.contract_address); - - world.register_namespace(namespace); - - assert(world.is_owner(hash, bob), 'namespace not registered'); - - let event = starknet::testing::pop_log_raw(world.contract_address); - assert(event.is_none(), 'unexpected event'); + world.register_namespace("namespace"); + world.register_namespace("namespace"); } #[test] @@ -335,20 +332,6 @@ fn test_register_namespace_already_registered_other_caller() { world.register_namespace("namespace"); } -#[test] -#[should_panic(expected: ("Caller `48879` is not an account", 'ENTRYPOINT_FAILED',))] -fn test_register_namespace_is_not_a_direct_call_from_account() { - let world = deploy_world(); - - let bob = starknet::contract_address_const::<0xb0b>(); - let malicious_contract = starknet::contract_address_const::<0xbeef>(); - - starknet::testing::set_account_contract_address(bob); - starknet::testing::set_contract_address(malicious_contract); - - world.register_namespace("namespace"); -} - #[test] fn test_deploy_contract_for_namespace_owner() { let world = deploy_world(); @@ -415,8 +398,10 @@ fn test_deploy_contract_with_unregistered_namespace() { world.deploy_contract('salt1', buzz_contract::TEST_CLASS_HASH.try_into().unwrap(),); } +// It's CONTRACT_NOT_DEPLOYED for now as in this example the contract is not a dojo contract +// and it's not the account that is calling the deploy_contract function. #[test] -#[should_panic(expected: ("Caller `57005` is not an account", 'ENTRYPOINT_FAILED',))] +#[should_panic(expected: ('CONTRACT_NOT_DEPLOYED', 'ENTRYPOINT_FAILED',))] fn test_deploy_contract_through_malicious_contract() { let world = deploy_world(); @@ -515,7 +500,12 @@ fn test_upgrade_contract_from_random_account() { } #[test] -#[should_panic(expected: ("Caller `57005` is not an account", 'ENTRYPOINT_FAILED',))] +#[should_panic( + expected: ( + "Caller `57005` is not the owner of the resource `2368393732245529956313345237151518608283468650081902115301417183793437311044`", + 'ENTRYPOINT_FAILED', + ) +)] fn test_upgrade_contract_through_malicious_contract() { let world = deploy_world(); let class_hash = test_contract::TEST_CLASS_HASH.try_into().unwrap(); diff --git a/crates/dojo-core/src/tests/world/world.cairo b/crates/dojo-core/src/tests/world/world.cairo index 6dcc8f3a8a..5ee498982b 100644 --- a/crates/dojo-core/src/tests/world/world.cairo +++ b/crates/dojo-core/src/tests/world/world.cairo @@ -11,7 +11,7 @@ use dojo::world::{ }; use dojo::tests::helpers::{ IbarDispatcher, IbarDispatcherTrait, drop_all_events, deploy_world_and_bar, Foo, foo, bar, - Character, character, test_contract + Character, character, test_contract, test_contract_with_dojo_init_args }; use dojo::utils::test::{spawn_test_world, deploy_with_world_address, GasCounterTrait}; @@ -331,19 +331,114 @@ fn test_facts_registry_event_emit() { ); } -#[starknet::interface] -trait IDojoInit { - fn dojo_init(self: @ContractState) -> felt252; -} +use test_contract::IDojoInitDispatcherTrait; #[test] #[available_gas(6000000)] -#[should_panic(expected: ('Only world can init', 'ENTRYPOINT_FAILED'))] -fn test_can_call_init() { +#[should_panic( + expected: ( + "Only the world can init contract `dojo-test_contract`, but caller is `0`", + 'ENTRYPOINT_FAILED' + ) +)] +fn test_can_call_init_only_world() { let world = deploy_world(); let address = world .deploy_contract('salt1', test_contract::TEST_CLASS_HASH.try_into().unwrap()); - let dojo_init = IDojoInitDispatcher { contract_address: address }; - dojo_init.dojo_init(); + let d = test_contract::IDojoInitDispatcher { contract_address: address }; + d.dojo_init(); +} + +#[test] +#[available_gas(6000000)] +#[should_panic( + expected: ( + "Caller `4919` cannot initialize contract `dojo-test_contract` (not owner)", + 'ENTRYPOINT_FAILED' + ) +)] +fn test_can_call_init_only_owner() { + let world = deploy_world(); + let _address = world + .deploy_contract('salt1', test_contract::TEST_CLASS_HASH.try_into().unwrap()); + + let bob = starknet::contract_address_const::<0x1337>(); + starknet::testing::set_contract_address(bob); + + world.init_contract(selector_from_tag!("dojo-test_contract"), [].span()); +} + +#[test] +#[available_gas(6000000)] +fn test_can_call_init_default() { + let world = deploy_world(); + let _address = world + .deploy_contract('salt1', test_contract::TEST_CLASS_HASH.try_into().unwrap()); + + world.init_contract(selector_from_tag!("dojo-test_contract"), [].span()); +} + +#[test] +#[available_gas(6000000)] +fn test_can_call_init_args() { + let world = deploy_world(); + let _address = world + .deploy_contract( + 'salt1', test_contract_with_dojo_init_args::TEST_CLASS_HASH.try_into().unwrap() + ); + + world.init_contract(selector_from_tag!("dojo-test_contract_with_dojo_init_args"), [1].span()); +} + +use test_contract_with_dojo_init_args::IDojoInitDispatcherTrait as IDojoInitArgs; + +#[test] +#[available_gas(6000000)] +#[should_panic( + expected: ( + "Only the world can init contract `dojo-test_contract_with_dojo_init_args`, but caller is `0`", + 'ENTRYPOINT_FAILED' + ) +)] +fn test_can_call_init_only_world_args() { + let world = deploy_world(); + let address = world + .deploy_contract( + 'salt1', test_contract_with_dojo_init_args::TEST_CLASS_HASH.try_into().unwrap() + ); + + let d = test_contract_with_dojo_init_args::IDojoInitDispatcher { contract_address: address }; + d.dojo_init(123); +} + +use dojo::world::update::IUpgradeableStateDispatcherTrait; + +#[test] +#[available_gas(6000000)] +#[should_panic( + expected: ("Caller `4919` can't upgrade state (not world owner)", 'ENTRYPOINT_FAILED') +)] +fn test_upgrade_state_not_owner() { + let world = deploy_world(); + + let not_owner = starknet::contract_address_const::<0x1337>(); + starknet::testing::set_contract_address(not_owner); + starknet::testing::set_account_contract_address(not_owner); + + let output = dojo::world::update::ProgramOutput { + prev_state_root: 0, + new_state_root: 0, + block_number: 0, + block_hash: 0, + config_hash: 0, + world_da_hash: 0, + message_to_starknet_segment: [].span(), + message_to_appchain_segment: [].span(), + }; + + let d = dojo::world::update::IUpgradeableStateDispatcher { + contract_address: world.contract_address + }; + d.upgrade_state([].span(), output, 0); } diff --git a/crates/dojo-core/src/world/errors.cairo b/crates/dojo-core/src/world/errors.cairo index e4b4492f97..ca1cadfb5c 100644 --- a/crates/dojo-core/src/world/errors.cairo +++ b/crates/dojo-core/src/world/errors.cairo @@ -8,6 +8,14 @@ pub fn no_write_access_with_tags( format!("Caller `{}` has no write access on {} `{}`", contract_tag, on_type, on_tag) } +pub fn not_owner_init(contract_tag: @ByteArray, caller: ContractAddress) -> ByteArray { + format!("Caller `{:?}` cannot initialize contract `{}` (not owner)", caller, contract_tag) +} + +pub fn contract_already_initialized(contract_tag: @ByteArray) -> ByteArray { + format!("Contract `{}` has already been initialized", contract_tag) +} + pub fn namespace_already_registered(namespace: @ByteArray) -> ByteArray { format!("Namespace `{}` is already registered", namespace) } @@ -55,3 +63,7 @@ pub fn resource_conflict(name: @ByteArray, expected_type: @ByteArray) -> ByteArr pub fn no_model_write_access(tag: @ByteArray, caller: ContractAddress) -> ByteArray { format!("Caller `{:?}` has no write access on model `{}`", caller, tag) } + +pub fn no_world_owner(caller: ContractAddress, target: @ByteArray) -> ByteArray { + format!("Caller `{:?}` can't {} (not world owner)", caller, target) +} diff --git a/crates/dojo-core/src/world/world_contract.cairo b/crates/dojo-core/src/world/world_contract.cairo index df8207b09c..7623bf607b 100644 --- a/crates/dojo-core/src/world/world_contract.cairo +++ b/crates/dojo-core/src/world/world_contract.cairo @@ -231,6 +231,7 @@ pub mod world { #[derive(Drop, starknet::Event)] pub struct StoreSetRecord { pub table: felt252, + pub entity_id: felt252, pub keys: Span, pub values: Span, } @@ -361,8 +362,7 @@ pub mod world { /// /// `metadata` - The metadata content for the resource. fn set_metadata(ref self: ContractState, metadata: ResourceMetadata) { - self.assert_caller_is_account(); - self.assert_resource_owner(metadata.resource_id); + self.assert_caller_is_resource_owner(metadata.resource_id); self .write_model_entity( @@ -402,13 +402,11 @@ pub mod world { /// * `resource` - The resource. /// * `address` - The contract address. fn grant_owner(ref self: ContractState, resource: felt252, address: ContractAddress) { - self.assert_caller_is_account(); - if self.resources.read(resource).is_unregistered() { panic_with_byte_array(@errors::resource_not_registered(resource)); } - self.assert_resource_owner(resource); + self.assert_caller_is_resource_owner(resource); self.owners.write((resource, address), true); @@ -425,13 +423,11 @@ pub mod world { /// * `resource` - The resource. /// * `address` - The contract address. fn revoke_owner(ref self: ContractState, resource: felt252, address: ContractAddress) { - self.assert_caller_is_account(); - if self.resources.read(resource).is_unregistered() { panic_with_byte_array(@errors::resource_not_registered(resource)); } - self.assert_resource_owner(resource); + self.assert_caller_is_resource_owner(resource); self.owners.write((resource, address), false); @@ -462,13 +458,11 @@ pub mod world { /// * `resource` - The hash of the resource name. /// * `contract` - The name of the contract. fn grant_writer(ref self: ContractState, resource: felt252, contract: ContractAddress) { - self.assert_caller_is_account(); - if self.resources.read(resource).is_unregistered() { panic_with_byte_array(@errors::resource_not_registered(resource)); } - self.assert_resource_owner(resource); + self.assert_caller_is_resource_owner(resource); self.writers.write((resource, contract), true); @@ -485,13 +479,11 @@ pub mod world { /// * `model` - The name of the model. /// * `contract` - The name of the contract. fn revoke_writer(ref self: ContractState, resource: felt252, contract: ContractAddress) { - self.assert_caller_is_account(); - if self.resources.read(resource).is_unregistered() { panic_with_byte_array(@errors::resource_not_registered(resource)); } - self.assert_resource_owner(resource); + self.assert_caller_is_resource_owner(resource); self.writers.write((resource, contract), false); @@ -505,8 +497,6 @@ pub mod world { /// /// * `class_hash` - The class hash of the model to be registered. fn register_model(ref self: ContractState, class_hash: ClassHash) { - self.assert_caller_is_account(); - let caller = get_caller_address(); let salt = self.models_salt.read(); @@ -525,7 +515,7 @@ pub mod world { panic_with_byte_array(@errors::namespace_not_registered(@namespace)); } - self.assert_namespace_write_access(@namespace, namespace_hash); + self.assert_caller_namespace_write_access(@namespace, namespace_hash); let model = self.resources.read(selector); if !model.is_unregistered() { @@ -539,8 +529,6 @@ pub mod world { } fn upgrade_model(ref self: ContractState, class_hash: ClassHash) { - self.assert_caller_is_account(); - let caller = get_caller_address(); let salt = self.models_salt.read(); @@ -555,7 +543,7 @@ pub mod world { panic_with_byte_array(@errors::namespace_not_registered(@namespace)); } - self.assert_namespace_write_access(@namespace, namespace_hash); + self.assert_caller_namespace_write_access(@namespace, namespace_hash); if selector.is_zero() { panic_with_byte_array(@errors::invalid_resource_selector(selector)); @@ -600,18 +588,14 @@ pub mod world { /// /// * `namespace` - The name of the namespace to be registered. fn register_namespace(ref self: ContractState, namespace: ByteArray) { - self.assert_caller_is_account(); - let caller = get_caller_address(); let hash = bytearray_hash(@namespace); match self.resources.read(hash) { - Resource::Namespace => { - if !self.is_owner(hash, caller) { - panic_with_byte_array(@errors::namespace_already_registered(@namespace)); - } - }, + Resource::Namespace => panic_with_byte_array( + @errors::namespace_already_registered(@namespace) + ), Resource::Unregistered => { self.resources.write(hash, Resource::Namespace); self.owners.write((hash, caller), true); @@ -638,8 +622,6 @@ pub mod world { fn deploy_contract( ref self: ContractState, salt: felt252, class_hash: ClassHash, ) -> ContractAddress { - self.assert_caller_is_account(); - let caller = get_caller_address(); let (contract_address, _) = deploy_syscall( @@ -658,7 +640,7 @@ pub mod world { panic_with_byte_array(@errors::namespace_not_registered(@namespace)); } - self.assert_namespace_write_access(@namespace, namespace_hash); + self.assert_caller_namespace_write_access(@namespace, namespace_hash); let selector = dispatcher.selector(); self.owners.write((selector, caller), true); @@ -685,8 +667,7 @@ pub mod world { fn upgrade_contract( ref self: ContractState, selector: felt252, class_hash: ClassHash ) -> ClassHash { - self.assert_caller_is_account(); - self.assert_resource_owner(selector); + self.assert_caller_is_resource_owner(selector); if let Resource::Contract((_, contract_address)) = self.resources.read(selector) { IUpgradeableDispatcher { contract_address }.upgrade(class_hash); @@ -707,15 +688,28 @@ pub mod world { /// * `init_calldata` - Calldata used to initialize the contract. fn init_contract(ref self: ContractState, selector: felt252, init_calldata: Span) { if let Resource::Contract((_, contract_address)) = self.resources.read(selector) { + let caller = get_caller_address(); + + let dispatcher = IContractDispatcher { contract_address }; + let tag = dispatcher.tag(); + if self.initialized_contract.read(selector) { - let dispatcher = IContractDispatcher { contract_address }; - let tag = dispatcher.tag(); - panic!("Contract {} has already been initialized", tag); + panic_with_byte_array(@errors::contract_already_initialized(@tag)); } else { + if !self.is_owner(selector, caller) { + panic_with_byte_array(@errors::not_owner_init(@tag, caller)); + } + + // For the init, to ensure only the world can call the init function, + // the verification is done in the init function of the contract: + // `crates/dojo-lang/src/contract.rs#L140` + // `crates/dojo-lang/src/contract.rs#L331` + starknet::syscalls::call_contract_syscall( contract_address, DOJO_INIT_SELECTOR, init_calldata ) .unwrap_syscall(); + self.initialized_contract.write(selector, true); EventEmitter::emit(ref self, ContractInitialized { selector, init_calldata }); @@ -795,7 +789,7 @@ pub mod world { values: Span, layout: Layout ) { - self.assert_model_write_access(model_selector); + self.assert_caller_model_write_access(model_selector); self.set_entity_internal(model_selector, index, values, layout); } @@ -810,7 +804,7 @@ pub mod world { fn delete_entity( ref self: ContractState, model_selector: felt252, index: ModelIndex, layout: Layout ) { - self.assert_model_write_access(model_selector); + self.assert_caller_model_write_access(model_selector); self.delete_entity_internal(model_selector, index, layout); } @@ -844,8 +838,6 @@ pub mod world { /// /// * `new_class_hash` - The new world class hash. fn upgrade(ref self: ContractState, new_class_hash: ClassHash) { - self.assert_caller_is_account(); - assert(new_class_hash.is_non_zero(), 'invalid class_hash'); if !self.is_caller_world_owner() { @@ -868,6 +860,12 @@ pub mod world { program_output: ProgramOutput, program_hash: felt252 ) { + if !self.is_caller_world_owner() { + panic_with_byte_array( + @errors::no_world_owner(get_caller_address(), @"upgrade state") + ); + } + let mut da_hasher = PedersenTrait::new(0); let mut i = 0; loop { @@ -930,27 +928,12 @@ pub mod world { self.is_owner(WORLD, get_caller_address()) } - /// Asserts that the caller is an account. - /// - /// This check is done to be sure that a sensible world function has been directly called - /// from an account (with sozo for example), to avoid any malicious contract between the - /// account and the world to be able to call some functions with account privileges. - #[inline(always)] - fn assert_caller_is_account(self: @ContractState) { - let caller = get_caller_address(); - let account = get_tx_info().unbox().account_contract_address; - - if caller != account { - panic_with_byte_array(@errors::caller_not_account(caller)); - } - } - /// Panics if the caller is NOT an owner of the resource. /// /// # Arguments /// * `resource_selector` - the selector of the resource. #[inline(always)] - fn assert_resource_owner(self: @ContractState, resource_selector: felt252) { + fn assert_caller_is_resource_owner(self: @ContractState, resource_selector: felt252) { let caller = get_caller_address(); if self.is_owner(resource_selector, caller) { @@ -969,7 +952,7 @@ pub mod world { /// # Arguments /// * `model_selector` - the selector of the model. #[inline(always)] - fn assert_model_write_access(self: @ContractState, model_selector: felt252) { + fn assert_caller_model_write_access(self: @ContractState, model_selector: felt252) { let caller = get_caller_address(); // Must have owner or writer role on the namespace or on the model. @@ -1036,7 +1019,7 @@ pub mod world { /// * `namespace` - the namespace name. /// * `namespace_hash` - the hash of the namespace. #[inline(always)] - fn assert_namespace_write_access( + fn assert_caller_namespace_write_access( self: @ContractState, namespace: @ByteArray, namespace_hash: felt252 ) { let caller = get_caller_address(); @@ -1051,6 +1034,8 @@ pub mod world { return; } + // We know it's an account and return the explicit error message as no tag will match + // the account. if caller == get_tx_info().account_contract_address { panic_with_byte_array(@errors::no_namespace_write_access(caller, namespace)); } @@ -1068,6 +1053,9 @@ pub mod world { @errors::no_write_access_with_tags(@d.tag(), @"namespace", namespace) ); } else { + // This is in theory unreachable code as the contract call syscall made by the + // dispatcher will panic. Which may lead to a bad user experience in testing as the + // error will be something like "CONTRACT_NOT_DEPLOYED". panic_with_byte_array(@errors::no_namespace_write_access(caller, namespace)); } } @@ -1104,7 +1092,7 @@ pub mod world { let entity_id = entity_id_from_keys(keys); self.write_model_entity(model_selector, entity_id, values, layout); EventEmitter::emit( - ref self, StoreSetRecord { table: model_selector, keys, values } + ref self, StoreSetRecord { table: model_selector, keys, values, entity_id } ); }, ModelIndex::Id(entity_id) => { diff --git a/crates/dojo-lang/src/compiler.rs b/crates/dojo-lang/src/compiler.rs index 8a5af0c33f..b4b9ab312f 100644 --- a/crates/dojo-lang/src/compiler.rs +++ b/crates/dojo-lang/src/compiler.rs @@ -1,13 +1,15 @@ use std::collections::{BTreeMap, HashMap}; use std::fs; use std::io::Write; -use std::iter::zip; use std::ops::DerefMut; +use std::rc::Rc; use anyhow::{anyhow, Context, Result}; use cairo_lang_compiler::db::RootDatabase; use cairo_lang_defs::db::DefsGroup; -use cairo_lang_defs::ids::{ModuleId, ModuleItemId, TopLevelLanguageElementId}; +use cairo_lang_defs::ids::{ + ModuleId, ModuleItemId, NamedLanguageElementId, TopLevelLanguageElementId, +}; use cairo_lang_filesystem::db::FilesGroup; use cairo_lang_filesystem::ids::{CrateId, CrateLongId}; use cairo_lang_formatter::format_string; @@ -15,6 +17,7 @@ use cairo_lang_semantic::db::SemanticGroup; use cairo_lang_starknet::compile::compile_prepared_db; use cairo_lang_starknet::contract::{find_contracts, ContractDeclaration}; use cairo_lang_starknet_classes::abi; +use cairo_lang_starknet_classes::allowed_libfuncs::{AllowedLibfuncsError, ListSelector}; use cairo_lang_starknet_classes::contract_class::ContractClass; use cairo_lang_utils::UpcastMut; use camino::Utf8PathBuf; @@ -25,7 +28,7 @@ use dojo_world::manifest::{ BASE_CONTRACT_TAG, BASE_DIR, BASE_QUALIFIED_PATH, CONTRACTS_DIR, MANIFESTS_DIR, MODELS_DIR, WORLD_CONTRACT_TAG, WORLD_QUALIFIED_PATH, }; -use itertools::Itertools; +use itertools::{izip, Itertools}; use scarb::compiler::helpers::{build_compiler_config, collect_main_crate_ids}; use scarb::compiler::{CairoCompilationUnit, CompilationUnitAttributes, Compiler}; use scarb::core::{PackageName, TargetKind, Workspace}; @@ -38,6 +41,19 @@ use starknet::core::types::Felt; use tracing::{debug, trace, trace_span}; use crate::plugin::{DojoAuxData, Model}; +use crate::scarb_internal::debug::SierraToCairoDebugInfo; + +#[derive(Debug, Clone)] +pub struct CompiledArtifact { + /// THe class hash of the Sierra contract. + class_hash: Felt, + /// The actual compiled Sierra contract class. + contract_class: Rc, + debug_info: Option>, +} + +/// A type alias for a map of compiled artifacts by their path. +type CompiledArtifactByPath = HashMap; const CAIRO_PATH_SEPARATOR: &str = "::"; @@ -47,8 +63,21 @@ pub(crate) const LOG_TARGET: &str = "dojo_lang::compiler"; #[path = "compiler_test.rs"] mod test; -#[derive(Debug)] -pub struct DojoCompiler; +#[derive(Debug, Default)] +pub struct DojoCompiler { + /// Output the debug information of the compiled Sierra contracts. + /// + /// Mainly used for the Walnut debugger integration. It is used + /// internally by Walnut to build the Dojo project with the Sierra + /// debug information. This flag has no use outside of that. + output_debug_info: bool, +} + +impl DojoCompiler { + pub fn new(output_debug_info: bool) -> Self { + Self { output_debug_info } + } +} #[derive(Debug, Default, Serialize, Deserialize)] #[serde(rename_all = "kebab-case")] @@ -85,6 +114,7 @@ impl Compiler for DojoCompiler { TargetKind::new("dojo") } + // TODO: refacto the main loop here, could be much more simpler and efficient. fn compile( &self, unit: CairoCompilationUnit, @@ -126,19 +156,64 @@ impl Compiler for DojoCompiler { compile_prepared_db(db, &contracts, compiler_config)? }; - let mut compiled_classes: HashMap = HashMap::new(); + let debug_info_classes: Vec> = if self.output_debug_info { + let debug_classes = + crate::scarb_internal::debug::compile_prepared_db_to_debug_info(db, &contracts)?; + + debug_classes + .into_iter() + .map(|d| Some(crate::scarb_internal::debug::get_sierra_to_cairo_debug_info(&d, db))) + .collect() + } else { + vec![None; contracts.len()] + }; + + let mut compiled_classes: CompiledArtifactByPath = HashMap::new(); + let list_selector = ListSelector::default(); - for (decl, class) in zip(contracts, classes) { + for (decl, contract_class, debug_info) in izip!(contracts, classes, debug_info_classes) { + let contract_name = decl.submodule_id.name(db.upcast_mut()); // note that the qualified path is in snake case while // the `full_path()` method of StructId uses the original struct name case. // (see in `get_dojo_model_artifacts`) let qualified_path = decl.module_id().full_path(db.upcast_mut()); - let class_hash = compute_class_hash_of_contract_class(&class).with_context(|| { - format!("problem computing class hash for contract `{}`", qualified_path.clone()) - })?; + match contract_class.validate_version_compatible(list_selector.clone()) { + Ok(()) => {} + Err(AllowedLibfuncsError::UnsupportedLibfunc { + invalid_libfunc, + allowed_libfuncs_list_name: _, + }) => { + let diagnostic = format! {r#" + Contract `{contract_name}` ({qualified_path}) includes `{invalid_libfunc}` function that is not allowed in the default libfuncs for public Starknet networks (mainnet, sepolia). + It will work on Katana, but don't forget to remove it before deploying on a public Starknet network. + "#}; + + ws.config().ui().warn(diagnostic); + } + Err(e) => { + return Err(e).with_context(|| { + format!("Failed to check allowed libfuncs for contract: {}", contract_name) + }); + } + } - compiled_classes.insert(qualified_path, (class_hash, class)); + let class_hash = + compute_class_hash_of_contract_class(&contract_class).with_context(|| { + format!( + "problem computing class hash for contract `{}`", + qualified_path.clone() + ) + })?; + + compiled_classes.insert( + qualified_path, + CompiledArtifact { + class_hash, + contract_class: Rc::new(contract_class), + debug_info: debug_info.map(Rc::new), + }, + ); } update_files( @@ -230,7 +305,7 @@ fn update_files( ws: &Workspace<'_>, target_dir: &Filesystem, crate_ids: &[CrateId], - compiled_artifacts: HashMap, + compiled_artifacts: CompiledArtifactByPath, external_contracts: Option>, ) -> anyhow::Result<()> { let profile_name = @@ -244,9 +319,9 @@ fn update_files( let manifest_dir = ws.manifest_path().parent().unwrap().to_path_buf(); fn get_compiled_artifact_from_map<'a>( - artifacts: &'a HashMap, + artifacts: &'a CompiledArtifactByPath, qualified_artifact_path: &str, - ) -> anyhow::Result<&'a (Felt, ContractClass)> { + ) -> anyhow::Result<&'a CompiledArtifact> { artifacts.get(qualified_artifact_path).context(format!( "Contract `{qualified_artifact_path}` not found. Did you include `dojo` as a \ dependency?", @@ -259,7 +334,7 @@ fn update_files( for (qualified_path, tag) in [(WORLD_QUALIFIED_PATH, WORLD_CONTRACT_TAG), (BASE_QUALIFIED_PATH, BASE_CONTRACT_TAG)] { - let (hash, class) = get_compiled_artifact_from_map(&compiled_artifacts, qualified_path)?; + let artifact = get_compiled_artifact_from_map(&compiled_artifacts, qualified_path)?; let filename = naming::get_filename_from_tag(tag); write_manifest_and_abi( &base_manifests_dir, @@ -268,16 +343,20 @@ fn update_files( &mut Manifest::new( // abi path will be written by `write_manifest` Class { - class_hash: *hash, + class_hash: artifact.class_hash, abi: None, - original_class_hash: *hash, + original_class_hash: artifact.class_hash, tag: tag.to_string(), }, filename.clone(), ), - &class.abi, + &artifact.contract_class.abi, )?; - save_json_artifact_file(ws, target_dir, class, &filename, tag)?; + save_json_artifact_file(ws, target_dir, &artifact.contract_class, &filename, tag)?; + + if let Some(debug_info) = &artifact.debug_info { + save_json_artifact_debug_file(ws, target_dir, debug_info, &filename, tag)?; + } } let mut models = BTreeMap::new(); @@ -354,13 +433,13 @@ fn update_files( std::fs::create_dir_all(&base_contracts_abis_dir)?; } - for (_, (manifest, class, module_id)) in contracts.iter_mut() { + for (_, (manifest, module_id, artifact)) in contracts.iter_mut() { write_manifest_and_abi( &base_contracts_dir, &base_contracts_abis_dir, &manifest_dir, manifest, - &class.abi, + &artifact.contract_class.abi, )?; let filename = naming::get_filename_from_tag(&manifest.inner.tag); @@ -372,7 +451,23 @@ fn update_files( &filename, &manifest.inner.tag, )?; - save_json_artifact_file(ws, &contracts_dir, class, &filename, &manifest.inner.tag)?; + save_json_artifact_file( + ws, + &contracts_dir, + &artifact.contract_class, + &filename, + &manifest.inner.tag, + )?; + + if let Some(debug_info) = &artifact.debug_info { + save_json_artifact_debug_file( + ws, + &contracts_dir, + debug_info, + &filename, + &manifest.inner.tag, + )?; + } } let models_dir = target_dir.child(MODELS_DIR); @@ -391,18 +486,34 @@ fn update_files( std::fs::create_dir_all(&base_models_abis_dir)?; } - for (_, (manifest, class, module_id)) in models.iter_mut() { + for (_, (manifest, module_id, artifact)) in models.iter_mut() { write_manifest_and_abi( &base_models_dir, &base_models_abis_dir, &manifest_dir, manifest, - &class.abi, + &artifact.contract_class.abi, )?; let filename = naming::get_filename_from_tag(&manifest.inner.tag); save_expanded_source_file(ws, *module_id, db, &models_dir, &filename, &manifest.inner.tag)?; - save_json_artifact_file(ws, &models_dir, class, &filename, &manifest.inner.tag)?; + save_json_artifact_file( + ws, + &models_dir, + &artifact.contract_class, + &filename, + &manifest.inner.tag, + )?; + + if let Some(debug_info) = &artifact.debug_info { + save_json_artifact_debug_file( + ws, + &models_dir, + debug_info, + &filename, + &manifest.inner.tag, + )?; + } } Ok(()) @@ -415,8 +526,8 @@ fn get_dojo_model_artifacts( db: &RootDatabase, aux_data: &Vec, module_id: ModuleId, - compiled_classes: &HashMap, -) -> anyhow::Result, ContractClass, ModuleId)>> { + compiled_classes: &CompiledArtifactByPath, +) -> anyhow::Result, ModuleId, CompiledArtifact)>> { let mut models = HashMap::with_capacity(aux_data.len()); for model in aux_data { @@ -430,25 +541,18 @@ fn get_dojo_model_artifacts( let compiled_class = compiled_classes.get(&qualified_path).cloned(); let tag = naming::get_tag(&model.namespace, &model.name); - if let Some((class_hash, class)) = compiled_class { - models.insert( - qualified_path.clone(), - ( - Manifest::new( - DojoModel { - tag: tag.clone(), - class_hash, - abi: None, - members: model.members.clone(), - original_class_hash: class_hash, - qualified_path, - }, - naming::get_filename_from_tag(&tag), - ), - class, - module_id, - ), - ); + if let Some(artifact) = compiled_class { + let dojo_model = DojoModel { + abi: None, + tag: tag.clone(), + members: model.members.clone(), + class_hash: artifact.class_hash, + qualified_path: qualified_path.clone(), + original_class_hash: artifact.class_hash, + }; + + let manifest = Manifest::new(dojo_model, naming::get_filename_from_tag(&tag)); + models.insert(qualified_path, (manifest, module_id, artifact.clone())); } else { println!("Model {} not found in target.", tag.clone()); } @@ -463,9 +567,9 @@ fn get_dojo_contract_artifacts( db: &RootDatabase, module_id: &ModuleId, tag: &str, - compiled_classes: &HashMap, + compiled_classes: &CompiledArtifactByPath, systems: &[String], -) -> anyhow::Result, ContractClass, ModuleId)>> { +) -> Result, ModuleId, CompiledArtifact)>> { let mut result = HashMap::new(); if !matches!(naming::get_name_from_tag(tag).as_str(), "world" | "resource_metadata" | "base") { @@ -475,24 +579,24 @@ fn get_dojo_contract_artifacts( let contract_qualified_path = format!("{}{}{}", module_id.full_path(db), CAIRO_PATH_SEPARATOR, contract_name); - if let Some((class_hash, class)) = - compiled_classes.get(&contract_qualified_path.to_string()) - { + if let Some(artifact) = compiled_classes.get(&contract_qualified_path.to_string()) { let manifest = Manifest::new( DojoContract { tag: tag.to_string(), writes: vec![], reads: vec![], - class_hash: *class_hash, - original_class_hash: *class_hash, + class_hash: artifact.class_hash, + original_class_hash: artifact.class_hash, systems: systems.to_vec(), ..Default::default() }, naming::get_filename_from_tag(tag), ); - result - .insert(contract_qualified_path.to_string(), (manifest, class.clone(), *module_id)); + result.insert( + contract_qualified_path.to_string(), + (manifest, *module_id, artifact.clone()), + ); } } @@ -605,3 +709,22 @@ fn save_json_artifact_file( .with_context(|| format!("failed to serialize contract artifact: {contract_tag}"))?; Ok(()) } + +fn save_json_artifact_debug_file( + ws: &Workspace<'_>, + contract_dir: &Filesystem, + debug_info: &SierraToCairoDebugInfo, + contract_basename: &str, + contract_tag: &str, +) -> anyhow::Result<()> { + let mut file = contract_dir.open_rw( + format!("{contract_basename}.debug.json"), + "class file", + ws.config(), + )?; + + serde_json::to_writer_pretty(file.deref_mut(), debug_info) + .with_context(|| format!("failed to serialize contract debug artifact: {contract_tag}"))?; + + Ok(()) +} diff --git a/crates/dojo-lang/src/contract.rs b/crates/dojo-lang/src/contract.rs index 337c01a935..23810b113b 100644 --- a/crates/dojo-lang/src/contract.rs +++ b/crates/dojo-lang/src/contract.rs @@ -130,17 +130,21 @@ impl DojoContract { let node = RewriteNode::interpolate_patched( " #[starknet::interface] - trait IDojoInit { + pub trait IDojoInit { fn $init_name$(self: @ContractState); } #[abi(embed_v0)] - impl IDojoInitImpl of IDojoInit { + pub impl IDojoInitImpl of IDojoInit { fn $init_name$(self: @ContractState) { - assert(starknet::get_caller_address() == \ - self.world().contract_address, 'Only world can init'); - assert(self.world().is_owner(self.selector(), \ - starknet::get_tx_info().account_contract_address), 'Only owner can init'); + if starknet::get_caller_address() != self.world().contract_address { + core::panics::panic_with_byte_array( + @format!(\"Only the world can init contract `{}`, but caller \ + is `{:?}`\", + self.tag(), + starknet::get_caller_address(), + )); + } } } ", @@ -294,26 +298,26 @@ impl DojoContract { let trait_node = RewriteNode::interpolate_patched( "#[starknet::interface] - trait IDojoInit { - fn dojo_init($params_str$); + pub trait IDojoInit { + fn $init_name$($params_str$); } ", - &UnorderedHashMap::from([( - "params_str".to_string(), - RewriteNode::Text(params_str.clone()), - )]), + &UnorderedHashMap::from([ + ("init_name".to_string(), RewriteNode::Text(DOJO_INIT_FN.to_string())), + ("params_str".to_string(), RewriteNode::Text(params_str.clone())), + ]), ); let impl_node = RewriteNode::Text( " #[abi(embed_v0)] - impl IDojoInitImpl of IDojoInit { + pub impl IDojoInitImpl of IDojoInit { " .to_string(), ); let declaration_node = RewriteNode::Mapped { - node: Box::new(RewriteNode::Text(format!("fn dojo_init({}) {{", params_str))), + node: Box::new(RewriteNode::Text(format!("fn {}({}) {{", DOJO_INIT_FN, params_str))), origin: fn_ast.declaration(db).as_syntax_node().span_without_trivia(db), }; @@ -324,8 +328,9 @@ impl DojoContract { }; let assert_world_caller_node = RewriteNode::Text( - "assert(starknet::get_caller_address() == self.world().contract_address, 'Only world \ - can init');" + "if starknet::get_caller_address() != self.world().contract_address { \ + core::panics::panic_with_byte_array(@format!(\"Only the world can init contract \ + `{}`, but caller is `{:?}`\", self.tag(), starknet::get_caller_address())); }" .to_string(), ); diff --git a/crates/dojo-lang/src/inline_macros/delete.rs b/crates/dojo-lang/src/inline_macros/delete.rs index 36e0f70b59..fc51eb28fc 100644 --- a/crates/dojo-lang/src/inline_macros/delete.rs +++ b/crates/dojo-lang/src/inline_macros/delete.rs @@ -92,7 +92,7 @@ impl InlineMacroExprPlugin for DeleteMacro { builder.add_str(&format!( " let __delete_model_instance__ = {}; - dojo::model::Model::delete(@__delete_model_instance__, {}); + dojo::model::Model::delete_model(@__delete_model_instance__, {}); ", entity, world.as_syntax_node().get_text(db), diff --git a/crates/dojo-lang/src/inline_macros/get_models_test_class_hashes.rs b/crates/dojo-lang/src/inline_macros/get_models_test_class_hashes.rs new file mode 100644 index 0000000000..1cfcfe0ea4 --- /dev/null +++ b/crates/dojo-lang/src/inline_macros/get_models_test_class_hashes.rs @@ -0,0 +1,104 @@ +use cairo_lang_defs::patcher::PatchBuilder; +use cairo_lang_defs::plugin::{ + InlineMacroExprPlugin, InlinePluginResult, MacroPluginMetadata, NamedPlugin, PluginDiagnostic, + PluginGeneratedFile, +}; +use cairo_lang_defs::plugin_utils::unsupported_bracket_diagnostic; +use cairo_lang_diagnostics::Severity; +use cairo_lang_syntax::node::{ast, TypedStablePtr, TypedSyntaxNode}; + +use super::unsupported_arg_diagnostic; +use super::utils::{extract_namespaces, load_manifest_models_and_namespaces}; + +#[derive(Debug, Default)] +pub struct GetModelsTestClassHashes; + +impl NamedPlugin for GetModelsTestClassHashes { + const NAME: &'static str = "get_models_test_class_hashes"; +} + +impl InlineMacroExprPlugin for GetModelsTestClassHashes { + fn generate_code( + &self, + db: &dyn cairo_lang_syntax::node::db::SyntaxGroup, + syntax: &ast::ExprInlineMacro, + metadata: &MacroPluginMetadata<'_>, + ) -> InlinePluginResult { + let ast::WrappedArgList::ParenthesizedArgList(arg_list) = syntax.arguments(db) else { + return unsupported_bracket_diagnostic(db, syntax); + }; + + let args = arg_list.arguments(db).elements(db); + + if args.len() > 1 { + return InlinePluginResult { + code: None, + diagnostics: vec![PluginDiagnostic { + stable_ptr: syntax.stable_ptr().untyped(), + message: "Invalid arguments. Expected \ + \"get_models_test_class_hashes!([\"ns1\", \"ns2\")]\" or \ + \"get_models_test_class_hashes!()\"." + .to_string(), + severity: Severity::Error, + }], + }; + } + + let whitelisted_namespaces = if args.len() == 1 { + let ast::ArgClause::Unnamed(expected_array) = args[0].arg_clause(db) else { + return unsupported_arg_diagnostic(db, syntax); + }; + + match extract_namespaces(db, &expected_array.value(db)) { + Ok(namespaces) => namespaces, + Err(e) => { + return InlinePluginResult { code: None, diagnostics: vec![e] }; + } + } + } else { + vec![] + }; + + let (_namespaces, models) = + match load_manifest_models_and_namespaces(metadata.cfg_set, &whitelisted_namespaces) { + Ok((namespaces, models)) => (namespaces, models), + Err(_e) => { + return InlinePluginResult { + code: None, + diagnostics: vec![PluginDiagnostic { + stable_ptr: syntax.stable_ptr().untyped(), + message: "Failed to load models and namespaces, ensure you have run \ + `sozo build` first." + .to_string(), + severity: Severity::Error, + }], + }; + } + }; + + let mut builder = PatchBuilder::new(db, syntax); + + // Use the TEST_CLASS_HASH for each model, which is already a qualified path, no `use` + // required. + builder.add_str(&format!( + "[{}].span()", + models + .iter() + .map(|m| format!("{}::TEST_CLASS_HASH", m)) + .collect::>() + .join(", ") + )); + + let (code, code_mappings) = builder.build(); + + InlinePluginResult { + code: Some(PluginGeneratedFile { + name: "get_models_test_class_hashes_macro".into(), + content: code, + code_mappings, + aux_data: None, + }), + diagnostics: vec![], + } + } +} diff --git a/crates/dojo-lang/src/inline_macros/mod.rs b/crates/dojo-lang/src/inline_macros/mod.rs index 35bef33321..5c90d35004 100644 --- a/crates/dojo-lang/src/inline_macros/mod.rs +++ b/crates/dojo-lang/src/inline_macros/mod.rs @@ -7,8 +7,10 @@ use smol_str::SmolStr; pub mod delete; pub mod emit; pub mod get; +pub mod get_models_test_class_hashes; pub mod selector_from_tag; pub mod set; +pub mod spawn_test_world; pub mod utils; const CAIRO_ERR_MSG_LEN: usize = 31; diff --git a/crates/dojo-lang/src/inline_macros/set.rs b/crates/dojo-lang/src/inline_macros/set.rs index ad1f3c269d..b027d22c58 100644 --- a/crates/dojo-lang/src/inline_macros/set.rs +++ b/crates/dojo-lang/src/inline_macros/set.rs @@ -106,7 +106,7 @@ impl InlineMacroExprPlugin for SetMacro { builder.add_str(&format!( " let __set_model_instance__ = {}; - dojo::model::Model::set(@__set_model_instance__, {}); + dojo::model::Model::set_model(@__set_model_instance__, {}); ", entity, world.as_syntax_node().get_text(db), diff --git a/crates/dojo-lang/src/inline_macros/spawn_test_world.rs b/crates/dojo-lang/src/inline_macros/spawn_test_world.rs new file mode 100644 index 0000000000..aa21fd8290 --- /dev/null +++ b/crates/dojo-lang/src/inline_macros/spawn_test_world.rs @@ -0,0 +1,102 @@ +use cairo_lang_defs::patcher::PatchBuilder; +use cairo_lang_defs::plugin::{ + InlineMacroExprPlugin, InlinePluginResult, MacroPluginMetadata, NamedPlugin, PluginDiagnostic, + PluginGeneratedFile, +}; +use cairo_lang_defs::plugin_utils::unsupported_bracket_diagnostic; +use cairo_lang_diagnostics::Severity; +use cairo_lang_syntax::node::{ast, TypedStablePtr, TypedSyntaxNode}; + +use super::unsupported_arg_diagnostic; +use super::utils::{extract_namespaces, load_manifest_models_and_namespaces}; + +#[derive(Debug, Default)] +pub struct SpawnTestWorld; + +impl NamedPlugin for SpawnTestWorld { + const NAME: &'static str = "spawn_test_world"; +} + +impl InlineMacroExprPlugin for SpawnTestWorld { + fn generate_code( + &self, + db: &dyn cairo_lang_syntax::node::db::SyntaxGroup, + syntax: &ast::ExprInlineMacro, + metadata: &MacroPluginMetadata<'_>, + ) -> InlinePluginResult { + let ast::WrappedArgList::ParenthesizedArgList(arg_list) = syntax.arguments(db) else { + return unsupported_bracket_diagnostic(db, syntax); + }; + + let args = arg_list.arguments(db).elements(db); + + if args.len() > 1 { + return InlinePluginResult { + code: None, + diagnostics: vec![PluginDiagnostic { + stable_ptr: syntax.stable_ptr().untyped(), + message: "Invalid arguments. Expected \"spawn_test_world!()\" or \ + \"spawn_test_world!([\"ns1\"])" + .to_string(), + severity: Severity::Error, + }], + }; + } + + let whitelisted_namespaces = if args.len() == 1 { + let ast::ArgClause::Unnamed(expected_array) = args[0].arg_clause(db) else { + return unsupported_arg_diagnostic(db, syntax); + }; + + match extract_namespaces(db, &expected_array.value(db)) { + Ok(namespaces) => namespaces, + Err(e) => { + return InlinePluginResult { code: None, diagnostics: vec![e] }; + } + } + } else { + vec![] + }; + + let (namespaces, models) = + match load_manifest_models_and_namespaces(metadata.cfg_set, &whitelisted_namespaces) { + Ok((namespaces, models)) => (namespaces, models), + Err(_e) => { + return InlinePluginResult { + code: None, + diagnostics: vec![PluginDiagnostic { + stable_ptr: syntax.stable_ptr().untyped(), + message: "Failed to load models and namespaces, ensure you have run \ + `sozo build` first." + .to_string(), + severity: Severity::Error, + }], + }; + } + }; + + let mut builder = PatchBuilder::new(db, syntax); + + builder.add_str(&format!( + "dojo::utils::test::spawn_test_world([{}].span(), [{}].span())", + namespaces.iter().map(|n| format!("\"{}\"", n)).collect::>().join(", "), + models + .iter() + .map(|m| format!("{}::TEST_CLASS_HASH", m)) + .collect::>() + .join(", ") + )); + + let (code, code_mappings) = builder.build(); + + InlinePluginResult { + code: Some(PluginGeneratedFile { + name: "spawn_test_world_macro".into(), + content: code, + code_mappings, + aux_data: None, + }), + diagnostics: vec![], + } + } +} diff --git a/crates/dojo-lang/src/inline_macros/utils.rs b/crates/dojo-lang/src/inline_macros/utils.rs index 00dee3a875..53bb8342fa 100644 --- a/crates/dojo-lang/src/inline_macros/utils.rs +++ b/crates/dojo-lang/src/inline_macros/utils.rs @@ -1,6 +1,16 @@ -use cairo_lang_syntax::node::ast::{ExprPath, ExprStructCtorCall}; +use std::collections::HashSet; + +use cairo_lang_defs::plugin::PluginDiagnostic; +use cairo_lang_diagnostics::Severity; +use cairo_lang_filesystem::cfg::CfgSet; +use cairo_lang_syntax::node::ast::{self, ExprPath, ExprStructCtorCall}; +use cairo_lang_syntax::node::db::SyntaxGroup; use cairo_lang_syntax::node::kind::SyntaxKind; -use cairo_lang_syntax::node::SyntaxNode; +use cairo_lang_syntax::node::{SyntaxNode, TypedStablePtr, TypedSyntaxNode}; +use camino::Utf8PathBuf; +use dojo_world::config::namespace_config::DOJO_MANIFESTS_DIR_CFG_KEY; +use dojo_world::contracts::naming; +use dojo_world::manifest::BaseManifest; #[derive(Debug)] pub enum SystemRWOpRecord { @@ -22,3 +32,79 @@ pub fn parent_of_kind( } None } + +/// Reads all the models and namespaces from base manifests files. +pub fn load_manifest_models_and_namespaces( + cfg_set: &CfgSet, + whitelisted_namespaces: &[String], +) -> anyhow::Result<(Vec, Vec)> { + let dojo_manifests_dir = get_dojo_manifests_dir(cfg_set.clone())?; + + let base_dir = dojo_manifests_dir.join("base"); + let base_abstract_manifest = BaseManifest::load_from_path(&base_dir)?; + + let mut models = HashSet::new(); + let mut namespaces = HashSet::new(); + + for model in base_abstract_manifest.models { + let qualified_path = model.inner.qualified_path; + let namespace = naming::split_tag(&model.inner.tag)?.0; + + if !whitelisted_namespaces.is_empty() && !whitelisted_namespaces.contains(&namespace) { + continue; + } + + models.insert(qualified_path); + namespaces.insert(namespace); + } + + let models_vec: Vec = models.into_iter().collect(); + let namespaces_vec: Vec = namespaces.into_iter().collect(); + + Ok((namespaces_vec, models_vec)) +} + +/// Gets the dojo_manifests_dir from the cfg_set. +pub fn get_dojo_manifests_dir(cfg_set: CfgSet) -> anyhow::Result { + for cfg in cfg_set.into_iter() { + if cfg.key == DOJO_MANIFESTS_DIR_CFG_KEY { + return Ok(Utf8PathBuf::from(cfg.value.unwrap().as_str().to_string())); + } + } + + Err(anyhow::anyhow!("dojo_manifests_dir not found")) +} + +/// Extracts the namespaces from a fixed size array of strings. +pub fn extract_namespaces( + db: &dyn SyntaxGroup, + expression: &ast::Expr, +) -> Result, PluginDiagnostic> { + let mut namespaces = vec![]; + + match expression { + ast::Expr::FixedSizeArray(array) => { + for element in array.exprs(db).elements(db) { + if let ast::Expr::String(string_literal) = element { + namespaces.push(string_literal.as_syntax_node().get_text(db).replace('\"', "")); + } else { + return Err(PluginDiagnostic { + stable_ptr: element.stable_ptr().untyped(), + message: "Expected a string literal".to_string(), + severity: Severity::Error, + }); + } + } + } + _ => { + return Err(PluginDiagnostic { + stable_ptr: expression.stable_ptr().untyped(), + message: "The list of namespaces should be a fixed size array of strings." + .to_string(), + severity: Severity::Error, + }); + } + } + + Ok(namespaces) +} diff --git a/crates/dojo-lang/src/manifest_test_data/compiler_cairo/manifests/dev/base/abis/dojo-world.json b/crates/dojo-lang/src/manifest_test_data/compiler_cairo/manifests/dev/base/abis/dojo-world.json index 897b4d959c..8553809311 100644 --- a/crates/dojo-lang/src/manifest_test_data/compiler_cairo/manifests/dev/base/abis/dojo-world.json +++ b/crates/dojo-lang/src/manifest_test_data/compiler_cairo/manifests/dev/base/abis/dojo-world.json @@ -939,6 +939,11 @@ "type": "core::felt252", "kind": "data" }, + { + "name": "entity_id", + "type": "core::felt252", + "kind": "data" + }, { "name": "keys", "type": "core::array::Span::", diff --git a/crates/dojo-lang/src/manifest_test_data/compiler_cairo/manifests/dev/base/dojo-world.toml b/crates/dojo-lang/src/manifest_test_data/compiler_cairo/manifests/dev/base/dojo-world.toml index 38a401a6d8..ff32465d06 100644 --- a/crates/dojo-lang/src/manifest_test_data/compiler_cairo/manifests/dev/base/dojo-world.toml +++ b/crates/dojo-lang/src/manifest_test_data/compiler_cairo/manifests/dev/base/dojo-world.toml @@ -1,6 +1,6 @@ kind = "Class" -class_hash = "0x3715f072aa1c07be724249fcda8b0322687f6c5c585eebc4402d162649c707c" -original_class_hash = "0x3715f072aa1c07be724249fcda8b0322687f6c5c585eebc4402d162649c707c" +class_hash = "0x6f4515274ee23404789c3351a77107d0ec07508530119822046600ca6948d6e" +original_class_hash = "0x6f4515274ee23404789c3351a77107d0ec07508530119822046600ca6948d6e" abi = "manifests/dev/base/abis/dojo-world.json" tag = "dojo-world" manifest_name = "dojo-world" diff --git a/crates/dojo-lang/src/model.rs b/crates/dojo-lang/src/model.rs index a5dcc99631..88e21fa69a 100644 --- a/crates/dojo-lang/src/model.rs +++ b/crates/dojo-lang/src/model.rs @@ -332,6 +332,14 @@ pub impl $type_name$EntityStoreImpl of $type_name$EntityStore { $type_name$ModelEntityImpl::get(world, entity_id) } + fn update(self: @$type_name$Entity, world: dojo::world::IWorldDispatcher) { + dojo::model::ModelEntity::<$type_name$Entity>::update_entity(self, world); + } + + fn delete(self: @$type_name$Entity, world: dojo::world::IWorldDispatcher) { + dojo::model::ModelEntity::<$type_name$Entity>::delete_entity(self, world); + } + $entity_field_accessors$ } @@ -368,6 +376,14 @@ pub impl $type_name$StoreImpl of $type_name$Store { dojo::model::Model::<$type_name$>::get(world, serialized.span()) } + fn set(self: @$type_name$, world: dojo::world::IWorldDispatcher) { + dojo::model::Model::<$type_name$>::set_model(self, world); + } + + fn delete(self: @$type_name$, world: dojo::world::IWorldDispatcher) { + dojo::model::Model::<$type_name$>::delete_model(self, world); + } + $field_accessors$ } @@ -406,7 +422,7 @@ pub impl $type_name$ModelEntityImpl of dojo::model::ModelEntity<$type_name$Entit Self::from_values(entity_id, ref values) } - fn update(self: @$type_name$Entity, world: dojo::world::IWorldDispatcher) { + fn update_entity(self: @$type_name$Entity, world: dojo::world::IWorldDispatcher) { dojo::world::IWorldDispatcherTrait::set_entity( world, dojo::model::Model::<$type_name$>::selector(), @@ -416,7 +432,7 @@ pub impl $type_name$ModelEntityImpl of dojo::model::ModelEntity<$type_name$Entit ); } - fn delete(self: @$type_name$Entity, world: dojo::world::IWorldDispatcher) { + fn delete_entity(self: @$type_name$Entity, world: dojo::world::IWorldDispatcher) { dojo::world::IWorldDispatcherTrait::delete_entity( world, dojo::model::Model::<$type_name$>::selector(), @@ -507,7 +523,7 @@ pub impl $type_name$ModelImpl of dojo::model::Model<$type_name$> { $type_name$Store::from_values(ref _keys, ref values) } - fn set( + fn set_model( self: @$type_name$, world: dojo::world::IWorldDispatcher ) { @@ -520,7 +536,7 @@ pub impl $type_name$ModelImpl of dojo::model::Model<$type_name$> { ); } - fn delete( + fn delete_model( self: @$type_name$, world: dojo::world::IWorldDispatcher ) { diff --git a/crates/dojo-lang/src/plugin.rs b/crates/dojo-lang/src/plugin.rs index 237c349926..317bf2cb87 100644 --- a/crates/dojo-lang/src/plugin.rs +++ b/crates/dojo-lang/src/plugin.rs @@ -30,8 +30,10 @@ use crate::event::handle_event_struct; use crate::inline_macros::delete::DeleteMacro; use crate::inline_macros::emit::EmitMacro; use crate::inline_macros::get::GetMacro; +use crate::inline_macros::get_models_test_class_hashes::GetModelsTestClassHashes; use crate::inline_macros::selector_from_tag::SelectorFromTagMacro; use crate::inline_macros::set::SetMacro; +use crate::inline_macros::spawn_test_world::SpawnTestWorld; use crate::interface::DojoInterface; use crate::introspect::{handle_introspect_enum, handle_introspect_struct}; use crate::model::handle_model_struct; @@ -155,7 +157,9 @@ pub fn dojo_plugin_suite() -> PluginSuite { .add_inline_macro_plugin::() .add_inline_macro_plugin::() .add_inline_macro_plugin::() - .add_inline_macro_plugin::(); + .add_inline_macro_plugin::() + .add_inline_macro_plugin::() + .add_inline_macro_plugin::(); suite } diff --git a/crates/dojo-lang/src/scarb_internal/debug.rs b/crates/dojo-lang/src/scarb_internal/debug.rs new file mode 100644 index 0000000000..50b9ba7e0a --- /dev/null +++ b/crates/dojo-lang/src/scarb_internal/debug.rs @@ -0,0 +1,155 @@ +use std::collections::HashMap; +use std::env; +use std::sync::Arc; + +use anyhow::{Context, Result}; +use cairo_lang_compiler::db::RootDatabase; +use cairo_lang_diagnostics::ToOption; +use cairo_lang_filesystem::db::{get_originating_location, FilesGroup}; +use cairo_lang_filesystem::ids::{FileId, FileLongId}; +use cairo_lang_filesystem::span::TextSpan; +use cairo_lang_sierra_generator::db::SierraGenGroup; +use cairo_lang_sierra_generator::program_generator::{ + SierraProgramDebugInfo, SierraProgramWithDebug, +}; +use cairo_lang_starknet::compile::{extract_semantic_entrypoints, SemanticEntryPoints}; +use cairo_lang_starknet::contract::ContractDeclaration; +use itertools::{chain, Itertools}; +use serde::Serialize; + +pub fn compile_prepared_db_to_debug_info( + db: &RootDatabase, + contracts: &[&ContractDeclaration], + // mut compiler_config: CompilerConfig<'_>, +) -> Result> { + // compiler_config.diagnostics_reporter.ensure(db)?; + + contracts + .iter() + .map(|contract| compile_contract_with_prepared_and_checked_db_to_debug_info(db, contract)) + .try_collect() +} + +/// Compile declared Starknet contract. +/// +/// The `contract` value **must** come from `db`, for example as a result of calling +/// [`find_contracts`]. Does not check diagnostics, it is expected that they are checked by caller +/// of this function. +fn compile_contract_with_prepared_and_checked_db_to_debug_info( + db: &RootDatabase, + contract: &ContractDeclaration, +) -> Result { + let SemanticEntryPoints { external, l1_handler, constructor } = + extract_semantic_entrypoints(db, contract)?; + let SierraProgramWithDebug { program: _sierra_program, debug_info } = Arc::unwrap_or_clone( + db.get_sierra_program_for_functions( + chain!(&external, &l1_handler, &constructor).map(|f| f.value).collect(), + ) + .to_option() + .with_context(|| "Compilation failed without any diagnostics.")?, + ); + + Ok(debug_info) +} + +#[derive(Debug, Clone, Serialize)] +pub struct SierraToCairoDebugInfo { + pub sierra_statements_to_cairo_info: HashMap, +} + +/// Human readable position inside a file, in lines and characters. +#[derive(Debug, Serialize, Clone)] +pub struct TextPosition { + /// Line index, 0 based. + pub line: usize, + /// Character index inside the line, 0 based. + pub col: usize, +} + +#[derive(Debug, Serialize, Clone)] +pub struct Location { + pub start: TextPosition, + pub end: TextPosition, + pub file_path: String, +} + +#[derive(Debug, Clone, Serialize)] +pub struct SierraStatementToCairoDebugInfo { + pub cairo_locations: Vec, +} + +/// Returns a map from Sierra statement indexes to Cairo function names. +pub fn get_sierra_to_cairo_debug_info( + sierra_program_debug_info: &SierraProgramDebugInfo, + compiler_db: &RootDatabase, +) -> SierraToCairoDebugInfo { + let mut sierra_statements_to_cairo_info: HashMap = + HashMap::new(); + + for (statement_idx, locations) in + sierra_program_debug_info.statements_locations.locations.iter_sorted() + { + let mut cairo_locations: Vec = Vec::new(); + for location in locations { + let syntax_node = location.syntax_node(compiler_db); + let file_id = syntax_node.stable_ptr().file_id(compiler_db); + let _file_name = file_id.file_name(compiler_db); + let syntax_node_location_span = syntax_node.span_without_trivia(compiler_db); + + let (originating_file_id, originating_text_span) = + get_originating_location(compiler_db, file_id, syntax_node_location_span); + let cairo_location = get_location_from_text_span( + originating_text_span, + originating_file_id, + compiler_db, + ); + if let Some(cl) = cairo_location { + cairo_locations.push(cl); + } + } + sierra_statements_to_cairo_info + .insert(statement_idx.0, SierraStatementToCairoDebugInfo { cairo_locations }); + } + + SierraToCairoDebugInfo { sierra_statements_to_cairo_info } +} + +pub fn get_location_from_text_span( + text_span: TextSpan, + file_id: FileId, + compiler_db: &RootDatabase, +) -> Option { + let current_dir = env::current_dir().expect("Failed to get current directory"); + // dbg!(¤t_dir); + // let file_path = match compiler_db.lookup_intern_file(file_id) { + // FileLongId::OnDisk(path) => { + // path.strip_prefix(current_dir).expect("Failed to get relative + // path").to_path_buf().to_str().unwrap_or("").to_string() }, + // FileLongId::Virtual(_) => file_id.full_path(compiler_db), + // }; + let file_path = match compiler_db.lookup_intern_file(file_id) { + FileLongId::OnDisk(path) => match path.strip_prefix(¤t_dir) { + Ok(relative_path) => relative_path.to_str().unwrap_or("").to_string(), + Err(_) => { + return None; + } + }, + FileLongId::Virtual(_) => { + return None; + } + }; + + // let file_path = file_id.full_path(compiler_db); + + let start: Option = text_span + .start + .position_in_file(compiler_db, file_id) + .map(|s| TextPosition { line: s.line, col: s.col }); + + let end = text_span + .end + .position_in_file(compiler_db, file_id) + .map(|e| TextPosition { line: e.line, col: e.col }); + + start.zip(end).map(|(start, end)| Location { start, end, file_path }) +} diff --git a/crates/dojo-lang/src/scarb_internal/mod.rs b/crates/dojo-lang/src/scarb_internal/mod.rs index 4c313fb62c..15ba53e467 100644 --- a/crates/dojo-lang/src/scarb_internal/mod.rs +++ b/crates/dojo-lang/src/scarb_internal/mod.rs @@ -15,7 +15,9 @@ use cairo_lang_starknet::starknet_plugin_suite; use cairo_lang_test_plugin::test_plugin_suite; use cairo_lang_utils::ordered_hash_map::OrderedHashMap; use camino::Utf8PathBuf; -use dojo_world::config::{NamespaceConfig, DEFAULT_NAMESPACE_CFG_KEY, NAMESPACE_CFG_PREFIX}; +use dojo_world::config::{ + NamespaceConfig, DEFAULT_NAMESPACE_CFG_KEY, DOJO_MANIFESTS_DIR_CFG_KEY, NAMESPACE_CFG_PREFIX, +}; use dojo_world::metadata::dojo_metadata_from_package; use regex::Regex; use scarb::compiler::{ @@ -30,6 +32,7 @@ use tracing::trace; use crate::plugin::dojo_plugin_suite; pub(crate) const LOG_TARGET: &str = "dojo_lang::scarb_internal"; +pub mod debug; /// Compilation information of all the units found in the workspace. #[derive(Debug, Default)] @@ -110,6 +113,7 @@ pub fn compile_workspace( .collect::>(); let mut compile_error_units = vec![]; + for unit in compilation_units { trace!(target: LOG_TARGET, unit_name = %unit.name(), target_kind = %unit.main_component().target_kind(), "Compiling unit."); @@ -208,6 +212,14 @@ pub fn cfg_set_from_component( let cname = c.cairo_package_name().clone(); let package_dojo_metadata = dojo_metadata_from_package(&c.package, ws)?; + let dojo_manifests_dir = ws + .config() + .manifest_path() + .parent() + .expect("Scarb.toml manifest should always have parent") + .join("manifests") + .join(ws.current_profile().expect("profile should be set").to_string()); + ui.verbose(format!("component: {} ({})", cname, c.package.manifest_path())); tracing::debug!(target: LOG_TARGET, ?c, ?package_dojo_metadata); @@ -226,6 +238,11 @@ pub fn cfg_set_from_component( // Add it's name for debugging on the plugin side. cfg_set.insert(component_cfg); + cfg_set.insert(Cfg { + key: DOJO_MANIFESTS_DIR_CFG_KEY.into(), + value: Some(dojo_manifests_dir.to_string().into()), + }); + cfg_set.insert(Cfg { key: DEFAULT_NAMESPACE_CFG_KEY.into(), value: Some(package_dojo_metadata.namespace.default.into()), diff --git a/crates/dojo-lang/src/semantics/test_data/get_models_test_class_hashes b/crates/dojo-lang/src/semantics/test_data/get_models_test_class_hashes new file mode 100644 index 0000000000..b70f7b851f --- /dev/null +++ b/crates/dojo-lang/src/semantics/test_data/get_models_test_class_hashes @@ -0,0 +1,89 @@ +//! > Test ok empty (expected to fail due to missing dojo_manifests_dir) + +//! > test_runner_name +test_semantics + +//! > expression +get_models_test_class_hashes!() + +//! > expected +Missing( + ExprMissing { + ty: , + }, +) + +//! > semantic_diagnostics +error: Plugin diagnostic: Failed to load models and namespaces, ensure you have run `sozo build` first. + --> lib.cairo:2:1 +get_models_test_class_hashes!() +^*****************************^ + +//! > ========================================================================== + +//! > Test bad params + +//! > test_runner_name +test_semantics + +//! > expression +get_models_test_class_hashes!("ns1") + +//! > expected +Missing( + ExprMissing { + ty: , + }, +) + +//! > semantic_diagnostics +error: Plugin diagnostic: The list of namespaces should be a fixed size array of strings. + --> lib.cairo:2:31 +get_models_test_class_hashes!("ns1") + ^***^ + +//! > ========================================================================== + +//! > Test too much params + +//! > test_runner_name +test_semantics + +//! > expression +get_models_test_class_hashes!("ns1", ["ns2"]) + +//! > expected +Missing( + ExprMissing { + ty: , + }, +) + +//! > semantic_diagnostics +error: Plugin diagnostic: Invalid arguments. Expected "get_models_test_class_hashes!(["ns1", "ns2")]" or "get_models_test_class_hashes!()". + --> lib.cairo:2:1 +get_models_test_class_hashes!("ns1", ["ns2"]) +^*******************************************^ + +//! > ========================================================================== + +//! > Test ok array (expected to fail due to missing dojo_manifests_dir) + +//! > test_runner_name +test_semantics + +//! > expression +get_models_test_class_hashes!(["ns1", "ns2"]) + +//! > expected +Missing( + ExprMissing { + ty: , + }, +) + +//! > semantic_diagnostics +error: Plugin diagnostic: Failed to load models and namespaces, ensure you have run `sozo build` first. + --> lib.cairo:2:1 +get_models_test_class_hashes!(["ns1", "ns2"]) +^*******************************************^ diff --git a/crates/dojo-lang/src/semantics/test_data/set b/crates/dojo-lang/src/semantics/test_data/set index 2a9249f47c..ddbd0b8287 100644 --- a/crates/dojo-lang/src/semantics/test_data/set +++ b/crates/dojo-lang/src/semantics/test_data/set @@ -120,7 +120,7 @@ Block( StatementExpr { expr: FunctionCall( ExprFunctionCall { - function: ?6::set, + function: ?6::set_model, args: [ Value( Snapshot( diff --git a/crates/dojo-lang/src/semantics/test_data/spawn_test_world b/crates/dojo-lang/src/semantics/test_data/spawn_test_world new file mode 100644 index 0000000000..70149dc31c --- /dev/null +++ b/crates/dojo-lang/src/semantics/test_data/spawn_test_world @@ -0,0 +1,66 @@ +//! > Test ok empty (expected to fail due to missing dojo_manifests_dir) + +//! > test_runner_name +test_semantics + +//! > expression +spawn_test_world!() + +//! > expected +Missing( + ExprMissing { + ty: , + }, +) + +//! > semantic_diagnostics +error: Plugin diagnostic: Failed to load models and namespaces, ensure you have run `sozo build` first. + --> lib.cairo:2:1 +spawn_test_world!() +^*****************^ + +//! > ========================================================================== + +//! > Test bad params + +//! > test_runner_name +test_semantics + +//! > expression +spawn_test_world!("ns1", "ns2") + +//! > expected +Missing( + ExprMissing { + ty: , + }, +) + +//! > semantic_diagnostics +error: Plugin diagnostic: Invalid arguments. Expected "spawn_test_world!()" or "spawn_test_world!(["ns1"]) + --> lib.cairo:2:1 +spawn_test_world!("ns1", "ns2") +^*****************************^ + +//! > ========================================================================== + +//! > Test ok namespaces + +//! > test_runner_name +test_semantics + +//! > expression +spawn_test_world!(["ns1", "ns2"]) + +//! > expected +Missing( + ExprMissing { + ty: , + }, +) + +//! > semantic_diagnostics +error: Plugin diagnostic: Failed to load models and namespaces, ensure you have run `sozo build` first. + --> lib.cairo:2:1 +spawn_test_world!(["ns1", "ns2"]) +^*******************************^ diff --git a/crates/dojo-lang/src/semantics/tests.rs b/crates/dojo-lang/src/semantics/tests.rs index 518def9ba7..2e8fba8a44 100644 --- a/crates/dojo-lang/src/semantics/tests.rs +++ b/crates/dojo-lang/src/semantics/tests.rs @@ -18,6 +18,10 @@ test_file_test!( set: "set", selector_from_tag: "selector_from_tag", + + get_models_test_class_hashes: "get_models_test_class_hashes", + + spawn_test_world: "spawn_test_world", }, test_semantics ); diff --git a/crates/dojo-test-utils/src/compiler.rs b/crates/dojo-test-utils/src/compiler.rs index 6fe6340e2f..53fa5269de 100644 --- a/crates/dojo-test-utils/src/compiler.rs +++ b/crates/dojo-test-utils/src/compiler.rs @@ -271,7 +271,7 @@ pub fn copy_project_temp( /// * `profile` - The profile to use for the config. pub fn build_test_config(path: &str, profile: Profile) -> anyhow::Result { let mut compilers = CompilerRepository::empty(); - compilers.add(Box::new(DojoCompiler)).unwrap(); + compilers.add(Box::new(DojoCompiler::default())).unwrap(); let cairo_plugins = CairoPluginRepository::default(); diff --git a/crates/dojo-test-utils/src/migration.rs b/crates/dojo-test-utils/src/migration.rs index 7f8af66492..1ee5d561f6 100644 --- a/crates/dojo-test-utils/src/migration.rs +++ b/crates/dojo-test-utils/src/migration.rs @@ -36,7 +36,7 @@ pub fn prepare_migration( ) .unwrap(); - if let Some(skip_manifests) = skip_migration { + if let Some(skip_manifests) = &skip_migration { manifest.remove_tags(skip_manifests); } diff --git a/crates/dojo-test-utils/src/sequencer.rs b/crates/dojo-test-utils/src/sequencer.rs index 334eab9a08..039b26fab1 100644 --- a/crates/dojo-test-utils/src/sequencer.rs +++ b/crates/dojo-test-utils/src/sequencer.rs @@ -3,10 +3,11 @@ use std::sync::Arc; use jsonrpsee::core::Error; pub use katana_core::backend::config::{Environment, StarknetConfig}; use katana_core::backend::Backend; +use katana_core::constants::DEFAULT_SEQUENCER_ADDRESS; #[allow(deprecated)] pub use katana_core::sequencer::SequencerConfig; use katana_executor::implementation::blockifier::BlockifierFactory; -use katana_node::NodeHandle; +use katana_node::Handle; use katana_primitives::chain::ChainId; use katana_rpc::config::ServerConfig; use katana_rpc_api::ApiKind; @@ -28,9 +29,8 @@ pub struct TestAccount { #[allow(missing_debug_implementations)] pub struct TestSequencer { url: Url, - handle: NodeHandle, + handle: Handle, account: TestAccount, - backend: Arc>, } impl TestSequencer { @@ -42,28 +42,22 @@ impl TestSequencer { host: "127.0.0.1".into(), max_connections: 100, allowed_origins: None, - apis: vec![ - ApiKind::Starknet, - ApiKind::Katana, - ApiKind::Dev, - ApiKind::Saya, - ApiKind::Torii, - ], + apis: vec![ApiKind::Starknet, ApiKind::Dev, ApiKind::Saya, ApiKind::Torii], }; - let (handle, backend) = katana_node::start(server_config, config, starknet_config) + let node = katana_node::start(server_config, config, starknet_config) .await .expect("Failed to build node components"); - let url = Url::parse(&format!("http://{}", handle.addr)).expect("Failed to parse URL"); + let url = Url::parse(&format!("http://{}", node.rpc.addr)).expect("Failed to parse URL"); - let account = backend.config.genesis.accounts().next().unwrap(); + let account = node.backend.config.genesis.accounts().next().unwrap(); let account = TestAccount { private_key: Felt::from_bytes_be(&account.1.private_key().unwrap().to_bytes_be()), account_address: Felt::from_bytes_be(&account.0.to_bytes_be()), }; - TestSequencer { backend, account, handle, url } + TestSequencer { handle: node, account, url } } pub fn account(&self) -> SingleOwnerAccount, LocalWallet> { @@ -85,7 +79,7 @@ impl TestSequencer { } pub fn backend(&self) -> &Arc> { - &self.backend + &self.handle.backend } pub fn account_at_index( @@ -93,7 +87,7 @@ impl TestSequencer { index: usize, ) -> SingleOwnerAccount, LocalWallet> { #[allow(deprecated)] - let accounts: Vec<_> = self.backend.config.genesis.accounts().collect::<_>(); + let accounts: Vec<_> = self.handle.backend.config.genesis.accounts().collect::<_>(); let account = accounts[index]; let private_key = Felt::from_bytes_be(&account.1.private_key().unwrap().to_bytes_be()); @@ -117,7 +111,7 @@ impl TestSequencer { } pub fn stop(self) -> Result<(), Error> { - self.handle.handle.stop() + self.handle.rpc.handle.stop() } pub fn url(&self) -> Url { @@ -126,9 +120,12 @@ impl TestSequencer { } pub fn get_default_test_starknet_config() -> StarknetConfig { - StarknetConfig { + let mut cfg = StarknetConfig { disable_fee: true, env: Environment { chain_id: ChainId::SEPOLIA, ..Default::default() }, ..Default::default() - } + }; + + cfg.genesis.sequencer_address = *DEFAULT_SEQUENCER_ADDRESS; + cfg } diff --git a/crates/dojo-types/src/schema.rs b/crates/dojo-types/src/schema.rs index c1783f496e..810a73de58 100644 --- a/crates/dojo-types/src/schema.rs +++ b/crates/dojo-types/src/schema.rs @@ -56,7 +56,13 @@ impl Ty { Ty::Struct(s) => s.name.clone(), Ty::Enum(e) => e.name.clone(), Ty::Tuple(tys) => format!("({})", tys.iter().map(|ty| ty.name()).join(", ")), - Ty::Array(ty) => format!("Array<{}>", ty[0].name()), + Ty::Array(ty) => { + if let Some(inner) = ty.first() { + format!("Array<{}>", inner.name()) + } else { + "Array".to_string() + } + } Ty::ByteArray(_) => "ByteArray".to_string(), } } diff --git a/crates/dojo-utils/Cargo.toml b/crates/dojo-utils/Cargo.toml index 448a92653e..ae9889a37f 100644 --- a/crates/dojo-utils/Cargo.toml +++ b/crates/dojo-utils/Cargo.toml @@ -11,6 +11,7 @@ futures.workspace = true reqwest.workspace = true starknet.workspace = true thiserror.workspace = true +rpassword.workspace = true tokio = { version = "1", features = [ "time" ], default-features = false } [dev-dependencies] diff --git a/crates/dojo-utils/src/env.rs b/crates/dojo-utils/src/env.rs new file mode 100644 index 0000000000..3229129dfb --- /dev/null +++ b/crates/dojo-utils/src/env.rs @@ -0,0 +1,6 @@ +pub const STARKNET_RPC_URL_ENV_VAR: &str = "STARKNET_RPC_URL"; +pub const DOJO_PRIVATE_KEY_ENV_VAR: &str = "DOJO_PRIVATE_KEY"; +pub const DOJO_KEYSTORE_PATH_ENV_VAR: &str = "DOJO_KEYSTORE_PATH"; +pub const DOJO_KEYSTORE_PASSWORD_ENV_VAR: &str = "DOJO_KEYSTORE_PASSWORD"; +pub const DOJO_ACCOUNT_ADDRESS_ENV_VAR: &str = "DOJO_ACCOUNT_ADDRESS"; +pub const DOJO_WORLD_ADDRESS_ENV_VAR: &str = "DOJO_WORLD_ADDRESS"; diff --git a/crates/dojo-utils/src/keystore.rs b/crates/dojo-utils/src/keystore.rs new file mode 100644 index 0000000000..d8ee2d9ee7 --- /dev/null +++ b/crates/dojo-utils/src/keystore.rs @@ -0,0 +1,13 @@ +use anyhow::{anyhow, Result}; + +/// Prompts the user for a password if no password is provided and `no_wait` is not set. +/// The `no_wait` is used for non-interactive commands. +pub fn prompt_password_if_needed(maybe_password: Option<&str>, no_wait: bool) -> Result { + if let Some(password) = maybe_password { + Ok(password.to_owned()) + } else if no_wait { + Err(anyhow!("Could not find password. Please specify the password.")) + } else { + Ok(rpassword::prompt_password("Enter password: ")?.to_owned()) + } +} diff --git a/crates/dojo-utils/src/lib.rs b/crates/dojo-utils/src/lib.rs index 28cfc3cb07..eace35faf7 100644 --- a/crates/dojo-utils/src/lib.rs +++ b/crates/dojo-utils/src/lib.rs @@ -5,3 +5,6 @@ mod tx; pub use tx::waiter::*; pub use tx::{TransactionExt, TxnAction, TxnConfig}; + +pub mod env; +pub mod keystore; diff --git a/crates/dojo-utils/src/tx/mod.rs b/crates/dojo-utils/src/tx/mod.rs index 0d77fc1a1f..d1d5f90110 100644 --- a/crates/dojo-utils/src/tx/mod.rs +++ b/crates/dojo-utils/src/tx/mod.rs @@ -18,6 +18,7 @@ pub struct TxnConfig { pub wait: bool, pub receipt: bool, pub max_fee_raw: Option, + pub walnut: bool, } #[derive(Debug, Copy, Clone)] @@ -29,6 +30,7 @@ pub enum TxnAction { /// The multiplier for how much the actual transaction max fee should be relative to the /// estimated fee. If `None` is provided, the multiplier is set to `1.1`. fee_estimate_multiplier: Option, + walnut: bool, }, Estimate, Simulate, diff --git a/crates/dojo-utils/src/tx/waiter.rs b/crates/dojo-utils/src/tx/waiter.rs index c45b9b6ac7..76e37637dd 100644 --- a/crates/dojo-utils/src/tx/waiter.rs +++ b/crates/dojo-utils/src/tx/waiter.rs @@ -30,9 +30,10 @@ pub enum TransactionWaitingError { Provider(ProviderError), } -/// A type that waits for a transaction to achieve the desired status. The waiter will poll for the -/// transaction receipt every `interval` miliseconds until it achieves the desired status or until -/// `timeout` is reached. +/// Utility for waiting on a transaction. +/// +/// The waiter will poll for the transaction receipt every `interval` miliseconds until it achieves +/// the desired status or until `timeout` is reached. /// /// The waiter can be configured to wait for a specific finality status (e.g, `ACCEPTED_ON_L2`), by /// default, it only waits until the transaction is included in the _pending_ block. It can also be diff --git a/crates/dojo-world/Cargo.toml b/crates/dojo-world/Cargo.toml index 97ab75379e..b380b04615 100644 --- a/crates/dojo-world/Cargo.toml +++ b/crates/dojo-world/Cargo.toml @@ -36,7 +36,7 @@ scarb = { workspace = true, optional = true } tokio = { version = "1.32.0", features = [ "time" ], default-features = false, optional = true } toml.workspace = true url = { workspace = true, optional = true } -walkdir = "2.5.0" +walkdir.workspace = true [dev-dependencies] assert_fs.workspace = true @@ -52,4 +52,4 @@ tokio.workspace = true contracts = [ "dep:dojo-types", "dep:http", "dep:num-traits" ] manifest = [ "contracts", "dep:dojo-types", "dep:scarb", "dep:url" ] metadata = [ "dep:ipfs-api-backend-hyper", "dep:scarb", "dep:url" ] -migration = [ "dep:dojo-utils", "dep:scarb", "dep:tokio" ] +migration = [ "dep:dojo-utils", "dep:scarb", "dep:tokio", "manifest" ] diff --git a/crates/dojo-world/src/config/mod.rs b/crates/dojo-world/src/config/mod.rs index ce429202b0..c2d1393da5 100644 --- a/crates/dojo-world/src/config/mod.rs +++ b/crates/dojo-world/src/config/mod.rs @@ -6,6 +6,8 @@ pub mod world_config; pub use environment::Environment; pub use migration_config::MigrationConfig; -pub use namespace_config::{NamespaceConfig, DEFAULT_NAMESPACE_CFG_KEY, NAMESPACE_CFG_PREFIX}; +pub use namespace_config::{ + NamespaceConfig, DEFAULT_NAMESPACE_CFG_KEY, DOJO_MANIFESTS_DIR_CFG_KEY, NAMESPACE_CFG_PREFIX, +}; pub use profile_config::ProfileConfig; pub use world_config::WorldConfig; diff --git a/crates/dojo-world/src/config/namespace_config.rs b/crates/dojo-world/src/config/namespace_config.rs index 596a84d404..25c637496d 100644 --- a/crates/dojo-world/src/config/namespace_config.rs +++ b/crates/dojo-world/src/config/namespace_config.rs @@ -7,6 +7,7 @@ use serde::Deserialize; pub const NAMESPACE_CFG_PREFIX: &str = "nm|"; pub const DEFAULT_NAMESPACE_CFG_KEY: &str = "namespace_default"; +pub const DOJO_MANIFESTS_DIR_CFG_KEY: &str = "dojo_manifests_dir"; /// Namespace configuration. #[derive(Debug, Clone, Default, Deserialize)] diff --git a/crates/dojo-world/src/config/profile_config.rs b/crates/dojo-world/src/config/profile_config.rs index 1595255b02..b1f21d1f2d 100644 --- a/crates/dojo-world/src/config/profile_config.rs +++ b/crates/dojo-world/src/config/profile_config.rs @@ -12,6 +12,7 @@ use super::namespace_config::NamespaceConfig; use super::world_config::WorldConfig; /// Profile configuration that is used to configure the world and the environment. +/// /// This [`ProfileConfig`] is expected to be loaded from a TOML file that is located /// next to the `Scarb.toml` file, named with the profile name. #[derive(Debug, Clone, Default, Deserialize)] diff --git a/crates/dojo-world/src/contracts/abi/world.rs b/crates/dojo-world/src/contracts/abi/world.rs index 21f17dcef3..fbf7df1a05 100644 --- a/crates/dojo-world/src/contracts/abi/world.rs +++ b/crates/dojo-world/src/contracts/abi/world.rs @@ -945,6 +945,11 @@ abigen!( "type": "core::felt252", "kind": "data" }, + { + "name": "entity_id", + "type": "core::felt252", + "kind": "data" + }, { "name": "keys", "type": "core::array::Span::", diff --git a/crates/dojo-world/src/manifest/manifest_test.rs b/crates/dojo-world/src/manifest/manifest_test.rs index 33d4f69009..ed166325b3 100644 --- a/crates/dojo-world/src/manifest/manifest_test.rs +++ b/crates/dojo-world/src/manifest/manifest_test.rs @@ -318,7 +318,7 @@ fn fetch_remote_manifest() { .unwrap(); if let Some(m) = dojo_metadata.migration { - local_manifest.remove_tags(m.skip_contracts); + local_manifest.remove_tags(&m.skip_contracts); } let overlay_dir = manifest_path.join(OVERLAYS_DIR).join(&profile_name); @@ -597,7 +597,7 @@ fn base_manifest_remove_items_work_as_expected() { let mut base = BaseManifest { contracts, models, world, base }; - base.remove_tags(vec!["ns:c1".to_string(), "ns:c3".to_string(), "ns:m2".to_string()]); + base.remove_tags(&["ns:c1".to_string(), "ns:c3".to_string(), "ns:m2".to_string()]); assert_eq!(base.contracts.len(), 1); assert_eq!( diff --git a/crates/dojo-world/src/manifest/mod.rs b/crates/dojo-world/src/manifest/mod.rs index 602ecdc96b..e7a067746a 100644 --- a/crates/dojo-world/src/manifest/mod.rs +++ b/crates/dojo-world/src/manifest/mod.rs @@ -127,7 +127,7 @@ impl BaseManifest { } /// Given a list of contract or model tags, remove those from the manifest. - pub fn remove_tags(&mut self, tags: Vec) { + pub fn remove_tags(&mut self, tags: &[String]) { self.contracts.retain(|contract| !tags.contains(&contract.inner.tag)); self.models.retain(|model| !tags.contains(&model.inner.tag)); } @@ -551,9 +551,14 @@ async fn get_events( loop { let res = provider.get_events(filter.clone(), continuation_token, DEFAULT_CHUNK_SIZE).await?; - continuation_token = res.continuation_token; - events.extend(res.events); + + // stop when there are no more events being returned + if res.events.is_empty() { + break; + } else { + events.extend(res.events); + } if continuation_token.is_none() { break; diff --git a/crates/katana/core/src/constants.rs b/crates/katana/core/src/constants.rs index 270c180fb1..7cfe3555b5 100644 --- a/crates/katana/core/src/constants.rs +++ b/crates/katana/core/src/constants.rs @@ -6,7 +6,7 @@ use starknet::macros::felt; pub const DEFAULT_ETH_L1_GAS_PRICE: u128 = 100 * u128::pow(10, 9); // Given in units of Wei. pub const DEFAULT_STRK_L1_GAS_PRICE: u128 = 100 * u128::pow(10, 9); // Given in units of STRK. -pub const DEFAULT_INVOKE_MAX_STEPS: u32 = 1_000_000; +pub const DEFAULT_INVOKE_MAX_STEPS: u32 = 10_000_000; pub const DEFAULT_VALIDATE_MAX_STEPS: u32 = 1_000_000; pub const MAX_RECURSION_DEPTH: usize = 1000; diff --git a/crates/katana/core/src/service/block_producer.rs b/crates/katana/core/src/service/block_producer.rs index 2d72cefa3a..618d0f2c03 100644 --- a/crates/katana/core/src/service/block_producer.rs +++ b/crates/katana/core/src/service/block_producer.rs @@ -9,6 +9,7 @@ use futures::channel::mpsc::{channel, Receiver, Sender}; use futures::stream::{Stream, StreamExt}; use futures::FutureExt; use katana_executor::{BlockExecutor, ExecutionResult, ExecutionStats, ExecutorFactory}; +use katana_pool::validation::stateful::TxValidator; use katana_primitives::block::{BlockHashOrNumber, ExecutableBlock, PartialHeader}; use katana_primitives::receipt::Receipt; use katana_primitives::trace::TxExecInfo; @@ -19,7 +20,8 @@ use katana_provider::traits::block::{BlockHashProvider, BlockNumberProvider}; use katana_provider::traits::env::BlockEnvProvider; use katana_provider::traits::state::StateFactoryProvider; use katana_tasks::{BlockingTaskPool, BlockingTaskResult}; -use parking_lot::RwLock; +use parking_lot::lock_api::RawMutex; +use parking_lot::{Mutex, RwLock}; use tokio::time::{interval_at, Instant, Interval}; use tracing::{error, info, trace, warn}; @@ -59,7 +61,7 @@ pub struct TxWithOutcome { type ServiceFuture = Pin> + Send + Sync>>; type BlockProductionResult = Result; -type BlockProductionFuture = ServiceFuture; +type BlockProductionFuture = ServiceFuture>; type TxExecutionResult = Result, BlockProductionError>; type TxExecutionFuture = ServiceFuture; @@ -72,57 +74,60 @@ type BlockProductionWithTxnsFuture = #[allow(missing_debug_implementations)] pub struct BlockProducer { /// The inner mode of mining. - pub inner: RwLock>, + pub producer: RwLock>, } impl BlockProducer { /// Creates a block producer that mines a new block every `interval` milliseconds. pub fn interval(backend: Arc>, interval: u64) -> Self { - Self { - inner: RwLock::new(BlockProducerMode::Interval(IntervalBlockProducer::new( - backend, interval, - ))), - } + let prod = IntervalBlockProducer::new(backend, Some(interval)); + Self { producer: BlockProducerMode::Interval(prod).into() } } /// Creates a new block producer that will only be possible to mine by calling the /// `katana_generateBlock` RPC method. pub fn on_demand(backend: Arc>) -> Self { - Self { - inner: RwLock::new(BlockProducerMode::Interval(IntervalBlockProducer::new_no_mining( - backend, - ))), - } + let prod = IntervalBlockProducer::new(backend, None); + Self { producer: BlockProducerMode::Interval(prod).into() } } /// Creates a block producer that mines a new block as soon as there are ready transactions in /// the transactions pool. pub fn instant(backend: Arc>) -> Self { - Self { inner: RwLock::new(BlockProducerMode::Instant(InstantBlockProducer::new(backend))) } + let prod = InstantBlockProducer::new(backend); + Self { producer: BlockProducerMode::Instant(prod).into() } } pub(super) fn queue(&self, transactions: Vec) { - let mut mode = self.inner.write(); + let mut mode = self.producer.write(); match &mut *mode { BlockProducerMode::Instant(producer) => producer.queued.push_back(transactions), BlockProducerMode::Interval(producer) => producer.queued.push_back(transactions), } } + pub fn validator(&self) -> TxValidator { + let mode = self.producer.read(); + match &*mode { + BlockProducerMode::Instant(pd) => pd.validator.clone(), + BlockProducerMode::Interval(pd) => pd.validator.clone(), + } + } + /// Returns `true` if the block producer is running in _interval_ mode. Otherwise, `fales`. pub fn is_interval_mining(&self) -> bool { - matches!(*self.inner.read(), BlockProducerMode::Interval(_)) + matches!(*self.producer.read(), BlockProducerMode::Interval(_)) } /// Returns `true` if the block producer is running in _instant_ mode. Otherwise, `fales`. pub fn is_instant_mining(&self) -> bool { - matches!(*self.inner.read(), BlockProducerMode::Instant(_)) + matches!(*self.producer.read(), BlockProducerMode::Instant(_)) } // Handler for the `katana_generateBlock` RPC method. pub fn force_mine(&self) { trace!(target: LOG_TARGET, "Scheduling force block mining."); - let mut mode = self.inner.write(); + let mut mode = self.producer.write(); match &mut *mode { BlockProducerMode::Instant(producer) => producer.force_mine(), BlockProducerMode::Interval(producer) => producer.force_mine(), @@ -130,7 +135,7 @@ impl BlockProducer { } pub(super) fn poll_next(&self, cx: &mut Context<'_>) -> Poll> { - let mut mode = self.inner.write(); + let mut mode = self.producer.write(); match &mut *mode { BlockProducerMode::Instant(producer) => producer.poll_next_unpin(cx), BlockProducerMode::Interval(producer) => producer.poll_next_unpin(cx), @@ -181,16 +186,23 @@ pub struct IntervalBlockProducer { ongoing_execution: Option, /// Listeners notified when a new executed tx is added. tx_execution_listeners: RwLock>>>, + + permit: Arc>, + + /// validator used in the tx pool + // the validator needs to always be built against the state of the block producer, so + // im putting here for now until we find a better way to handle this. + validator: TxValidator, } impl IntervalBlockProducer { - pub fn new(backend: Arc>, interval: u64) -> Self { - let interval = { - let duration = Duration::from_millis(interval); + pub fn new(backend: Arc>, interval: Option) -> Self { + let interval = interval.map(|time| { + let duration = Duration::from_millis(time); let mut interval = interval_at(Instant::now() + duration, duration); interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Delay); interval - }; + }); let provider = backend.blockchain.provider(); @@ -199,20 +211,28 @@ impl IntervalBlockProducer { backend.update_block_env(&mut block_env); let state = provider.latest().unwrap(); - let executor = backend.executor_factory.with_state_and_block_env(state, block_env); - let executor = PendingExecutor::new(executor); + let executor = backend.executor_factory.with_state_and_block_env(state, block_env.clone()); - let blocking_task_spawner = BlockingTaskPool::new().unwrap(); + let permit = Arc::new(Mutex::new(())); + + // -- build the validator using the same state and envs as the executor + let state = executor.state(); + let cfg = backend.executor_factory.cfg(); + let flags = backend.executor_factory.execution_flags(); + let validator = + TxValidator::new(state, flags.clone(), cfg.clone(), block_env, permit.clone()); Self { + validator, + permit, backend, - executor, + interval, ongoing_mining: None, - blocking_task_spawner, ongoing_execution: None, - interval: Some(interval), queued: VecDeque::default(), + executor: PendingExecutor::new(executor), tx_execution_listeners: RwLock::new(vec![]), + blocking_task_spawner: BlockingTaskPool::new().unwrap(), } } @@ -220,28 +240,7 @@ impl IntervalBlockProducer { /// for every fixed interval, although it will still execute all queued transactions and /// keep hold of the pending state. pub fn new_no_mining(backend: Arc>) -> Self { - let provider = backend.blockchain.provider(); - - let latest_num = provider.latest_number().unwrap(); - let mut block_env = provider.block_env_at(latest_num.into()).unwrap().unwrap(); - backend.update_block_env(&mut block_env); - - let state = provider.latest().unwrap(); - let executor = backend.executor_factory.with_state_and_block_env(state, block_env); - let executor = PendingExecutor::new(executor); - - let blocking_task_spawner = BlockingTaskPool::new().unwrap(); - - Self { - backend, - executor, - interval: None, - ongoing_mining: None, - queued: VecDeque::default(), - blocking_task_spawner, - ongoing_execution: None, - tx_execution_listeners: RwLock::new(vec![]), - } + Self::new(backend, None) } pub fn executor(&self) -> PendingExecutor { @@ -250,11 +249,24 @@ impl IntervalBlockProducer { /// Force mine a new block. It will only able to mine if there is no ongoing mining process. pub fn force_mine(&mut self) { - match Self::do_mine(self.executor.clone(), self.backend.clone()) { + match Self::do_mine(self.permit.clone(), self.executor.clone(), self.backend.clone()) { Ok(outcome) => { info!(target: LOG_TARGET, block_number = %outcome.block_number, "Force mined block."); self.executor = self.create_new_executor_for_next_block().expect("fail to create executor"); + + // update pool validator state here --------- + + let provider = self.backend.blockchain.provider(); + let state = self.executor.0.read().state(); + let num = provider.latest_number().unwrap(); + let block_env = provider.block_env_at(num.into()).unwrap().unwrap(); + + self.validator.update(state, block_env); + + // ------------------------------------------- + + unsafe { self.permit.raw().unlock() }; } Err(e) => { error!(target: LOG_TARGET, error = %e, "On force mine."); @@ -263,9 +275,11 @@ impl IntervalBlockProducer { } fn do_mine( + permit: Arc>, executor: PendingExecutor, backend: Arc>, ) -> Result { + unsafe { permit.raw() }.lock(); let executor = &mut executor.write(); trace!(target: LOG_TARGET, "Creating new block."); @@ -358,7 +372,7 @@ impl IntervalBlockProducer { impl Stream for IntervalBlockProducer { // mined block outcome and the new state - type Item = BlockProductionResult; + type Item = Result; fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { let pin = self.get_mut(); @@ -366,10 +380,13 @@ impl Stream for IntervalBlockProducer { if let Some(interval) = &mut pin.interval { // mine block if the interval is over if interval.poll_tick(cx).is_ready() && pin.ongoing_mining.is_none() { - let executor = pin.executor.clone(); - let backend = pin.backend.clone(); - let fut = pin.blocking_task_spawner.spawn(|| Self::do_mine(executor, backend)); - pin.ongoing_mining = Some(Box::pin(fut)); + pin.ongoing_mining = Some(Box::pin({ + let executor = pin.executor.clone(); + let backend = pin.backend.clone(); + let permit = pin.permit.clone(); + + pin.blocking_task_spawner.spawn(|| Self::do_mine(permit, executor, backend)) + })); } } @@ -424,7 +441,19 @@ impl Stream for IntervalBlockProducer { Ok(outcome) => { match pin.create_new_executor_for_next_block() { Ok(executor) => { + // update pool validator state here --------- + + let provider = pin.backend.blockchain.provider(); + let state = executor.0.read().state(); + let num = provider.latest_number()?; + let block_env = provider.block_env_at(num.into()).unwrap().unwrap(); + + pin.validator.update(state, block_env); + + // ------------------------------------------- + pin.executor = executor; + unsafe { pin.permit.raw().unlock() }; } Err(e) => return Poll::Ready(Some(Err(e))), @@ -435,7 +464,7 @@ impl Stream for IntervalBlockProducer { Err(_) => { return Poll::Ready(Some(Err( - BlockProductionError::BlockMiningTaskCancelled, + BlockProductionError::ExecutionTaskCancelled, ))); } } @@ -460,12 +489,37 @@ pub struct InstantBlockProducer { blocking_task_pool: BlockingTaskPool, /// Listeners notified when a new executed tx is added. tx_execution_listeners: RwLock>>>, + + permit: Arc>, + + /// validator used in the tx pool + // the validator needs to always be built against the state of the block producer, so + // im putting here for now until we find a better way to handle this. + validator: TxValidator, } impl InstantBlockProducer { pub fn new(backend: Arc>) -> Self { + let provider = backend.blockchain.provider(); + + let permit = Arc::new(Mutex::new(())); + + let latest_num = provider.latest_number().expect("latest block num"); + let block_env = provider + .block_env_at(latest_num.into()) + .expect("provider error") + .expect("latest block env"); + + let state = provider.latest().expect("latest state"); + let cfg = backend.executor_factory.cfg(); + let flags = backend.executor_factory.execution_flags(); + let validator = + TxValidator::new(state, flags.clone(), cfg.clone(), block_env, permit.clone()); + Self { + permit, backend, + validator, block_mining: None, queued: VecDeque::default(), blocking_task_pool: BlockingTaskPool::new().unwrap(), @@ -475,19 +529,30 @@ impl InstantBlockProducer { pub fn force_mine(&mut self) { if self.block_mining.is_none() { - let txs = self.queued.pop_front().unwrap_or_default(); - let _ = Self::do_mine(self.backend.clone(), txs); + let txs = std::mem::take(&mut self.queued); + let _ = Self::do_mine( + self.validator.clone(), + self.permit.clone(), + self.backend.clone(), + txs, + ); } else { trace!(target: LOG_TARGET, "Unable to force mine while a mining process is running.") } } fn do_mine( + validator: TxValidator, + permit: Arc>, backend: Arc>, - transactions: Vec, + transactions: VecDeque>, ) -> Result<(MinedBlockOutcome, Vec), BlockProductionError> { + let _permit = permit.lock(); + trace!(target: LOG_TARGET, "Creating new block."); + let transactions = transactions.into_iter().flatten().collect::>(); + let provider = backend.blockchain.provider(); let latest_num = provider.latest_number()?; @@ -528,6 +593,16 @@ impl InstantBlockProducer { let outcome = backend.do_mine_block(&block_env, execution_output)?; + // update pool validator state here --------- + + let provider = backend.blockchain.provider(); + let state = provider.latest()?; + let latest_num = provider.latest_number()?; + let block_env = provider.block_env_at(latest_num.into())?.expect("latest"); + validator.update(state, block_env); + + // ------------------------------------------- + trace!(target: LOG_TARGET, block_number = %outcome.block_number, "Created new block."); Ok((outcome, txs_outcomes)) @@ -575,12 +650,16 @@ impl Stream for InstantBlockProducer { let pin = self.get_mut(); if !pin.queued.is_empty() && pin.block_mining.is_none() { - let transactions = pin.queued.pop_front().expect("not empty; qed"); - let backend = pin.backend.clone(); + pin.block_mining = Some(Box::pin({ + // take everything that is already in the queue + let transactions = std::mem::take(&mut pin.queued); + let validator = pin.validator.clone(); + let backend = pin.backend.clone(); + let permit = pin.permit.clone(); - pin.block_mining = Some(Box::pin( - pin.blocking_task_pool.spawn(|| Self::do_mine(backend, transactions)), - )); + pin.blocking_task_pool + .spawn(|| Self::do_mine(validator, permit, backend, transactions)) + })); } // poll the mining future diff --git a/crates/katana/core/src/service/messaging/ethereum.rs b/crates/katana/core/src/service/messaging/ethereum.rs index 6b4c5556eb..9ab91757e4 100644 --- a/crates/katana/core/src/service/messaging/ethereum.rs +++ b/crates/katana/core/src/service/messaging/ethereum.rs @@ -1,6 +1,5 @@ #![allow(dead_code)] -use std::collections::HashMap; use std::str::FromStr; use std::sync::Arc; @@ -72,14 +71,10 @@ impl EthereumMessaging { /// /// * `from_block` - The first block of which logs must be fetched. /// * `to_block` - The last block of which logs must be fetched. - pub async fn fetch_logs( - &self, - from_block: u64, - to_block: u64, - ) -> MessengerResult>> { + pub async fn fetch_logs(&self, from_block: u64, to_block: u64) -> MessengerResult> { trace!(target: LOG_TARGET, from_block = ?from_block, to_block = ?to_block, "Fetching logs."); - let mut block_to_logs: HashMap> = HashMap::new(); + let mut logs = vec![]; let filters = Filter { block_option: FilterBlockOption::Range { @@ -107,15 +102,11 @@ impl EthereumMessaging { .await? .into_iter() .filter(|log| log.block_number.is_some()) - .map(|log| (log.block_number.unwrap(), log)) - .for_each(|(block_num, log)| { - block_to_logs - .entry(block_num) - .and_modify(|v| v.push(log.clone())) - .or_insert(vec![log]); + .for_each(|log| { + logs.push(log); }); - Ok(block_to_logs) + Ok(logs) } } @@ -143,22 +134,17 @@ impl Messenger for EthereumMessaging { let mut l1_handler_txs = vec![]; trace!(target: LOG_TARGET, from_block, to_block, "Fetching logs from {from_block} to {to_block}."); - self.fetch_logs(from_block, to_block).await?.into_iter().for_each( - |(block_number, block_logs)| { - debug!( - target: LOG_TARGET, - block_number = %block_number, - logs_found = %block_logs.len(), - "Converting logs into L1HandlerTx.", - ); - - block_logs.into_iter().for_each(|log| { - if let Ok(tx) = l1_handler_tx_from_log(log, chain_id) { - l1_handler_txs.push(tx) - } - }) - }, - ); + self.fetch_logs(from_block, to_block).await?.iter().for_each(|l| { + debug!( + target: LOG_TARGET, + log = ?l, + "Converting log into L1HandlerTx.", + ); + + if let Ok(tx) = l1_handler_tx_from_log(l.clone(), chain_id) { + l1_handler_txs.push(tx) + } + }); Ok((to_block, l1_handler_txs)) } diff --git a/crates/katana/core/src/service/messaging/mod.rs b/crates/katana/core/src/service/messaging/mod.rs index 7c40028b9b..cd064f44be 100644 --- a/crates/katana/core/src/service/messaging/mod.rs +++ b/crates/katana/core/src/service/messaging/mod.rs @@ -37,13 +37,18 @@ mod service; #[cfg(feature = "starknet-messaging")] mod starknet; +use std::future::Future; use std::path::Path; +use std::pin::Pin; +use std::task::{Context, Poll}; use ::starknet::providers::ProviderError as StarknetProviderError; use alloy_transport::TransportError; use anyhow::Result; use async_trait::async_trait; use ethereum::EthereumMessaging; +use futures::StreamExt; +use katana_executor::ExecutorFactory; use katana_primitives::chain::ChainId; use katana_primitives::receipt::MessageToL1; use serde::Deserialize; @@ -202,3 +207,37 @@ impl MessengerMode { } } } + +#[allow(missing_debug_implementations)] +#[must_use = "MessagingTask does nothing unless polled"] +pub struct MessagingTask { + messaging: MessagingService, +} + +impl MessagingTask { + pub fn new(messaging: MessagingService) -> Self { + Self { messaging } + } +} + +impl Future for MessagingTask { + type Output = (); + + fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + let this = self.get_mut(); + + while let Poll::Ready(Some(outcome)) = this.messaging.poll_next_unpin(cx) { + match outcome { + MessagingOutcome::Gather { msg_count, .. } => { + info!(target: LOG_TARGET, %msg_count, "Collected messages from settlement chain."); + } + + MessagingOutcome::Send { msg_count, .. } => { + info!(target: LOG_TARGET, %msg_count, "Sent messages to the settlement chain."); + } + } + } + + Poll::Pending + } +} diff --git a/crates/katana/core/src/service/messaging/service.rs b/crates/katana/core/src/service/messaging/service.rs index 2850a11cf8..df7bc6f8a7 100644 --- a/crates/katana/core/src/service/messaging/service.rs +++ b/crates/katana/core/src/service/messaging/service.rs @@ -91,7 +91,10 @@ impl MessagingService { txs.into_iter().for_each(|tx| { let hash = tx.calculate_hash(); trace_l1_handler_tx_exec(hash, &tx); - pool.add_transaction(ExecutableTxWithHash { hash, transaction: tx.into() }) + + // ignore result because L1Handler tx will always be valid + let _ = + pool.add_transaction(ExecutableTxWithHash { hash, transaction: tx.into() }); }); Ok((block_num, txs_count)) @@ -106,7 +109,10 @@ impl MessagingService { txs.into_iter().for_each(|tx| { let hash = tx.calculate_hash(); trace_l1_handler_tx_exec(hash, &tx); - pool.add_transaction(ExecutableTxWithHash { hash, transaction: tx.into() }) + + // ignore result because L1Handler tx will always be valid + let tx = ExecutableTxWithHash { hash, transaction: tx.into() }; + let _ = pool.add_transaction(tx); }); Ok((block_num, txs_count)) diff --git a/crates/katana/core/src/service/messaging/starknet.rs b/crates/katana/core/src/service/messaging/starknet.rs index cf98b58a5f..7c3f2bb3db 100644 --- a/crates/katana/core/src/service/messaging/starknet.rs +++ b/crates/katana/core/src/service/messaging/starknet.rs @@ -1,4 +1,3 @@ -use std::collections::HashMap; use std::sync::Arc; use anyhow::Result; @@ -61,15 +60,14 @@ impl StarknetMessaging { }) } - /// Fetches events for the given blocks range. pub async fn fetch_events( &self, from_block: BlockId, to_block: BlockId, - ) -> Result>> { + ) -> Result> { trace!(target: LOG_TARGET, from_block = ?from_block, to_block = ?to_block, "Fetching logs."); - let mut block_to_events: HashMap> = HashMap::new(); + let mut events = vec![]; let filter = EventFilter { from_block: Some(from_block), @@ -89,11 +87,10 @@ impl StarknetMessaging { event_page.events.into_iter().for_each(|event| { // We ignore events without the block number - if let Some(block_number) = event.block_number { - block_to_events - .entry(block_number) - .and_modify(|v| v.push(event.clone())) - .or_insert(vec![event]); + if event.block_number.is_some() { + // Blocks are processed in order as retrieved by `get_events`. + // This way we keep the order and ensure the messages are executed in order. + events.push(event); } }); @@ -104,7 +101,7 @@ impl StarknetMessaging { } } - Ok(block_to_events) + Ok(events) } /// Sends an invoke TX on starknet. @@ -201,19 +198,16 @@ impl Messenger for StarknetMessaging { .map_err(|_| Error::SendError) .unwrap() .iter() - .for_each(|(block_number, block_events)| { + .for_each(|e| { debug!( target: LOG_TARGET, - block_number = %block_number, - events_count = %block_events.len(), - "Converting events of block into L1HandlerTx." + event = ?e, + "Converting event into L1HandlerTx." ); - block_events.iter().for_each(|e| { - if let Ok(tx) = l1_handler_tx_from_event(e, chain_id) { - l1_handler_txs.push(tx) - } - }) + if let Ok(tx) = l1_handler_tx_from_event(e, chain_id) { + l1_handler_txs.push(tx) + } }); Ok((to_block, l1_handler_txs)) diff --git a/crates/katana/core/src/service/metrics.rs b/crates/katana/core/src/service/metrics.rs index e773a1630f..c6ce43373e 100644 --- a/crates/katana/core/src/service/metrics.rs +++ b/crates/katana/core/src/service/metrics.rs @@ -1,11 +1,6 @@ use dojo_metrics::Metrics; use metrics::Counter; -#[derive(Debug)] -pub(crate) struct ServiceMetrics { - pub(crate) block_producer: BlockProducerMetrics, -} - #[derive(Metrics)] #[metrics(scope = "block_producer")] pub(crate) struct BlockProducerMetrics { diff --git a/crates/katana/core/src/service/mod.rs b/crates/katana/core/src/service/mod.rs index 0dce5669cd..be12368f45 100644 --- a/crates/katana/core/src/service/mod.rs +++ b/crates/katana/core/src/service/mod.rs @@ -1,4 +1,5 @@ -//! background service +// TODO: remove the messaging feature flag +// TODO: move the tasks to a separate module use std::future::Future; use std::pin::Pin; @@ -14,102 +15,72 @@ use katana_primitives::FieldElement; use tracing::{error, info}; use self::block_producer::BlockProducer; -use self::metrics::{BlockProducerMetrics, ServiceMetrics}; +use self::metrics::BlockProducerMetrics; pub mod block_producer; #[cfg(feature = "messaging")] pub mod messaging; mod metrics; -#[cfg(feature = "messaging")] -use self::messaging::{MessagingOutcome, MessagingService}; - pub(crate) const LOG_TARGET: &str = "node"; /// The type that drives the blockchain's state /// -/// This service is basically an endless future that continuously polls the miner which returns +/// This task is basically an endless future that continuously polls the miner which returns /// transactions for the next block, then those transactions are handed off to the [BlockProducer] /// to construct a new block. +#[must_use = "BlockProductionTask does nothing unless polled"] #[allow(missing_debug_implementations)] -pub struct NodeService { - /// the pool that holds all transactions - pub(crate) pool: TxPool, +pub struct BlockProductionTask { /// creates new blocks pub(crate) block_producer: Arc>, /// the miner responsible to select transactions from the `pool´ pub(crate) miner: TransactionMiner, - /// The messaging service - #[cfg(feature = "messaging")] - pub(crate) messaging: Option>, + /// the pool that holds all transactions + pub(crate) pool: TxPool, /// Metrics for recording the service operations - metrics: ServiceMetrics, + metrics: BlockProducerMetrics, } -impl NodeService { +impl BlockProductionTask { pub fn new( pool: TxPool, miner: TransactionMiner, block_producer: Arc>, - #[cfg(feature = "messaging")] messaging: Option>, ) -> Self { - let metrics = ServiceMetrics { block_producer: BlockProducerMetrics::default() }; - - Self { - pool, - miner, - block_producer, - metrics, - #[cfg(feature = "messaging")] - messaging, - } + Self { block_producer, miner, pool, metrics: BlockProducerMetrics::default() } } } -impl Future for NodeService { +impl Future for BlockProductionTask { type Output = (); fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { - let pin = self.get_mut(); - - #[cfg(feature = "messaging")] - if let Some(messaging) = pin.messaging.as_mut() { - while let Poll::Ready(Some(outcome)) = messaging.poll_next_unpin(cx) { - match outcome { - MessagingOutcome::Gather { msg_count, .. } => { - info!(target: LOG_TARGET, msg_count = %msg_count, "Collected messages from settlement chain."); - } - MessagingOutcome::Send { msg_count, .. } => { - info!(target: LOG_TARGET, msg_count = %msg_count, "Sent messages to the settlement chain."); - } - } - } - } + let this = self.get_mut(); // this drives block production and feeds new sets of ready transactions to the block // producer loop { - while let Poll::Ready(Some(res)) = pin.block_producer.poll_next(cx) { + while let Poll::Ready(Some(res)) = this.block_producer.poll_next(cx) { match res { Ok(outcome) => { info!(target: LOG_TARGET, block_number = %outcome.block_number, "Mined block."); - let metrics = &pin.metrics.block_producer; let gas_used = outcome.stats.l1_gas_used; let steps_used = outcome.stats.cairo_steps_used; - metrics.l1_gas_processed_total.increment(gas_used as u64); - metrics.cairo_steps_processed_total.increment(steps_used as u64); + this.metrics.l1_gas_processed_total.increment(gas_used as u64); + this.metrics.cairo_steps_processed_total.increment(steps_used as u64); } - Err(err) => { - error!(target: LOG_TARGET, error = %err, "Mining block."); + Err(error) => { + error!(target: LOG_TARGET, %error, "Mining block."); } } } - if let Poll::Ready(pool_txs) = pin.miner.poll(&pin.pool, cx) { + if let Poll::Ready(pool_txs) = this.miner.poll(&this.pool, cx) { // miner returned a set of transaction that we feed to the producer - pin.block_producer.queue(pool_txs); + this.block_producer.queue(pool_txs); } else { // no progress made break; diff --git a/crates/katana/executor/benches/execution.rs b/crates/katana/executor/benches/execution.rs index d68c6a263e..932393d4b5 100644 --- a/crates/katana/executor/benches/execution.rs +++ b/crates/katana/executor/benches/execution.rs @@ -45,7 +45,7 @@ fn blockifier( || { // setup state let state = provider.latest().expect("failed to get latest state"); - let state = CachedState::new(StateProviderDb::from(state)); + let state = CachedState::new(StateProviderDb::new(state)); (state, &block_context, execution_flags, tx.clone()) }, diff --git a/crates/katana/executor/src/abstraction/executor.rs b/crates/katana/executor/src/abstraction/executor.rs index 7395c03882..fee93b0ee5 100644 --- a/crates/katana/executor/src/abstraction/executor.rs +++ b/crates/katana/executor/src/abstraction/executor.rs @@ -28,6 +28,9 @@ pub trait ExecutorFactory: Send + Sync + 'static + core::fmt::Debug { /// Returns the configuration environment of the factory. fn cfg(&self) -> &CfgEnv; + + /// Returns the execution flags set by the factory. + fn execution_flags(&self) -> &SimulationFlag; } /// An executor that can execute a block of transactions. diff --git a/crates/katana/executor/src/abstraction/mod.rs b/crates/katana/executor/src/abstraction/mod.rs index 0dcf0e9b50..b98c5599bb 100644 --- a/crates/katana/executor/src/abstraction/mod.rs +++ b/crates/katana/executor/src/abstraction/mod.rs @@ -149,10 +149,10 @@ pub struct ResultAndStates { /// A wrapper around a boxed [StateProvider] for implementing the executor's own state reader /// traits. #[derive(Debug)] -pub struct StateProviderDb<'a>(pub(crate) Box); +pub struct StateProviderDb<'a>(Box); -impl From> for StateProviderDb<'_> { - fn from(provider: Box) -> Self { +impl<'a> StateProviderDb<'a> { + pub fn new(provider: Box) -> Self { Self(provider) } } diff --git a/crates/katana/executor/src/implementation/blockifier/mod.rs b/crates/katana/executor/src/implementation/blockifier/mod.rs index ff25f043d0..eaeb94951b 100644 --- a/crates/katana/executor/src/implementation/blockifier/mod.rs +++ b/crates/katana/executor/src/implementation/blockifier/mod.rs @@ -1,3 +1,6 @@ +// Re-export the blockifier crate. +pub use blockifier; + mod error; mod state; pub mod utils; @@ -63,6 +66,11 @@ impl ExecutorFactory for BlockifierFactory { fn cfg(&self) -> &CfgEnv { &self.cfg } + + /// Returns the execution flags set by the factory. + fn execution_flags(&self) -> &SimulationFlag { + &self.flags + } } #[derive(Debug)] @@ -83,7 +91,7 @@ impl<'a> StarknetVMProcessor<'a> { ) -> Self { let transactions = Vec::new(); let block_context = utils::block_context_from_envs(&block_env, &cfg_env); - let state = state::CachedState::new(StateProviderDb(state)); + let state = state::CachedState::new(StateProviderDb::new(state)); Self { block_context, state, transactions, simulation_flags, stats: Default::default() } } diff --git a/crates/katana/executor/src/implementation/blockifier/state.rs b/crates/katana/executor/src/implementation/blockifier/state.rs index 3258f94a72..af801d1ebe 100644 --- a/crates/katana/executor/src/implementation/blockifier/state.rs +++ b/crates/katana/executor/src/implementation/blockifier/state.rs @@ -27,8 +27,7 @@ impl<'a> StateReader for StateProviderDb<'a> { &self, contract_address: katana_cairo::starknet_api::core::ContractAddress, ) -> StateResult { - self.0 - .class_hash_of_contract(utils::to_address(contract_address)) + self.class_hash_of_contract(utils::to_address(contract_address)) .map(|v| ClassHash(v.unwrap_or_default())) .map_err(|e| StateError::StateReadError(e.to_string())) } @@ -38,7 +37,6 @@ impl<'a> StateReader for StateProviderDb<'a> { class_hash: katana_cairo::starknet_api::core::ClassHash, ) -> StateResult { if let Some(hash) = self - .0 .compiled_class_hash_of_class_hash(class_hash.0) .map_err(|e| StateError::StateReadError(e.to_string()))? { @@ -53,7 +51,7 @@ impl<'a> StateReader for StateProviderDb<'a> { class_hash: ClassHash, ) -> StateResult { if let Some(class) = - self.0.class(class_hash.0).map_err(|e| StateError::StateReadError(e.to_string()))? + self.class(class_hash.0).map_err(|e| StateError::StateReadError(e.to_string()))? { let class = utils::to_class(class).map_err(|e| StateError::StateReadError(e.to_string()))?; @@ -68,8 +66,7 @@ impl<'a> StateReader for StateProviderDb<'a> { &self, contract_address: katana_cairo::starknet_api::core::ContractAddress, ) -> StateResult { - self.0 - .nonce(utils::to_address(contract_address)) + self.nonce(utils::to_address(contract_address)) .map(|n| Nonce(n.unwrap_or_default())) .map_err(|e| StateError::StateReadError(e.to_string())) } @@ -79,15 +76,14 @@ impl<'a> StateReader for StateProviderDb<'a> { contract_address: katana_cairo::starknet_api::core::ContractAddress, key: katana_cairo::starknet_api::state::StorageKey, ) -> StateResult { - self.0 - .storage(utils::to_address(contract_address), *key.0.key()) + self.storage(utils::to_address(contract_address), *key.0.key()) .map(|v| v.unwrap_or_default()) .map_err(|e| StateError::StateReadError(e.to_string())) } } #[derive(Debug)] -pub(super) struct CachedState(pub(super) Arc>>); +pub struct CachedState(pub(super) Arc>>); impl Clone for CachedState { fn clone(&self) -> Self { @@ -286,7 +282,7 @@ mod tests { #[test] fn can_fetch_from_inner_state_provider() -> anyhow::Result<()> { let state = state_provider(); - let cached_state = CachedState::new(StateProviderDb(state)); + let cached_state = CachedState::new(StateProviderDb::new(state)); let address = ContractAddress::from(felt!("0x67")); let legacy_class_hash = felt!("0x111"); @@ -357,7 +353,7 @@ mod tests { assert_eq!(actual_new_compiled_class_hash, None, "data shouldn't exist"); assert_eq!(actual_new_legacy_compiled_hash, None, "data shouldn't exist"); - let cached_state = CachedState::new(StateProviderDb(sp)); + let cached_state = CachedState::new(StateProviderDb::new(sp)); // insert some data to the cached state { @@ -472,7 +468,7 @@ mod tests { let sp = db.latest()?; - let cached_state = CachedState::new(StateProviderDb(sp)); + let cached_state = CachedState::new(StateProviderDb::new(sp)); let api_address = utils::to_blk_address(address); let api_storage_key = StorageKey(storage_key.try_into().unwrap()); diff --git a/crates/katana/executor/src/implementation/blockifier/utils.rs b/crates/katana/executor/src/implementation/blockifier/utils.rs index 6a6fb51b5c..8dffe1a1f6 100644 --- a/crates/katana/executor/src/implementation/blockifier/utils.rs +++ b/crates/katana/executor/src/implementation/blockifier/utils.rs @@ -3,6 +3,7 @@ use std::num::NonZeroU128; use std::sync::Arc; use blockifier::blockifier::block::{BlockInfo, GasPrices}; +use blockifier::bouncer::BouncerConfig; use blockifier::context::{BlockContext, ChainInfo, FeeTokenAddresses, TransactionContext}; use blockifier::execution::call_info::{ CallExecution, CallInfo, OrderedEvent, OrderedL2ToL1Message, @@ -74,14 +75,20 @@ pub fn transact( ) -> Result<(TransactionExecutionInfo, TxFeeInfo), ExecutionError> { let validate = !simulation_flags.skip_validate; let charge_fee = !simulation_flags.skip_fee_transfer; + // Blockifier doesn't provide a way to fully skip nonce check during the tx validation + // stage. The `nonce_check` flag in `tx.execute()` only 'relaxes' the check for + // nonce that is equal or higher than the current (expected) account nonce. + // + // Related commit on Blockifier: https://github.com/dojoengine/blockifier/commit/2410b6055453f247d48759f223c34b3fb5fa777 + let nonce_check = !simulation_flags.skip_nonce_check; let fee_type = get_fee_type_from_tx(&tx); let info = match tx { Transaction::AccountTransaction(tx) => { - tx.execute(state, block_context, charge_fee, validate) + tx.execute(state, block_context, charge_fee, validate, nonce_check) } Transaction::L1HandlerTransaction(tx) => { - tx.execute(state, block_context, charge_fee, validate) + tx.execute(state, block_context, charge_fee, validate, nonce_check) } }?; @@ -173,7 +180,7 @@ pub fn call( Ok(res.execution.retdata.0) } -fn to_executor_tx(tx: ExecutableTxWithHash) -> Transaction { +pub fn to_executor_tx(tx: ExecutableTxWithHash) -> Transaction { let hash = tx.hash; match tx.transaction { @@ -386,7 +393,7 @@ pub fn block_context_from_envs(block_env: &BlockEnv, cfg_env: &CfgEnv) -> BlockC versioned_constants.validate_max_n_steps = cfg_env.validate_max_n_steps; versioned_constants.invoke_tx_max_n_steps = cfg_env.invoke_tx_max_n_steps; - BlockContext::new(block_info, chain_info, versioned_constants, Default::default()) + BlockContext::new(block_info, chain_info, versioned_constants, BouncerConfig::max()) } pub(super) fn state_update_from_cached_state( diff --git a/crates/katana/executor/src/implementation/noop.rs b/crates/katana/executor/src/implementation/noop.rs index d9f1e205e0..e8551c4e48 100644 --- a/crates/katana/executor/src/implementation/noop.rs +++ b/crates/katana/executor/src/implementation/noop.rs @@ -19,6 +19,7 @@ use crate::ExecutionError; #[derive(Debug, Default)] pub struct NoopExecutorFactory { cfg: CfgEnv, + execution_flags: SimulationFlag, } impl NoopExecutorFactory { @@ -53,6 +54,10 @@ impl ExecutorFactory for NoopExecutorFactory { fn cfg(&self) -> &CfgEnv { &self.cfg } + + fn execution_flags(&self) -> &SimulationFlag { + &self.execution_flags + } } #[derive(Debug, Default)] diff --git a/crates/katana/node/Cargo.toml b/crates/katana/node/Cargo.toml index 011bd8db62..c74b8e1380 100644 --- a/crates/katana/node/Cargo.toml +++ b/crates/katana/node/Cargo.toml @@ -14,6 +14,7 @@ katana-primitives.workspace = true katana-provider.workspace = true katana-rpc.workspace = true katana-rpc-api.workspace = true +katana-tasks.workspace = true anyhow.workspace = true dojo-metrics.workspace = true @@ -22,7 +23,6 @@ jsonrpsee.workspace = true num-traits.workspace = true serde_json.workspace = true starknet.workspace = true -tokio.workspace = true tower = { workspace = true, features = [ "full" ] } tower-http = { workspace = true, features = [ "full" ] } tracing.workspace = true diff --git a/crates/katana/node/src/lib.rs b/crates/katana/node/src/lib.rs index fe72ac1aaa..55dab949f0 100644 --- a/crates/katana/node/src/lib.rs +++ b/crates/katana/node/src/lib.rs @@ -19,12 +19,12 @@ use katana_core::env::BlockContextGenerator; use katana_core::sequencer::SequencerConfig; use katana_core::service::block_producer::BlockProducer; #[cfg(feature = "messaging")] -use katana_core::service::messaging::MessagingService; -use katana_core::service::{NodeService, TransactionMiner}; +use katana_core::service::messaging::{MessagingService, MessagingTask}; +use katana_core::service::{BlockProductionTask, TransactionMiner}; use katana_executor::implementation::blockifier::BlockifierFactory; use katana_executor::{ExecutorFactory, SimulationFlag}; use katana_pool::ordering::FiFo; -use katana_pool::validation::NoopValidator; +use katana_pool::validation::stateful::TxValidator; use katana_pool::{TransactionPool, TxPool}; use katana_primitives::block::FinalityStatus; use katana_primitives::env::{CfgEnv, FeeTokenAddressses}; @@ -32,17 +32,16 @@ use katana_provider::providers::fork::ForkedProvider; use katana_provider::providers::in_memory::InMemoryProvider; use katana_rpc::config::ServerConfig; use katana_rpc::dev::DevApi; -use katana_rpc::katana::KatanaApi; use katana_rpc::metrics::RpcServerMetrics; use katana_rpc::saya::SayaApi; use katana_rpc::starknet::StarknetApi; use katana_rpc::torii::ToriiApi; use katana_rpc_api::dev::DevApiServer; -use katana_rpc_api::katana::KatanaApiServer; use katana_rpc_api::saya::SayaApiServer; use katana_rpc_api::starknet::{StarknetApiServer, StarknetTraceApiServer, StarknetWriteApiServer}; use katana_rpc_api::torii::ToriiApiServer; use katana_rpc_api::ApiKind; +use katana_tasks::TaskManager; use num_traits::ToPrimitive; use starknet::core::types::{BlockId, BlockStatus, MaybePendingBlockWithTxHashes}; use starknet::core::utils::parse_cairo_short_string; @@ -51,6 +50,26 @@ use starknet::providers::{JsonRpcClient, Provider}; use tower_http::cors::{AllowOrigin, CorsLayer}; use tracing::{info, trace}; +/// A handle to the instantiated Katana node. +#[allow(missing_debug_implementations)] +pub struct Handle { + pub pool: TxPool, + pub rpc: RpcServer, + pub task_manager: TaskManager, + pub backend: Arc>, + pub block_producer: Arc>, +} + +impl Handle { + /// Stops the Katana node. + pub async fn stop(self) -> Result<()> { + // TODO: wait for the rpc server to stop + self.rpc.handle.stop()?; + self.task_manager.shutdown().await; + Ok(()) + } +} + /// Build the core Katana components from the given configurations and start running the node. // TODO: placeholder until we implement a dedicated class that encapsulate building the node // components @@ -65,7 +84,7 @@ pub async fn start( server_config: ServerConfig, sequencer_config: SequencerConfig, mut starknet_config: StarknetConfig, -) -> anyhow::Result<(NodeHandle, Arc>)> { +) -> Result { // --- build executor factory let cfg_env = CfgEnv { @@ -154,12 +173,7 @@ pub async fn start( config: starknet_config, }); - // --- build transaction pool and miner - - let pool = TxPool::new(NoopValidator::new(), FiFo::new()); - let miner = TransactionMiner::new(pool.add_listener()); - - // --- build block producer service + // --- build block producer let block_producer = if sequencer_config.block_time.is_some() || sequencer_config.no_mining { if let Some(interval) = sequencer_config.block_time { @@ -171,6 +185,12 @@ pub async fn start( BlockProducer::instant(Arc::clone(&backend)) }; + // --- build transaction pool and miner + + let validator = block_producer.validator(); + let pool = TxPool::new(validator.clone(), FiFo::new()); + let miner = TransactionMiner::new(pool.add_listener()); + // --- build metrics service // Metrics recorder must be initialized before calling any of the metrics macros, in order for @@ -190,40 +210,40 @@ pub async fn start( info!(%addr, "Metrics endpoint started."); } - // --- build messaging service + // --- create a TaskManager using the ambient Tokio runtime + + let task_manager = TaskManager::current(); + + // --- build and spawn the messaging task #[cfg(feature = "messaging")] - let messaging = if let Some(config) = sequencer_config.messaging.clone() { - MessagingService::new(config, pool.clone(), Arc::clone(&backend)).await.ok() - } else { - None - }; + if let Some(config) = sequencer_config.messaging.clone() { + let messaging = MessagingService::new(config, pool.clone(), Arc::clone(&backend)).await?; + let task = MessagingTask::new(messaging); + task_manager.build_task().critical().name("Messaging").spawn(task); + } let block_producer = Arc::new(block_producer); - // TODO: avoid dangling task, or at least store the handle to the NodeService - tokio::spawn(NodeService::new( - pool.clone(), - miner, - block_producer.clone(), - #[cfg(feature = "messaging")] - messaging, - )); + // --- build and spawn the block production task + + let task = BlockProductionTask::new(pool.clone(), miner, block_producer.clone()); + task_manager.build_task().critical().name("BlockProduction").spawn(task); // --- spawn rpc server - let node_components = (pool, backend.clone(), block_producer); - let rpc_handle = spawn(node_components, server_config).await?; + let node_components = (pool.clone(), backend.clone(), block_producer.clone(), validator); + let rpc = spawn(node_components, server_config).await?; - Ok((rpc_handle, backend)) + Ok(Handle { backend, block_producer, pool, rpc, task_manager }) } // Moved from `katana_rpc` crate pub async fn spawn( - node_components: (TxPool, Arc>, Arc>), + node_components: (TxPool, Arc>, Arc>, TxValidator), config: ServerConfig, -) -> Result { - let (pool, backend, block_producer) = node_components; +) -> Result { + let (pool, backend, block_producer, validator) = node_components; let mut methods = RpcModule::new(()); methods.register_method("health", |_, _| Ok(serde_json::json!({ "health": true })))?; @@ -232,15 +252,16 @@ pub async fn spawn( match api { ApiKind::Starknet => { // TODO: merge these into a single logic. - let server = - StarknetApi::new(backend.clone(), pool.clone(), block_producer.clone()); + let server = StarknetApi::new( + backend.clone(), + pool.clone(), + block_producer.clone(), + validator.clone(), + ); methods.merge(StarknetApiServer::into_rpc(server.clone()))?; methods.merge(StarknetWriteApiServer::into_rpc(server.clone()))?; methods.merge(StarknetTraceApiServer::into_rpc(server))?; } - ApiKind::Katana => { - methods.merge(KatanaApi::new(backend.clone()).into_rpc())?; - } ApiKind::Dev => { methods.merge(DevApi::new(backend.clone(), block_producer.clone()).into_rpc())?; } @@ -291,12 +312,11 @@ pub async fn spawn( let addr = server.local_addr()?; let handle = server.start(methods)?; - Ok(NodeHandle { config, handle, addr }) + Ok(RpcServer { handle, addr }) } -#[derive(Debug, Clone)] -pub struct NodeHandle { +#[derive(Debug)] +pub struct RpcServer { pub addr: SocketAddr, - pub config: ServerConfig, pub handle: ServerHandle, } diff --git a/crates/katana/pool/Cargo.toml b/crates/katana/pool/Cargo.toml index 824edc7de8..256f6ce928 100644 --- a/crates/katana/pool/Cargo.toml +++ b/crates/katana/pool/Cargo.toml @@ -10,6 +10,7 @@ version.workspace = true futures.workspace = true katana-executor.workspace = true katana-primitives.workspace = true +katana-provider.workspace = true parking_lot.workspace = true thiserror.workspace = true tracing.workspace = true diff --git a/crates/katana/pool/src/lib.rs b/crates/katana/pool/src/lib.rs index 3a9f484864..465f679732 100644 --- a/crates/katana/pool/src/lib.rs +++ b/crates/katana/pool/src/lib.rs @@ -12,11 +12,21 @@ use katana_primitives::transaction::{ExecutableTxWithHash, TxHash}; use ordering::{FiFo, PoolOrd}; use pool::Pool; use tx::{PendingTx, PoolTransaction}; -use validation::{NoopValidator, Validator}; +use validation::stateful::TxValidator; +use validation::{InvalidTransactionError, Validator}; /// Katana default transacstion pool type. -pub type TxPool = - Pool, FiFo>; +pub type TxPool = Pool>; + +pub type PoolResult = Result; + +#[derive(Debug, thiserror::Error)] +pub enum PoolError { + #[error("Invalid transaction: {0}")] + InvalidTransaction(Box), + #[error("Internal error: {0}")] + Internal(Box), +} /// Represents a complete transaction pool. pub trait TransactionPool { @@ -31,7 +41,7 @@ pub trait TransactionPool { type Validator: Validator; /// Add a new transaction to the pool. - fn add_transaction(&self, tx: Self::Transaction); + fn add_transaction(&self, tx: Self::Transaction) -> PoolResult; fn take_transactions( &self, diff --git a/crates/katana/pool/src/ordering.rs b/crates/katana/pool/src/ordering.rs index ee714bb93d..25a7794082 100644 --- a/crates/katana/pool/src/ordering.rs +++ b/crates/katana/pool/src/ordering.rs @@ -53,7 +53,7 @@ pub struct TxSubmissionNonce(u64); impl Ord for TxSubmissionNonce { fn cmp(&self, other: &Self) -> std::cmp::Ordering { // Reverse the ordering so lower values have higher priority - other.0.cmp(&self.0) + self.0.cmp(&other.0) } } @@ -76,25 +76,136 @@ impl PartialEq for TxSubmissionNonce { /// This ordering implementation uses the transaction's tip as the priority value. We don't have a /// use case for this ordering implementation yet, but it's mostly used for testing. #[derive(Debug)] -pub struct Tip(PhantomData); +pub struct TipOrdering(PhantomData); -impl Tip { +impl TipOrdering { pub fn new() -> Self { Self(PhantomData) } } -impl PoolOrd for Tip { +#[derive(Debug, Clone)] +pub struct Tip(u64); + +impl Ord for Tip { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + other.0.cmp(&self.0) + } +} + +impl PartialOrd for Tip { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl PartialEq for Tip { + fn eq(&self, other: &Self) -> bool { + self.0 == other.0 + } +} + +impl Eq for Tip {} + +impl PoolOrd for TipOrdering { type Transaction = T; - type PriorityValue = u64; + type PriorityValue = Tip; fn priority(&self, tx: &Self::Transaction) -> Self::PriorityValue { - tx.tip() + Tip(tx.tip()) } } -impl Default for Tip { +impl Default for TipOrdering { fn default() -> Self { Self::new() } } + +#[cfg(test)] +mod tests { + + use crate::ordering::{self, FiFo}; + use crate::pool::test_utils::*; + use crate::tx::PoolTransaction; + use crate::validation::NoopValidator; + use crate::{Pool, TransactionPool}; + + #[test] + fn fifo_ordering() { + // Create mock transactions + let txs = [PoolTx::new(), PoolTx::new(), PoolTx::new(), PoolTx::new(), PoolTx::new()]; + + // Create a pool with FiFo ordering + let pool = Pool::new(NoopValidator::new(), FiFo::new()); + + // Add transactions to the pool + txs.iter().for_each(|tx| { + let _ = pool.add_transaction(tx.clone()); + }); + + // Get pending transactions + let pendings = pool.take_transactions().collect::>(); + + // Assert that the transactions are in the order they were added (first to last) + pendings.iter().zip(txs).for_each(|(pending, tx)| { + assert_eq!(pending.tx.as_ref(), &tx); + }); + } + + #[test] + fn tip_based_ordering() { + // Create mock transactions with different tips and in random order + let txs = [ + PoolTx::new().with_tip(2), + PoolTx::new().with_tip(1), + PoolTx::new().with_tip(6), + PoolTx::new().with_tip(3), + PoolTx::new().with_tip(2), + PoolTx::new().with_tip(2), + PoolTx::new().with_tip(5), + PoolTx::new().with_tip(4), + PoolTx::new().with_tip(7), + ]; + + // Create a pool with tip-based ordering + let pool = Pool::new(NoopValidator::new(), ordering::TipOrdering::new()); + + // Add transactions to the pool + txs.iter().for_each(|tx| { + let _ = pool.add_transaction(tx.clone()); + }); + + // Get pending transactions + let pending = pool.take_transactions().collect::>(); + assert_eq!(pending.len(), txs.len()); + + // Assert that the transactions are ordered by tip (highest to lowest) + assert_eq!(pending[0].tx.tip(), 7); + assert_eq!(pending[0].tx.hash(), txs[8].hash()); + + assert_eq!(pending[1].tx.tip(), 6); + assert_eq!(pending[1].tx.hash(), txs[2].hash()); + + assert_eq!(pending[2].tx.tip(), 5); + assert_eq!(pending[2].tx.hash(), txs[6].hash()); + + assert_eq!(pending[3].tx.tip(), 4); + assert_eq!(pending[3].tx.hash(), txs[7].hash()); + + assert_eq!(pending[4].tx.tip(), 3); + assert_eq!(pending[4].tx.hash(), txs[3].hash()); + + assert_eq!(pending[5].tx.tip(), 2); + assert_eq!(pending[5].tx.hash(), txs[0].hash()); + + assert_eq!(pending[6].tx.tip(), 2); + assert_eq!(pending[6].tx.hash(), txs[4].hash()); + + assert_eq!(pending[7].tx.tip(), 2); + assert_eq!(pending[7].tx.hash(), txs[5].hash()); + + assert_eq!(pending[8].tx.tip(), 1); + assert_eq!(pending[8].tx.hash(), txs[1].hash()); + } +} diff --git a/crates/katana/pool/src/pool.rs b/crates/katana/pool/src/pool.rs index eb4722ae93..883fdf746d 100644 --- a/crates/katana/pool/src/pool.rs +++ b/crates/katana/pool/src/pool.rs @@ -1,5 +1,7 @@ +use core::fmt; +use std::collections::btree_set::IntoIter; +use std::collections::BTreeSet; use std::sync::Arc; -use std::vec::IntoIter; use futures::channel::mpsc::{channel, Receiver, Sender}; use katana_primitives::transaction::TxHash; @@ -8,8 +10,8 @@ use tracing::{error, info, warn}; use crate::ordering::PoolOrd; use crate::tx::{PendingTx, PoolTransaction, TxId}; -use crate::validation::{ValidationOutcome, Validator}; -use crate::TransactionPool; +use crate::validation::{InvalidTransactionError, ValidationOutcome, Validator}; +use crate::{PoolError, PoolResult, TransactionPool}; #[derive(Debug)] pub struct Pool @@ -23,8 +25,8 @@ where #[derive(Debug)] struct Inner { - /// List of all valid txs in the pool - transactions: RwLock>>, + /// List of all valid txs in the pool. + transactions: RwLock>>, /// listeners for incoming txs listeners: RwLock>>, @@ -84,7 +86,7 @@ where impl TransactionPool for Pool where - T: PoolTransaction, + T: PoolTransaction + fmt::Debug, V: Validator, O: PoolOrd, { @@ -92,34 +94,49 @@ where type Validator = V; type Ordering = O; - fn add_transaction(&self, tx: T) { + fn add_transaction(&self, tx: T) -> PoolResult { + let hash = tx.hash(); let id = TxId::new(tx.sender(), tx.nonce()); + info!(hash = format!("{hash:#x}"), "Transaction received."); + match self.inner.validator.validate(tx) { Ok(outcome) => { - let hash = match outcome { + match outcome { ValidationOutcome::Valid(tx) => { // get the priority of the validated tx let priority = self.inner.ordering.priority(&tx); - - let tx = PendingTx::new(id.clone(), tx, priority); - let hash = tx.tx.hash(); + let tx = PendingTx::new(id, tx, priority); // insert the tx in the pool - self.inner.transactions.write().push(tx); + self.inner.transactions.write().insert(tx); self.notify_listener(hash); - hash + + Ok(hash) } - // for now, this variant is a no-op - ValidationOutcome::Invalid { tx, .. } => tx.hash(), - }; + ValidationOutcome::Invalid { error, .. } => { + warn!(hash = format!("{hash:#x}"), "Invalid transaction."); + Err(PoolError::InvalidTransaction(Box::new(error))) + } - info!(hash = format!("{hash:#x}"), "Transaction received."); + // return as error for now but ideally we should kept the tx in a separate + // queue and revalidate it when the parent tx is added to the pool + ValidationOutcome::Dependent { tx, tx_nonce, current_nonce } => { + info!(hash = format!("{hash:#x}"), "Dependent transaction."); + let err = InvalidTransactionError::InvalidNonce { + address: tx.sender(), + current_nonce, + tx_nonce, + }; + Err(PoolError::InvalidTransaction(Box::new(err))) + } + } } - Err(error @ crate::validation::Error { hash, .. }) => { - error!(hash = format!("{hash:#x}"), %error, "Failed to validate transaction."); + Err(e @ crate::validation::Error { hash, .. }) => { + error!(hash = format!("{hash:#x}"), %e, "Failed to validate transaction."); + Err(PoolError::Internal(e.error)) } } } @@ -193,14 +210,12 @@ where #[cfg(test)] pub(crate) mod test_utils { - use katana_executor::ExecutionError; use katana_primitives::contract::{ContractAddress, Nonce}; use katana_primitives::FieldElement; use rand::Rng; use super::*; use crate::tx::PoolTransaction; - use crate::validation::{ValidationOutcome, ValidationResult, Validator}; fn random_bytes() -> [u8; SIZE] { let mut bytes = [0u8; SIZE]; @@ -208,7 +223,7 @@ pub(crate) mod test_utils { bytes } - #[derive(Clone, Debug)] + #[derive(Clone, Debug, PartialEq, Eq)] pub struct PoolTx { tip: u64, nonce: Nonce, @@ -269,33 +284,6 @@ pub(crate) mod test_utils { self.tip } } - - /// A tip-based validator that flags transactions as invalid if they have less than 10 tip. - pub struct TipValidator { - threshold: u64, - t: std::marker::PhantomData, - } - - impl TipValidator { - pub fn new(threshold: u64) -> Self { - Self { threshold, t: std::marker::PhantomData } - } - } - - impl Validator for TipValidator { - type Transaction = T; - - fn validate(&self, tx: Self::Transaction) -> ValidationResult { - if tx.tip() < self.threshold { - return ValidationResult::Ok(ValidationOutcome::Invalid { - tx, - error: ExecutionError::Other("tip too low".to_string()), - }); - } - - ValidationResult::Ok(ValidationOutcome::Valid(tx)) - } - } } #[cfg(test)] @@ -306,10 +294,9 @@ mod tests { use super::test_utils::*; use super::Pool; - use crate::ordering::{self, FiFo}; - use crate::pool::test_utils; + use crate::ordering::FiFo; use crate::tx::PoolTransaction; - use crate::validation::{NoopValidator, ValidationOutcome, Validator}; + use crate::validation::NoopValidator; use crate::TransactionPool; /// Tx pool that uses a noop validator and a first-come-first-serve ordering. @@ -341,7 +328,9 @@ mod tests { assert!(pool.inner.transactions.read().is_empty()); // add all the txs to the pool - txs.iter().for_each(|tx| pool.add_transaction(tx.clone())); + txs.iter().for_each(|tx| { + let _ = pool.add_transaction(tx.clone()); + }); // all the txs should be in the pool assert_eq!(pool.size(), txs.len()); @@ -389,7 +378,9 @@ mod tests { let mut listener = pool.add_listener(); // start adding txs to the pool - txs.iter().for_each(|tx| pool.add_transaction(tx.clone())); + txs.iter().for_each(|tx| { + let _ = pool.add_transaction(tx.clone()); + }); // the channel should contain all the added txs let mut counter = 0; @@ -402,100 +393,6 @@ mod tests { assert_eq!(counter, txs.len()); } - #[test] - #[ignore = "Rejected pool not implemented yet"] - fn transactions_rejected() { - let all = [ - PoolTx::new().with_tip(5), - PoolTx::new().with_tip(0), - PoolTx::new().with_tip(15), - PoolTx::new().with_tip(8), - PoolTx::new().with_tip(12), - PoolTx::new().with_tip(10), - PoolTx::new().with_tip(1), - ]; - - // create a pool with a validator that rejects txs with tip < 10 - let pool = Pool::new(test_utils::TipValidator::new(10), FiFo::new()); - - // Extract the expected valid and invalid transactions from the all list - let (expected_valids, expected_invalids) = pool - .validator() - .validate_all(all.to_vec()) - .into_iter() - .filter_map(|res| res.ok()) - .fold((Vec::new(), Vec::new()), |mut acc, res| match res { - ValidationOutcome::Valid(tx) => { - acc.0.push(tx); - acc - } - - ValidationOutcome::Invalid { tx, .. } => { - acc.1.push(tx); - acc - } - }); - - assert_eq!(expected_valids.len(), 3); - assert_eq!(expected_invalids.len(), 4); - - // Add all transactions to the pool - all.iter().for_each(|tx| pool.add_transaction(tx.clone())); - - // Check that all transactions should be in the pool regardless of validity - assert!(all.iter().all(|tx| pool.get(tx.hash()).is_some())); - assert_eq!(pool.size(), all.len()); - - // Pending transactions should only contain the valid transactions - let pendings = pool.take_transactions().collect::>(); - assert_eq!(pendings.len(), expected_valids.len()); - - // bcs its a fcfs pool, the order of the pending txs should be the as its order of insertion - // (position in the array) - for (actual, expected) in pendings.iter().zip(expected_valids.iter()) { - assert_eq!(actual.tx.hash(), expected.hash()); - } - - // // rejected_txs should contain all the invalid txs - // assert_eq!(pool.inner.rejected.read().len(), expected_invalids.len()); - // for tx in expected_invalids.iter() { - // assert!(pool.inner.rejected.read().contains_key(&tx.hash())); - // } - } - - #[test] - #[ignore = "Txs ordering not fully implemented yet"] - fn txs_ordering() { - // Create mock transactions with different tips and in random order - let txs = [ - PoolTx::new().with_tip(1), - PoolTx::new().with_tip(6), - PoolTx::new().with_tip(3), - PoolTx::new().with_tip(2), - PoolTx::new().with_tip(5), - PoolTx::new().with_tip(4), - PoolTx::new().with_tip(7), - ]; - - // Create a pool with tip-based ordering - let pool = Pool::new(NoopValidator::new(), ordering::Tip::new()); - - // Add transactions to the pool - txs.iter().for_each(|tx| pool.add_transaction(tx.clone())); - - // Get pending transactions - let pending = pool.take_transactions().collect::>(); - - // Assert that the transactions are ordered by tip (highest to lowest) - assert_eq!(pending[0].tx.tip(), 7); - assert_eq!(pending[1].tx.tip(), 6); - assert_eq!(pending[2].tx.tip(), 5); - assert_eq!(pending[3].tx.tip(), 4); - assert_eq!(pending[4].tx.tip(), 3); - assert_eq!(pending[5].tx.tip(), 2); - assert_eq!(pending[6].tx.tip(), 1); - } - #[test] #[ignore = "Txs dependency management not fully implemented yet"] fn dependent_txs_linear_insertion() { @@ -509,7 +406,9 @@ mod tests { .collect(); // Add all transactions to the pool - txs.iter().for_each(|tx| pool.add_transaction(tx.clone())); + txs.iter().for_each(|tx| { + let _ = pool.add_transaction(tx.clone()); + }); // Get pending transactions let pending = pool.take_transactions().collect::>(); diff --git a/crates/katana/pool/src/tx.rs b/crates/katana/pool/src/tx.rs index 2487aa3e72..ca7117d292 100644 --- a/crates/katana/pool/src/tx.rs +++ b/crates/katana/pool/src/tx.rs @@ -1,4 +1,5 @@ use std::sync::Arc; +use std::time::Instant; use katana_primitives::contract::{ContractAddress, Nonce}; use katana_primitives::transaction::{ @@ -56,11 +57,12 @@ pub struct PendingTx { pub id: TxId, pub tx: Arc, pub priority: O::PriorityValue, + pub added_at: std::time::Instant, } impl PendingTx { pub fn new(id: TxId, tx: T, priority: O::PriorityValue) -> Self { - Self { id, tx: Arc::new(tx), priority } + Self { id, tx: Arc::new(tx), priority, added_at: Instant::now() } } } @@ -69,7 +71,12 @@ impl PendingTx { impl Clone for PendingTx { fn clone(&self) -> Self { - Self { id: self.id.clone(), tx: Arc::clone(&self.tx), priority: self.priority.clone() } + Self { + id: self.id.clone(), + added_at: self.added_at, + tx: Arc::clone(&self.tx), + priority: self.priority.clone(), + } } } @@ -87,9 +94,17 @@ impl PartialOrd for PendingTx { } } +// When two transactions have the same priority, we want to prioritize the one that was added +// first. So, when an incoming transaction with similar priority value is added to the +// [BTreeSet](std::collections::BTreeSet), the transaction is assigned a 'greater' +// [Ordering](std::cmp::Ordering) so that it will be placed after the existing ones. This is +// because items in a BTree is ordered from lowest to highest. impl Ord for PendingTx { fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.priority.cmp(&other.priority) + match self.priority.cmp(&other.priority) { + std::cmp::Ordering::Equal => std::cmp::Ordering::Greater, + other => other, + } } } diff --git a/crates/katana/pool/src/validation.rs b/crates/katana/pool/src/validation.rs deleted file mode 100644 index ad7ba895eb..0000000000 --- a/crates/katana/pool/src/validation.rs +++ /dev/null @@ -1,65 +0,0 @@ -use katana_executor::ExecutionError; -use katana_primitives::transaction::TxHash; - -use crate::tx::PoolTransaction; - -#[derive(Debug, thiserror::Error)] -#[error("{error}")] -pub struct Error { - /// The hash of the transaction that failed validation. - pub hash: TxHash, - /// The error that caused the transaction to fail validation. - pub error: Box, -} - -pub type ValidationResult = Result, Error>; - -/// A trait for validating transactions before they are added to the transaction pool. -pub trait Validator { - type Transaction: PoolTransaction; - - /// Validate a transaction. - fn validate(&self, tx: Self::Transaction) -> ValidationResult; - - /// Validate a batch of transactions. - fn validate_all( - &self, - txs: Vec, - ) -> Vec> { - txs.into_iter().map(|tx| self.validate(tx)).collect() - } -} - -// outcome of the validation phase. the variant of this enum determines on which pool -// the tx should be inserted into. -#[derive(Debug)] -pub enum ValidationOutcome { - /// tx that is or may eventually be valid after some nonce changes. - Valid(T), - /// tx that will never be valid, eg. due to invalid signature, nonce lower than current, etc. - Invalid { tx: T, error: ExecutionError }, -} - -/// A no-op validator that does nothing and assume all incoming transactions are valid. -#[derive(Debug)] -pub struct NoopValidator(std::marker::PhantomData); - -impl NoopValidator { - pub fn new() -> Self { - Self(std::marker::PhantomData) - } -} - -impl Validator for NoopValidator { - type Transaction = T; - - fn validate(&self, tx: Self::Transaction) -> ValidationResult { - ValidationResult::Ok(ValidationOutcome::Valid(tx)) - } -} - -impl Default for NoopValidator { - fn default() -> Self { - Self::new() - } -} diff --git a/crates/katana/pool/src/validation/mod.rs b/crates/katana/pool/src/validation/mod.rs new file mode 100644 index 0000000000..68943ecd73 --- /dev/null +++ b/crates/katana/pool/src/validation/mod.rs @@ -0,0 +1,130 @@ +pub mod stateful; + +use katana_primitives::class::ClassHash; +use katana_primitives::contract::{ContractAddress, Nonce}; +use katana_primitives::transaction::TxHash; +use katana_primitives::FieldElement; + +use crate::tx::PoolTransaction; + +#[derive(Debug, thiserror::Error)] +#[error("{error}")] +pub struct Error { + /// The hash of the transaction that failed validation. + pub hash: TxHash, + /// The actual error object. + pub error: Box, +} + +// TODO: figure out how to combine this with ExecutionError +#[derive(Debug, thiserror::Error)] +pub enum InvalidTransactionError { + /// Error when the account's balance is insufficient to cover the specified transaction fee. + #[error("Max fee ({max_fee}) exceeds balance ({balance}).")] + InsufficientFunds { + /// The specified transaction fee. + max_fee: u128, + /// The account's balance of the fee token. + balance: FieldElement, + }, + + /// Error when the specified transaction fee is insufficient to cover the minimum fee required. + #[error("The specified tx max fee is insufficient")] + InsufficientMaxFee { min_fee: u128, max_fee: u128 }, + + /// Error when the account's validation logic fails (ie __validate__ function). + #[error("{error}")] + ValidationFailure { + /// The address of the contract that failed validation. + address: ContractAddress, + /// The class hash of the account contract. + class_hash: ClassHash, + /// The error message returned by Blockifier. + // TODO: this should be a more specific error type. + error: String, + }, + + /// Error when the transaction's sender is not an account contract. + #[error("sender is not an account")] + NonAccount { + /// The address of the contract that is not an account. + address: ContractAddress, + }, + + /// Error when the transaction is using a nonexpected nonce. + #[error( + "Invalid transaction nonce of contract at address {address}. Account nonce: \ + {current_nonce:#x}; got: {tx_nonce:#x}." + )] + InvalidNonce { + /// The address of the contract that has the invalid nonce. + address: ContractAddress, + /// The current nonce of the sender's account. + current_nonce: Nonce, + /// The nonce that the tx is using. + tx_nonce: Nonce, + }, +} + +pub type ValidationResult = Result, Error>; + +/// A trait for validating transactions before they are added to the transaction pool. +pub trait Validator { + type Transaction: PoolTransaction; + + /// Validate a transaction. + fn validate(&self, tx: Self::Transaction) -> ValidationResult; + + /// Validate a batch of transactions. + fn validate_all( + &self, + txs: Vec, + ) -> Vec> { + txs.into_iter().map(|tx| self.validate(tx)).collect() + } +} + +// outcome of the validation phase. the variant of this enum determines on which pool +// the tx should be inserted into. +#[derive(Debug)] +pub enum ValidationOutcome { + /// tx that is or may eventually be valid after some nonce changes. + Valid(T), + + /// tx that will never be valid, eg. due to invalid signature, nonce lower than current, etc. + Invalid { tx: T, error: InvalidTransactionError }, + + /// tx that is dependent on another tx ie. when the tx nonce is higher than the current account + /// nonce. + Dependent { + tx: T, + /// The nonce that the tx is using. + tx_nonce: Nonce, + /// The current nonce of the sender's account. + current_nonce: Nonce, + }, +} + +/// A no-op validator that does nothing and assume all incoming transactions are valid. +#[derive(Debug)] +pub struct NoopValidator(std::marker::PhantomData); + +impl NoopValidator { + pub fn new() -> Self { + Self(std::marker::PhantomData) + } +} + +impl Validator for NoopValidator { + type Transaction = T; + + fn validate(&self, tx: Self::Transaction) -> ValidationResult { + ValidationResult::Ok(ValidationOutcome::Valid(tx)) + } +} + +impl Default for NoopValidator { + fn default() -> Self { + Self::new() + } +} diff --git a/crates/katana/pool/src/validation/stateful.rs b/crates/katana/pool/src/validation/stateful.rs new file mode 100644 index 0000000000..abd8773a87 --- /dev/null +++ b/crates/katana/pool/src/validation/stateful.rs @@ -0,0 +1,224 @@ +use std::collections::HashMap; +use std::sync::Arc; + +use katana_executor::implementation::blockifier::blockifier::blockifier::stateful_validator::{ + StatefulValidator, StatefulValidatorError, +}; +use katana_executor::implementation::blockifier::blockifier::state::cached_state::CachedState; +use katana_executor::implementation::blockifier::blockifier::transaction::errors::{ + TransactionExecutionError, TransactionFeeError, TransactionPreValidationError, +}; +use katana_executor::implementation::blockifier::blockifier::transaction::transaction_execution::Transaction; +use katana_executor::implementation::blockifier::utils::{ + block_context_from_envs, to_address, to_executor_tx, +}; +use katana_executor::{SimulationFlag, StateProviderDb}; +use katana_primitives::contract::{ContractAddress, Nonce}; +use katana_primitives::env::{BlockEnv, CfgEnv}; +use katana_primitives::transaction::{ExecutableTx, ExecutableTxWithHash}; +use katana_primitives::FieldElement; +use katana_provider::error::ProviderError; +use katana_provider::traits::state::StateProvider; +use parking_lot::Mutex; + +use super::{Error, InvalidTransactionError, ValidationOutcome, ValidationResult, Validator}; +use crate::tx::PoolTransaction; + +#[allow(missing_debug_implementations)] +#[derive(Clone)] +pub struct TxValidator { + inner: Arc>, + permit: Arc>, +} + +struct Inner { + // execution context + cfg_env: CfgEnv, + block_env: BlockEnv, + execution_flags: SimulationFlag, + state: Arc>, + + pool_nonces: HashMap, +} + +impl TxValidator { + pub fn new( + state: Box, + execution_flags: SimulationFlag, + cfg_env: CfgEnv, + block_env: BlockEnv, + permit: Arc>, + ) -> Self { + let inner = Arc::new(Mutex::new(Inner { + cfg_env, + block_env, + execution_flags, + state: Arc::new(state), + pool_nonces: HashMap::new(), + })); + Self { permit, inner } + } + + /// Reset the state of the validator with the given params. This method is used to update the + /// validator's state with a new state and block env after a block is mined. + pub fn update(&self, new_state: Box, block_env: BlockEnv) { + let mut this = self.inner.lock(); + this.block_env = block_env; + this.state = Arc::new(new_state); + } + + // NOTE: + // If you check the get_nonce method of StatefulValidator in blockifier, under the hood it + // unwraps the Option to get the state of the TransactionExecutor struct. StatefulValidator + // guaranteees that the state will always be present so it is safe to uwnrap. However, this + // safety is not guaranteed by TransactionExecutor itself. + pub fn pool_nonce(&self, address: ContractAddress) -> Result, ProviderError> { + let this = self.inner.lock(); + match this.pool_nonces.get(&address) { + Some(nonce) => Ok(Some(*nonce)), + None => Ok(this.state.nonce(address)?), + } + } +} + +impl Inner { + // Prepare the stateful validator with the current state and block env to be used + // for transaction validation. + fn prepare(&self) -> StatefulValidator> { + let state = Box::new(self.state.clone()); + let cached_state = CachedState::new(StateProviderDb::new(state)); + let context = block_context_from_envs(&self.block_env, &self.cfg_env); + + StatefulValidator::create(cached_state, context, Default::default()) + } +} + +impl Validator for TxValidator { + type Transaction = ExecutableTxWithHash; + + fn validate(&self, tx: Self::Transaction) -> ValidationResult { + let _permit = self.permit.lock(); + let mut this = self.inner.lock(); + + let tx_nonce = tx.nonce(); + let address = tx.sender(); + + // Get the current nonce of the account from the pool or the state + let current_nonce = if let Some(nonce) = this.pool_nonces.get(&address) { + *nonce + } else { + this.state.nonce(address).unwrap().unwrap_or_default() + }; + + // Check if the transaction nonce is higher than the current account nonce, + // if yes, dont't run its validation logic and tag it as a dependent tx. + if tx_nonce > current_nonce { + return Ok(ValidationOutcome::Dependent { current_nonce, tx_nonce, tx }); + } + + // Check if validation of an invoke transaction should be skipped due to deploy_account not + // being proccessed yet. This feature is used to improve UX for users sending + // deploy_account + invoke at once. + let skip_validate = match tx.transaction { + // we skip validation for invoke tx with nonce 1 and nonce 0 in the state, this + ExecutableTx::DeployAccount(_) | ExecutableTx::Declare(_) => false, + // we skip validation for invoke tx with nonce 1 and nonce 0 in the state, this + _ => tx.nonce() == Nonce::ONE && current_nonce == Nonce::ZERO, + }; + + // prepare a stateful validator and validate the transaction + let result = validate( + this.prepare(), + tx, + this.execution_flags.skip_validate || skip_validate, + this.execution_flags.skip_fee_transfer, + ); + + match result { + res @ Ok(ValidationOutcome::Valid { .. }) => { + // update the nonce of the account in the pool only for valid tx + let updated_nonce = current_nonce + FieldElement::ONE; + this.pool_nonces.insert(address, updated_nonce); + res + } + _ => result, + } + } +} + +// perform validation on the pool transaction using the provided stateful validator +fn validate( + mut validator: StatefulValidator>, + pool_tx: ExecutableTxWithHash, + skip_validate: bool, + skip_fee_check: bool, +) -> ValidationResult { + match to_executor_tx(pool_tx.clone()) { + Transaction::AccountTransaction(tx) => { + match validator.perform_validations(tx, skip_validate, skip_fee_check) { + Ok(()) => Ok(ValidationOutcome::Valid(pool_tx)), + Err(e) => match map_invalid_tx_err(e) { + Ok(error) => Ok(ValidationOutcome::Invalid { tx: pool_tx, error }), + Err(error) => Err(Error { hash: pool_tx.hash, error }), + }, + } + } + + // we skip validation for L1HandlerTransaction + Transaction::L1HandlerTransaction(_) => Ok(ValidationOutcome::Valid(pool_tx)), + } +} + +fn map_invalid_tx_err( + err: StatefulValidatorError, +) -> Result> { + match err { + StatefulValidatorError::TransactionExecutionError(err) => match err { + e @ TransactionExecutionError::ValidateTransactionError { + storage_address, + class_hash, + .. + } => { + let address = to_address(storage_address); + let class_hash = class_hash.0; + let error = e.to_string(); + Ok(InvalidTransactionError::ValidationFailure { address, class_hash, error }) + } + + _ => Err(Box::new(err)), + }, + + StatefulValidatorError::TransactionPreValidationError(err) => match err { + TransactionPreValidationError::InvalidNonce { + address, + account_nonce, + incoming_tx_nonce, + } => { + let address = to_address(address); + let current_nonce = account_nonce.0; + let tx_nonce = incoming_tx_nonce.0; + Ok(InvalidTransactionError::InvalidNonce { address, current_nonce, tx_nonce }) + } + + TransactionPreValidationError::TransactionFeeError(err) => match err { + TransactionFeeError::MaxFeeExceedsBalance { max_fee, balance } => { + let max_fee = max_fee.0; + let balance = balance.into(); + Ok(InvalidTransactionError::InsufficientFunds { max_fee, balance }) + } + + TransactionFeeError::MaxFeeTooLow { min_fee, max_fee } => { + let max_fee = max_fee.0; + let min_fee = min_fee.0; + Ok(InvalidTransactionError::InsufficientMaxFee { max_fee, min_fee }) + } + + _ => Err(Box::new(err)), + }, + + _ => Err(Box::new(err)), + }, + + _ => Err(Box::new(err)), + } +} diff --git a/crates/katana/primitives/Cargo.toml b/crates/katana/primitives/Cargo.toml index 9e8c334b66..5fa597bb87 100644 --- a/crates/katana/primitives/Cargo.toml +++ b/crates/katana/primitives/Cargo.toml @@ -25,6 +25,7 @@ flate2 = { workspace = true, optional = true } katana-cairo.workspace = true [dev-dependencies] +assert_matches.workspace = true num-traits.workspace = true similar-asserts.workspace = true diff --git a/crates/katana/primitives/src/event.rs b/crates/katana/primitives/src/event.rs index 4fbbc470e9..d0b7e4c0db 100644 --- a/crates/katana/primitives/src/event.rs +++ b/crates/katana/primitives/src/event.rs @@ -11,10 +11,21 @@ pub struct OrderedEvent { pub data: Vec, } +/// Represents a continuation token for implementing paging in event queries. +/// +/// This struct stores the necessary information to resume fetching events +/// from a specific point relative to the given filter passed as parameter to the +/// `starknet_getEvents` API, [EventFilter][starknet::core::types::EventFilter]. +/// +/// There JSON-RPC specification does not specify the format of the continuation token, +/// so how the node should handle it is implementation specific. #[derive(PartialEq, Eq, Debug, Default)] pub struct ContinuationToken { + /// The block number to continue from. pub block_n: u64, + /// The transaction number within the block to continue from. pub txn_n: u64, + /// The event number within the transaction to continue from. pub event_n: u64, } @@ -27,7 +38,7 @@ pub enum ContinuationTokenError { } impl ContinuationToken { - pub fn parse(token: String) -> Result { + pub fn parse(token: &str) -> Result { let arr: Vec<&str> = token.split(',').collect(); if arr.len() != 3 { return Err(ContinuationTokenError::InvalidToken); @@ -66,7 +77,7 @@ mod test { #[test] fn parse_works() { fn helper(token: &str) -> ContinuationToken { - ContinuationToken::parse(token.to_owned()).unwrap() + ContinuationToken::parse(token).unwrap() } assert_eq!(helper("0,0,0"), ContinuationToken { block_n: 0, txn_n: 0, event_n: 0 }); assert_eq!(helper("1e,ff,4"), ContinuationToken { block_n: 30, txn_n: 255, event_n: 4 }); @@ -75,15 +86,15 @@ mod test { #[test] fn parse_should_fail() { assert_eq!( - ContinuationToken::parse("100".to_owned()).unwrap_err(), + ContinuationToken::parse("100").unwrap_err(), ContinuationTokenError::InvalidToken ); assert_eq!( - ContinuationToken::parse("0,".to_owned()).unwrap_err(), + ContinuationToken::parse("0,").unwrap_err(), ContinuationTokenError::InvalidToken ); assert_eq!( - ContinuationToken::parse("0,0".to_owned()).unwrap_err(), + ContinuationToken::parse("0,0").unwrap_err(), ContinuationTokenError::InvalidToken ); } @@ -91,15 +102,15 @@ mod test { #[test] fn parse_u64_should_fail() { matches!( - ContinuationToken::parse("2y,100,4".to_owned()).unwrap_err(), + ContinuationToken::parse("2y,100,4").unwrap_err(), ContinuationTokenError::ParseFailed(_) ); matches!( - ContinuationToken::parse("30,255g,4".to_owned()).unwrap_err(), + ContinuationToken::parse("30,255g,4").unwrap_err(), ContinuationTokenError::ParseFailed(_) ); matches!( - ContinuationToken::parse("244,1,fv".to_owned()).unwrap_err(), + ContinuationToken::parse("244,1,fv").unwrap_err(), ContinuationTokenError::ParseFailed(_) ); } diff --git a/crates/katana/rpc/rpc-api/src/dev.rs b/crates/katana/rpc/rpc-api/src/dev.rs index 5d0202981e..3de14ac807 100644 --- a/crates/katana/rpc/rpc-api/src/dev.rs +++ b/crates/katana/rpc/rpc-api/src/dev.rs @@ -1,6 +1,7 @@ use jsonrpsee::core::RpcResult; use jsonrpsee::proc_macros::rpc; use katana_primitives::FieldElement; +use katana_rpc_types::account::Account; #[cfg_attr(not(feature = "client"), rpc(server, namespace = "dev"))] #[cfg_attr(feature = "client", rpc(client, server, namespace = "dev"))] @@ -24,4 +25,7 @@ pub trait DevApi { key: FieldElement, value: FieldElement, ) -> RpcResult<()>; + + #[method(name = "predeployedAccounts")] + async fn predeployed_accounts(&self) -> RpcResult>; } diff --git a/crates/katana/rpc/rpc-api/src/katana.rs b/crates/katana/rpc/rpc-api/src/katana.rs deleted file mode 100644 index bff9cb0e07..0000000000 --- a/crates/katana/rpc/rpc-api/src/katana.rs +++ /dev/null @@ -1,10 +0,0 @@ -use jsonrpsee::core::RpcResult; -use jsonrpsee::proc_macros::rpc; -use katana_rpc_types::account::Account; - -#[cfg_attr(not(feature = "client"), rpc(server, namespace = "katana"))] -#[cfg_attr(feature = "client", rpc(client, server, namespace = "katana"))] -pub trait KatanaApi { - #[method(name = "predeployedAccounts")] - async fn predeployed_accounts(&self) -> RpcResult>; -} diff --git a/crates/katana/rpc/rpc-api/src/lib.rs b/crates/katana/rpc/rpc-api/src/lib.rs index 198766158b..5f138a3d29 100644 --- a/crates/katana/rpc/rpc-api/src/lib.rs +++ b/crates/katana/rpc/rpc-api/src/lib.rs @@ -1,5 +1,4 @@ pub mod dev; -pub mod katana; pub mod saya; pub mod starknet; pub mod torii; @@ -8,7 +7,6 @@ pub mod torii; #[derive(Debug, Copy, Clone)] pub enum ApiKind { Starknet, - Katana, Torii, Dev, Saya, diff --git a/crates/katana/rpc/rpc-types/Cargo.toml b/crates/katana/rpc/rpc-types/Cargo.toml index 5a89d5c0b0..52cb37c664 100644 --- a/crates/katana/rpc/rpc-types/Cargo.toml +++ b/crates/katana/rpc/rpc-types/Cargo.toml @@ -9,6 +9,7 @@ version.workspace = true [dependencies] katana-cairo.workspace = true katana-core.workspace = true +katana-pool.workspace = true katana-executor.workspace = true katana-primitives.workspace = true katana-provider.workspace = true diff --git a/crates/katana/rpc/rpc-types/src/error/starknet.rs b/crates/katana/rpc/rpc-types/src/error/starknet.rs index 9a5d32ed68..d5aecfb5dd 100644 --- a/crates/katana/rpc/rpc-types/src/error/starknet.rs +++ b/crates/katana/rpc/rpc-types/src/error/starknet.rs @@ -1,9 +1,12 @@ use jsonrpsee::core::Error; use jsonrpsee::types::error::CallError; use jsonrpsee::types::ErrorObject; +use katana_pool::validation::InvalidTransactionError; +use katana_pool::PoolError; use katana_primitives::event::ContinuationTokenError; use katana_provider::error::ProviderError; use serde::Serialize; +use serde_json::Value; /// Possible list of errors that can be returned by the Starknet API according to the spec: . #[derive(Debug, thiserror::Error, Clone, Serialize)] @@ -45,14 +48,17 @@ pub enum StarknetApiError { InvalidContractClass, #[error("Class already declared")] ClassAlreadyDeclared, + // TEMP: adding a reason field temporarily to match what's being returned by the gateway. the + // gateway includes the information regarding the expected and actual nonce in the error + // message. but this doesn't break compatibility with the spec. #[error("Invalid transaction nonce")] - InvalidTransactionNonce, + InvalidTransactionNonce { reason: String }, #[error("Max fee is smaller than the minimal transaction cost (validation plus fee transfer)")] InsufficientMaxFee, #[error("Account balance is smaller than the transaction's max_fee")] InsufficientAccountBalance, #[error("Account validation failed")] - ValidationFailure, + ValidationFailure { reason: String }, #[error("Compilation failed")] CompilationFailed, #[error("Contract class size is too large")] @@ -97,10 +103,10 @@ impl StarknetApiError { StarknetApiError::TransactionExecutionError { .. } => 41, StarknetApiError::InvalidContractClass => 50, StarknetApiError::ClassAlreadyDeclared => 51, - StarknetApiError::InvalidTransactionNonce => 52, + StarknetApiError::InvalidTransactionNonce { .. } => 52, StarknetApiError::InsufficientMaxFee => 53, StarknetApiError::InsufficientAccountBalance => 54, - StarknetApiError::ValidationFailure => 55, + StarknetApiError::ValidationFailure { .. } => 55, StarknetApiError::CompilationFailed => 56, StarknetApiError::ContractClassSizeIsTooLarge => 57, StarknetApiError::NonAccount => 58, @@ -122,6 +128,12 @@ impl StarknetApiError { StarknetApiError::ContractError { .. } | StarknetApiError::UnexpectedError { .. } | StarknetApiError::TransactionExecutionError { .. } => Some(serde_json::json!(self)), + + StarknetApiError::InvalidTransactionNonce { reason } + | StarknetApiError::ValidationFailure { reason } => { + Some(Value::String(reason.to_string())) + } + _ => None, } } @@ -155,6 +167,33 @@ impl From for StarknetApiError { } } +impl From for StarknetApiError { + fn from(error: PoolError) -> Self { + match error { + PoolError::InvalidTransaction(err) => err.into(), + PoolError::Internal(err) => { + StarknetApiError::UnexpectedError { reason: err.to_string() } + } + } + } +} + +impl From> for StarknetApiError { + fn from(error: Box) -> Self { + match error.as_ref() { + InvalidTransactionError::InsufficientFunds { .. } => Self::InsufficientAccountBalance, + InvalidTransactionError::InsufficientMaxFee { .. } => Self::InsufficientMaxFee, + InvalidTransactionError::NonAccount { .. } => Self::NonAccount, + InvalidTransactionError::InvalidNonce { .. } => { + Self::InvalidTransactionNonce { reason: error.to_string() } + } + InvalidTransactionError::ValidationFailure { error, .. } => { + Self::ValidationFailure { reason: error.to_string() } + } + } + } +} + #[cfg(test)] mod tests { use rstest::rstest; @@ -171,13 +210,11 @@ mod tests { #[case(StarknetApiError::CompilationFailed, 56, "Compilation failed")] #[case(StarknetApiError::ClassHashNotFound, 28, "Class hash not found")] #[case(StarknetApiError::TxnHashNotFound, 29, "Transaction hash not found")] - #[case(StarknetApiError::ValidationFailure, 55, "Account validation failed")] #[case(StarknetApiError::ClassAlreadyDeclared, 51, "Class already declared")] #[case(StarknetApiError::InvalidContractClass, 50, "Invalid contract class")] #[case(StarknetApiError::PageSizeTooBig, 31, "Requested page size is too big")] #[case(StarknetApiError::FailedToReceiveTxn, 1, "Failed to write transaction")] #[case(StarknetApiError::InvalidMessageSelector, 21, "Invalid message selector")] - #[case(StarknetApiError::InvalidTransactionNonce, 52, "Invalid transaction nonce")] #[case(StarknetApiError::NonAccount, 58, "Sender address in not an account contract")] #[case(StarknetApiError::InvalidTxnIndex, 27, "Invalid transaction index in a block")] #[case(StarknetApiError::ProofLimitExceeded, 10000, "Too many storage keys requested")] @@ -240,6 +277,22 @@ mod tests { "reason": "Unexpected error reason".to_string() }), )] + #[case( + StarknetApiError::InvalidTransactionNonce { + reason: "Wrong nonce".to_string() + }, + 52, + "Invalid transaction nonce", + Value::String("Wrong nonce".to_string()) + )] + #[case( + StarknetApiError::ValidationFailure { + reason: "Invalid signature".to_string() + }, + 55, + "Account validation failed", + Value::String("Invalid signature".to_string()) + )] fn test_starknet_api_error_to_error_conversion_data_some( #[case] starknet_error: StarknetApiError, #[case] expected_code: i32, diff --git a/crates/katana/rpc/rpc/Cargo.toml b/crates/katana/rpc/rpc/Cargo.toml index 919d2b8e4c..b836ebb139 100644 --- a/crates/katana/rpc/rpc/Cargo.toml +++ b/crates/katana/rpc/rpc/Cargo.toml @@ -22,6 +22,7 @@ katana-rpc-types-builder.workspace = true katana-tasks.workspace = true metrics.workspace = true starknet.workspace = true +thiserror.workspace = true tracing.workspace = true [dev-dependencies] @@ -32,11 +33,13 @@ cainome.workspace = true dojo-test-utils.workspace = true dojo-utils.workspace = true dojo-world.workspace = true +indexmap.workspace = true jsonrpsee = { workspace = true, features = [ "client" ] } katana-cairo.workspace = true katana-node = { workspace = true, features = [ "messaging" ] } katana-rpc-api = { workspace = true, features = [ "client" ] } katana-runner.workspace = true +rstest.workspace = true num-traits.workspace = true rand.workspace = true serde.workspace = true diff --git a/crates/katana/rpc/rpc/src/dev.rs b/crates/katana/rpc/rpc/src/dev.rs index ba6d16940c..afe4e80023 100644 --- a/crates/katana/rpc/rpc/src/dev.rs +++ b/crates/katana/rpc/rpc/src/dev.rs @@ -6,6 +6,7 @@ use katana_core::service::block_producer::{BlockProducer, BlockProducerMode, Pen use katana_executor::ExecutorFactory; use katana_primitives::FieldElement; use katana_rpc_api::dev::DevApiServer; +use katana_rpc_types::account::Account; use katana_rpc_types::error::dev::DevApiError; #[allow(missing_debug_implementations)] @@ -21,7 +22,7 @@ impl DevApi { /// Returns the pending state if the sequencer is running in _interval_ mode. Otherwise `None`. fn pending_executor(&self) -> Option { - match &*self.block_producer.inner.read() { + match &*self.block_producer.producer.read() { BlockProducerMode::Instant(_) => None, BlockProducerMode::Interval(producer) => Some(producer.executor()), } @@ -90,4 +91,9 @@ impl DevApiServer for DevApi { // .map_err(|_| Error::from(KatanaApiError::FailedToUpdateStorage)) Ok(()) } + + #[allow(deprecated)] + async fn predeployed_accounts(&self) -> Result, Error> { + Ok(self.backend.config.genesis.accounts().map(|e| Account::new(*e.0, e.1)).collect()) + } } diff --git a/crates/katana/rpc/rpc/src/katana.rs b/crates/katana/rpc/rpc/src/katana.rs deleted file mode 100644 index e263a8ded5..0000000000 --- a/crates/katana/rpc/rpc/src/katana.rs +++ /dev/null @@ -1,26 +0,0 @@ -use std::sync::Arc; - -use jsonrpsee::core::{async_trait, Error}; -use katana_core::backend::Backend; -use katana_executor::ExecutorFactory; -use katana_rpc_api::katana::KatanaApiServer; -use katana_rpc_types::account::Account; - -#[allow(missing_debug_implementations)] -pub struct KatanaApi { - backend: Arc>, -} - -impl KatanaApi { - pub fn new(backend: Arc>) -> Self { - Self { backend } - } -} - -#[async_trait] -impl KatanaApiServer for KatanaApi { - #[allow(deprecated)] - async fn predeployed_accounts(&self) -> Result, Error> { - Ok(self.backend.config.genesis.accounts().map(|e| Account::new(*e.0, e.1)).collect()) - } -} diff --git a/crates/katana/rpc/rpc/src/lib.rs b/crates/katana/rpc/rpc/src/lib.rs index a2c49ad24c..6abe5d449e 100644 --- a/crates/katana/rpc/rpc/src/lib.rs +++ b/crates/katana/rpc/rpc/src/lib.rs @@ -5,8 +5,9 @@ pub mod config; pub mod dev; -pub mod katana; pub mod metrics; pub mod saya; pub mod starknet; pub mod torii; + +mod utils; diff --git a/crates/katana/rpc/rpc/src/saya.rs b/crates/katana/rpc/rpc/src/saya.rs index 9c29b92382..c93d4188ae 100644 --- a/crates/katana/rpc/rpc/src/saya.rs +++ b/crates/katana/rpc/rpc/src/saya.rs @@ -44,7 +44,7 @@ impl SayaApi { /// Returns the pending state if the sequencer is running in _interval_ mode. Otherwise `None`. fn pending_executor(&self) -> Option { - match &*self.block_producer.inner.read() { + match &*self.block_producer.producer.read() { BlockProducerMode::Instant(_) => None, BlockProducerMode::Interval(producer) => Some(producer.executor()), } diff --git a/crates/katana/rpc/rpc/src/starknet/mod.rs b/crates/katana/rpc/rpc/src/starknet/mod.rs index a78c31415f..00f68a4d6c 100644 --- a/crates/katana/rpc/rpc/src/starknet/mod.rs +++ b/crates/katana/rpc/rpc/src/starknet/mod.rs @@ -4,15 +4,12 @@ mod read; mod trace; mod write; -use std::cmp::Ordering; -use std::iter::Skip; -use std::slice::Iter; use std::sync::Arc; -use anyhow::Result; use katana_core::backend::Backend; use katana_core::service::block_producer::{BlockProducer, BlockProducerMode, PendingExecutor}; use katana_executor::{ExecutionResult, ExecutorFactory}; +use katana_pool::validation::stateful::TxValidator; use katana_pool::TxPool; use katana_primitives::block::{ BlockHash, BlockHashOrNumber, BlockIdOrTag, BlockNumber, BlockTag, FinalityStatus, @@ -22,25 +19,25 @@ use katana_primitives::contract::{ContractAddress, Nonce, StorageKey, StorageVal use katana_primitives::conversion::rpc::legacy_inner_to_rpc_class; use katana_primitives::env::BlockEnv; use katana_primitives::event::ContinuationToken; -use katana_primitives::receipt::Event; use katana_primitives::transaction::{ExecutableTxWithHash, TxHash, TxWithHash}; use katana_primitives::FieldElement; -use katana_provider::traits::block::{ - BlockHashProvider, BlockIdReader, BlockNumberProvider, BlockProvider, -}; +use katana_provider::traits::block::{BlockHashProvider, BlockIdReader, BlockNumberProvider}; use katana_provider::traits::contract::ContractClassProvider; use katana_provider::traits::env::BlockEnvProvider; use katana_provider::traits::state::{StateFactoryProvider, StateProvider}; use katana_provider::traits::transaction::{ - ReceiptProvider, TransactionProvider, TransactionStatusProvider, TransactionsProviderExt, + ReceiptProvider, TransactionProvider, TransactionStatusProvider, }; use katana_rpc_types::error::starknet::StarknetApiError; use katana_rpc_types::FeeEstimate; use katana_tasks::{BlockingTaskPool, TokioTaskSpawner}; use starknet::core::types::{ - ContractClass, EmittedEvent, EventsPage, TransactionExecutionStatus, TransactionStatus, + ContractClass, EventsPage, TransactionExecutionStatus, TransactionStatus, }; +use crate::utils; +use crate::utils::events::{Cursor, EventBlockId}; + #[allow(missing_debug_implementations)] pub struct StarknetApi { inner: Arc>, @@ -53,6 +50,7 @@ impl Clone for StarknetApi { } struct Inner { + validator: TxValidator, pool: TxPool, backend: Arc>, block_producer: Arc>, @@ -64,11 +62,12 @@ impl StarknetApi { backend: Arc>, pool: TxPool, block_producer: Arc>, + validator: TxValidator, ) -> Self { let blocking_task_pool = BlockingTaskPool::new().expect("failed to create blocking task pool"); - let inner = Inner { pool, backend, block_producer, blocking_task_pool }; + let inner = Inner { pool, backend, block_producer, blocking_task_pool, validator }; Self { inner: Arc::new(inner) } } @@ -131,7 +130,7 @@ impl StarknetApi { /// Returns the pending state if the sequencer is running in _interval_ mode. Otherwise `None`. fn pending_executor(&self) -> Option { - match &*self.inner.block_producer.inner.read() { + match &*self.inner.block_producer.producer.read() { BlockProducerMode::Instant(_) => None, BlockProducerMode::Interval(producer) => Some(producer.executor()), } @@ -291,8 +290,18 @@ impl StarknetApi { contract_address: ContractAddress, ) -> Result { self.on_io_blocking_task(move |this| { - let state = this.state(&block_id)?; - let nonce = state.nonce(contract_address)?.ok_or(StarknetApiError::ContractNotFound)?; + // read from the pool state if pending block + // + // TODO: this is a temporary solution, we should have a better way to handle this. + // perhaps a pending/pool state provider that implements all the state provider traits. + let result = if let BlockIdOrTag::Tag(BlockTag::Pending) = block_id { + this.inner.validator.pool_nonce(contract_address)? + } else { + let state = this.state(&block_id)?; + state.nonce(contract_address)? + }; + + let nonce = result.ok_or(StarknetApiError::ContractNotFound)?; Ok(nonce) }) .await @@ -321,6 +330,7 @@ impl StarknetApi { .await } + // TODO: should document more and possible find a simpler solution(?) fn events( &self, from_block: BlockIdOrTag, @@ -331,101 +341,120 @@ impl StarknetApi { chunk_size: u64, ) -> Result { let provider = self.inner.backend.blockchain.provider(); - let mut current_block = 0; - - let mut from = - provider.convert_block_id(from_block)?.ok_or(StarknetApiError::BlockNotFound)?; - let to = provider.convert_block_id(to_block)?.ok_or(StarknetApiError::BlockNotFound)?; - let mut continuation_token = match continuation_token { - Some(token) => ContinuationToken::parse(token)?, - None => ContinuationToken::default(), + let from = if BlockIdOrTag::Tag(BlockTag::Pending) == from_block { + EventBlockId::Pending + } else { + let num = provider.convert_block_id(from_block)?; + EventBlockId::Num(num.ok_or(StarknetApiError::BlockNotFound)?) }; - // skip blocks that have been already read - from += continuation_token.block_n; - - let mut filtered_events = Vec::with_capacity(chunk_size as usize); - - for i in from..=to { - let block_hash = - provider.block_hash_by_num(i)?.ok_or(StarknetApiError::BlockNotFound)?; - - let receipts = - provider.receipts_by_block(i.into())?.ok_or(StarknetApiError::BlockNotFound)?; - - let tx_range = - provider.block_body_indices(i.into())?.ok_or(StarknetApiError::BlockNotFound)?; + let to = if BlockIdOrTag::Tag(BlockTag::Pending) == to_block { + EventBlockId::Pending + } else { + let num = provider.convert_block_id(to_block)?; + EventBlockId::Num(num.ok_or(StarknetApiError::BlockNotFound)?) + }; - let tx_hashes = provider.transaction_hashes_in_range(tx_range.into())?; + let token: Option = match continuation_token { + Some(token) => Some(ContinuationToken::parse(&token)?.into()), + None => None, + }; - let txn_n = receipts.len(); - if (txn_n as u64) < continuation_token.txn_n { - return Err(StarknetApiError::InvalidContinuationToken); + // reserved buffer to fill up with events to avoid reallocations + let mut buffer = Vec::with_capacity(chunk_size as usize); + let filter = utils::events::Filter { address, keys }; + + match (from, to) { + (EventBlockId::Num(from), EventBlockId::Num(to)) => { + let cursor = utils::events::fetch_events_at_blocks( + provider, + from..=to, + &filter, + chunk_size, + token, + &mut buffer, + )?; + + Ok(EventsPage { + events: buffer, + continuation_token: cursor.map(|c| c.into_rpc_cursor().to_string()), + }) } - for (tx_hash, events) in tx_hashes - .into_iter() - .zip(receipts.iter().map(|r| r.events())) - .skip(continuation_token.txn_n as usize) - { - let txn_events_len: usize = events.len(); - - // check if continuation_token.event_n is correct - match (txn_events_len as u64).cmp(&continuation_token.event_n) { - Ordering::Greater => (), - Ordering::Less => { - return Err(StarknetApiError::InvalidContinuationToken); - } - Ordering::Equal => { - continuation_token.txn_n += 1; - continuation_token.event_n = 0; - continue; - } + (EventBlockId::Num(from), EventBlockId::Pending) => { + let latest = provider.latest_number()?; + let int_cursor = utils::events::fetch_events_at_blocks( + provider, + from..=latest, + &filter, + chunk_size, + token.clone(), + &mut buffer, + )?; + + // if the internal cursor is Some, meaning the buffer is full and we havent + // reached the latest block. + if let Some(c) = int_cursor { + return Ok(EventsPage { + events: buffer, + continuation_token: Some(c.into_rpc_cursor().to_string()), + }); } - // skip events - let txn_events = events.iter().skip(continuation_token.event_n as usize); - - let (new_filtered_events, continuation_index) = filter_events_by_params( - txn_events, - address, - keys.clone(), - Some((chunk_size as usize) - filtered_events.len()), - ); - - filtered_events.extend(new_filtered_events.iter().map(|e| EmittedEvent { - from_address: e.from_address.into(), - keys: e.keys.clone(), - data: e.data.clone(), - block_hash: Some(block_hash), - block_number: Some(i), - transaction_hash: tx_hash, - })); - - if filtered_events.len() >= chunk_size as usize { - let token = if current_block < to - || continuation_token.txn_n < txn_n as u64 - 1 - || continuation_index < txn_events_len - { - continuation_token.event_n = continuation_index as u64; - Some(continuation_token.to_string()) - } else { - None - }; - return Ok(EventsPage { events: filtered_events, continuation_token: token }); + if let Some(executor) = self.pending_executor() { + let cursor = utils::events::fetch_pending_events( + &executor, + &filter, + chunk_size, + token, + &mut buffer, + )?; + + Ok(EventsPage { + events: buffer, + continuation_token: Some(cursor.into_rpc_cursor().to_string()), + }) + } else { + let cursor = Cursor::new_block(latest + 1); + Ok(EventsPage { + events: buffer, + continuation_token: Some(cursor.into_rpc_cursor().to_string()), + }) } + } + + (EventBlockId::Pending, EventBlockId::Pending) => { + if let Some(executor) = self.pending_executor() { + let cursor = utils::events::fetch_pending_events( + &executor, + &filter, + chunk_size, + token, + &mut buffer, + )?; + + Ok(EventsPage { + events: buffer, + continuation_token: Some(cursor.into_rpc_cursor().to_string()), + }) + } else { + let latest = provider.latest_number()?; + let cursor = Cursor::new_block(latest); - continuation_token.txn_n += 1; - continuation_token.event_n = 0; + Ok(EventsPage { + events: buffer, + continuation_token: Some(cursor.into_rpc_cursor().to_string()), + }) + } } - current_block += 1; - continuation_token.block_n += 1; - continuation_token.txn_n = 0; + (EventBlockId::Pending, EventBlockId::Num(_)) => { + Err(StarknetApiError::UnexpectedError { + reason: "Invalid block range; `from` block must be lower than `to`".to_string(), + }) + } } - - Ok(EventsPage { events: filtered_events, continuation_token: None }) } async fn transaction_status( @@ -486,53 +515,3 @@ impl StarknetApi { .await } } - -fn filter_events_by_params( - events: Skip>, - address: Option, - filter_keys: Option>>, - max_results: Option, -) -> (Vec, usize) { - let mut filtered_events = vec![]; - let mut index = 0; - - // Iterate on block events. - for event in events { - index += 1; - if !address.map_or(true, |addr| addr == event.from_address) { - continue; - } - - let match_keys = match filter_keys { - // From starknet-api spec: - // Per key (by position), designate the possible values to be matched for events to be - // returned. Empty array designates 'any' value" - Some(ref filter_keys) => filter_keys.iter().enumerate().all(|(i, keys)| { - // Lets say we want to filter events which are either named `Event1` or `Event2` and - // custom key `0x1` or `0x2` Filter: [[sn_keccack("Event1"), - // sn_keccack("Event2")], ["0x1", "0x2"]] - - // This checks: number of keys in event >= number of keys in filter (we check > i - // and not >= i because i is zero indexed) because otherwise this - // event doesn't contain all the keys we requested - event.keys.len() > i && - // This checks: Empty array desginates 'any' value - (keys.is_empty() - || - // This checks: If this events i'th value is one of the requested value in filter_keys[i] - keys.contains(&event.keys[i])) - }), - None => true, - }; - - if match_keys { - filtered_events.push(event.clone()); - if let Some(max_results) = max_results { - if filtered_events.len() >= max_results { - break; - } - } - } - } - (filtered_events, index) -} diff --git a/crates/katana/rpc/rpc/src/starknet/read.rs b/crates/katana/rpc/rpc/src/starknet/read.rs index c8494854f1..d1bd8d9e33 100644 --- a/crates/katana/rpc/rpc/src/starknet/read.rs +++ b/crates/katana/rpc/rpc/src/starknet/read.rs @@ -356,20 +356,27 @@ impl StarknetApiServer for StarknetApi { async fn get_events(&self, filter: EventFilterWithPage) -> RpcResult { self.on_io_blocking_task(move |this| { - let from_block = filter.event_filter.from_block.unwrap_or(BlockIdOrTag::Number(0)); - let to_block = - filter.event_filter.to_block.unwrap_or(BlockIdOrTag::Tag(BlockTag::Latest)); + let EventFilterWithPage { event_filter, result_page_request } = filter; - let keys = filter.event_filter.keys; - let keys = keys.filter(|keys| !(keys.len() == 1 && keys.is_empty())); + let from = match event_filter.from_block { + Some(id) => id, + None => BlockIdOrTag::Number(0), + }; + + let to = match event_filter.to_block { + Some(id) => id, + None => BlockIdOrTag::Tag(BlockTag::Pending), + }; + + let keys = event_filter.keys.filter(|keys| !(keys.len() == 1 && keys.is_empty())); let events = this.events( - from_block, - to_block, - filter.event_filter.address.map(|f| f.into()), + from, + to, + event_filter.address.map(|f| f.into()), keys, - filter.result_page_request.continuation_token, - filter.result_page_request.chunk_size, + result_page_request.continuation_token, + result_page_request.chunk_size, )?; Ok(events) @@ -467,6 +474,12 @@ impl StarknetApiServer for StarknetApi { let should_validate = !(skip_validate || this.inner.backend.config.disable_validate); let flags = katana_executor::SimulationFlag { skip_validate: !should_validate, + // We don't care about the nonce when estimating the fee as the nonce value + // doesn't affect transaction execution. + // + // This doesn't completely disregard the nonce as nonce < account nonce will + // return an error. It only 'relaxes' the check for nonce >= account nonce. + skip_nonce_check: true, ..Default::default() }; diff --git a/crates/katana/rpc/rpc/src/starknet/write.rs b/crates/katana/rpc/rpc/src/starknet/write.rs index 7ecf445db0..158b0212a8 100644 --- a/crates/katana/rpc/rpc/src/starknet/write.rs +++ b/crates/katana/rpc/rpc/src/starknet/write.rs @@ -23,10 +23,10 @@ impl StarknetApi { let tx = tx.into_tx_with_chain_id(this.inner.backend.chain_id); let tx = ExecutableTxWithHash::new(ExecutableTx::Invoke(tx)); - let tx_hash = tx.hash; + let hash = + this.inner.pool.add_transaction(tx).inspect_err(|e| println!("Error: {:?}", e))?; - this.inner.pool.add_transaction(tx); - Ok(tx_hash.into()) + Ok(hash.into()) }) .await } @@ -46,10 +46,9 @@ impl StarknetApi { let class_hash = tx.class_hash(); let tx = ExecutableTxWithHash::new(ExecutableTx::Declare(tx)); - let tx_hash = tx.hash; + let hash = this.inner.pool.add_transaction(tx)?; - this.inner.pool.add_transaction(tx); - Ok((tx_hash, class_hash).into()) + Ok((hash, class_hash).into()) }) .await } @@ -67,10 +66,9 @@ impl StarknetApi { let contract_address = tx.contract_address(); let tx = ExecutableTxWithHash::new(ExecutableTx::DeployAccount(tx)); - let tx_hash = tx.hash; + let hash = this.inner.pool.add_transaction(tx)?; - this.inner.pool.add_transaction(tx); - Ok((tx_hash, contract_address).into()) + Ok((hash, contract_address).into()) }) .await } diff --git a/crates/katana/rpc/rpc/src/torii.rs b/crates/katana/rpc/rpc/src/torii.rs index 63e09f8b7f..e8db499384 100644 --- a/crates/katana/rpc/rpc/src/torii.rs +++ b/crates/katana/rpc/rpc/src/torii.rs @@ -55,7 +55,7 @@ impl ToriiApi { /// Returns the pending state if the sequencer is running in _interval_ mode. Otherwise `None`. fn pending_executor(&self) -> Option { - match &*self.block_producer.inner.read() { + match &*self.block_producer.producer.read() { BlockProducerMode::Instant(_) => None, BlockProducerMode::Interval(producer) => Some(producer.executor()), } @@ -152,7 +152,7 @@ impl ToriiApiServer for ToriiApi { // If there are no transactions after the index in the pending block if pending_transactions.is_empty() { // Wait for a new transaction to be executed - let inner = this.block_producer.inner.read(); + let inner = this.block_producer.producer.read(); let block_producer = match &*inner { BlockProducerMode::Interval(block_producer) => block_producer, _ => panic!( @@ -204,7 +204,7 @@ impl ToriiApiServer for ToriiApi { if transactions.is_empty() { // Wait for a new transaction to be executed - let inner = this.block_producer.inner.read(); + let inner = this.block_producer.producer.read(); let block_producer = match &*inner { BlockProducerMode::Instant(block_producer) => block_producer, _ => { diff --git a/crates/katana/rpc/rpc/src/utils/events.rs b/crates/katana/rpc/rpc/src/utils/events.rs new file mode 100644 index 0000000000..a92f3ec1bf --- /dev/null +++ b/crates/katana/rpc/rpc/src/utils/events.rs @@ -0,0 +1,347 @@ +use std::cmp::Ordering; +use std::ops::RangeInclusive; + +use anyhow::Context; +use katana_core::service::block_producer::PendingExecutor; +use katana_primitives::block::{BlockHash, BlockNumber}; +use katana_primitives::contract::ContractAddress; +use katana_primitives::event::ContinuationToken; +use katana_primitives::receipt::Event; +use katana_primitives::transaction::TxHash; +use katana_primitives::FieldElement; +use katana_provider::error::ProviderError; +use katana_provider::traits::block::BlockProvider; +use katana_provider::traits::transaction::ReceiptProvider; +use katana_rpc_types::error::starknet::StarknetApiError; +use starknet::core::types::EmittedEvent; + +pub type EventQueryResult = Result; + +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error("Invalid cursor")] + InvalidCursor, + #[error(transparent)] + Provider(#[from] ProviderError), + #[error(transparent)] + Other(#[from] anyhow::Error), +} + +#[derive(Debug)] +pub enum EventBlockId { + Pending, + Num(BlockNumber), +} + +/// An object to specify how events should be filtered. +#[derive(Debug, Default, Clone)] +pub struct Filter { + /// The contract address to filter by. + /// + /// If `None`, all events are considered. If `Some`, only events emitted by the specified + /// contract are considered. + pub address: Option, + /// The keys to filter by. + pub keys: Option>>, +} + +/// Internal cursor +#[derive(Debug, Clone, PartialEq)] +pub struct Cursor { + block: u64, + txn: PartialCursor, +} + +impl Cursor { + pub fn new(block: u64, txn: usize, event: usize) -> Self { + Self { block, txn: PartialCursor { idx: txn, event } } + } + + pub fn new_block(block: u64) -> Self { + Self { block, txn: PartialCursor::default() } + } + + pub fn into_rpc_cursor(self) -> ContinuationToken { + ContinuationToken { + block_n: self.block, + txn_n: self.txn.idx as u64, + event_n: self.txn.event as u64, + } + } +} + +/// A partial cursor that points to a specific event within a transaction. +#[derive(Debug, Clone, PartialEq, Default)] +struct PartialCursor { + /// The transaction index within a block. + idx: usize, + /// The event index within a transaction. + event: usize, +} + +impl PartialCursor { + fn into_full(self, block: BlockNumber) -> Cursor { + Cursor { block, txn: self } + } +} + +pub fn fetch_pending_events( + pending_executor: &PendingExecutor, + filter: &Filter, + chunk_size: u64, + cursor: Option, + buffer: &mut Vec, +) -> EventQueryResult { + let pending_block = pending_executor.read(); + + let block_env = pending_block.block_env(); + let txs = pending_block.transactions(); + let cursor = cursor.unwrap_or(Cursor::new_block(block_env.number)); + + // process individual transactions in the block. + // the iterator will start with txn index == cursor.txn.idx + for (tx_idx, (tx_hash, events)) in txs + .iter() + .filter_map(|(tx, res)| res.receipt().map(|receipt| (tx.hash, receipt.events()))) + .enumerate() + .skip(cursor.txn.idx) + { + if tx_idx == cursor.txn.idx { + match events.len().cmp(&cursor.txn.event) { + Ordering::Equal | Ordering::Greater => {} + Ordering::Less => continue, + } + } + + // we should only skip for the last txn pointed by the cursor. + let next_event = if tx_idx == cursor.txn.idx { cursor.txn.event } else { 0 }; + let partial_cursor = fetch_tx_events( + next_event, + None, + None, + tx_idx, + tx_hash, + events, + filter, + chunk_size as usize, + buffer, + )?; + + if let Some(c) = partial_cursor { + return Ok(c.into_full(block_env.number)); + } + } + + // if we reach here, it means we have processed all the transactions in the pending block. + // we return a cursor that points to the next tx in the pending block. + let next_pending_tx_idx = txs.len(); + Ok(Cursor::new(block_env.number, next_pending_tx_idx, 0)) +} + +/// Returns `true` if reach the end of the block range. +pub fn fetch_events_at_blocks( + provider: impl BlockProvider + ReceiptProvider, + block_range: RangeInclusive, + filter: &Filter, + chunk_size: u64, + cursor: Option, + buffer: &mut Vec, +) -> EventQueryResult> { + let cursor = cursor.unwrap_or(Cursor::new_block(*block_range.start())); + + // update the block range to start from the block pointed by the cursor. + let block_range = cursor.block..=*block_range.end(); + + for block_num in block_range { + let block_hash = provider.block_hash_by_num(block_num)?.context("Missing block hash")?; + let receipts = provider.receipts_by_block(block_num.into())?.context("Missing receipts")?; + + let body_index = + provider.block_body_indices(block_num.into())?.context("Missing block body index")?; + + let tx_hashes = provider.transaction_hashes_in_range(body_index.into())?; + + if block_num == cursor.block { + match receipts.len().cmp(&cursor.txn.idx) { + Ordering::Equal | Ordering::Greater => {} + Ordering::Less => continue, + } + } + + // we should only skip for the last block pointed by the cursor. + let total_tx_to_skip = if block_num == cursor.block { cursor.txn.idx } else { 0 }; + + // skip number of transactions as specified in the continuation token + for (tx_idx, (tx_hash, events)) in tx_hashes + .into_iter() + .zip(receipts.iter().map(|r| r.events())) + .enumerate() + .skip(total_tx_to_skip) + { + // we should only skip for the last txn pointed by the cursor. + if block_num == cursor.block && tx_idx == cursor.txn.idx { + match events.len().cmp(&cursor.txn.event) { + Ordering::Greater => {} + Ordering::Less | Ordering::Equal => continue, + } + } + + // we should only skip for the last txn pointed by the cursor. + let next_event = if tx_idx == cursor.txn.idx { cursor.txn.event } else { 0 }; + let partial_cursor = fetch_tx_events( + next_event, + Some(block_num), + Some(block_hash), + tx_idx, + tx_hash, + events, + filter, + chunk_size as usize, + buffer, + )?; + + if let Some(c) = partial_cursor { + return Ok(Some(c.into_full(block_num))); + } + } + } + + // if we reach here, it means we have processed all the blocks in the range. + // therefore we don't need to return a cursor. + Ok(None) +} + +/// An iterator that yields events that match the given filters. +#[derive(Debug)] +struct FilteredEvents<'a, I: Iterator> { + iter: I, + filter: &'a Filter, +} + +impl<'a, I: Iterator> FilteredEvents<'a, I> { + fn new(iter: I, filter: &'a Filter) -> Self { + Self { iter, filter } + } +} + +impl<'a, I: Iterator> Iterator for FilteredEvents<'a, I> { + type Item = &'a Event; + + fn next(&mut self) -> Option { + for event in self.iter.by_ref() { + // Check if the event matches the address filter + if !self.filter.address.map_or(true, |addr| addr == event.from_address) { + continue; + } + + // Check if the event matches the keys filter + let is_matched = match &self.filter.keys { + None => true, + // From starknet-api spec: + // Per key (by position), designate the possible values to be matched for events to + // be returned. Empty array designates 'any' value" + Some(filters) => filters.iter().enumerate().all(|(i, keys)| { + // Lets say we want to filter events which are either named `Event1` or `Event2` + // and custom key `0x1` or `0x2` Filter: + // [[sn_keccak("Event1"), sn_keccak("Event2")], ["0x1", "0x2"]] + + // This checks: number of keys in event >= number of keys in filter (we check > + // i and not >= i because i is zero indexed) because + // otherwise this event doesn't contain all the keys we + // requested + event.keys.len() > i && + // This checks: Empty array desginates 'any' value + (keys.is_empty() + || + // This checks: If this events i'th value is one of the requested value in filter_keys[i] + keys.contains(&event.keys[i])) + }), + }; + + if is_matched { + return Some(event); + } + } + + None + } +} + +// returns a cursor if it couldn't include all the events of the current transaction because +// the buffer is already full. otherwise none. +#[allow(clippy::too_many_arguments)] +fn fetch_tx_events( + next_event_idx: usize, + block_number: Option, + block_hash: Option, + tx_idx: usize, + tx_hash: TxHash, + events: &[Event], + filter: &Filter, + chunk_size: usize, + buffer: &mut Vec, +) -> EventQueryResult> { + // calculate the remaining capacity based on the chunk size and the current + // number of events we have taken. + let total_can_take = chunk_size.saturating_sub(buffer.len()); + + // skip events according to the continuation token. + let filtered = FilteredEvents::new(events.iter(), filter) + .map(|e| EmittedEvent { + block_hash, + block_number, + keys: e.keys.clone(), + data: e.data.clone(), + transaction_hash: tx_hash, + from_address: e.from_address.into(), + }) + .enumerate() + .skip(next_event_idx) + .take(total_can_take) + .collect::>(); + + // remaining possible events that we haven't seen due to the chunk size limit. + let total_events_traversed = next_event_idx + total_can_take; + + // get the index of the last matching event that we have reached. if there is not + // matching events (ie `filtered` is empty) we point the end of the chunk + // we've covered thus far using the iterator.. + let last_event_idx = filtered.last().map(|(idx, _)| *idx).unwrap_or(total_events_traversed); + + buffer.extend(filtered.into_iter().map(|(_, event)| event)); + + if buffer.len() >= chunk_size { + // the next time we have to fetch the events, we will start from this index. + let new_last_event = if total_can_take == 0 { + // start from the the same event pointed by the + // current cursor.. + last_event_idx + } else { + // start at the next event of the last event we've filtered out. + last_event_idx + 1 + }; + + // if there are still more events that we haven't fetched yet for this tx. + if new_last_event < events.len() { + return Ok(Some(PartialCursor { idx: tx_idx, event: new_last_event })); + } + } + + Ok(None) +} + +impl From for StarknetApiError { + fn from(error: Error) -> Self { + match error { + Error::InvalidCursor => Self::InvalidContinuationToken, + Error::Provider(e) => e.into(), + Error::Other(e) => e.into(), + } + } +} + +impl From for Cursor { + fn from(token: ContinuationToken) -> Self { + Cursor::new(token.block_n, token.txn_n as usize, token.event_n as usize) + } +} diff --git a/crates/katana/rpc/rpc/src/utils/mod.rs b/crates/katana/rpc/rpc/src/utils/mod.rs new file mode 100644 index 0000000000..a9970c28f8 --- /dev/null +++ b/crates/katana/rpc/rpc/src/utils/mod.rs @@ -0,0 +1 @@ +pub mod events; diff --git a/crates/katana/rpc/rpc/tests/common/mod.rs b/crates/katana/rpc/rpc/tests/common/mod.rs index f0e87d0948..fb452a29b4 100644 --- a/crates/katana/rpc/rpc/tests/common/mod.rs +++ b/crates/katana/rpc/rpc/tests/common/mod.rs @@ -61,3 +61,11 @@ pub fn build_deploy_cairo1_contract_call(class_hash: Felt, salt: Felt) -> Call { selector: get_selector_from_name("deployContract").unwrap(), } } + +/// Splits a Felt into two Felts, representing its lower and upper 128 bits. +#[allow(unused)] +pub fn split_felt(felt: Felt) -> (Felt, Felt) { + let low: Felt = (felt.to_biguint() & Felt::from(u128::MAX).to_biguint()).into(); + let high = felt.to_biguint() >> 128; + (low, Felt::from(high)) +} diff --git a/crates/katana/rpc/rpc/tests/starknet.rs b/crates/katana/rpc/rpc/tests/starknet.rs index 6d69160f0f..da22973f3e 100644 --- a/crates/katana/rpc/rpc/tests/starknet.rs +++ b/crates/katana/rpc/rpc/tests/starknet.rs @@ -3,175 +3,724 @@ use std::fs::{self}; use std::path::PathBuf; use std::sync::Arc; -use std::time::Duration; +use anyhow::Result; +use assert_matches::assert_matches; +use cainome::rs::abigen_legacy; +use common::split_felt; use dojo_test_utils::sequencer::{get_default_test_starknet_config, TestSequencer}; +use indexmap::IndexSet; +use jsonrpsee::http_client::HttpClientBuilder; use katana_core::sequencer::SequencerConfig; -use katana_rpc_types::receipt::ReceiptBlock; -use starknet::accounts::{Account, ConnectedAccount}; +use katana_primitives::event::ContinuationToken; +use katana_primitives::genesis::constant::{ + DEFAULT_FEE_TOKEN_ADDRESS, DEFAULT_OZ_ACCOUNT_CONTRACT_CLASS_HASH, + DEFAULT_PREFUNDED_ACCOUNT_BALANCE, DEFAULT_UDC_ADDRESS, +}; +use katana_rpc_api::dev::DevApiClient; +use starknet::accounts::{ + Account, AccountError, AccountFactory, ConnectedAccount, ExecutionEncoding, + OpenZeppelinAccountFactory, SingleOwnerAccount, +}; use starknet::core::types::contract::legacy::LegacyContractClass; use starknet::core::types::{ - BlockId, BlockTag, Call, DeclareTransactionReceipt, Felt, TransactionFinalityStatus, + BlockId, BlockTag, Call, DeclareTransactionReceipt, DeployAccountTransactionReceipt, + EventFilter, EventsPage, ExecutionResult, Felt, StarknetError, TransactionFinalityStatus, TransactionReceipt, }; -use starknet::core::utils::{get_contract_address, get_selector_from_name}; -use starknet::providers::Provider; +use starknet::core::utils::get_contract_address; +use starknet::macros::{felt, selector}; +use starknet::providers::{Provider, ProviderError}; +use starknet::signers::{LocalWallet, Signer, SigningKey}; +use tokio::sync::Mutex; mod common; -const WAIT_TX_DELAY_MILLIS: u64 = 1000; - -#[tokio::test(flavor = "multi_thread")] -async fn test_send_declare_and_deploy_contract() { +#[tokio::test] +async fn declare_and_deploy_contract() -> Result<()> { let sequencer = TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + let account = sequencer.account(); + let provider = sequencer.provider(); let path: PathBuf = PathBuf::from("tests/test_data/cairo1_contract.json"); - let (contract, compiled_class_hash) = - common::prepare_contract_declaration_params(&path).unwrap(); + let (contract, compiled_class_hash) = common::prepare_contract_declaration_params(&path)?; let class_hash = contract.class_hash(); - let res = account.declare_v2(Arc::new(contract), compiled_class_hash).send().await.unwrap(); + let res = account.declare_v2(contract.into(), compiled_class_hash).send().await?; + + // check that the tx is executed successfully and return the correct receipt + let receipt = dojo_utils::TransactionWaiter::new(res.transaction_hash, &provider).await?; + assert_matches!(receipt.receipt, TransactionReceipt::Declare(DeclareTransactionReceipt { .. })); + + // check that the class is actually declared + assert!(provider.get_class(BlockId::Tag(BlockTag::Pending), class_hash).await.is_ok()); + + let ctor_args = vec![Felt::ONE, Felt::TWO]; + let calldata = [ + vec![ + res.class_hash, // class hash + Felt::ZERO, // salt + Felt::ZERO, // unique + Felt::from(ctor_args.len()), // constructor calldata len + ], + ctor_args.clone(), + ] + .concat(); + + // pre-compute the contract address of the would-be deployed contract + let address = get_contract_address(Felt::ZERO, res.class_hash, &ctor_args, Felt::ZERO); + + let res = account + .execute_v1(vec![Call { + calldata, + to: DEFAULT_UDC_ADDRESS.into(), + selector: selector!("deployContract"), + }]) + .send() + .await?; // wait for the tx to be mined - tokio::time::sleep(Duration::from_millis(WAIT_TX_DELAY_MILLIS)).await; + dojo_utils::TransactionWaiter::new(res.transaction_hash, &provider).await?; - let receipt = account.provider().get_transaction_receipt(res.transaction_hash).await.unwrap(); + // make sure the contract is deployed + let res = provider.get_class_hash_at(BlockId::Tag(BlockTag::Pending), address).await?; + assert_eq!(res, class_hash); - match receipt.block { - ReceiptBlock::Block { .. } => { - let TransactionReceipt::Declare(DeclareTransactionReceipt { finality_status, .. }) = - receipt.receipt - else { - panic!("invalid tx receipt") - }; + Ok(()) +} - assert_eq!(finality_status, TransactionFinalityStatus::AcceptedOnL2); - } +#[tokio::test] +async fn declare_and_deploy_legacy_contract() -> Result<()> { + let sequencer = + TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; - _ => panic!("invalid tx receipt"), - } + let account = sequencer.account(); + let provider = sequencer.provider(); - assert!(account.provider().get_class(BlockId::Tag(BlockTag::Latest), class_hash).await.is_ok()); + let path = PathBuf::from("tests/test_data/cairo0_contract.json"); + let contract: LegacyContractClass = serde_json::from_reader(fs::File::open(path)?)?; - let constructor_calldata = vec![Felt::from(1_u32), Felt::from(2_u32)]; + let class_hash = contract.class_hash()?; + let res = account.declare_legacy(contract.into()).send().await?; + let receipt = dojo_utils::TransactionWaiter::new(res.transaction_hash, &provider).await?; + assert_matches!(receipt.receipt, TransactionReceipt::Declare(DeclareTransactionReceipt { .. })); + + // check that the class is actually declared + assert!(provider.get_class(BlockId::Tag(BlockTag::Pending), class_hash).await.is_ok()); + + let ctor_args = vec![Felt::ONE]; let calldata = [ vec![ - res.class_hash, // class hash - Felt::ZERO, // salt - Felt::ZERO, // unique - Felt::from(constructor_calldata.len()), // constructor calldata len + res.class_hash, // class hash + Felt::ZERO, // salt + Felt::ZERO, // unique + Felt::from(ctor_args.len()), // constructor calldata len ], - constructor_calldata.clone(), + ctor_args.clone(), ] .concat(); - let contract_address = - get_contract_address(Felt::ZERO, res.class_hash, &constructor_calldata, Felt::ZERO); + // pre-compute the contract address of the would-be deployed contract + let address = get_contract_address(Felt::ZERO, res.class_hash, &ctor_args.clone(), Felt::ZERO); - account + let res = account .execute_v1(vec![Call { calldata, - // devnet UDC address - to: Felt::from_hex("0x41a78e741e5af2fec34b695679bc6891742439f7afb8484ecd7766661ad02bf") - .unwrap(), - selector: get_selector_from_name("deployContract").unwrap(), + to: DEFAULT_UDC_ADDRESS.into(), + selector: selector!("deployContract"), }]) .send() - .await - .unwrap(); + .await?; // wait for the tx to be mined - tokio::time::sleep(Duration::from_millis(WAIT_TX_DELAY_MILLIS)).await; - - assert_eq!( - account - .provider() - .get_class_hash_at(BlockId::Tag(BlockTag::Latest), contract_address) - .await - .unwrap(), - class_hash + dojo_utils::TransactionWaiter::new(res.transaction_hash, &provider).await?; + + // make sure the contract is deployed + let res = provider.get_class_hash_at(BlockId::Tag(BlockTag::Pending), address).await?; + assert_eq!(res, class_hash); + + Ok(()) +} + +#[rstest::rstest] +#[tokio::test] +async fn deploy_account( + #[values(true, false)] disable_fee: bool, + #[values(None, Some(1000))] block_time: Option, +) -> Result<()> { + // setup test sequencer with the given configuration + let mut starknet_config = get_default_test_starknet_config(); + starknet_config.disable_fee = disable_fee; + let sequencer_config = SequencerConfig { block_time, ..Default::default() }; + + let sequencer = TestSequencer::start(sequencer_config, starknet_config).await; + + let provider = sequencer.provider(); + let funding_account = sequencer.account(); + let chain_id = provider.chain_id().await?; + + // Precompute the contract address of the new account with the given parameters: + let signer = LocalWallet::from(SigningKey::from_random()); + let class_hash = DEFAULT_OZ_ACCOUNT_CONTRACT_CLASS_HASH; + let salt = felt!("0x123"); + let ctor_args = [signer.get_public_key().await?.scalar()]; + let computed_address = get_contract_address(salt, class_hash, &ctor_args, Felt::ZERO); + + // Fund the new account + abigen_legacy!(FeeToken, "crates/katana/rpc/rpc/tests/test_data/erc20.json"); + let contract = FeeToken::new(DEFAULT_FEE_TOKEN_ADDRESS.into(), &funding_account); + + // send enough tokens to the new_account's address just to send the deploy account tx + let amount = Uint256 { low: felt!("0x100000000000"), high: Felt::ZERO }; + let recipient = computed_address; + let res = contract.transfer(&recipient, &amount).send().await?; + dojo_utils::TransactionWaiter::new(res.transaction_hash, &provider).await?; + + // starknet-rs's utility for deploying an OpenZeppelin account + let factory = OpenZeppelinAccountFactory::new(class_hash, chain_id, &signer, &provider).await?; + let res = factory.deploy_v1(salt).send().await?; + // the contract address in the send tx result must be the same as the computed one + assert_eq!(res.contract_address, computed_address); + + let receipt = dojo_utils::TransactionWaiter::new(res.transaction_hash, &provider).await?; + assert_matches!( + receipt.receipt, + TransactionReceipt::DeployAccount(DeployAccountTransactionReceipt { contract_address, .. }) => { + // the contract address in the receipt must be the same as the computed one + assert_eq!(contract_address, computed_address) + } ); - sequencer.stop().expect("failed to stop sequencer"); + // Verify the `getClassHashAt` returns the same class hash that we use for the account + // deployment + let res = provider.get_class_hash_at(BlockId::Tag(BlockTag::Pending), computed_address).await?; + assert_eq!(res, class_hash); + + Ok(()) } -#[tokio::test(flavor = "multi_thread")] -async fn test_send_declare_and_deploy_legacy_contract() { +abigen_legacy!(Erc20Contract, "crates/katana/rpc/rpc/tests/test_data/erc20.json"); + +#[tokio::test] +async fn estimate_fee() -> Result<()> { let sequencer = TestSequencer::start(SequencerConfig::default(), get_default_test_starknet_config()).await; + + let provider = sequencer.provider(); let account = sequencer.account(); - let path = PathBuf::from("tests/test_data/cairo0_contract.json"); + // setup contract to interact with (can be any existing contract that can be interacted with) + let contract = Erc20Contract::new(DEFAULT_FEE_TOKEN_ADDRESS.into(), &account); + + // setup contract function params + let recipient = felt!("0x1"); + let amount = Uint256 { low: felt!("0x1"), high: Felt::ZERO }; + + // send a valid transaction first to increment the nonce (so that we can test nonce < current + // nonce later) + let res = contract.transfer(&recipient, &amount).send().await?; + dojo_utils::TransactionWaiter::new(res.transaction_hash, &provider).await?; + + // estimate fee with current nonce (the expected nonce) + let nonce = provider.get_nonce(BlockId::Tag(BlockTag::Pending), account.address()).await?; + let result = contract.transfer(&recipient, &amount).nonce(nonce).estimate_fee().await; + assert!(result.is_ok(), "estimate should succeed with nonce == current nonce"); + + // estimate fee with arbitrary nonce < current nonce + // + // here we're essentially estimating a transaction with a nonce that has already been + // used, so it should fail. + let nonce = nonce - 1; + let result = contract.transfer(&recipient, &amount).nonce(nonce).estimate_fee().await; + assert!(result.is_err(), "estimate should fail with nonce < current nonce"); + + // estimate fee with arbitrary nonce >= current nonce + let nonce = felt!("0x1337"); + let result = contract.transfer(&recipient, &amount).nonce(nonce).estimate_fee().await; + assert!(result.is_ok(), "estimate should succeed with nonce >= current nonce"); + + Ok(()) +} - let legacy_contract: LegacyContractClass = - serde_json::from_reader(fs::File::open(path).unwrap()).unwrap(); - let contract_class = Arc::new(legacy_contract); +#[rstest::rstest] +#[tokio::test(flavor = "multi_thread")] +async fn concurrent_transactions_submissions( + #[values(None, Some(1000))] block_time: Option, +) -> Result<()> { + // setup test sequencer with the given configuration + let starknet_config = get_default_test_starknet_config(); + let sequencer_config = SequencerConfig { block_time, ..Default::default() }; + + let sequencer = TestSequencer::start(sequencer_config, starknet_config).await; + let provider = sequencer.provider(); + let account = Arc::new(sequencer.account()); + + // function call params + let recipient = Felt::ONE; + let amount = Uint256 { low: Felt::ONE, high: Felt::ZERO }; + + let initial_nonce = + provider.get_nonce(BlockId::Tag(BlockTag::Pending), sequencer.account().address()).await?; + + const N: usize = 100; + let nonce = Arc::new(Mutex::new(initial_nonce)); + let txs = Arc::new(Mutex::new(IndexSet::with_capacity(N))); + + let mut handles = Vec::with_capacity(N); + + for _ in 0..N { + let txs = txs.clone(); + let nonce = nonce.clone(); + let amount = amount.clone(); + let account = account.clone(); + + let handle = tokio::spawn(async move { + let mut nonce = nonce.lock().await; + let contract = Erc20Contract::new(DEFAULT_FEE_TOKEN_ADDRESS.into(), account); + let res = contract.transfer(&recipient, &amount).nonce(*nonce).send().await.unwrap(); + txs.lock().await.insert(res.transaction_hash); + *nonce += Felt::ONE; + }); + + handles.push(handle); + } - let class_hash = contract_class.class_hash().unwrap(); - let res = account.declare_legacy(contract_class).send().await.unwrap(); - // wait for the tx to be mined - tokio::time::sleep(Duration::from_millis(WAIT_TX_DELAY_MILLIS)).await; + // wait for all txs to be submitted + for handle in handles { + handle.await?; + } - let receipt = account.provider().get_transaction_receipt(res.transaction_hash).await.unwrap(); + // Wait only for the last transaction to be accepted + let txs = txs.lock().await; + let last_tx = txs.last().unwrap(); + dojo_utils::TransactionWaiter::new(*last_tx, &provider).await?; - match receipt.block { - ReceiptBlock::Block { .. } => { - let TransactionReceipt::Declare(DeclareTransactionReceipt { finality_status, .. }) = - receipt.receipt - else { - panic!("invalid tx receipt") - }; + // we should've submitted ITERATION transactions + assert_eq!(txs.len(), N); - assert_eq!(finality_status, TransactionFinalityStatus::AcceptedOnL2); - } + // check the status of each txs + for hash in txs.iter() { + let receipt = provider.get_transaction_receipt(hash).await?; + assert_eq!(receipt.receipt.execution_result(), &ExecutionResult::Succeeded); + assert_eq!(receipt.receipt.finality_status(), &TransactionFinalityStatus::AcceptedOnL2); + } + + let nonce = account.get_nonce().await?; + assert_eq!(nonce, Felt::from(N), "Nonce should be incremented by {N} time"); + + Ok(()) +} + +/// Macro used to assert that the given error is a Starknet error. +macro_rules! assert_starknet_err { + ($err:expr, $api_err:pat) => { + assert_matches!($err, AccountError::Provider(ProviderError::StarknetError($api_err))) + }; +} + +#[rstest::rstest] +#[tokio::test] +async fn ensure_validator_have_valid_state( + #[values(None, Some(1000))] block_time: Option, +) -> Result<()> { + let mut starknet_config = get_default_test_starknet_config(); + starknet_config.disable_fee = false; + let sequencer_config = SequencerConfig { block_time, ..Default::default() }; - _ => panic!("invalid tx receipt"), + let sequencer = TestSequencer::start(sequencer_config, starknet_config).await; + let account = sequencer.account(); + + // setup test contract to interact with. + let contract = Erc20Contract::new(DEFAULT_FEE_TOKEN_ADDRESS.into(), &account); + + // reduce account balance + let recipient = felt!("0x1337"); + let (low, high) = split_felt(Felt::from(DEFAULT_PREFUNDED_ACCOUNT_BALANCE / 2)); + let amount = Uint256 { low, high }; + + let res = contract.transfer(&recipient, &amount).send().await?; + dojo_utils::TransactionWaiter::new(res.transaction_hash, &sequencer.provider()).await?; + + // this should fail validation due to insufficient balance because we specify max fee > the + // actual balance that we have now. + let fee = Felt::from(DEFAULT_PREFUNDED_ACCOUNT_BALANCE); + let err = contract.transfer(&recipient, &amount).max_fee(fee).send().await.unwrap_err(); + assert_starknet_err!(err, StarknetError::InsufficientAccountBalance); + + Ok(()) +} + +#[rstest::rstest] +#[tokio::test] +async fn send_txs_with_insufficient_fee( + #[values(true, false)] disable_fee: bool, + #[values(None, Some(1000))] block_time: Option, +) -> Result<()> { + // setup test sequencer with the given configuration + let mut starknet_config = get_default_test_starknet_config(); + starknet_config.disable_fee = disable_fee; + let sequencer_config = SequencerConfig { block_time, ..Default::default() }; + + let sequencer = TestSequencer::start(sequencer_config, starknet_config).await; + + // setup test contract to interact with. + let contract = Erc20Contract::new(DEFAULT_FEE_TOKEN_ADDRESS.into(), sequencer.account()); + + // function call params + let recipient = Felt::ONE; + let amount = Uint256 { low: Felt::ONE, high: Felt::ZERO }; + + // initial sender's account nonce. use to assert how the txs validity change the account nonce. + let initial_nonce = sequencer.account().get_nonce().await?; + + // ----------------------------------------------------------------------- + // transaction with low max fee (underpriced). + + let res = contract.transfer(&recipient, &amount).max_fee(Felt::TWO).send().await; + + if disable_fee { + // in no fee mode, setting the max fee (which translates to the tx run resources) lower + // than the amount required would result in a validation failure. due to insufficient + // resources. + assert_starknet_err!(res.unwrap_err(), StarknetError::ValidationFailure(_)); + } else { + assert_starknet_err!(res.unwrap_err(), StarknetError::InsufficientMaxFee); } - assert!(account.provider().get_class(BlockId::Tag(BlockTag::Latest), class_hash).await.is_ok()); + let nonce = sequencer.account().get_nonce().await?; + assert_eq!(initial_nonce, nonce, "Nonce shouldn't change after invalid tx"); - let constructor_calldata = vec![Felt::ONE]; + // ----------------------------------------------------------------------- + // transaction with insufficient balance. - let calldata = [ - vec![ - res.class_hash, // class hash - Felt::ZERO, // salt - Felt::ZERO, // unique - Felt::from(constructor_calldata.len()), // constructor calldata len - ], - constructor_calldata.clone(), - ] - .concat(); + let fee = Felt::from(DEFAULT_PREFUNDED_ACCOUNT_BALANCE + 1); + let res = contract.transfer(&recipient, &amount).max_fee(fee).send().await; - let contract_address = - get_contract_address(Felt::ZERO, res.class_hash, &constructor_calldata.clone(), Felt::ZERO); + if disable_fee { + // in no fee mode, account balance is ignored. as long as the max fee (aka resources) is + // enough to at least run the account validation, the tx should be accepted. + // Wait for the transaction to be accepted + dojo_utils::TransactionWaiter::new(res?.transaction_hash, &sequencer.provider()).await?; - account - .execute_v1(vec![Call { - calldata, - // devnet UDC address - to: Felt::from_hex("0x41a78e741e5af2fec34b695679bc6891742439f7afb8484ecd7766661ad02bf") - .unwrap(), - selector: get_selector_from_name("deployContract").unwrap(), - }]) - .send() - .await - .unwrap(); + // nonce should be incremented by 1 after a valid tx. + let nonce = sequencer.account().get_nonce().await?; + assert_eq!(initial_nonce + 1, nonce); + } else { + assert_starknet_err!(res.unwrap_err(), StarknetError::InsufficientAccountBalance); - // wait for the tx to be mined - tokio::time::sleep(Duration::from_millis(WAIT_TX_DELAY_MILLIS)).await; - - assert_eq!( - account - .provider() - .get_class_hash_at(BlockId::Tag(BlockTag::Latest), contract_address) - .await - .unwrap(), - class_hash + // nonce shouldn't change for an invalid tx. + let nonce = sequencer.account().get_nonce().await?; + assert_eq!(initial_nonce, nonce); + } + + Ok(()) +} + +#[rstest::rstest] +#[tokio::test] +async fn send_txs_with_invalid_signature( + #[values(true, false)] disable_validate: bool, + #[values(None, Some(1000))] block_time: Option, +) -> Result<()> { + // setup test sequencer with the given configuration + let mut starknet_config = get_default_test_starknet_config(); + starknet_config.disable_validate = disable_validate; + let sequencer_config = SequencerConfig { block_time, ..Default::default() }; + + let sequencer = TestSequencer::start(sequencer_config, starknet_config).await; + + // starknet-rs doesn't provide a way to manually set the signatures so instead we create an + // account with random signer to simulate invalid signatures. + + let account = SingleOwnerAccount::new( + sequencer.provider(), + LocalWallet::from(SigningKey::from_random()), + sequencer.account().address(), + sequencer.provider().chain_id().await?, + ExecutionEncoding::New, ); - sequencer.stop().expect("failed to stop sequencer"); + // setup test contract to interact with. + let contract = Erc20Contract::new(DEFAULT_FEE_TOKEN_ADDRESS.into(), &account); + + // function call params + let recipient = Felt::ONE; + let amount = Uint256 { low: Felt::ONE, high: Felt::ZERO }; + + // initial sender's account nonce. use to assert how the txs validity change the account nonce. + let initial_nonce = account.get_nonce().await?; + + // ----------------------------------------------------------------------- + // transaction with invalid signatures. + + // we set the max fee manually here to skip fee estimation. we want to test the pool validator. + let res = contract.transfer(&recipient, &amount).max_fee(felt!("0x1111111111")).send().await; + + if disable_validate { + // Wait for the transaction to be accepted + dojo_utils::TransactionWaiter::new(res?.transaction_hash, &sequencer.provider()).await?; + + // nonce should be incremented by 1 after a valid tx. + let nonce = sequencer.account().get_nonce().await?; + assert_eq!(initial_nonce + 1, nonce); + } else { + assert_starknet_err!(res.unwrap_err(), StarknetError::ValidationFailure(_)); + + // nonce shouldn't change for an invalid tx. + let nonce = sequencer.account().get_nonce().await?; + assert_eq!(initial_nonce, nonce); + } + + Ok(()) +} + +#[rstest::rstest] +#[tokio::test] +async fn send_txs_with_invalid_nonces( + #[values(None, Some(1000))] block_time: Option, +) -> Result<()> { + // setup test sequencer with the given configuration + let starknet_config = get_default_test_starknet_config(); + let sequencer_config = SequencerConfig { block_time, ..Default::default() }; + + let sequencer = TestSequencer::start(sequencer_config, starknet_config).await; + let provider = sequencer.provider(); + let account = sequencer.account(); + + // setup test contract to interact with. + let contract = Erc20Contract::new(DEFAULT_FEE_TOKEN_ADDRESS.into(), &account); + + // function call params + let recipient = Felt::ONE; + let amount = Uint256 { low: Felt::ONE, high: Felt::ZERO }; + + // set the fee manually here to skip fee estimation. we want to test the pool validator. + let fee = felt!("0x11111111111"); + + // send a valid transaction first to increment the nonce (so that we can test nonce < current + // nonce later) + let res = contract.transfer(&recipient, &amount).send().await?; + dojo_utils::TransactionWaiter::new(res.transaction_hash, &provider).await?; + + // initial sender's account nonce. use to assert how the txs validity change the account nonce. + let initial_nonce = account.get_nonce().await?; + assert_eq!(initial_nonce, Felt::ONE, "Initial nonce after sending 1st tx should be 1."); + + // ----------------------------------------------------------------------- + // transaction with nonce < account nonce. + + let old_nonce = initial_nonce - Felt::ONE; + let res = contract.transfer(&recipient, &amount).nonce(old_nonce).max_fee(fee).send().await; + assert_starknet_err!(res.unwrap_err(), StarknetError::InvalidTransactionNonce); + + let nonce = account.get_nonce().await?; + assert_eq!(nonce, initial_nonce, "Nonce shouldn't change on invalid tx."); + + // ----------------------------------------------------------------------- + // transaction with nonce = account nonce. + + let curr_nonce = initial_nonce; + let res = contract.transfer(&recipient, &amount).nonce(curr_nonce).max_fee(fee).send().await?; + dojo_utils::TransactionWaiter::new(res.transaction_hash, &provider).await?; + + let nonce = account.get_nonce().await?; + assert_eq!(nonce, Felt::TWO, "Nonce should be 2 after sending two valid txs."); + + // ----------------------------------------------------------------------- + // transaction with nonce >= account nonce. + // + // ideally, tx with nonce >= account nonce should be considered as valid BUT not to be executed + // immediately and should be kept around in the pool until the nonce is reached. however, + // katana doesn't support this feature yet so the current behaviour is to treat the tx as + // invalid with nonce mismatch error. + + let new_nonce = felt!("0x100"); + let res = contract.transfer(&recipient, &amount).nonce(new_nonce).max_fee(fee).send().await; + assert_starknet_err!(res.unwrap_err(), StarknetError::InvalidTransactionNonce); + + let nonce = account.get_nonce().await?; + assert_eq!(nonce, Felt::TWO, "Nonce shouldn't change bcs the tx is still invalid."); + + Ok(()) +} + +// TODO: write more elaborate tests for get events. +#[tokio::test] +async fn get_events_no_pending() -> Result<()> { + // setup test sequencer with the given configuration + let starknet_config = get_default_test_starknet_config(); + let sequencer_config = SequencerConfig { no_mining: true, ..Default::default() }; + let sequencer = TestSequencer::start(sequencer_config, starknet_config).await; + + // create a json rpc client to interact with the dev api. + let client = HttpClientBuilder::default().build(sequencer.url()).unwrap(); + + let provider = sequencer.provider(); + let account = sequencer.account(); + + // setup test contract to interact with. + let contract = Erc20Contract::new(DEFAULT_FEE_TOKEN_ADDRESS.into(), &account); + // tx that emits 1 event + let tx = || contract.transfer(&Felt::ONE, &Uint256 { low: Felt::ONE, high: Felt::ZERO }); + + const BLOCK_1_TX_COUNT: usize = 5; + const EVENT_COUNT_PER_TX: usize = 1; + const TOTAL_EVENT_COUNT: usize = BLOCK_1_TX_COUNT * EVENT_COUNT_PER_TX; + + for _ in 0..BLOCK_1_TX_COUNT { + let res = tx().send().await?; + dojo_utils::TransactionWaiter::new(res.transaction_hash, &provider).await?; + } + + // generate a block to mine pending transactions. + client.generate_block().await?; + + let filter = EventFilter { + keys: None, + address: None, + to_block: Some(BlockId::Number(1)), + from_block: Some(BlockId::Number(0)), + }; + + // ----------------------------------------------------------------------- + // case 1 (chunk size = 0) + + let chunk_size = 0; + let EventsPage { events, continuation_token } = + provider.get_events(filter.clone(), None, chunk_size).await?; + + assert_eq!(events.len(), 0); + assert_matches!(continuation_token, Some(token ) => { + let token = ContinuationToken::parse(&token)?; + assert_eq!(token.block_n, 1); + assert_eq!(token.txn_n, 0); + assert_eq!(token.event_n, 0); + }); + + // ----------------------------------------------------------------------- + // case 2 + + let chunk_size = 3; + let EventsPage { events, continuation_token } = + provider.get_events(filter.clone(), None, chunk_size).await?; + + assert_eq!(events.len(), 3, "Total events should be limited by chunk size ({chunk_size})"); + assert_matches!(continuation_token, Some(ref token) => { + let token = ContinuationToken::parse(token)?; + assert_eq!(token.block_n, 1); + assert_eq!(token.txn_n, 3); + assert_eq!(token.event_n, 0); + }); + + let EventsPage { events, continuation_token } = + provider.get_events(filter.clone(), continuation_token, chunk_size).await?; + + assert_eq!(events.len(), 2, "Remaining should be 2"); + assert_matches!(continuation_token, None); + + // ----------------------------------------------------------------------- + // case 3 (max chunk is greater than total events in the requested range) + + let chunk_size = 100; + let EventsPage { events, continuation_token } = + provider.get_events(filter.clone(), None, chunk_size).await?; + + assert_eq!(events.len(), TOTAL_EVENT_COUNT); + assert_matches!(continuation_token, None); + + Ok(()) +} + +#[tokio::test] +async fn get_events_with_pending() -> Result<()> { + // setup test sequencer with the given configuration + let starknet_config = get_default_test_starknet_config(); + let sequencer_config = SequencerConfig { no_mining: true, ..Default::default() }; + let sequencer = TestSequencer::start(sequencer_config, starknet_config).await; + + // create a json rpc client to interact with the dev api. + let client = HttpClientBuilder::default().build(sequencer.url()).unwrap(); + + let provider = sequencer.provider(); + let account = sequencer.account(); + + // setup test contract to interact with. + let contract = Erc20Contract::new(DEFAULT_FEE_TOKEN_ADDRESS.into(), &account); + // tx that emits 1 event + let tx = || contract.transfer(&Felt::ONE, &Uint256 { low: Felt::ONE, high: Felt::ZERO }); + + const BLOCK_1_TX_COUNT: usize = 5; + const PENDING_BLOCK_TX_COUNT: usize = 5; + + for _ in 0..BLOCK_1_TX_COUNT { + let res = tx().send().await?; + dojo_utils::TransactionWaiter::new(res.transaction_hash, &provider).await?; + } + + // generate block 1 + client.generate_block().await?; + + // events in pending block (2) + for _ in 0..PENDING_BLOCK_TX_COUNT { + let res = tx().send().await?; + dojo_utils::TransactionWaiter::new(res.transaction_hash, &provider).await?; + } + + // because we didnt specifically set the `from` and `to` block, it will implicitly + // get events starting from the initial (0) block to the pending block (2) + let filter = EventFilter { keys: None, address: None, to_block: None, from_block: None }; + + let chunk_size = BLOCK_1_TX_COUNT; + let EventsPage { events, continuation_token } = + provider.get_events(filter.clone(), None, chunk_size as u64).await?; + + assert_eq!(events.len(), chunk_size); + assert_matches!(continuation_token, Some(ref token) => { + // the continuation token should now point to block 2 (pending block) because:- + // (1) the filter doesn't specify the exact 'to' block, so it will keep moving the cursor to point to the next block. + // (2) events in block 1 has been exhausted by the first two queries. + let token = ContinuationToken::parse(token)?; + assert_eq!(token.block_n, 2); + assert_eq!(token.txn_n, 0); + assert_eq!(token.event_n, 0); + }); + + // we split the pending events into two chunks to cover different cases. + + let chunk_size = 3; + let EventsPage { events, continuation_token } = + provider.get_events(filter.clone(), continuation_token, chunk_size).await?; + + assert_eq!(events.len() as u64, chunk_size); + assert_matches!(continuation_token, Some(ref token) => { + let token = ContinuationToken::parse(token)?; + assert_eq!(token.block_n, 2); + assert_eq!(token.txn_n, 3); + assert_eq!(token.event_n, 0); + }); + + // get the rest of events in the pending block + let EventsPage { events, continuation_token } = + provider.get_events(filter.clone(), continuation_token, chunk_size).await?; + + assert_eq!(events.len(), PENDING_BLOCK_TX_COUNT - chunk_size as usize); + assert_matches!(continuation_token, Some(ref token) => { + let token = ContinuationToken::parse(token)?; + assert_eq!(token.block_n, 2); + assert_eq!(token.txn_n, 5); + assert_eq!(token.event_n, 0); + }); + + // fetching events with the continuation token should return an empty list and the + // token shouldn't change. + let EventsPage { events, continuation_token: new_token } = + provider.get_events(filter, continuation_token.clone(), chunk_size).await?; + + assert_eq!(events.len(), 0); + assert_eq!(new_token, continuation_token); + + Ok(()) } diff --git a/crates/katana/rpc/rpc/tests/test_data/erc20.json b/crates/katana/rpc/rpc/tests/test_data/erc20.json new file mode 120000 index 0000000000..e8ed708b86 --- /dev/null +++ b/crates/katana/rpc/rpc/tests/test_data/erc20.json @@ -0,0 +1 @@ +../../../../contracts/compiled/erc20.json \ No newline at end of file diff --git a/crates/katana/runner/runner-macro/Cargo.toml b/crates/katana/runner/runner-macro/Cargo.toml index b6affe1e4c..ff265987a8 100644 --- a/crates/katana/runner/runner-macro/Cargo.toml +++ b/crates/katana/runner/runner-macro/Cargo.toml @@ -1,7 +1,7 @@ [package] edition = "2021" name = "runner-macro" -version = "1.0.0-alpha.7" +version = "1.0.0-alpha.11" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [lib] diff --git a/crates/katana/runner/src/lib.rs b/crates/katana/runner/src/lib.rs index 6004ddd203..45c3d38785 100644 --- a/crates/katana/runner/src/lib.rs +++ b/crates/katana/runner/src/lib.rs @@ -102,6 +102,7 @@ impl KatanaRunner { .accounts(n_accounts) .json_log(true) .max_connections(10000) + .dev(config.dev) .fee(!config.disable_fee); if let Some(block_time_ms) = config.block_time { diff --git a/crates/katana/tasks/src/lib.rs b/crates/katana/tasks/src/lib.rs index c329156ffc..459427c783 100644 --- a/crates/katana/tasks/src/lib.rs +++ b/crates/katana/tasks/src/lib.rs @@ -89,9 +89,11 @@ impl Future for BlockingTaskHandle { } } -/// A thread-pool for spawning blocking tasks . This is a simple wrapper around *rayon*'s -/// thread-pool. This is mainly for executing expensive CPU-bound tasks. For spawing blocking -/// IO-bound tasks, use [TokioTaskSpawner::spawn_blocking] instead. +/// A thread-pool for spawning blocking tasks. +/// +/// This is a simple wrapper around *rayon*'s thread-pool. This is mainly for executing expensive +/// CPU-bound tasks. For spawing blocking IO-bound tasks, use [TokioTaskSpawner::spawn_blocking] +/// instead. /// /// Refer to the [CPU-bound tasks and blocking code] section of the *tokio* docs and this [blog /// post] for more information. diff --git a/crates/katana/tasks/src/manager.rs b/crates/katana/tasks/src/manager.rs index 94a7e227ed..f79a7657e8 100644 --- a/crates/katana/tasks/src/manager.rs +++ b/crates/katana/tasks/src/manager.rs @@ -42,21 +42,24 @@ impl TaskManager { self.spawn_inner(fut) } - /// Wait until all spawned tasks are completed. - pub async fn wait(&self) { - // need to close the tracker first before waiting - let _ = self.tracker.close(); - self.tracker.wait().await; - // reopen the tracker for spawning future tasks - let _ = self.tracker.reopen(); + /// Wait for the shutdown signal to be received. + pub async fn wait_for_shutdown(&self) { + self.on_cancel.cancelled().await; } - /// Consumes the manager and wait until all tasks are finished, either due to completion or - /// cancellation. - pub async fn wait_shutdown(self) { + /// Shuts down the manager and wait until all currently running tasks are finished, either due + /// to completion or cancellation. + /// + /// No task can be spawned on the manager after this method is called. + pub async fn shutdown(self) { + if !self.on_cancel.is_cancelled() { + self.on_cancel.cancel(); + } + + self.wait_for_shutdown().await; + // need to close the tracker first before waiting let _ = self.tracker.close(); - let _ = self.on_cancel.cancelled().await; self.tracker.wait().await; } @@ -70,6 +73,16 @@ impl TaskManager { TaskBuilder::new(self) } + /// Wait until all spawned tasks are completed. + #[cfg(test)] + async fn wait(&self) { + // need to close the tracker first before waiting + let _ = self.tracker.close(); + self.tracker.wait().await; + // reopen the tracker for spawning future tasks + let _ = self.tracker.reopen(); + } + fn spawn_inner(&self, task: F) -> TaskHandle where F: Future + Send + 'static, @@ -156,20 +169,20 @@ mod tests { manager.build_task().graceful_shutdown().spawn(future::ready(())); // wait until all task spawned to the manager have been completed - manager.wait_shutdown().await; + manager.shutdown().await; } #[tokio::test] async fn critical_task_implicit_graceful_shutdown() { let manager = TaskManager::current(); manager.build_task().critical().spawn(future::ready(())); - manager.wait_shutdown().await; + manager.shutdown().await; } #[tokio::test] async fn critical_task_graceful_shudown_on_panicked() { let manager = TaskManager::current(); manager.build_task().critical().spawn(async { panic!("panicking") }); - manager.wait_shutdown().await; + manager.shutdown().await; } } diff --git a/crates/katana/tasks/src/task.rs b/crates/katana/tasks/src/task.rs index 47e7a421b8..1e5cdb813a 100644 --- a/crates/katana/tasks/src/task.rs +++ b/crates/katana/tasks/src/task.rs @@ -79,15 +79,15 @@ impl<'a> TaskBuilder<'a> { let Self { manager, instrument, graceful_shutdown, .. } = self; // creates a future that will send a cancellation signal to the manager when the future is - // completed. - let fut = if graceful_shutdown { + // completed, regardless of success or error. + let fut = { let ct = manager.on_cancel.clone(); - Either::Left(fut.map(move |a| { - ct.cancel(); - a - })) - } else { - Either::Right(fut) + fut.map(move |res| { + if graceful_shutdown { + ct.cancel(); + } + res + }) }; let fut = if instrument { @@ -131,9 +131,9 @@ impl<'a> CriticalTaskBuilder<'a> { let fut = AssertUnwindSafe(fut) .catch_unwind() .map_err(move |error| { - ct.cancel(); let error = PanickedTaskError { error }; error!(%error, task = task_name, "Critical task failed."); + ct.cancel(); error }) .map(drop); diff --git a/crates/sozo/ops/Cargo.toml b/crates/sozo/ops/Cargo.toml index dc61e39a58..a3ff511a4a 100644 --- a/crates/sozo/ops/Cargo.toml +++ b/crates/sozo/ops/Cargo.toml @@ -43,6 +43,7 @@ serde.workspace = true serde_json.workspace = true serde_with.workspace = true smol_str.workspace = true +sozo-walnut = { workspace = true, optional = true } starknet.workspace = true starknet-crypto.workspace = true thiserror.workspace = true @@ -63,3 +64,4 @@ tee = "0.1.0" [features] test-utils = [ "dep:dojo-test-utils", "dep:katana-runner" ] +walnut = [ "dep:sozo-walnut" ] diff --git a/crates/sozo/ops/src/account.rs b/crates/sozo/ops/src/account.rs index fbdb4f4907..9508dfc966 100644 --- a/crates/sozo/ops/src/account.rs +++ b/crates/sozo/ops/src/account.rs @@ -248,7 +248,7 @@ pub async fn deploy( }; match txn_action { - TxnAction::Send { wait, receipt, max_fee_raw, fee_estimate_multiplier } => { + TxnAction::Send { wait, receipt, max_fee_raw, fee_estimate_multiplier, walnut } => { let max_fee = if let Some(max_fee_raw) = max_fee_raw { MaxFeeType::Manual { max_fee: max_fee_raw } } else { @@ -277,7 +277,8 @@ pub async fn deploy( }; let account_deployment = account_deployment.max_fee(max_fee.max_fee()); - let txn_config = TxnConfig { fee_estimate_multiplier, wait, receipt, max_fee_raw }; + let txn_config = + TxnConfig { fee_estimate_multiplier, wait, receipt, max_fee_raw, walnut }; do_account_deploy( max_fee, txn_config, diff --git a/crates/sozo/ops/src/auth.rs b/crates/sozo/ops/src/auth.rs index e8bd676a59..899540aefa 100644 --- a/crates/sozo/ops/src/auth.rs +++ b/crates/sozo/ops/src/auth.rs @@ -9,6 +9,8 @@ use dojo_world::contracts::naming::{ use dojo_world::contracts::world::WorldContract; use dojo_world::contracts::WorldContractReader; use scarb_ui::Ui; +#[cfg(feature = "walnut")] +use sozo_walnut::WalnutDebugger; use starknet::accounts::{Account, ConnectedAccount}; use starknet::core::types::{BlockId, BlockTag, Felt}; @@ -108,8 +110,9 @@ pub async fn grant_writer<'a, A>( ui: &'a Ui, world: &WorldContract, new_writers: &[ResourceWriter], - txn_config: TxnConfig, + txn_config: &TxnConfig, default_namespace: &str, + #[cfg(feature = "walnut")] walnut_debugger: &Option, ) -> Result<()> where A: ConnectedAccount + Sync + Send, @@ -129,7 +132,7 @@ where let res = world .account .execute_v1(calls) - .send_with_cfg(&txn_config) + .send_with_cfg(txn_config) .await .with_context(|| "Failed to send transaction")?; @@ -141,6 +144,8 @@ where res, txn_config.wait, txn_config.receipt, + #[cfg(feature = "walnut")] + walnut_debugger, ) .await?; } @@ -152,8 +157,9 @@ pub async fn grant_owner( ui: &Ui, world: &WorldContract, new_owners: &[ResourceOwner], - txn_config: TxnConfig, + txn_config: &TxnConfig, default_namespace: &str, + #[cfg(feature = "walnut")] walnut_debugger: &Option, ) -> Result<()> where A: ConnectedAccount + Sync + Send + 'static, @@ -169,7 +175,7 @@ where let res = world .account .execute_v1(calls) - .send_with_cfg(&txn_config) + .send_with_cfg(txn_config) .await .with_context(|| "Failed to send transaction")?; @@ -181,6 +187,8 @@ where res, txn_config.wait, txn_config.receipt, + #[cfg(feature = "walnut")] + walnut_debugger, ) .await?; @@ -191,8 +199,9 @@ pub async fn revoke_writer( ui: &Ui, world: &WorldContract, new_writers: &[ResourceWriter], - txn_config: TxnConfig, + txn_config: &TxnConfig, default_namespace: &str, + #[cfg(feature = "walnut")] walnut_debugger: &Option, ) -> Result<()> where A: ConnectedAccount + Sync + Send + 'static, @@ -211,7 +220,7 @@ where let res = world .account .execute_v1(calls) - .send_with_cfg(&txn_config) + .send_with_cfg(txn_config) .await .with_context(|| "Failed to send transaction")?; @@ -223,6 +232,8 @@ where res, txn_config.wait, txn_config.receipt, + #[cfg(feature = "walnut")] + walnut_debugger, ) .await?; } @@ -234,8 +245,9 @@ pub async fn revoke_owner( ui: &Ui, world: &WorldContract, new_owners: &[ResourceOwner], - txn_config: TxnConfig, + txn_config: &TxnConfig, default_namespace: &str, + #[cfg(feature = "walnut")] walnut_debugger: &Option, ) -> Result<()> where A: ConnectedAccount + Sync + Send + 'static, @@ -251,7 +263,7 @@ where let res = world .account .execute_v1(calls) - .send_with_cfg(&txn_config) + .send_with_cfg(txn_config) .await .with_context(|| "Failed to send transaction")?; @@ -261,6 +273,8 @@ where res, txn_config.wait, txn_config.receipt, + #[cfg(feature = "walnut")] + walnut_debugger, ) .await?; diff --git a/crates/sozo/ops/src/call.rs b/crates/sozo/ops/src/call.rs index 251cc07819..12bc197ce6 100644 --- a/crates/sozo/ops/src/call.rs +++ b/crates/sozo/ops/src/call.rs @@ -1,12 +1,15 @@ -use anyhow::{Context, Result}; +use anyhow::Result; use dojo_world::contracts::WorldContractReader; +use scarb_ui::Ui; use starknet::core::types::{BlockId, BlockTag, Felt, FunctionCall}; use starknet::core::utils::get_selector_from_name; use starknet::providers::Provider; +use crate::migration::ui::MigrationUi; use crate::utils::{get_contract_address_from_reader, parse_block_id}; pub async fn call( + ui: &Ui, world_reader: WorldContractReader

, tag_or_address: String, entrypoint: String, @@ -20,7 +23,7 @@ pub async fn call( BlockId::Tag(BlockTag::Pending) }; - let output = world_reader + let res = world_reader .provider() .call( FunctionCall { @@ -30,10 +33,23 @@ pub async fn call( }, block_id, ) - .await - .with_context(|| format!("Failed to call {entrypoint}"))?; + .await; - println!("[ {} ]", output.iter().map(|o| format!("0x{:x}", o)).collect::>().join(" ")); + match res { + Ok(output) => { + println!( + "[ {} ]", + output.iter().map(|o| format!("0x{:x}", o)).collect::>().join(" ") + ); + } + Err(e) => { + ui.print_hidden_sub(format!("{:?}", e)); + anyhow::bail!(format!( + "Error calling entrypoint `{}` on address: {:#066x}", + entrypoint, contract_address + )); + } + } Ok(()) } diff --git a/crates/sozo/ops/src/execute.rs b/crates/sozo/ops/src/execute.rs index 13ceec55d9..7ec2aefebd 100644 --- a/crates/sozo/ops/src/execute.rs +++ b/crates/sozo/ops/src/execute.rs @@ -2,6 +2,8 @@ use anyhow::{Context, Result}; use dojo_utils::{TransactionExt, TxnConfig}; use dojo_world::contracts::world::WorldContract; use scarb_ui::Ui; +#[cfg(feature = "walnut")] +use sozo_walnut::WalnutDebugger; use starknet::accounts::ConnectedAccount; use starknet::core::types::{Call, Felt}; use starknet::core::utils::get_selector_from_name; @@ -15,6 +17,7 @@ pub async fn execute( calldata: Vec, world: &WorldContract, txn_config: &TxnConfig, + #[cfg(feature = "walnut")] walnut_debugger: &Option, ) -> Result<()> where A: ConnectedAccount + Sync + Send + 'static, @@ -37,6 +40,8 @@ where res, txn_config.wait, txn_config.receipt, + #[cfg(feature = "walnut")] + walnut_debugger, ) .await } diff --git a/crates/sozo/ops/src/migration/auto_auth.rs b/crates/sozo/ops/src/migration/auto_auth.rs index 18b2a9a861..7e06d76603 100644 --- a/crates/sozo/ops/src/migration/auto_auth.rs +++ b/crates/sozo/ops/src/migration/auto_auth.rs @@ -2,6 +2,8 @@ use anyhow::Result; use dojo_utils::TxnConfig; use dojo_world::contracts::WorldContract; use scarb::core::Workspace; +#[cfg(feature = "walnut")] +use sozo_walnut::WalnutDebugger; use starknet::accounts::ConnectedAccount; use crate::auth::{grant_writer, revoke_writer, ResourceWriter}; @@ -13,6 +15,7 @@ pub async fn auto_authorize( default_namespace: &str, grant: &[ResourceWriter], revoke: &[ResourceWriter], + #[cfg(feature = "walnut")] walnut_debugger: &Option, ) -> Result<()> where A: ConnectedAccount + Sync + Send + 'static, @@ -20,8 +23,26 @@ where { let ui = ws.config().ui(); - grant_writer(&ui, world, grant, *txn_config, default_namespace).await?; - revoke_writer(&ui, world, revoke, *txn_config, default_namespace).await?; + grant_writer( + &ui, + world, + grant, + txn_config, + default_namespace, + #[cfg(feature = "walnut")] + walnut_debugger, + ) + .await?; + revoke_writer( + &ui, + world, + revoke, + txn_config, + default_namespace, + #[cfg(feature = "walnut")] + walnut_debugger, + ) + .await?; Ok(()) } diff --git a/crates/sozo/ops/src/migration/migrate.rs b/crates/sozo/ops/src/migration/migrate.rs index 4c128bb5c0..c8c4b0fbfd 100644 --- a/crates/sozo/ops/src/migration/migrate.rs +++ b/crates/sozo/ops/src/migration/migrate.rs @@ -6,7 +6,7 @@ use anyhow::{anyhow, bail, Context, Result}; use cainome::cairo_serde::ByteArray; use camino::Utf8PathBuf; use dojo_utils::{TransactionExt, TransactionWaiter, TxnConfig}; -use dojo_world::contracts::abi::world; +use dojo_world::contracts::abi::world::{self, Resource}; use dojo_world::contracts::naming::{ self, compute_selector_from_tag, get_name_from_tag, get_namespace_from_tag, }; @@ -413,6 +413,11 @@ where let calls = resources.iter().map(|r| world.set_metadata_getcall(r)).collect::>(); + if calls.is_empty() { + ui.print_sub("No metadata to register"); + return Ok(()); + } + let InvokeTransactionResult { transaction_hash } = migrator.execute_v1(calls).send_with_cfg(&txn_config).await.map_err(|e| { ui.verbose(format!("{e:?}")); @@ -443,18 +448,34 @@ where A: ConnectedAccount + Send + Sync, ::Provider: Send, { - ui.print_header(format!("# Namespaces ({})", namespaces.len())); - let world = WorldContract::new(world_address, migrator); + // We need to check if the namespace is not already registered. + let mut registered_namespaces = vec![]; + + for namespace in namespaces { + let namespace_selector = naming::compute_bytearray_hash(namespace); + + if let Resource::Namespace = world.resource(&namespace_selector).call().await? { + registered_namespaces.push(namespace); + } + } + let calls = namespaces .iter() + .filter(|ns| !registered_namespaces.contains(ns)) .map(|ns| { ui.print(italic_message(&ns).to_string()); world.register_namespace_getcall(&ByteArray::from_string(ns).unwrap()) }) .collect::>(); + if calls.is_empty() { + return Ok(()); + } + + ui.print_header(format!("# Namespaces ({})", namespaces.len() - registered_namespaces.len())); + let InvokeTransactionResult { transaction_hash } = world.account.execute_v1(calls).send_with_cfg(txn_config).await.map_err(|e| { ui.verbose(format!("{e:?}")); @@ -489,13 +510,25 @@ where ui.print_header(format!("# Models ({})", models.len())); + let world = WorldContract::new(world_address, &migrator); + let mut declare_output = vec![]; + let mut models_to_register = vec![]; for (i, m) in models.iter().enumerate() { let tag = &m.diff.tag; ui.print(italic_message(tag).to_string()); + if let Resource::Unregistered = + world.resource(&compute_selector_from_tag(tag)).call().await? + { + models_to_register.push(tag.clone()); + } else { + ui.print_sub("Already registered"); + continue; + } + match m.declare(&migrator, txn_config).await { Ok(output) => { ui.print_sub(format!("Selector: {:#066x}", compute_selector_from_tag(tag))); @@ -519,18 +552,20 @@ where } } - let world = WorldContract::new(world_address, &migrator); - - let mut registered_models = vec![]; - let calls = models .iter() - .map(|c| { - registered_models.push(c.diff.tag.clone()); - world.register_model_getcall(&c.diff.local_class_hash.into()) - }) + .filter(|m| models_to_register.contains(&m.diff.tag)) + .map(|c| world.register_model_getcall(&c.diff.local_class_hash.into())) .collect::>(); + if calls.is_empty() { + return Ok(RegisterOutput { + transaction_hash: Felt::ZERO, + declare_output: vec![], + registered_models: vec![], + }); + } + let InvokeTransactionResult { transaction_hash } = world.account.execute_v1(calls).send_with_cfg(txn_config).await.map_err(|e| { ui.verbose(format!("{e:?}")); @@ -541,7 +576,7 @@ where ui.print(format!("All models are registered at: {transaction_hash:#x}\n")); - Ok(RegisterOutput { transaction_hash, declare_output, registered_models }) + Ok(RegisterOutput { transaction_hash, declare_output, registered_models: models_to_register }) } // For now duplicated because the migrator account is different from the declarers account type. @@ -568,7 +603,7 @@ where ui.print_header(format!("# Models ({})", models.len())); let mut declare_output = vec![]; - let mut registered_models = vec![]; + let mut models_to_register = vec![]; let mut declarers_tasks = HashMap::new(); for (i, m) in models.iter().enumerate() { @@ -596,9 +631,21 @@ where let all_results = futures::future::join_all(futures).await; + let world = WorldContract::new(world_address, &migrator); + for results in all_results { for (index, tag, result) in results { ui.print(italic_message(&tag).to_string()); + + if let Resource::Unregistered = + world.resource(&compute_selector_from_tag(&tag)).call().await? + { + models_to_register.push(tag.clone()); + } else { + ui.print_sub("Already registered"); + continue; + } + match result { Ok(output) => { ui.print_sub(format!("Selector: {:#066x}", compute_selector_from_tag(&tag))); @@ -623,16 +670,20 @@ where } } - let world = WorldContract::new(world_address, &migrator); - let calls = models .iter() - .map(|c| { - registered_models.push(c.diff.tag.clone()); - world.register_model_getcall(&c.diff.local_class_hash.into()) - }) + .filter(|m| models_to_register.contains(&m.diff.tag)) + .map(|c| world.register_model_getcall(&c.diff.local_class_hash.into())) .collect::>(); + if calls.is_empty() { + return Ok(RegisterOutput { + transaction_hash: Felt::ZERO, + declare_output: vec![], + registered_models: vec![], + }); + } + let InvokeTransactionResult { transaction_hash } = world.account.execute_v1(calls).send_with_cfg(txn_config).await.map_err(|e| { ui.verbose(format!("{e:?}")); @@ -643,7 +694,7 @@ where ui.print(format!("All models are registered at: {transaction_hash:#x}\n")); - Ok(RegisterOutput { transaction_hash, declare_output, registered_models }) + Ok(RegisterOutput { transaction_hash, declare_output, registered_models: models_to_register }) } async fn register_dojo_contracts( @@ -731,6 +782,10 @@ where } } + if calls.is_empty() { + return Ok(deploy_outputs); + } + let InvokeTransactionResult { transaction_hash } = migrator.execute_v1(calls).send_with_cfg(txn_config).await.map_err(|e| { ui.verbose(format!("{e:?}")); @@ -858,6 +913,10 @@ where } } + if calls.is_empty() { + return Ok(deploy_outputs); + } + let InvokeTransactionResult { transaction_hash } = migrator.execute_v1(calls).send_with_cfg(txn_config).await.map_err(|e| { ui.verbose(format!("{e:?}")); diff --git a/crates/sozo/ops/src/migration/mod.rs b/crates/sozo/ops/src/migration/mod.rs index 4b60c0b345..a74db4799f 100644 --- a/crates/sozo/ops/src/migration/mod.rs +++ b/crates/sozo/ops/src/migration/mod.rs @@ -10,6 +10,8 @@ use dojo_world::metadata::get_default_namespace_from_ws; use dojo_world::migration::world::WorldDiff; use dojo_world::migration::{DeployOutput, UpgradeOutput}; use scarb::core::Workspace; +#[cfg(feature = "walnut")] +use sozo_walnut::WalnutDebugger; use starknet::accounts::{ConnectedAccount, ExecutionEncoding, SingleOwnerAccount}; use starknet::core::types::{BlockId, BlockTag, Call, Felt, InvokeTransactionResult}; use starknet::core::utils::{cairo_short_string_to_felt, get_contract_address}; @@ -64,7 +66,7 @@ async fn get_declarers_accounts( .post(rpc_url) .json(&serde_json::json!({ "jsonrpc": "2.0", - "method": "katana_predeployedAccounts", + "method": "dev_predeployedAccounts", "params": [], "id": 1 })) @@ -135,6 +137,10 @@ where { let ui = ws.config().ui(); + #[cfg(feature = "walnut")] + let walnut_debugger = + WalnutDebugger::new_from_flag(txn_config.walnut, Url::parse(&rpc_url).unwrap()); + // its path to a file so `parent` should never return `None` let root_dir = ws.manifest_path().parent().unwrap().to_path_buf(); @@ -156,7 +162,7 @@ where &account, world_address, &ui, - skip_manifests, + &skip_manifests, ) .await .map_err(|e| { @@ -216,6 +222,11 @@ where Ok(None) } else { + #[cfg(feature = "walnut")] + if txn_config.walnut { + WalnutDebugger::check_api_key()?; + } + let declarers = get_declarers_accounts(&account, &rpc_url).await?; let declarers_len = if declarers.is_empty() { 1 } else { declarers.len() }; @@ -269,7 +280,18 @@ where ) .await?; - match auto_authorize(ws, &world, &txn_config, &default_namespace, &grant, &revoke).await { + match auto_authorize( + ws, + &world, + &txn_config, + &default_namespace, + &grant, + &revoke, + #[cfg(feature = "walnut")] + &walnut_debugger, + ) + .await + { Ok(()) => { ui.print_sub("Auto authorize completed successfully"); } @@ -284,19 +306,33 @@ where // Run dojo inits now that everything is actually deployed and permissioned. let mut init_calls = vec![]; - for c in strategy.contracts { - let was_upgraded = migration_output - .contracts - .iter() - .flatten() - .find(|output| output.tag == c.diff.tag) - .map(|output| output.was_upgraded) - .unwrap_or(false); - - if was_upgraded { + for (i, c) in strategy.contracts.iter().enumerate() { + if let Some(contract_migration_output) = &migration_output.contracts[i] { + if contract_migration_output.was_upgraded { + ui.print_sub(format!( + "Contract {} was upgraded, skipping initialization", + c.diff.tag + )); + continue; + } + } else { + ui.print_sub(format!( + "Contract {} was not deployed at this run, skipping initialization", + c.diff.tag + )); continue; } + if let Some(skips) = &skip_manifests { + if skips.contains(&c.diff.tag) { + ui.print_sub(format!( + "Contract {} was skipped in config, skipping initialization", + c.diff.tag + )); + continue; + } + } + let contract_selector = compute_selector_from_tag(&c.diff.tag); let init_calldata: Vec = c .diff @@ -308,6 +344,16 @@ where let mut calldata = vec![contract_selector, Felt::from(init_calldata.len())]; calldata.extend(init_calldata); + ui.print_sub(format!( + "Initializing contract: {} ([{}])", + c.diff.tag, + calldata + .iter() + .map(|c| format!("{:#x}", c)) + .collect::>() + .join(", ") + )); + init_calls.push(Call { calldata, selector: selector!("init_contract"), @@ -332,12 +378,22 @@ where } } + #[cfg(feature = "walnut")] + if let Some(walnut_debugger) = &walnut_debugger { + walnut_debugger.verify_migration_strategy(ws, &strategy).await?; + } + if let Some(migration_output) = &migration_output { if !ws.config().offline() { upload_metadata(ws, &account, migration_output.clone(), txn_config).await?; } } + // We should print the block number at which the world was deployed by polling the + // transaction hash of the migration transaction here once everything is done as it + // has high chance to be into a mined block. If not, just wait for this inclusion? + // Should be pretty fast with BOLT. + Ok(migration_output) } } diff --git a/crates/sozo/ops/src/migration/utils.rs b/crates/sozo/ops/src/migration/utils.rs index 4e2aedeff6..8d1b62e016 100644 --- a/crates/sozo/ops/src/migration/utils.rs +++ b/crates/sozo/ops/src/migration/utils.rs @@ -25,7 +25,7 @@ pub(super) async fn load_world_manifests( account: A, world_address: Option, ui: &Ui, - skip_migration: Option>, + skip_migration: &Option>, ) -> Result<(BaseManifest, Option)> where A: ConnectedAccount + Sync + Send, diff --git a/crates/sozo/ops/src/model.rs b/crates/sozo/ops/src/model.rs index 0eb710cc08..998ee145ee 100644 --- a/crates/sozo/ops/src/model.rs +++ b/crates/sozo/ops/src/model.rs @@ -668,9 +668,10 @@ pub fn deep_print_ty(root: &Ty) { } } -/// Checks if the tag is a valid tag, if not, return the default namespace. This allows -/// sozo model commands to be run even without a Scarb.toml file in the current directory -/// if a valid tag is provided. +/// Checks if the tag is a valid tag, if not, return the default namespace. +/// +/// This allows sozo model commands to be run even without a Scarb.toml file in the current +/// directory if a valid tag is provided. /// TODO: This may be removed in the future once SDKs are updated to use the new bindgen. pub fn check_tag_or_read_default_namespace(tag_or_name: &str, config: &Config) -> Result { if naming::is_valid_tag(tag_or_name) { diff --git a/crates/sozo/ops/src/register.rs b/crates/sozo/ops/src/register.rs index 00df4f2fe5..3897489bc5 100644 --- a/crates/sozo/ops/src/register.rs +++ b/crates/sozo/ops/src/register.rs @@ -6,6 +6,8 @@ use dojo_world::contracts::model::ModelReader; use dojo_world::contracts::{WorldContract, WorldContractReader}; use dojo_world::manifest::DeploymentManifest; use scarb::core::Config; +#[cfg(feature = "walnut")] +use sozo_walnut::WalnutDebugger; use starknet::accounts::ConnectedAccount; use starknet::core::types::Felt; use starknet::providers::Provider; @@ -15,10 +17,11 @@ use crate::utils::handle_transaction_result; pub async fn model_register( models: Vec, world: &WorldContract, - txn_config: TxnConfig, + txn_config: &TxnConfig, world_reader: WorldContractReader

, world_address: Felt, config: &Config, + #[cfg(feature = "walnut")] walnut_debugger: &Option, ) -> Result<()> where A: ConnectedAccount + Sync + Send + 'static, @@ -66,7 +69,7 @@ where let res = world .account .execute_v1(calls) - .send_with_cfg(&txn_config) + .send_with_cfg(txn_config) .await .with_context(|| "Failed to send transaction")?; @@ -76,6 +79,8 @@ where res, txn_config.wait, txn_config.receipt, + #[cfg(feature = "walnut")] + walnut_debugger, ) .await?; diff --git a/crates/sozo/ops/src/test_utils/setup.rs b/crates/sozo/ops/src/test_utils/setup.rs index d03bd5dbda..3ea1d53d0f 100644 --- a/crates/sozo/ops/src/test_utils/setup.rs +++ b/crates/sozo/ops/src/test_utils/setup.rs @@ -155,6 +155,8 @@ pub async fn setup( &default_namespace, &grant, &revoke, + #[cfg(feature = "walnut")] + &None, ) .await?; diff --git a/crates/sozo/ops/src/tests/auth.rs b/crates/sozo/ops/src/tests/auth.rs index 2e9d5a86b3..38aaf0ee4b 100644 --- a/crates/sozo/ops/src/tests/auth.rs +++ b/crates/sozo/ops/src/tests/auth.rs @@ -63,8 +63,10 @@ async fn auth_grant_writer_ok() { &Ui::new(Verbosity::Normal, OutputFormat::Text), &world, &get_resource_writers(), - TxnConfig { wait: true, ..Default::default() }, + &TxnConfig { wait: true, ..Default::default() }, DEFAULT_NAMESPACE, + #[cfg(feature = "walnut")] + &None, ) .await .unwrap(); @@ -87,8 +89,10 @@ async fn auth_revoke_writer_ok() { &Ui::new(Verbosity::Normal, OutputFormat::Text), &world, &get_resource_writers(), - TxnConfig { wait: true, ..Default::default() }, + &TxnConfig { wait: true, ..Default::default() }, DEFAULT_NAMESPACE, + #[cfg(feature = "walnut")] + &None, ) .await .unwrap(); @@ -99,8 +103,10 @@ async fn auth_revoke_writer_ok() { &Ui::new(Verbosity::Normal, OutputFormat::Text), &world, &get_resource_writers(), - TxnConfig { wait: true, ..Default::default() }, + &TxnConfig { wait: true, ..Default::default() }, DEFAULT_NAMESPACE, + #[cfg(feature = "walnut")] + &None, ) .await .unwrap(); @@ -136,8 +142,10 @@ async fn auth_grant_owner_ok() { &Ui::new(Verbosity::Normal, OutputFormat::Text), &world, &get_resource_owners(other_account), - TxnConfig { wait: true, ..Default::default() }, + &TxnConfig { wait: true, ..Default::default() }, DEFAULT_NAMESPACE, + #[cfg(feature = "walnut")] + &None, ) .await .unwrap(); @@ -169,8 +177,10 @@ async fn auth_revoke_owner_ok() { &Ui::new(Verbosity::Normal, OutputFormat::Text), &world, &get_resource_owners(default_account), - TxnConfig { wait: true, ..Default::default() }, + &TxnConfig { wait: true, ..Default::default() }, DEFAULT_NAMESPACE, + #[cfg(feature = "walnut")] + &None, ) .await .unwrap(); @@ -199,6 +209,8 @@ async fn execute_spawn( vec![], world, &TxnConfig::init_wait(), + #[cfg(feature = "walnut")] + &None, ) .await; diff --git a/crates/sozo/ops/src/tests/call.rs b/crates/sozo/ops/src/tests/call.rs index addb2b4e96..f477c64814 100644 --- a/crates/sozo/ops/src/tests/call.rs +++ b/crates/sozo/ops/src/tests/call.rs @@ -1,5 +1,6 @@ use dojo_world::contracts::WorldContractReader; use katana_runner::{KatanaRunner, KatanaRunnerConfig}; +use scarb_ui::Ui; use starknet::accounts::SingleOwnerAccount; use starknet::core::types::Felt; use starknet::providers::jsonrpc::HttpTransport; @@ -21,8 +22,11 @@ async fn call_with_bad_address() { let provider = sequencer.provider(); let world_reader = WorldContractReader::new(world.address, provider); + let ui = Ui::new(scarb_ui::Verbosity::Verbose, scarb_ui::OutputFormat::Text); + assert!( call::call( + &ui, world_reader, "0xBadCoffeeBadCode".to_string(), ENTRYPOINT.to_string(), @@ -43,8 +47,11 @@ async fn call_with_bad_name() { let provider = sequencer.provider(); let world_reader = WorldContractReader::new(world.address, provider); + let ui = Ui::new(scarb_ui::Verbosity::Verbose, scarb_ui::OutputFormat::Text); + assert!( call::call( + &ui, world_reader, "BadName".to_string(), ENTRYPOINT.to_string(), @@ -65,8 +72,11 @@ async fn call_with_bad_entrypoint() { let provider = sequencer.provider(); let world_reader = WorldContractReader::new(world.address, provider); + let ui = Ui::new(scarb_ui::Verbosity::Verbose, scarb_ui::OutputFormat::Text); + assert!( call::call( + &ui, world_reader, CONTRACT_TAG.to_string(), "BadEntryPoint".to_string(), @@ -87,8 +97,11 @@ async fn call_with_bad_calldata() { let provider = sequencer.provider(); let world_reader = WorldContractReader::new(world.address, provider); + let ui = Ui::new(scarb_ui::Verbosity::Verbose, scarb_ui::OutputFormat::Text); + assert!( call::call( + &ui, world_reader, CONTRACT_TAG.to_string(), ENTRYPOINT.to_string(), @@ -109,9 +122,17 @@ async fn call_with_contract_name() { let provider = sequencer.provider(); let world_reader = WorldContractReader::new(world.address, provider); - let r = - call::call(world_reader, CONTRACT_TAG.to_string(), ENTRYPOINT.to_string(), vec![], None) - .await; + let ui = Ui::new(scarb_ui::Verbosity::Verbose, scarb_ui::OutputFormat::Text); + + let r = call::call( + &ui, + world_reader, + CONTRACT_TAG.to_string(), + ENTRYPOINT.to_string(), + vec![], + None, + ) + .await; assert!(r.is_ok()); } @@ -121,6 +142,8 @@ async fn call_with_contract_address() { let config = KatanaRunnerConfig::default().with_db_dir("/tmp/spawn-and-move-db"); let sequencer = KatanaRunner::new_with_config(config).expect("Failed to start runner."); + let ui = Ui::new(scarb_ui::Verbosity::Verbose, scarb_ui::OutputFormat::Text); + let world = setup::setup_with_world(&sequencer).await.unwrap(); let provider = sequencer.provider(); let world_reader = WorldContractReader::new(world.address, provider); @@ -133,6 +156,7 @@ async fn call_with_contract_address() { assert!( call::call( + &ui, world_reader, format!("{:#x}", contract_address), ENTRYPOINT.to_string(), diff --git a/crates/sozo/ops/src/tests/migration.rs b/crates/sozo/ops/src/tests/migration.rs index 2a30c5bba2..8dac034b65 100644 --- a/crates/sozo/ops/src/tests/migration.rs +++ b/crates/sozo/ops/src/tests/migration.rs @@ -387,7 +387,17 @@ async fn migrate_with_auto_authorize() { .await .unwrap(); - let res = auto_authorize(&ws, &world, &txn_config, &default_namespace, &grant, &revoke).await; + let res = auto_authorize( + &ws, + &world, + &txn_config, + &default_namespace, + &grant, + &revoke, + #[cfg(feature = "walnut")] + &None, + ) + .await; assert!(res.is_ok()); let provider = sequencer.provider(); diff --git a/crates/sozo/ops/src/tests/model.rs b/crates/sozo/ops/src/tests/model.rs index 2edc2d4299..4e44973261 100644 --- a/crates/sozo/ops/src/tests/model.rs +++ b/crates/sozo/ops/src/tests/model.rs @@ -58,7 +58,7 @@ async fn test_model_ops() { ) .await .unwrap(), - Felt::from_hex("0x604735fb6510c558ba3ae21972fcbdb1b4234bedcbc990910bd7efd194e7db3") + Felt::from_hex("0x68e3a53988f20d84c6652f25d6add070633a5d05f8c4ac68285cacb228afa14") .unwrap() ); @@ -171,6 +171,8 @@ async fn test_model_ops() { vec![], &WorldContract::new(world.address, sequencer.account(0)), &TxnConfig::init_wait(), + #[cfg(feature = "walnut")] + &None, ) .await; diff --git a/crates/sozo/ops/src/utils.rs b/crates/sozo/ops/src/utils.rs index f67287b6b7..61d9c1d04e 100644 --- a/crates/sozo/ops/src/utils.rs +++ b/crates/sozo/ops/src/utils.rs @@ -5,6 +5,8 @@ use dojo_world::contracts::naming::get_name_from_tag; use dojo_world::contracts::world::{WorldContract, WorldContractReader}; use dojo_world::migration::strategy::generate_salt; use scarb_ui::Ui; +#[cfg(feature = "walnut")] +use sozo_walnut::WalnutDebugger; use starknet::accounts::ConnectedAccount; use starknet::core::types::{BlockId, BlockTag, ExecutionResult, Felt, InvokeTransactionResult}; use starknet::providers::Provider; @@ -78,12 +80,14 @@ pub async fn get_contract_address_from_reader( /// * `transaction_result` - Result of the transaction to handle. /// * `wait_for_tx` - Wait for the transaction to be mined. /// * `show_receipt` - If the receipt of the transaction should be displayed on stdout. +/// * `walnut_debugger` - Optionally a Walnut debugger to debug the transaction. stdout. pub async fn handle_transaction_result

( ui: &Ui, provider: P, transaction_result: InvokeTransactionResult, wait_for_tx: bool, show_receipt: bool, + #[cfg(feature = "walnut")] walnut_debugger: &Option, ) -> Result<()> where P: Provider + Send, @@ -106,6 +110,11 @@ where ui.print(format!("Reason:\n{}", reason)); } }; + + #[cfg(feature = "walnut")] + if let Some(walnut_debugger) = walnut_debugger { + walnut_debugger.debug_transaction(ui, &transaction_result.transaction_hash)?; + } } } diff --git a/crates/sozo/walnut/Cargo.toml b/crates/sozo/walnut/Cargo.toml new file mode 100644 index 0000000000..8fb55d68fe --- /dev/null +++ b/crates/sozo/walnut/Cargo.toml @@ -0,0 +1,25 @@ +[package] +edition.workspace = true +name = "sozo-walnut" +version.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +anyhow.workspace = true +console.workspace = true +dojo-world = { workspace = true, features = [ "contracts", "metadata", "migration" ] } +futures.workspace = true +reqwest.workspace = true +scarb.workspace = true +scarb-ui.workspace = true +serde.workspace = true +serde_json.workspace = true +starknet.workspace = true +thiserror.workspace = true +url.workspace = true +urlencoding = "2.1.3" +walkdir.workspace = true + +[dev-dependencies] +starknet.workspace = true diff --git a/crates/sozo/walnut/src/debugger.rs b/crates/sozo/walnut/src/debugger.rs new file mode 100644 index 0000000000..b7997b0071 --- /dev/null +++ b/crates/sozo/walnut/src/debugger.rs @@ -0,0 +1,50 @@ +use dojo_world::migration::strategy::MigrationStrategy; +use scarb::core::Workspace; +use scarb_ui::Ui; +use starknet::core::types::Felt; +use url::Url; + +use crate::transaction::walnut_debug_transaction; +use crate::verification::walnut_verify_migration_strategy; +use crate::{utils, Error}; + +/// A debugger for Starknet transactions embedding the walnut configuration. +#[derive(Debug)] +pub struct WalnutDebugger { + rpc_url: Url, +} + +impl WalnutDebugger { + /// Creates a new Walnut debugger. + pub fn new(rpc_url: Url) -> Self { + Self { rpc_url } + } + + /// Creates a new Walnut debugger if the `use_walnut` flag is set. + pub fn new_from_flag(use_walnut: bool, rpc_url: Url) -> Option { + if use_walnut { Some(Self::new(rpc_url)) } else { None } + } + + /// Debugs a transaction with Walnut by printing a link to the Walnut debugger page. + pub fn debug_transaction(&self, ui: &Ui, transaction_hash: &Felt) -> Result<(), Error> { + let url = walnut_debug_transaction(&self.rpc_url, transaction_hash)?; + ui.print(format!("Debug transaction with Walnut: {url}")); + Ok(()) + } + + /// Verifies a migration strategy with Walnut by uploading the source code of the contracts and + /// models in the strategy. + pub async fn verify_migration_strategy( + &self, + ws: &Workspace<'_>, + strategy: &MigrationStrategy, + ) -> anyhow::Result<()> { + walnut_verify_migration_strategy(ws, self.rpc_url.to_string(), strategy).await + } + + /// Checks if the Walnut API key is set. + pub fn check_api_key() -> Result<(), Error> { + let _ = utils::walnut_get_api_key()?; + Ok(()) + } +} diff --git a/crates/sozo/walnut/src/lib.rs b/crates/sozo/walnut/src/lib.rs new file mode 100644 index 0000000000..96feb2be92 --- /dev/null +++ b/crates/sozo/walnut/src/lib.rs @@ -0,0 +1,67 @@ +//! # Walnut Integration Module +//! +//! This module integrates Walnut, a debugger for Starknet transactions, with Dojo, +//! enhancing Dojo's capabilities by allowing users to debug transactions. +//! +//! The integration introduces a `--walnut` flag to the `sozo migrate apply` and `sozo execute` +//! commands. +//! +//! Using the --walnut flag with the `sozo migrate apply` command performs a verification process, +//! during which the source code of the Dojo project is uploaded and stored on Walnut. +//! The source code of each class will be linked with the respective class hash. +//! +//! When running the `sozo execute` command with the `--walnut` flag, a link to the Walnut debugger +//! page is printed to the terminal, allowing users to debug their transactions. +//! +//! At Sozo level, only the `WalnutDebugger` struct is exposed to handle the integration. +//! +//! Note: +//! - Classes should be verified with `sozo migrate apply --walnut` before debugging transactions. +//! - This feature is only supported on hosted networks. + +#![cfg_attr(not(test), warn(unused_crate_dependencies))] + +mod debugger; +mod transaction; +mod utils; +mod verification; + +pub use debugger::WalnutDebugger; + +pub const WALNUT_APP_URL: &str = "https://app.walnut.dev"; +pub const WALNUT_API_URL: &str = "https://api.walnut.dev"; +pub const WALNUT_API_KEY_ENV_VAR: &str = "WALNUT_API_KEY"; +pub const WALNUT_API_URL_ENV_VAR: &str = "WALNUT_API_URL"; + +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error("Debugging transactions with Walnut is only supported on hosted networks")] + UnsupportedNetwork, + + #[error(transparent)] + UrlParseError(#[from] url::ParseError), + + #[error("Invalid file name")] + InvalidFileName, + + #[error("Namespace prefix not found in file name")] + NamespacePrefixNotFound, + + #[error("Failed to serialize payload: {0}")] + SerializationError(#[from] serde_json::Error), + + #[error(transparent)] + RequestError(#[from] reqwest::Error), + + #[error("Failed to verify contract: {0}")] + VerificationError(String), + + #[error(transparent)] + Io(#[from] std::io::Error), + + #[error( + "Environment variable '{WALNUT_API_KEY_ENV_VAR}' is not set. Please set it to your Walnut \ + API key." + )] + MissingApiKey, +} diff --git a/crates/sozo/walnut/src/transaction.rs b/crates/sozo/walnut/src/transaction.rs new file mode 100644 index 0000000000..bed8910b9a --- /dev/null +++ b/crates/sozo/walnut/src/transaction.rs @@ -0,0 +1,58 @@ +use starknet::core::types::Felt; +use url::Url; +use urlencoding::encode; + +use crate::{Error, WALNUT_APP_URL}; + +pub fn walnut_debug_transaction(rpc_url: &Url, transaction_hash: &Felt) -> Result { + // Check if the RPC URL is not localhost + if rpc_url.host_str() != Some("localhost") && rpc_url.host_str() != Some("127.0.0.1") { + let mut url = Url::parse(WALNUT_APP_URL)?; + + url.path_segments_mut().unwrap().push("transactions"); + url.query_pairs_mut() + .append_pair("rpcUrl", &encode(rpc_url.as_str())) + .append_pair("txHash", &format!("{transaction_hash:#066x}")); + + Ok(url) + } else { + Err(Error::UnsupportedNetwork) + } +} + +#[cfg(test)] +mod tests { + + use starknet::macros::felt; + + use super::*; + + #[test] + fn test_walnut_debug_transaction_hosted() { + let rpc_url = Url::parse("https://example.com").unwrap(); + let transaction_hash = felt!("0x1234"); + + let result = walnut_debug_transaction(&rpc_url, &transaction_hash); + + assert!(result.is_ok()); + let debug_url = result.unwrap(); + assert!(debug_url.as_str().starts_with(WALNUT_APP_URL)); + assert!(debug_url.as_str().contains("rpcUrl=https%253A%252F%252Fexample.com")); + assert!( + debug_url.as_str().contains( + "txHash=0x0000000000000000000000000000000000000000000000000000000000001234" + ) + ); + } + + #[test] + fn test_walnut_debug_transaction_localhost() { + let rpc_url = Url::parse("http://localhost:5050").unwrap(); + let transaction_hash = felt!("0x1234"); + + let result = walnut_debug_transaction(&rpc_url, &transaction_hash); + + assert!(result.is_err()); + assert!(matches!(result.unwrap_err(), Error::UnsupportedNetwork)); + } +} diff --git a/crates/sozo/walnut/src/utils.rs b/crates/sozo/walnut/src/utils.rs new file mode 100644 index 0000000000..ca861249c1 --- /dev/null +++ b/crates/sozo/walnut/src/utils.rs @@ -0,0 +1,11 @@ +use std::env; + +use crate::{Error, WALNUT_API_KEY_ENV_VAR, WALNUT_API_URL, WALNUT_API_URL_ENV_VAR}; + +pub fn walnut_get_api_key() -> Result { + env::var(WALNUT_API_KEY_ENV_VAR).map_err(|_| Error::MissingApiKey) +} + +pub fn walnut_get_api_url() -> String { + env::var(WALNUT_API_URL_ENV_VAR).unwrap_or_else(|_| WALNUT_API_URL.to_string()) +} diff --git a/crates/sozo/walnut/src/verification.rs b/crates/sozo/walnut/src/verification.rs new file mode 100644 index 0000000000..9c987e9990 --- /dev/null +++ b/crates/sozo/walnut/src/verification.rs @@ -0,0 +1,199 @@ +use std::ffi::OsStr; +use std::io; +use std::path::Path; + +use console::{pad_str, Alignment, Style, StyledObject}; +use dojo_world::metadata::get_default_namespace_from_ws; +use dojo_world::migration::strategy::MigrationStrategy; +use futures::future::join_all; +use reqwest::StatusCode; +use scarb::core::Workspace; +use serde::Serialize; +use serde_json::Value; +use walkdir::WalkDir; + +use crate::utils::{walnut_get_api_key, walnut_get_api_url}; +use crate::Error; + +/// Verifies all classes declared during migration. +/// Only supported on hosted networks (non-localhost). +/// +/// This function verifies all contracts and models in the strategy. For every contract and model, +/// it sends a request to the Walnut backend with the class name, class hash, RPC URL, and source +/// code. Walnut will then build the project with Sozo, compare the Sierra bytecode with the +/// bytecode on the network, and if they are equal, it will store the source code and associate it +/// with the class hash. +pub async fn walnut_verify_migration_strategy( + ws: &Workspace<'_>, + rpc_url: String, + migration_strategy: &MigrationStrategy, +) -> anyhow::Result<()> { + let ui = ws.config().ui(); + // Check if rpc_url is localhost + if rpc_url.contains("localhost") || rpc_url.contains("127.0.0.1") { + ui.print(" "); + ui.warn("Verifying classes with Walnut is only supported on hosted networks."); + ui.print(" "); + return Ok(()); + } + + // its path to a file so `parent` should never return `None` + let root_dir: &Path = ws.manifest_path().parent().unwrap().as_std_path(); + let default_namespace = get_default_namespace_from_ws(ws)?; + + // Check if there are any contracts or models in the strategy + if migration_strategy.contracts.is_empty() && migration_strategy.models.is_empty() { + ui.print(" "); + ui.print("🌰 No contracts or models to verify."); + ui.print(" "); + return Ok(()); + } + + // Notify start of verification + ui.print(" "); + ui.print("🌰 Verifying classes with Walnut..."); + ui.print(" "); + + // Retrieve the API key and URL from environment variables + let api_key = walnut_get_api_key()?; + let api_url = walnut_get_api_url(); + + // Collect source code + let source_code = collect_source_code(root_dir)?; + + // Prepare verification payloads + let mut verification_tasks = Vec::new(); + let mut class_tags = Vec::new(); + + for contract_migration in &migration_strategy.contracts { + let class_name = get_class_name_from_artifact_path( + &contract_migration.artifact_path, + &default_namespace, + )?; + let verification_payload = VerificationPayload { + class_name: class_name.clone(), + class_hash: contract_migration.diff.local_class_hash.to_hex_string(), + rpc_url: rpc_url.clone(), + source_code: source_code.clone(), + }; + class_tags.push(contract_migration.diff.tag.clone()); + verification_tasks.push(verify_class(verification_payload, &api_url, &api_key)); + } + + for class_migration in &migration_strategy.models { + let class_name = + get_class_name_from_artifact_path(&class_migration.artifact_path, &default_namespace)?; + let verification_payload = VerificationPayload { + class_name: class_name.clone(), + class_hash: class_migration.diff.local_class_hash.to_hex_string(), + rpc_url: rpc_url.clone(), + source_code: source_code.clone(), + }; + class_tags.push(class_migration.diff.tag.clone()); + verification_tasks.push(verify_class(verification_payload, &api_url, &api_key)); + } + + // Run all verification tasks + let results = join_all(verification_tasks).await; + + for (i, result) in results.into_iter().enumerate() { + match result { + Ok(message) => { + ui.print(subtitle(format!("{}: {}", class_tags[i], message))); + } + Err(e) => { + ui.print(subtitle(format!("{}: {}", class_tags[i], e))); + } + } + } + + Ok(()) +} + +fn get_class_name_from_artifact_path(path: &Path, namespace: &str) -> Result { + let file_name = path.file_stem().and_then(OsStr::to_str).ok_or(Error::InvalidFileName)?; + let class_name = file_name.strip_prefix(namespace).ok_or(Error::NamespacePrefixNotFound)?; + Ok(class_name.to_string()) +} + +#[derive(Debug, Serialize)] +struct VerificationPayload { + /// The name of the class we want to verify together with the selector. + pub class_name: String, + /// The hash of the Sierra class. + pub class_hash: String, + /// The RPC URL of the network where this class is declared (can only be a hosted network). + pub rpc_url: String, + /// JSON that contains a map where the key is the path to the file and the value is the content + /// of the file. It should contain all files required to build the Dojo project with Sozo. + pub source_code: Value, +} + +async fn verify_class( + payload: VerificationPayload, + api_url: &str, + api_key: &str, +) -> Result { + let res = reqwest::Client::new() + .post(format!("{api_url}/v1/verify")) + .header("x-api-key", api_key) + .json(&payload) + .send() + .await?; + + if res.status() == StatusCode::OK { + Ok(res.text().await?) + } else { + Err(Error::VerificationError(res.text().await?)) + } +} + +fn collect_source_code(root_dir: &Path) -> Result { + fn collect_files( + root_dir: &Path, + search_dir: &Path, + extension: &str, + max_depth: Option, + file_data: &mut serde_json::Map, + ) -> Result<(), Error> { + // Set max_depth to usize::MAX if None is provided, matching the default value set by + // WalkDir::new() + let max_depth = max_depth.unwrap_or(usize::MAX); + for entry in WalkDir::new(search_dir).max_depth(max_depth).follow_links(true) { + let entry = entry.map_err(io::Error::from)?; + let path = entry.path(); + if path.is_file() { + if let Some(file_extension) = path.extension() { + if file_extension == OsStr::new(extension) { + // Safe to unwrap here because we're iterating over files within root_dir, + // so path will always have root_dir as a prefix + let relative_path = path.strip_prefix(root_dir).unwrap(); + let file_content = std::fs::read_to_string(path)?; + file_data.insert( + relative_path.to_string_lossy().into_owned(), + serde_json::Value::String(file_content), + ); + } + } + } + } + Ok(()) + } + + let mut file_data = serde_json::Map::new(); + // Read `.toml` files in the root folder + collect_files(root_dir, root_dir, "toml", Some(1), &mut file_data)?; + // Read `.cairo` files in the root/src folder + collect_files(root_dir, &root_dir.join("src"), "cairo", None, &mut file_data)?; + + Ok(serde_json::Value::Object(file_data)) +} + +fn subtitle>(message: D) -> String { + dimmed_message(format!("{} {}", pad_str(">", 3, Alignment::Right, None), message.as_ref())) + .to_string() +} + +fn dimmed_message(message: D) -> StyledObject { + Style::new().dim().apply_to(message) +} diff --git a/crates/torii/core/Cargo.toml b/crates/torii/core/Cargo.toml index fcf862b82b..a22ccfcc9c 100644 --- a/crates/torii/core/Cargo.toml +++ b/crates/torii/core/Cargo.toml @@ -19,6 +19,7 @@ dojo-types = { path = "../../dojo-types" } dojo-world = { path = "../../dojo-world", features = [ "contracts", "manifest" ] } futures-channel = "0.3.0" futures-util.workspace = true +hashlink.workspace = true hex.workspace = true lazy_static.workspace = true log.workspace = true @@ -31,8 +32,8 @@ serde_json.workspace = true slab = "0.4.2" sozo-ops.workspace = true sqlx.workspace = true -starknet.workspace = true starknet-crypto.workspace = true +starknet.workspace = true thiserror.workspace = true tokio = { version = "1.32.0", features = [ "sync" ], default-features = true } tokio-stream = "0.1.11" diff --git a/crates/torii/core/src/engine.rs b/crates/torii/core/src/engine.rs index 153269bc6e..a12420d5a0 100644 --- a/crates/torii/core/src/engine.rs +++ b/crates/torii/core/src/engine.rs @@ -1,38 +1,46 @@ -use std::collections::BTreeMap; +use std::collections::{BTreeMap, HashMap}; use std::fmt::Debug; use std::time::Duration; use anyhow::Result; use dojo_world::contracts::world::WorldContractReader; +use hashlink::LinkedHashMap; use starknet::core::types::{ - BlockId, BlockTag, Event, EventFilter, Felt, MaybePendingBlockWithTxHashes, - MaybePendingBlockWithTxs, ReceiptBlock, Transaction, TransactionReceipt, - TransactionReceiptWithBlockInfo, + BlockId, BlockTag, EmittedEvent, Event, EventFilter, Felt, MaybePendingBlockWithReceipts, + MaybePendingBlockWithTxHashes, PendingBlockWithReceipts, ReceiptBlock, TransactionReceipt, + TransactionReceiptWithBlockInfo, TransactionWithReceipt, }; -use starknet::core::utils::get_selector_from_name; use starknet::providers::Provider; use tokio::sync::broadcast::Sender; use tokio::sync::mpsc::Sender as BoundedSender; use tokio::time::sleep; -use tracing::{error, info, trace, warn}; +use tracing::{debug, error, info, trace, warn}; +use crate::processors::event_message::EventMessageProcessor; use crate::processors::{BlockProcessor, EventProcessor, TransactionProcessor}; use crate::sql::Sql; #[allow(missing_debug_implementations)] -pub struct Processors { +pub struct Processors { pub block: Vec>>, pub transaction: Vec>>, - pub event: Vec>>, + pub event: HashMap>>, + pub catch_all_event: Box>, } -impl Default for Processors

{ +impl Default for Processors

{ fn default() -> Self { - Self { block: vec![], event: vec![], transaction: vec![] } + Self { + block: vec![], + event: HashMap::new(), + transaction: vec![], + catch_all_event: Box::new(EventMessageProcessor) as Box>, + } } } -pub(crate) const LOG_TARGET: &str = "tori_core::engine"; +pub(crate) const LOG_TARGET: &str = "torii_core::engine"; +pub const QUERY_QUEUE_BATCH_SIZE: usize = 1000; #[derive(Debug)] pub struct EngineConfig { @@ -53,8 +61,30 @@ impl Default for EngineConfig { } } +#[derive(Debug)] +pub enum FetchDataResult { + Range(FetchRangeResult), + Pending(FetchPendingResult), + None, +} + +#[derive(Debug)] +pub struct FetchRangeResult { + // (block_number, transaction_hash) -> events + pub transactions: LinkedHashMap<(u64, Felt), Vec>, + pub blocks: BTreeMap, + pub latest_block_number: u64, +} + +#[derive(Debug)] +pub struct FetchPendingResult { + pub pending_block: Box, + pub last_pending_block_tx: Option, + pub block_number: u64, +} + #[allow(missing_debug_implementations)] -pub struct Engine { +pub struct Engine { world: WorldContractReader

, db: Sql, provider: Box

, @@ -69,7 +99,7 @@ struct UnprocessedEvent { data: Vec, } -impl Engine

{ +impl Engine

{ pub fn new( world: WorldContractReader

, db: Sql, @@ -83,9 +113,14 @@ impl Engine

{ } pub async fn start(&mut self) -> Result<()> { - let (mut head, mut pending_block_tx) = self.db.head().await?; + // use the start block provided by user if head is 0 + let (head, last_pending_block_world_tx, last_pending_block_tx) = self.db.head().await?; if head == 0 { - head = self.config.start_block; + self.db.set_head( + self.config.start_block, + last_pending_block_world_tx, + last_pending_block_tx, + ); } else if self.config.start_block != 0 { warn!(target: LOG_TARGET, "Start block ignored, stored head exists and will be used instead."); } @@ -97,25 +132,35 @@ impl Engine

{ let mut erroring_out = false; loop { + let (head, last_pending_block_world_tx, last_pending_block_tx) = self.db.head().await?; tokio::select! { _ = shutdown_rx.recv() => { break Ok(()); } - _ = async { - match self.sync_to_head(head, pending_block_tx).await { - Ok((latest_block_number, latest_pending_tx)) => { + res = self.fetch_data(head, last_pending_block_world_tx, last_pending_block_tx) => { + match res { + Ok(fetch_result) => { if erroring_out { erroring_out = false; backoff_delay = Duration::from_secs(1); - info!(target: LOG_TARGET, latest_block_number = latest_block_number, "Syncing reestablished."); + info!(target: LOG_TARGET, "Syncing reestablished."); } - pending_block_tx = latest_pending_tx; - head = latest_block_number; + match self.process(fetch_result).await { + Ok(()) => {} + Err(e) => { + error!(target: LOG_TARGET, error = %e, "Processing fetched data."); + erroring_out = true; + sleep(backoff_delay).await; + if backoff_delay < max_backoff_delay { + backoff_delay *= 2; + } + } + } } Err(e) => { erroring_out = true; - error!(target: LOG_TARGET, error = %e, "Syncing to head."); + error!(target: LOG_TARGET, error = %e, "Fetching data."); sleep(backoff_delay).await; if backoff_delay < max_backoff_delay { backoff_delay *= 2; @@ -123,104 +168,45 @@ impl Engine

{ } }; sleep(self.config.polling_interval).await; - } => {} + } } } } - pub async fn sync_to_head( + pub async fn fetch_data( &mut self, from: u64, - mut pending_block_tx: Option, - ) -> Result<(u64, Option)> { + last_pending_block_world_tx: Option, + last_pending_block_tx: Option, + ) -> Result { let latest_block_number = self.provider.block_hash_and_number().await?.block_number; - if from < latest_block_number { - // if `from` == 0, then the block may or may not be processed yet. + let result = if from < latest_block_number { let from = if from == 0 { from } else { from + 1 }; - pending_block_tx = self.sync_range(from, latest_block_number, pending_block_tx).await?; + debug!(target: LOG_TARGET, from = %from, to = %latest_block_number, "Fetching data for range."); + let data = + self.fetch_range(from, latest_block_number, last_pending_block_world_tx).await?; + FetchDataResult::Range(data) } else if self.config.index_pending { - pending_block_tx = self.sync_pending(latest_block_number + 1, pending_block_tx).await?; - } - - Ok((latest_block_number, pending_block_tx)) - } - - pub async fn sync_pending( - &mut self, - block_number: u64, - mut pending_block_tx: Option, - ) -> Result> { - let block = if let MaybePendingBlockWithTxs::PendingBlock(pending) = - self.provider.get_block_with_txs(BlockId::Tag(BlockTag::Pending)).await? - { - pending + let data = self.fetch_pending(latest_block_number + 1, last_pending_block_tx).await?; + if let Some(data) = data { + FetchDataResult::Pending(data) + } else { + FetchDataResult::None + } } else { - return Ok(None); + FetchDataResult::None }; - // Skip transactions that have been processed already - // Our cursor is the last processed transaction - let mut pending_block_tx_cursor = pending_block_tx; - for transaction in block.transactions { - if let Some(tx) = pending_block_tx_cursor { - if transaction.transaction_hash() != &tx { - continue; - } - - pending_block_tx_cursor = None; - continue; - } - - match self - .process_transaction_and_receipt( - *transaction.transaction_hash(), - &transaction, - block_number, - block.timestamp, - ) - .await - { - Err(e) => { - match e.to_string().as_str() { - "TransactionHashNotFound" => { - // We failed to fetch the transaction, which is because - // the transaction might not have been processed fast enough by the - // provider. So we can fail silently and try - // again in the next iteration. - warn!(target: LOG_TARGET, transaction_hash = %format!("{:#x}", transaction.transaction_hash()), "Retrieving pending transaction receipt."); - return Ok(pending_block_tx); - } - _ => { - error!(target: LOG_TARGET, error = %e, transaction_hash = %format!("{:#x}", transaction.transaction_hash()), "Processing pending transaction."); - return Err(e); - } - } - } - Ok(true) => { - pending_block_tx = Some(*transaction.transaction_hash()); - info!(target: LOG_TARGET, transaction_hash = %format!("{:#x}", transaction.transaction_hash()), "Processed pending world transaction."); - } - Ok(_) => { - info!(target: LOG_TARGET, transaction_hash = %format!("{:#x}", transaction.transaction_hash()), "Processed pending transaction.") - } - } - } - - // Set the head to the last processed pending transaction - // Head block number should still be latest block number - self.db.set_head(block_number - 1, pending_block_tx); - - self.db.execute().await?; - Ok(pending_block_tx) + Ok(result) } - pub async fn sync_range( + pub async fn fetch_range( &mut self, from: u64, to: u64, - pending_block_tx: Option, - ) -> Result> { + last_pending_block_world_tx: Option, + ) -> Result { // Process all blocks from current to latest. let get_events = |token: Option| { self.provider.get_events( @@ -239,19 +225,22 @@ impl Engine

{ let mut events_pages = vec![get_events(None).await?]; while let Some(token) = &events_pages.last().unwrap().continuation_token { + debug!(target: LOG_TARGET, "Fetching events page with continuation token: {}", &token); events_pages.push(get_events(Some(token.clone())).await?); } + debug!(target: LOG_TARGET, "Total events pages fetched: {}", &events_pages.len()); // Transactions & blocks to process let mut last_block = 0_u64; let mut blocks = BTreeMap::new(); // Flatten events pages and events according to the pending block cursor // to array of (block_number, transaction_hash) - let mut pending_block_tx_cursor = pending_block_tx; - let mut transactions = vec![]; - for events_page in &events_pages { - for event in &events_page.events { + let mut last_pending_block_world_tx_cursor = last_pending_block_world_tx; + let mut transactions = LinkedHashMap::new(); + for events_page in events_pages { + debug!("Processing events page with events: {}", &events_page.events.len()); + for event in events_page.events { let block_number = match event.block_number { Some(block_number) => block_number, // If the block number is not present, try to fetch it from the transaction @@ -287,44 +276,148 @@ impl Engine

{ // Then we skip all transactions until we reach the last pending processed // transaction (if any) - if let Some(tx) = pending_block_tx_cursor { + if let Some(tx) = last_pending_block_world_tx_cursor { if event.transaction_hash != tx { continue; } - pending_block_tx_cursor = None; + last_pending_block_world_tx_cursor = None; } // Skip the latest pending block transaction events // * as we might have multiple events for the same transaction - if let Some(tx) = pending_block_tx { + if let Some(tx) = last_pending_block_world_tx { if event.transaction_hash == tx { continue; } } - if let Some((_, last_tx_hash)) = transactions.last() { - // Dedup transactions - // As me might have multiple events for the same transaction - if *last_tx_hash == event.transaction_hash { - continue; + transactions + .entry((block_number, event.transaction_hash)) + .or_insert(vec![]) + .push(event); + } + } + + debug!("Transactions: {}", &transactions.len()); + debug!("Blocks: {}", &blocks.len()); + + Ok(FetchRangeResult { transactions, blocks, latest_block_number: to }) + } + + async fn fetch_pending( + &self, + block_number: u64, + last_pending_block_tx: Option, + ) -> Result> { + let block = if let MaybePendingBlockWithReceipts::PendingBlock(pending) = + self.provider.get_block_with_receipts(BlockId::Tag(BlockTag::Pending)).await? + { + pending + } else { + // TODO: change this to unreachable once katana is updated to return PendingBlockWithTxs + // when BlockTag is Pending unreachable!("We requested pending block, so it + // must be pending"); + return Ok(None); + }; + + Ok(Some(FetchPendingResult { + pending_block: Box::new(block), + block_number, + last_pending_block_tx, + })) + } + + pub async fn process(&mut self, fetch_result: FetchDataResult) -> Result<()> { + match fetch_result { + FetchDataResult::Range(data) => { + self.process_range(data).await?; + } + FetchDataResult::Pending(data) => { + self.process_pending(data).await?; + } + FetchDataResult::None => {} + } + + Ok(()) + } + + pub async fn process_pending(&mut self, data: FetchPendingResult) -> Result<()> { + // Skip transactions that have been processed already + // Our cursor is the last processed transaction + + let mut last_pending_block_tx_cursor = data.last_pending_block_tx; + let mut last_pending_block_tx = data.last_pending_block_tx; + let mut last_pending_block_world_tx = None; + + let timestamp = data.pending_block.timestamp; + + for t in data.pending_block.transactions { + let transaction_hash = t.transaction.transaction_hash(); + if let Some(tx) = last_pending_block_tx_cursor { + if transaction_hash != &tx { + continue; + } + + last_pending_block_tx_cursor = None; + continue; + } + + match self.process_transaction_with_receipt(&t, data.block_number, timestamp).await { + Err(e) => { + match e.to_string().as_str() { + "TransactionHashNotFound" => { + // We failed to fetch the transaction, which is because + // the transaction might not have been processed fast enough by the + // provider. So we can fail silently and try + // again in the next iteration. + warn!(target: LOG_TARGET, transaction_hash = %format!("{:#x}", transaction_hash), "Retrieving pending transaction receipt."); + self.db.set_head( + data.block_number - 1, + last_pending_block_world_tx, + last_pending_block_tx, + ); + return Ok(()); + } + _ => { + error!(target: LOG_TARGET, error = %e, transaction_hash = %format!("{:#x}", transaction_hash), "Processing pending transaction."); + return Err(e); + } } } - transactions.push((block_number, event.transaction_hash)); + Ok(true) => { + last_pending_block_world_tx = Some(*transaction_hash); + last_pending_block_tx = Some(*transaction_hash); + info!(target: LOG_TARGET, transaction_hash = %format!("{:#x}", transaction_hash), "Processed pending world transaction."); + } + Ok(_) => { + last_pending_block_tx = Some(*transaction_hash); + debug!(target: LOG_TARGET, transaction_hash = %format!("{:#x}", transaction_hash), "Processed pending transaction.") + } } } + // Set the head to the last processed pending transaction + // Head block number should still be latest block number + self.db.set_head(data.block_number - 1, last_pending_block_world_tx, last_pending_block_tx); + + self.db.execute().await?; + Ok(()) + } + + pub async fn process_range(&mut self, data: FetchRangeResult) -> Result<()> { // Process all transactions let mut last_block = 0; - for (block_number, transaction_hash) in transactions { + for ((block_number, transaction_hash), events) in data.transactions { + debug!("Processing transaction hash: {:#x}", transaction_hash); // Process transaction - let transaction = self.provider.get_transaction_by_hash(transaction_hash).await?; + // let transaction = self.provider.get_transaction_by_hash(transaction_hash).await?; - self.process_transaction_and_receipt( + self.process_transaction_with_events( transaction_hash, - &transaction, + events.as_slice(), block_number, - blocks[&block_number], + data.blocks[&block_number], ) .await?; @@ -334,22 +427,25 @@ impl Engine

{ block_tx.send(block_number).await?; } - self.process_block(block_number, blocks[&block_number]).await?; + self.process_block(block_number, data.blocks[&block_number]).await?; last_block = block_number; } + + if self.db.query_queue.queue.len() >= QUERY_QUEUE_BATCH_SIZE { + self.db.execute().await?; + } } - // We return None for the pending_block_tx because our sync_range - // retrieves only specific events from the world. so some transactions + // We return None for the pending_block_tx because our process_range + // gets only specific events from the world. so some transactions // might get ignored and wont update the cursor. // so once the sync range is done, we assume all of the tx of the block // have been processed. - self.db.set_head(to, None); - + self.db.set_head(data.latest_block_number, None, None); self.db.execute().await?; - Ok(None) + Ok(()) } async fn get_block_timestamp(&self, block_number: u64) -> Result { @@ -359,17 +455,56 @@ impl Engine

{ } } - // Process a transaction and its receipt. - // Returns whether the transaction has a world event. - async fn process_transaction_and_receipt( + async fn process_transaction_with_events( &mut self, transaction_hash: Felt, - transaction: &Transaction, + events: &[EmittedEvent], + block_number: u64, + block_timestamp: u64, + ) -> Result<()> { + for (event_idx, event) in events.iter().enumerate() { + let event_id = + format!("{:#064x}:{:#x}:{:#04x}", block_number, transaction_hash, event_idx); + + let event = Event { + from_address: event.from_address, + keys: event.keys.clone(), + data: event.data.clone(), + }; + Self::process_event( + self, + block_number, + block_timestamp, + &event_id, + &event, + transaction_hash, + ) + .await?; + } + + // Commented out this transaction processor because it requires an RPC call for each + // transaction which is slowing down the sync process by alot. + // Self::process_transaction( + // self, + // block_number, + // block_timestamp, + // transaction_hash, + // transaction, + // ) + // .await?; + + Ok(()) + } + // Process a transaction and events from its receipt. + // Returns whether the transaction has a world event. + async fn process_transaction_with_receipt( + &mut self, + transaction_with_receipt: &TransactionWithReceipt, block_number: u64, block_timestamp: u64, ) -> Result { - let receipt = self.provider.get_transaction_receipt(transaction_hash).await?; - let events = match &receipt.receipt { + let transaction_hash = transaction_with_receipt.transaction.transaction_hash(); + let events = match &transaction_with_receipt.receipt { TransactionReceipt::Invoke(receipt) => Some(&receipt.events), TransactionReceipt::L1Handler(receipt) => Some(&receipt.events), _ => None, @@ -384,30 +519,29 @@ impl Engine

{ world_event = true; let event_id = - format!("{:#064x}:{:#x}:{:#04x}", block_number, transaction_hash, event_idx); + format!("{:#064x}:{:#x}:{:#04x}", block_number, *transaction_hash, event_idx); Self::process_event( self, block_number, block_timestamp, - &receipt, &event_id, event, + *transaction_hash, ) .await?; } - if world_event { - Self::process_transaction( - self, - block_number, - block_timestamp, - &receipt, - transaction_hash, - transaction, - ) - .await?; - } + // if world_event { + // Self::process_transaction( + // self, + // block_number, + // block_timestamp, + // transaction_hash, + // transaction, + // ) + // .await?; + // } } Ok(world_event) @@ -424,65 +558,57 @@ impl Engine

{ Ok(()) } - async fn process_transaction( - &mut self, - block_number: u64, - block_timestamp: u64, - transaction_receipt: &TransactionReceiptWithBlockInfo, - transaction_hash: Felt, - transaction: &Transaction, - ) -> Result<()> { - for processor in &self.processors.transaction { - processor - .process( - &mut self.db, - self.provider.as_ref(), - block_number, - block_timestamp, - transaction_receipt, - transaction_hash, - transaction, - ) - .await? - } - - Ok(()) - } + // async fn process_transaction( + // &mut self, + // block_number: u64, + // block_timestamp: u64, + // transaction_hash: Felt, + // transaction: &Transaction, + // ) -> Result<()> { + // for processor in &self.processors.transaction { + // processor + // .process( + // &mut self.db, + // self.provider.as_ref(), + // block_number, + // block_timestamp, + // transaction_hash, + // transaction, + // ) + // .await? + // } + + // Ok(()) + // } async fn process_event( &mut self, block_number: u64, block_timestamp: u64, - transaction_receipt: &TransactionReceiptWithBlockInfo, event_id: &str, event: &Event, + transaction_hash: Felt, ) -> Result<()> { - self.db.store_event( - event_id, - event, - *transaction_receipt.receipt.transaction_hash(), - block_timestamp, - ); - for processor in &self.processors.event { - // If the processor has no event_key, means it's a catch-all processor. - // We also validate the event - if (processor.event_key().is_empty() - || get_selector_from_name(&processor.event_key())? == event.keys[0]) - && processor.validate(event) - { - if let Err(e) = processor + self.db.store_event(event_id, event, transaction_hash, block_timestamp); + let event_key = event.keys[0]; + + let Some(processor) = self.processors.event.get(&event_key) else { + // if we dont have a processor for this event, we try the catch all processor + if self.processors.catch_all_event.validate(event) { + if let Err(e) = self + .processors + .catch_all_event .process( &self.world, &mut self.db, block_number, block_timestamp, - transaction_receipt, event_id, event, ) .await { - error!(target: LOG_TARGET, event_name = processor.event_key(), error = %e, "Processing event."); + error!(target: LOG_TARGET, error = %e, "Processing catch all event processor."); } } else { let unprocessed_event = UnprocessedEvent { @@ -497,7 +623,19 @@ impl Engine

{ "Unprocessed event.", ); } + + return Ok(()); + }; + + // if processor.validate(event) { + if let Err(e) = processor + .process(&self.world, &mut self.db, block_number, block_timestamp, event_id, event) + .await + { + error!(target: LOG_TARGET, event_name = processor.event_key(), error = %e, "Processing event."); } + // } + Ok(()) } } diff --git a/crates/torii/core/src/lib.rs b/crates/torii/core/src/lib.rs index 86f55f26d7..df6e8b3adc 100644 --- a/crates/torii/core/src/lib.rs +++ b/crates/torii/core/src/lib.rs @@ -1,8 +1,3 @@ -use serde::Deserialize; -use sqlx::FromRow; - -use crate::types::SQLFelt; - pub mod cache; pub mod engine; pub mod error; @@ -13,10 +8,3 @@ pub mod simple_broker; pub mod sql; pub mod types; pub mod utils; - -#[allow(dead_code)] -#[derive(FromRow, Deserialize, Debug)] -pub struct World { - #[sqlx(try_from = "String")] - world_address: SQLFelt, -} diff --git a/crates/torii/core/src/processors/event_message.rs b/crates/torii/core/src/processors/event_message.rs index dcbe66aa58..e2044cbe1a 100644 --- a/crates/torii/core/src/processors/event_message.rs +++ b/crates/torii/core/src/processors/event_message.rs @@ -1,7 +1,7 @@ use anyhow::{Error, Result}; use async_trait::async_trait; use dojo_world::contracts::world::WorldContractReader; -use starknet::core::types::{Event, TransactionReceiptWithBlockInfo}; +use starknet::core::types::Event; use starknet::providers::Provider; use tracing::info; @@ -41,7 +41,6 @@ where db: &mut Sql, _block_number: u64, block_timestamp: u64, - _transaction_receipt: &TransactionReceiptWithBlockInfo, event_id: &str, event: &Event, ) -> Result<(), Error> { diff --git a/crates/torii/core/src/processors/metadata_update.rs b/crates/torii/core/src/processors/metadata_update.rs index 129b5ec6ac..594a32898a 100644 --- a/crates/torii/core/src/processors/metadata_update.rs +++ b/crates/torii/core/src/processors/metadata_update.rs @@ -9,7 +9,7 @@ use dojo_world::contracts::world::WorldContractReader; use dojo_world::metadata::WorldMetadata; use dojo_world::uri::Uri; use reqwest::Client; -use starknet::core::types::{Event, Felt, TransactionReceiptWithBlockInfo}; +use starknet::core::types::{Event, Felt}; use starknet::providers::Provider; use tokio_util::bytes::Bytes; use tracing::{error, info}; @@ -53,7 +53,6 @@ where db: &mut Sql, _block_number: u64, block_timestamp: u64, - _transaction_receipt: &TransactionReceiptWithBlockInfo, _event_id: &str, event: &Event, ) -> Result<(), Error> { diff --git a/crates/torii/core/src/processors/mod.rs b/crates/torii/core/src/processors/mod.rs index e2b22e4d75..c4a02da631 100644 --- a/crates/torii/core/src/processors/mod.rs +++ b/crates/torii/core/src/processors/mod.rs @@ -1,7 +1,10 @@ +use std::collections::HashMap; + use anyhow::{Error, Result}; use async_trait::async_trait; use dojo_world::contracts::world::WorldContractReader; -use starknet::core::types::{Event, Felt, Transaction, TransactionReceiptWithBlockInfo}; +use starknet::core::types::{Event, Felt, Transaction}; +use starknet::core::utils::get_selector_from_name; use starknet::providers::Provider; use crate::sql::Sql; @@ -16,8 +19,8 @@ pub mod store_update_member; pub mod store_update_record; const MODEL_INDEX: usize = 0; -const NUM_KEYS_INDEX: usize = 1; const ENTITY_ID_INDEX: usize = 1; +const NUM_KEYS_INDEX: usize = 2; #[async_trait] pub trait EventProcessor

@@ -39,7 +42,6 @@ where db: &mut Sql, block_number: u64, block_timestamp: u64, - transaction_receipt: &TransactionReceiptWithBlockInfo, event_id: &str, event: &Event, ) -> Result<(), Error>; @@ -66,8 +68,21 @@ pub trait TransactionProcessor { provider: &P, block_number: u64, block_timestamp: u64, - transaction_receipt: &TransactionReceiptWithBlockInfo, transaction_hash: Felt, transaction: &Transaction, ) -> Result<(), Error>; } + +/// Given a list of event processors, generate a map of event keys to the event processor +pub fn generate_event_processors_map( + event_processor: Vec>>, +) -> Result>>> { + let mut event_processors = HashMap::new(); + + for processor in event_processor { + let key = get_selector_from_name(processor.event_key().as_str())?; + event_processors.insert(key, processor); + } + + Ok(event_processors) +} diff --git a/crates/torii/core/src/processors/register_model.rs b/crates/torii/core/src/processors/register_model.rs index c2b2f47e3f..369357a243 100644 --- a/crates/torii/core/src/processors/register_model.rs +++ b/crates/torii/core/src/processors/register_model.rs @@ -3,7 +3,7 @@ use async_trait::async_trait; use cainome::cairo_serde::{ByteArray, CairoSerde}; use dojo_world::contracts::model::ModelReader; use dojo_world::contracts::world::WorldContractReader; -use starknet::core::types::{Event, TransactionReceiptWithBlockInfo}; +use starknet::core::types::Event; use starknet::providers::Provider; use tracing::{debug, info}; @@ -43,7 +43,6 @@ where db: &mut Sql, _block_number: u64, block_timestamp: u64, - _transaction_receipt: &TransactionReceiptWithBlockInfo, _event_id: &str, event: &Event, ) -> Result<(), Error> { diff --git a/crates/torii/core/src/processors/store_del_record.rs b/crates/torii/core/src/processors/store_del_record.rs index fcc901bd20..46e43c4f1e 100644 --- a/crates/torii/core/src/processors/store_del_record.rs +++ b/crates/torii/core/src/processors/store_del_record.rs @@ -1,7 +1,7 @@ use anyhow::{Error, Ok, Result}; use async_trait::async_trait; use dojo_world::contracts::world::WorldContractReader; -use starknet::core::types::{Event, TransactionReceiptWithBlockInfo}; +use starknet::core::types::Event; use starknet::providers::Provider; use tracing::info; @@ -42,7 +42,6 @@ where db: &mut Sql, _block_number: u64, block_timestamp: u64, - _transaction_receipt: &TransactionReceiptWithBlockInfo, event_id: &str, event: &Event, ) -> Result<(), Error> { diff --git a/crates/torii/core/src/processors/store_set_record.rs b/crates/torii/core/src/processors/store_set_record.rs index 6fa9fc5394..8a2530292d 100644 --- a/crates/torii/core/src/processors/store_set_record.rs +++ b/crates/torii/core/src/processors/store_set_record.rs @@ -2,13 +2,13 @@ use anyhow::{Context, Error, Ok, Result}; use async_trait::async_trait; use dojo_world::contracts::world::WorldContractReader; use num_traits::ToPrimitive; -use starknet::core::types::{Event, TransactionReceiptWithBlockInfo}; +use starknet::core::types::Event; use starknet::providers::Provider; use tracing::info; use super::EventProcessor; -use crate::processors::{MODEL_INDEX, NUM_KEYS_INDEX}; -use crate::sql::Sql; +use crate::processors::{ENTITY_ID_INDEX, MODEL_INDEX, NUM_KEYS_INDEX}; +use crate::sql::{felts_sql_string, Sql}; pub(crate) const LOG_TARGET: &str = "torii_core::processors::store_set_record"; @@ -43,13 +43,12 @@ where db: &mut Sql, _block_number: u64, block_timestamp: u64, - _transaction_receipt: &TransactionReceiptWithBlockInfo, event_id: &str, event: &Event, ) -> Result<(), Error> { - let selector = event.data[MODEL_INDEX]; + let model_id = event.data[MODEL_INDEX]; - let model = db.model(selector).await?; + let model = db.model(model_id).await?; info!( target: LOG_TARGET, @@ -61,6 +60,7 @@ where let keys_end: usize = keys_start + event.data[NUM_KEYS_INDEX].to_usize().context("invalid usize")?; let keys = event.data[keys_start..keys_end].to_vec(); + let keys_str = felts_sql_string(&keys); // keys_end is already the length of the values array. @@ -69,12 +69,14 @@ where values_start + event.data[keys_end].to_usize().context("invalid usize")?; let values = event.data[values_start..values_end].to_vec(); + let entity_id = event.data[ENTITY_ID_INDEX]; + let mut keys_and_unpacked = [keys, values].concat(); let mut entity = model.schema; entity.deserialize(&mut keys_and_unpacked)?; - db.set_entity(entity, event_id, block_timestamp).await?; + db.set_entity(entity, event_id, block_timestamp, entity_id, model_id, &keys_str).await?; Ok(()) } } diff --git a/crates/torii/core/src/processors/store_transaction.rs b/crates/torii/core/src/processors/store_transaction.rs index d058b7ed31..2e7056e401 100644 --- a/crates/torii/core/src/processors/store_transaction.rs +++ b/crates/torii/core/src/processors/store_transaction.rs @@ -1,6 +1,6 @@ use anyhow::{Error, Ok, Result}; use async_trait::async_trait; -use starknet::core::types::{Felt, Transaction, TransactionReceiptWithBlockInfo}; +use starknet::core::types::{Felt, Transaction}; use starknet::providers::Provider; use super::TransactionProcessor; @@ -17,7 +17,6 @@ impl TransactionProcessor

for StoreTran _provider: &P, block_number: u64, block_timestamp: u64, - _receipt: &TransactionReceiptWithBlockInfo, transaction_hash: Felt, transaction: &Transaction, ) -> Result<(), Error> { diff --git a/crates/torii/core/src/processors/store_update_member.rs b/crates/torii/core/src/processors/store_update_member.rs index ef17b6f321..01f1c92c95 100644 --- a/crates/torii/core/src/processors/store_update_member.rs +++ b/crates/torii/core/src/processors/store_update_member.rs @@ -3,7 +3,7 @@ use async_trait::async_trait; use dojo_world::contracts::naming; use dojo_world::contracts::world::WorldContractReader; use num_traits::ToPrimitive; -use starknet::core::types::{Event, TransactionReceiptWithBlockInfo}; +use starknet::core::types::Event; use starknet::core::utils::get_selector_from_name; use starknet::providers::Provider; use tracing::{info, warn}; @@ -47,7 +47,6 @@ where db: &mut Sql, _block_number: u64, block_timestamp: u64, - _transaction_receipt: &TransactionReceiptWithBlockInfo, event_id: &str, event: &Event, ) -> Result<(), Error> { diff --git a/crates/torii/core/src/processors/store_update_record.rs b/crates/torii/core/src/processors/store_update_record.rs index 5dd2309c14..feab5765ec 100644 --- a/crates/torii/core/src/processors/store_update_record.rs +++ b/crates/torii/core/src/processors/store_update_record.rs @@ -3,13 +3,13 @@ use async_trait::async_trait; use dojo_world::contracts::naming; use dojo_world::contracts::world::WorldContractReader; use num_traits::ToPrimitive; -use starknet::core::types::{Event, TransactionReceiptWithBlockInfo}; +use starknet::core::types::Event; use starknet::providers::Provider; use tracing::info; use super::EventProcessor; use crate::processors::{ENTITY_ID_INDEX, MODEL_INDEX}; -use crate::sql::Sql; +use crate::sql::{felts_sql_string, Sql}; pub(crate) const LOG_TARGET: &str = "torii_core::processors::store_update_record"; @@ -44,14 +44,13 @@ where db: &mut Sql, _block_number: u64, block_timestamp: u64, - _transaction_receipt: &TransactionReceiptWithBlockInfo, event_id: &str, event: &Event, ) -> Result<(), Error> { - let selector = event.data[MODEL_INDEX]; + let model_id = event.data[MODEL_INDEX]; let entity_id = event.data[ENTITY_ID_INDEX]; - let model = db.model(selector).await?; + let model = db.model(model_id).await?; info!( target: LOG_TARGET, @@ -72,12 +71,14 @@ where // Keys are read from the db, since we don't have access to them when only // the entity id is passed. let keys = db.get_entity_keys(entity_id, &tag).await?; + + let keys_str = felts_sql_string(&keys); let mut keys_and_unpacked = [keys, values].concat(); let mut entity = model.schema; entity.deserialize(&mut keys_and_unpacked)?; - db.set_entity(entity, event_id, block_timestamp).await?; + db.set_entity(entity, event_id, block_timestamp, entity_id, model_id, &keys_str).await?; Ok(()) } } diff --git a/crates/torii/core/src/query_queue.rs b/crates/torii/core/src/query_queue.rs index c623bb218d..d42fdb94b3 100644 --- a/crates/torii/core/src/query_queue.rs +++ b/crates/torii/core/src/query_queue.rs @@ -3,6 +3,12 @@ use std::collections::VecDeque; use sqlx::{Executor, Pool, Sqlite}; use starknet::core::types::Felt; +use crate::simple_broker::SimpleBroker; +use crate::types::{ + Entity as EntityUpdated, Event as EventEmitted, EventMessage as EventMessageUpdated, + Model as ModelRegistered, +}; + #[derive(Debug, Clone)] pub enum Argument { Null, @@ -12,15 +18,26 @@ pub enum Argument { FieldElement(Felt), } +#[derive(Debug, Clone)] +pub enum BrokerMessage { + ModelRegistered(ModelRegistered), + EntityUpdated(EntityUpdated), + EventMessageUpdated(EventMessageUpdated), + EventEmitted(EventEmitted), +} + #[derive(Debug, Clone)] pub struct QueryQueue { pool: Pool, - queue: VecDeque<(String, Vec)>, + pub queue: VecDeque<(String, Vec)>, + // publishes that are related to queries in the queue, they should be sent + // after the queries are executed + pub publish_queue: VecDeque, } impl QueryQueue { pub fn new(pool: Pool) -> Self { - QueryQueue { pool, queue: VecDeque::new() } + QueryQueue { pool, queue: VecDeque::new(), publish_queue: VecDeque::new() } } pub fn enqueue>(&mut self, statement: S, arguments: Vec) { @@ -31,6 +48,10 @@ impl QueryQueue { self.queue.push_front((statement.into(), arguments)); } + pub fn push_publish(&mut self, value: BrokerMessage) { + self.publish_queue.push_back(value); + } + pub async fn execute_all(&mut self) -> sqlx::Result { let mut total_affected = 0_u64; let mut tx = self.pool.begin().await?; @@ -53,6 +74,15 @@ impl QueryQueue { tx.commit().await?; + while let Some(message) = self.publish_queue.pop_front() { + match message { + BrokerMessage::ModelRegistered(model) => SimpleBroker::publish(model), + BrokerMessage::EntityUpdated(entity) => SimpleBroker::publish(entity), + BrokerMessage::EventMessageUpdated(event) => SimpleBroker::publish(event), + BrokerMessage::EventEmitted(event) => SimpleBroker::publish(event), + } + } + Ok(total_affected) } } diff --git a/crates/torii/core/src/sql.rs b/crates/torii/core/src/sql.rs index 49284a52e3..f3b6f887d0 100644 --- a/crates/torii/core/src/sql.rs +++ b/crates/torii/core/src/sql.rs @@ -13,20 +13,20 @@ use sqlx::pool::PoolConnection; use sqlx::{Pool, Row, Sqlite}; use starknet::core::types::{Event, Felt, InvokeTransaction, Transaction}; use starknet_crypto::poseidon_hash_many; +use tracing::debug; use crate::cache::{Model, ModelCache}; -use crate::query_queue::{Argument, QueryQueue}; -use crate::simple_broker::SimpleBroker; +use crate::query_queue::{Argument, BrokerMessage, QueryQueue}; use crate::types::{ Entity as EntityUpdated, Event as EventEmitted, EventMessage as EventMessageUpdated, Model as ModelRegistered, }; use crate::utils::{must_utc_datetime_from_timestamp, utc_dt_string_from_timestamp}; -use crate::World; type IsEventMessage = bool; type IsStoreUpdateMember = bool; +pub const WORLD_CONTRACT_TYPE: &str = "WORLD"; pub const FELT_DELIMITER: &str = "/"; #[cfg(test)] @@ -37,7 +37,7 @@ mod test; pub struct Sql { world_address: Felt, pub pool: Pool, - query_queue: QueryQueue, + pub query_queue: QueryQueue, model_cache: Arc, } @@ -46,12 +46,13 @@ impl Sql { let mut query_queue = QueryQueue::new(pool.clone()); query_queue.enqueue( - "INSERT OR IGNORE INTO indexers (id, head) VALUES (?, ?)", - vec![Argument::FieldElement(world_address), Argument::Int(0)], - ); - query_queue.enqueue( - "INSERT OR IGNORE INTO worlds (id, world_address) VALUES (?, ?)", - vec![Argument::FieldElement(world_address), Argument::FieldElement(world_address)], + "INSERT OR IGNORE INTO contracts (id, contract_address, contract_type) VALUES (?, ?, \ + ?)", + vec![ + Argument::FieldElement(world_address), + Argument::FieldElement(world_address), + Argument::String(WORLD_CONTRACT_TYPE.to_string()), + ], ); query_queue.execute_all().await?; @@ -64,45 +65,50 @@ impl Sql { }) } - pub async fn head(&self) -> Result<(u64, Option)> { + pub async fn head(&self) -> Result<(u64, Option, Option)> { let mut conn: PoolConnection = self.pool.acquire().await?; - let indexer_query = sqlx::query_as::<_, (i64, Option)>( - "SELECT head, pending_block_tx FROM indexers WHERE id = ?", - ) - .bind(format!("{:#x}", self.world_address)); - - let indexer: (i64, Option) = indexer_query.fetch_one(&mut *conn).await?; + let indexer_query = + sqlx::query_as::<_, (Option, Option, Option, String)>( + "SELECT head, last_pending_block_world_tx, last_pending_block_tx, contract_type \ + FROM contracts WHERE id = ?", + ) + .bind(format!("{:#x}", self.world_address)); + + let indexer: (Option, Option, Option, String) = + indexer_query.fetch_one(&mut *conn).await?; Ok(( - indexer.0.try_into().expect("doesn't fit in u64"), + indexer.0.map(|h| h.try_into().expect("doesn't fit in u64")).unwrap_or(0), indexer.1.map(|f| Felt::from_str(&f)).transpose()?, + indexer.2.map(|f| Felt::from_str(&f)).transpose()?, )) } - pub fn set_head(&mut self, head: u64, pending_block_tx: Option) { + pub fn set_head( + &mut self, + head: u64, + last_pending_block_world_tx: Option, + last_pending_block_tx: Option, + ) { let head = Argument::Int(head.try_into().expect("doesn't fit in u64")); let id = Argument::FieldElement(self.world_address); - let pending_block_tx = if let Some(f) = pending_block_tx { + let last_pending_block_world_tx = if let Some(f) = last_pending_block_world_tx { + Argument::String(format!("{:#x}", f)) + } else { + Argument::Null + }; + let last_pending_block_tx = if let Some(f) = last_pending_block_tx { Argument::String(format!("{:#x}", f)) } else { Argument::Null }; self.query_queue.enqueue( - "UPDATE indexers SET head = ?, pending_block_tx = ? WHERE id = ?", - vec![head, pending_block_tx, id], + "UPDATE contracts SET head = ?, last_pending_block_world_tx = ?, \ + last_pending_block_tx = ? WHERE id = ?", + vec![head, last_pending_block_world_tx, last_pending_block_tx, id], ); } - pub async fn world(&self) -> Result { - let mut conn: PoolConnection = self.pool.acquire().await?; - let meta: World = sqlx::query_as("SELECT * FROM worlds WHERE id = ?") - .bind(format!("{:#x}", self.world_address)) - .fetch_one(&mut *conn) - .await?; - - Ok(meta) - } - #[allow(clippy::too_many_arguments)] pub async fn register_model( &mut self, @@ -148,9 +154,8 @@ impl Sql { &mut 0, &mut 0, ); - self.query_queue.execute_all().await?; - - SimpleBroker::publish(model_registered); + self.execute().await?; + self.query_queue.push_publish(BrokerMessage::ModelRegistered(model_registered)); Ok(()) } @@ -160,22 +165,14 @@ impl Sql { entity: Ty, event_id: &str, block_timestamp: u64, + entity_id: Felt, + model_id: Felt, + keys_str: &str, ) -> Result<()> { - let keys = if let Ty::Struct(s) = &entity { - let mut keys = Vec::new(); - for m in s.keys() { - keys.extend(m.serialize()?); - } - keys - } else { - return Err(anyhow!("Entity is not a struct")); - }; - let namespaced_name = entity.name(); - let (model_namespace, model_name) = namespaced_name.split_once('-').unwrap(); - let entity_id = format!("{:#x}", poseidon_hash_many(&keys)); - let model_id = format!("{:#x}", compute_selector_from_names(model_namespace, model_name)); + let entity_id = format!("{:#x}", entity_id); + let model_id = format!("{:#x}", model_id); self.query_queue.enqueue( "INSERT INTO entity_model (entity_id, model_id) VALUES (?, ?) ON CONFLICT(entity_id, \ @@ -183,14 +180,13 @@ impl Sql { vec![Argument::String(entity_id.clone()), Argument::String(model_id.clone())], ); - let keys_str = felts_sql_string(&keys); let insert_entities = "INSERT INTO entities (id, keys, event_id, executed_at) VALUES (?, \ ?, ?, ?) ON CONFLICT(id) DO UPDATE SET \ updated_at=CURRENT_TIMESTAMP, executed_at=EXCLUDED.executed_at, \ event_id=EXCLUDED.event_id RETURNING *"; let mut entity_updated: EntityUpdated = sqlx::query_as(insert_entities) .bind(&entity_id) - .bind(&keys_str) + .bind(keys_str) .bind(event_id) .bind(utc_dt_string_from_timestamp(block_timestamp)) .fetch_one(&self.pool) @@ -207,9 +203,8 @@ impl Sql { block_timestamp, &vec![], ); - self.query_queue.execute_all().await?; - SimpleBroker::publish(entity_updated); + self.query_queue.push_publish(BrokerMessage::EntityUpdated(entity_updated)); Ok(()) } @@ -266,9 +261,8 @@ impl Sql { block_timestamp, &vec![], ); - self.query_queue.execute_all().await?; - SimpleBroker::publish(event_message_updated); + self.query_queue.push_publish(BrokerMessage::EventMessageUpdated(event_message_updated)); Ok(()) } @@ -297,7 +291,7 @@ impl Sql { block_timestamp, &vec![], ); - self.query_queue.execute_all().await?; + self.execute().await?; let mut update_entity = sqlx::query_as::<_, EntityUpdated>( "UPDATE entities SET updated_at=CURRENT_TIMESTAMP, executed_at=?, event_id=? WHERE id \ @@ -310,8 +304,7 @@ impl Sql { .await?; update_entity.updated_model = Some(wrapped_ty); - - SimpleBroker::publish(update_entity); + self.query_queue.push_publish(BrokerMessage::EntityUpdated(update_entity)); Ok(()) } @@ -327,7 +320,7 @@ impl Sql { let path = vec![entity.name()]; // delete entity models data self.build_delete_entity_queries_recursive(path, &entity_id, &entity); - self.query_queue.execute_all().await?; + self.execute().await?; let deleted_entity_model = sqlx::query("DELETE FROM entity_model WHERE entity_id = ? AND model_id = ?") @@ -369,7 +362,7 @@ impl Sql { update_entity.deleted = true; } - SimpleBroker::publish(update_entity); + self.query_queue.push_publish(BrokerMessage::EntityUpdated(update_entity)); Ok(()) } @@ -413,7 +406,6 @@ impl Sql { arguments.push(Argument::FieldElement(*resource)); self.query_queue.enqueue(statement, arguments); - self.query_queue.execute_all().await?; Ok(()) } @@ -550,14 +542,16 @@ impl Sql { vec![id, keys, data, hash, executed_at], ); - SimpleBroker::publish(EventEmitted { + let emitted = EventEmitted { id: event_id.to_string(), keys: felts_sql_string(&event.keys), data: felts_sql_string(&event.data), transaction_hash: format!("{:#x}", transaction_hash), created_at: Utc::now(), executed_at: must_utc_datetime_from_timestamp(block_timestamp), - }); + }; + + self.query_queue.push_publish(BrokerMessage::EventEmitted(emitted)); } #[allow(clippy::too_many_arguments)] @@ -1172,14 +1166,16 @@ impl Sql { }); } + /// Execute all queries in the queue pub async fn execute(&mut self) -> Result<()> { + debug!("Executing {} queries from the queue", self.query_queue.queue.len()); self.query_queue.execute_all().await?; Ok(()) } } -fn felts_sql_string(felts: &[Felt]) -> String { +pub fn felts_sql_string(felts: &[Felt]) -> String { felts.iter().map(|k| format!("{:#x}", k)).collect::>().join(FELT_DELIMITER) + FELT_DELIMITER } diff --git a/crates/torii/core/src/sql_test.rs b/crates/torii/core/src/sql_test.rs index bb7707b31b..e59797d213 100644 --- a/crates/torii/core/src/sql_test.rs +++ b/crates/torii/core/src/sql_test.rs @@ -18,6 +18,7 @@ use starknet_crypto::poseidon_hash_many; use tokio::sync::broadcast; use crate::engine::{Engine, EngineConfig, Processors}; +use crate::processors::generate_event_processors_map; use crate::processors::register_model::RegisterModelProcessor; use crate::processors::store_del_record::StoreDelRecordProcessor; use crate::processors::store_set_record::StoreSetRecordProcessor; @@ -32,16 +33,17 @@ where P: Provider + Send + Sync + core::fmt::Debug, { let (shutdown_tx, _) = broadcast::channel(1); + let to = provider.block_hash_and_number().await?.block_number; let mut engine = Engine::new( world, db, provider, Processors { - event: vec![ + event: generate_event_processors_map(vec![ Box::new(RegisterModelProcessor), Box::new(StoreSetRecordProcessor), Box::new(StoreDelRecordProcessor), - ], + ])?, ..Processors::default() }, EngineConfig::default(), @@ -49,7 +51,8 @@ where None, ); - let _ = engine.sync_to_head(0, None).await?; + let data = engine.fetch_range(0, to, None).await.unwrap(); + engine.process_range(data).await.unwrap(); Ok(engine) } @@ -118,7 +121,7 @@ async fn test_load_from_remote() { let mut db = Sql::new(pool.clone(), world_reader.address).await.unwrap(); - let _ = bootstrap_engine(world_reader, db.clone(), account.provider()).await; + let _ = bootstrap_engine(world_reader, db.clone(), account.provider()).await.unwrap(); let _block_timestamp = 1710754478_u64; let models = sqlx::query("SELECT * FROM models").fetch_all(&pool).await.unwrap(); diff --git a/crates/torii/graphql/Cargo.toml b/crates/torii/graphql/Cargo.toml index 779c8b54c6..464dddefb6 100644 --- a/crates/torii/graphql/Cargo.toml +++ b/crates/torii/graphql/Cargo.toml @@ -29,9 +29,9 @@ sqlx.workspace = true strum.workspace = true strum_macros.workspace = true thiserror.workspace = true -tokio.workspace = true tokio-stream = "0.1.11" tokio-util = "0.7.7" +tokio.workspace = true toml.workspace = true torii-core = { path = "../core" } tracing.workspace = true @@ -46,5 +46,5 @@ dojo-world.workspace = true katana-runner.workspace = true scarb.workspace = true serial_test = "2.0.0" -starknet.workspace = true starknet-crypto.workspace = true +starknet.workspace = true diff --git a/crates/torii/graphql/src/query/data.rs b/crates/torii/graphql/src/query/data.rs index 20d68debca..0b34b2af15 100644 --- a/crates/torii/graphql/src/query/data.rs +++ b/crates/torii/graphql/src/query/data.rs @@ -1,6 +1,7 @@ use async_graphql::connection::PageInfo; use sqlx::sqlite::SqliteRow; use sqlx::{Result, Row, SqliteConnection}; +use torii_core::sql::WORLD_CONTRACT_TYPE; use super::filter::{Filter, FilterValue}; use super::order::{CursorDirection, Direction, Order}; @@ -25,8 +26,9 @@ pub async fn count_rows( } pub async fn fetch_world_address(conn: &mut SqliteConnection) -> Result { - let query = "SELECT world_address FROM worlds".to_string(); - let res: (String,) = sqlx::query_as(&query).fetch_one(conn).await?; + let query = "SELECT contract_address FROM contracts where contract_type = ?".to_string(); + // for now we only have one world contract so this works + let res: (String,) = sqlx::query_as(&query).bind(WORLD_CONTRACT_TYPE).fetch_one(conn).await?; Ok(res.0) } diff --git a/crates/torii/graphql/src/tests/metadata_test.rs b/crates/torii/graphql/src/tests/metadata_test.rs index 7383a36b08..53ff0367ff 100644 --- a/crates/torii/graphql/src/tests/metadata_test.rs +++ b/crates/torii/graphql/src/tests/metadata_test.rs @@ -74,6 +74,7 @@ mod tests { db.update_metadata(&RESOURCE, URI, &world_metadata, &None, &Some(cover_img.to_string())) .await .unwrap(); + db.execute().await.unwrap(); let result = run_graphql_query(&schema, QUERY).await; let value = result.get("metadatas").ok_or("metadatas not found").unwrap().clone(); diff --git a/crates/torii/graphql/src/tests/mod.rs b/crates/torii/graphql/src/tests/mod.rs index 259587ddc1..26ff6870df 100644 --- a/crates/torii/graphql/src/tests/mod.rs +++ b/crates/torii/graphql/src/tests/mod.rs @@ -21,9 +21,11 @@ use sqlx::SqlitePool; use starknet::accounts::{Account, ConnectedAccount}; use starknet::core::types::{Call, Felt, InvokeTransactionResult}; use starknet::macros::selector; +use starknet::providers::Provider; use tokio::sync::broadcast; use tokio_stream::StreamExt; use torii_core::engine::{Engine, EngineConfig, Processors}; +use torii_core::processors::generate_event_processors_map; use torii_core::processors::register_model::RegisterModelProcessor; use torii_core::processors::store_del_record::StoreDelRecordProcessor; use torii_core::processors::store_set_record::StoreSetRecordProcessor; @@ -351,11 +353,12 @@ pub async fn spinup_types_test() -> Result { db, account.provider(), Processors { - event: vec![ + event: generate_event_processors_map(vec![ Box::new(RegisterModelProcessor), Box::new(StoreSetRecordProcessor), Box::new(StoreDelRecordProcessor), - ], + ]) + .unwrap(), ..Processors::default() }, EngineConfig::default(), @@ -363,7 +366,9 @@ pub async fn spinup_types_test() -> Result { None, ); - let _ = engine.sync_to_head(0, None).await?; + let to = account.provider().block_hash_and_number().await?.block_number; + let data = engine.fetch_range(0, to, None).await.unwrap(); + engine.process_range(data).await.unwrap(); Ok(pool) } diff --git a/crates/torii/graphql/src/tests/subscription_test.rs b/crates/torii/graphql/src/tests/subscription_test.rs index 015df8789c..f7dc04d1f1 100644 --- a/crates/torii/graphql/src/tests/subscription_test.rs +++ b/crates/torii/graphql/src/tests/subscription_test.rs @@ -7,13 +7,13 @@ mod tests { use dojo_types::primitive::Primitive; use dojo_types::schema::{Enum, EnumOption, Member, Struct, Ty}; use dojo_world::contracts::abi::model::Layout; - use dojo_world::contracts::naming::compute_selector_from_names; + use dojo_world::contracts::naming::{compute_selector_from_names, compute_selector_from_tag}; use serial_test::serial; use sqlx::SqlitePool; - use starknet::core::types::{Event, Felt}; - use starknet_crypto::poseidon_hash_many; + use starknet::core::types::Event; + use starknet_crypto::{poseidon_hash_many, Felt}; use tokio::sync::mpsc; - use torii_core::sql::Sql; + use torii_core::sql::{felts_sql_string, Sql}; use crate::tests::{model_fixtures, run_graphql_subscription}; use crate::utils; @@ -54,63 +54,72 @@ mod tests { tokio::spawn(async move { // 1. Open process and sleep.Go to execute subscription tokio::time::sleep(Duration::from_secs(1)).await; + let ty = Ty::Struct(Struct { + name: utils::struct_name_from_names(&namespace, &model_name), + children: vec![ + Member { + name: "depth".to_string(), + key: false, + ty: Ty::Enum(Enum { + name: "Depth".to_string(), + option: Some(0), + options: vec![ + EnumOption { name: "Zero".to_string(), ty: Ty::Tuple(vec![]) }, + EnumOption { name: "One".to_string(), ty: Ty::Tuple(vec![]) }, + EnumOption { name: "Two".to_string(), ty: Ty::Tuple(vec![]) }, + EnumOption { name: "Three".to_string(), ty: Ty::Tuple(vec![]) }, + ], + }), + }, + Member { + name: "record_id".to_string(), + key: false, + ty: Ty::Primitive(Primitive::U8(Some(0))), + }, + Member { + name: "typeU16".to_string(), + key: false, + ty: Ty::Primitive(Primitive::U16(Some(1))), + }, + Member { + name: "type_u64".to_string(), + key: false, + ty: Ty::Primitive(Primitive::U64(Some(1))), + }, + Member { + name: "typeBool".to_string(), + key: false, + ty: Ty::Primitive(Primitive::Bool(Some(true))), + }, + Member { + name: "type_felt".to_string(), + key: false, + ty: Ty::Primitive(Primitive::Felt252(Some(Felt::from(1u128)))), + }, + Member { + name: "typeContractAddress".to_string(), + key: true, + ty: Ty::Primitive(Primitive::ContractAddress(Some(Felt::ONE))), + }, + ], + }); + let keys = keys_from_ty(&ty).unwrap(); + let keys_str = felts_sql_string(&keys); + let entity_id = poseidon_hash_many(&keys); + let model_id = model_id_from_ty(&ty); // Set entity with one Record model db.set_entity( - Ty::Struct(Struct { - name: utils::struct_name_from_names(&namespace, &model_name), - children: vec![ - Member { - name: "depth".to_string(), - key: false, - ty: Ty::Enum(Enum { - name: "Depth".to_string(), - option: Some(0), - options: vec![ - EnumOption { name: "Zero".to_string(), ty: Ty::Tuple(vec![]) }, - EnumOption { name: "One".to_string(), ty: Ty::Tuple(vec![]) }, - EnumOption { name: "Two".to_string(), ty: Ty::Tuple(vec![]) }, - EnumOption { name: "Three".to_string(), ty: Ty::Tuple(vec![]) }, - ], - }), - }, - Member { - name: "record_id".to_string(), - key: false, - ty: Ty::Primitive(Primitive::U8(Some(0))), - }, - Member { - name: "typeU16".to_string(), - key: false, - ty: Ty::Primitive(Primitive::U16(Some(1))), - }, - Member { - name: "type_u64".to_string(), - key: false, - ty: Ty::Primitive(Primitive::U64(Some(1))), - }, - Member { - name: "typeBool".to_string(), - key: false, - ty: Ty::Primitive(Primitive::Bool(Some(true))), - }, - Member { - name: "type_felt".to_string(), - key: false, - ty: Ty::Primitive(Primitive::Felt252(Some(Felt::from(1u128)))), - }, - Member { - name: "typeContractAddress".to_string(), - key: true, - ty: Ty::Primitive(Primitive::ContractAddress(Some(Felt::ONE))), - }, - ], - }), + ty, &format!("0x{:064x}:0x{:04x}:0x{:04x}", 0, 0, 0), block_timestamp, + entity_id, + model_id, + &keys_str, ) .await .unwrap(); + db.execute().await.unwrap(); tx.send(()).await.unwrap(); }); @@ -177,48 +186,58 @@ mod tests { tokio::spawn(async move { // 1. Open process and sleep.Go to execute subscription tokio::time::sleep(Duration::from_secs(1)).await; + let ty = Ty::Struct(Struct { + name: utils::struct_name_from_names(&namespace, &model_name), + children: vec![ + Member { + name: "depth".to_string(), + key: false, + ty: Ty::Enum(Enum { + name: "Depth".to_string(), + option: Some(0), + options: vec![ + EnumOption { name: "Zero".to_string(), ty: Ty::Tuple(vec![]) }, + EnumOption { name: "One".to_string(), ty: Ty::Tuple(vec![]) }, + EnumOption { name: "Two".to_string(), ty: Ty::Tuple(vec![]) }, + EnumOption { name: "Three".to_string(), ty: Ty::Tuple(vec![]) }, + ], + }), + }, + Member { + name: "record_id".to_string(), + key: false, + ty: Ty::Primitive(Primitive::U32(Some(0))), + }, + Member { + name: "type_felt".to_string(), + key: false, + ty: Ty::Primitive(Primitive::Felt252(Some(Felt::from(1u128)))), + }, + Member { + name: "typeContractAddress".to_string(), + key: true, + ty: Ty::Primitive(Primitive::ContractAddress(Some(Felt::ONE))), + }, + ], + }); + + let keys = keys_from_ty(&ty).unwrap(); + let keys_str = felts_sql_string(&keys); + let entity_id = poseidon_hash_many(&keys); + let model_id = model_id_from_ty(&ty); // Set entity with one Record model db.set_entity( - Ty::Struct(Struct { - name: utils::struct_name_from_names(&namespace, &model_name), - children: vec![ - Member { - name: "depth".to_string(), - key: false, - ty: Ty::Enum(Enum { - name: "Depth".to_string(), - option: Some(0), - options: vec![ - EnumOption { name: "Zero".to_string(), ty: Ty::Tuple(vec![]) }, - EnumOption { name: "One".to_string(), ty: Ty::Tuple(vec![]) }, - EnumOption { name: "Two".to_string(), ty: Ty::Tuple(vec![]) }, - EnumOption { name: "Three".to_string(), ty: Ty::Tuple(vec![]) }, - ], - }), - }, - Member { - name: "record_id".to_string(), - key: false, - ty: Ty::Primitive(Primitive::U32(Some(0))), - }, - Member { - name: "type_felt".to_string(), - key: false, - ty: Ty::Primitive(Primitive::Felt252(Some(Felt::from(1u128)))), - }, - Member { - name: "typeContractAddress".to_string(), - key: true, - ty: Ty::Primitive(Primitive::ContractAddress(Some(Felt::ONE))), - }, - ], - }), + ty, &format!("0x{:064x}:0x{:04x}:0x{:04x}", 0, 0, 0), block_timestamp, + entity_id, + model_id, + &keys_str, ) .await .unwrap(); + db.execute().await.unwrap(); tx.send(()).await.unwrap(); }); @@ -290,6 +309,7 @@ mod tests { ) .await .unwrap(); + db.execute().await.unwrap(); // 3. fn publish() is called from state.set_entity() @@ -353,6 +373,7 @@ mod tests { ) .await .unwrap(); + db.execute().await.unwrap(); // 3. fn publish() is called from state.set_entity() tx.send(()).await.unwrap(); @@ -403,6 +424,7 @@ mod tests { Felt::ZERO, block_timestamp, ); + db.execute().await.unwrap(); tx.send(()).await.unwrap(); }); @@ -437,4 +459,24 @@ mod tests { assert_eq!(response_value, expected_value); rx.recv().await.unwrap(); } + + fn keys_from_ty(ty: &Ty) -> anyhow::Result> { + if let Ty::Struct(s) = &ty { + let mut keys = Vec::new(); + for m in s.keys() { + keys.extend( + m.serialize().map_err(|_| anyhow::anyhow!("Failed to serialize model key"))?, + ); + } + Ok(keys) + } else { + anyhow::bail!("Entity is not a struct") + } + } + + fn model_id_from_ty(ty: &Ty) -> Felt { + let namespaced_name = ty.name(); + + compute_selector_from_tag(&namespaced_name) + } } diff --git a/crates/torii/grpc/Cargo.toml b/crates/torii/grpc/Cargo.toml index c6edb36d45..64b0a90ecb 100644 --- a/crates/torii/grpc/Cargo.toml +++ b/crates/torii/grpc/Cargo.toml @@ -8,13 +8,13 @@ version.workspace = true [dependencies] bytes.workspace = true dojo-types = { path = "../../dojo-types" } -futures.workspace = true futures-util.workspace = true +futures.workspace = true num-traits.workspace = true parking_lot.workspace = true rayon.workspace = true -starknet.workspace = true starknet-crypto.workspace = true +starknet.workspace = true thiserror.workspace = true torii-core = { path = "../core", optional = true } @@ -49,11 +49,11 @@ wasm-tonic.workspace = true [target.'cfg(not(target_arch = "wasm32"))'.dependencies] prost.workspace = true sqlx.workspace = true -tokio.workspace = true tokio-stream = "0.1.14" -tonic.workspace = true +tokio.workspace = true tonic-reflection.workspace = true tonic-web.workspace = true +tonic.workspace = true url.workspace = true [build-dependencies] diff --git a/crates/torii/grpc/src/server/mod.rs b/crates/torii/grpc/src/server/mod.rs index f833881939..d6f6894d67 100644 --- a/crates/torii/grpc/src/server/mod.rs +++ b/crates/torii/grpc/src/server/mod.rs @@ -132,7 +132,7 @@ impl DojoWorld { impl DojoWorld { pub async fn metadata(&self) -> Result { let world_address = sqlx::query_scalar(&format!( - "SELECT world_address FROM worlds WHERE id = '{:#x}'", + "SELECT contract_address FROM contracts WHERE id = '{:#x}'", self.world_address )) .fetch_one(&self.pool) diff --git a/crates/torii/grpc/src/server/tests/entities_test.rs b/crates/torii/grpc/src/server/tests/entities_test.rs index 9ffc855f28..30eaebbd47 100644 --- a/crates/torii/grpc/src/server/tests/entities_test.rs +++ b/crates/torii/grpc/src/server/tests/entities_test.rs @@ -16,10 +16,11 @@ use starknet::accounts::Account; use starknet::core::types::{BlockId, BlockTag, Call}; use starknet::core::utils::{get_contract_address, get_selector_from_name}; use starknet::providers::jsonrpc::HttpTransport; -use starknet::providers::JsonRpcClient; +use starknet::providers::{JsonRpcClient, Provider}; use starknet_crypto::poseidon_hash_many; use tokio::sync::broadcast; use torii_core::engine::{Engine, EngineConfig, Processors}; +use torii_core::processors::generate_event_processors_map; use torii_core::processors::register_model::RegisterModelProcessor; use torii_core::processors::store_set_record::StoreSetRecordProcessor; use torii_core::sql::Sql; @@ -64,7 +65,7 @@ async fn test_entities_queries() { let provider = Arc::new(JsonRpcClient::new(HttpTransport::new(sequencer.url()))); let world = WorldContract::new(strat.world_address, &account); - let world_reader = WorldContractReader::new(strat.world_address, &provider); + let world_reader = WorldContractReader::new(strat.world_address, Arc::clone(&provider)); let actions = strat.contracts.first().unwrap(); let actions_address = get_contract_address( @@ -100,9 +101,13 @@ async fn test_entities_queries() { let mut engine = Engine::new( world_reader, db.clone(), - &provider, + Arc::clone(&provider), Processors { - event: vec![Box::new(RegisterModelProcessor), Box::new(StoreSetRecordProcessor)], + event: generate_event_processors_map(vec![ + Box::new(RegisterModelProcessor), + Box::new(StoreSetRecordProcessor), + ]) + .unwrap(), ..Processors::default() }, EngineConfig::default(), @@ -110,7 +115,9 @@ async fn test_entities_queries() { None, ); - let _ = engine.sync_to_head(0, None).await.unwrap(); + let to = provider.block_hash_and_number().await.unwrap().block_number; + let data = engine.fetch_range(0, to, None).await.unwrap(); + engine.process_range(data).await.unwrap(); let (_, receiver) = tokio::sync::mpsc::channel(1); let grpc = DojoWorld::new(db.pool, receiver, strat.world_address, provider.clone()); diff --git a/crates/torii/libp2p/Cargo.toml b/crates/torii/libp2p/Cargo.toml index daa8b10510..7846b880d3 100644 --- a/crates/torii/libp2p/Cargo.toml +++ b/crates/torii/libp2p/Cargo.toml @@ -34,15 +34,17 @@ katana-runner.workspace = true tempfile.workspace = true [target.'cfg(not(target_arch = "wasm32"))'.dependencies] -libp2p = { git = "https://github.com/libp2p/rust-libp2p", features = [ "ed25519", "gossipsub", "identify", "macros", "noise", "ping", "quic", "relay", "tcp", "tokio", "yamux" ], rev = "451bcb60bb472262f96071006b19e5d236b1dd54" } -libp2p-webrtc = { git = "https://github.com/libp2p/rust-libp2p", features = [ "pem", "tokio" ], rev = "451bcb60bb472262f96071006b19e5d236b1dd54" } +rcgen = "0.13.1" +libp2p = { git = "https://github.com/libp2p/rust-libp2p", features = [ "ed25519", "gossipsub", "identify", "macros", "noise", "ping", "quic", "relay", "tcp", "tokio", "yamux", "websocket", "dns" ], rev = "f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" } +libp2p-webrtc = { git = "https://github.com/libp2p/rust-libp2p", features = [ "pem", "tokio" ], rev = "f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" } sqlx.workspace = true tokio.workspace = true torii-core.workspace = true [target.'cfg(target_arch = "wasm32")'.dependencies] -libp2p = { git = "https://github.com/libp2p/rust-libp2p", features = [ "ed25519", "gossipsub", "identify", "macros", "ping", "tcp", "wasm-bindgen" ], rev = "451bcb60bb472262f96071006b19e5d236b1dd54" } -libp2p-webrtc-websys = { git = "https://github.com/libp2p/rust-libp2p", rev = "451bcb60bb472262f96071006b19e5d236b1dd54" } +libp2p = { git = "https://github.com/libp2p/rust-libp2p", features = [ "ed25519", "gossipsub", "identify", "macros", "ping", "tcp", "wasm-bindgen", "noise", "yamux" ], rev = "f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" } +libp2p-webrtc-websys = { git = "https://github.com/libp2p/rust-libp2p", rev = "f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" } +libp2p-websocket-websys = { git = "https://github.com/libp2p/rust-libp2p", rev = "f0cbd4fb0cef8d1ae2298901eab95acb5f104ea3" } tracing-wasm = "0.2.1" wasm-bindgen-futures = "0.4.40" wasm-bindgen-test = "0.3.40" diff --git a/crates/torii/libp2p/src/client/mod.rs b/crates/torii/libp2p/src/client/mod.rs index c3aad69c12..f438d23072 100644 --- a/crates/torii/libp2p/src/client/mod.rs +++ b/crates/torii/libp2p/src/client/mod.rs @@ -5,11 +5,13 @@ use futures::channel::mpsc::{UnboundedReceiver, UnboundedSender}; use futures::channel::oneshot; use futures::lock::Mutex; use futures::{select, StreamExt}; +#[cfg(target_arch = "wasm32")] +use libp2p::core::{upgrade::Version, Transport}; use libp2p::gossipsub::{self, IdentTopic, MessageId}; use libp2p::swarm::{NetworkBehaviour, Swarm, SwarmEvent}; -use libp2p::{identify, identity, ping, Multiaddr, PeerId}; #[cfg(not(target_arch = "wasm32"))] -use libp2p::{noise, tcp, yamux}; +use libp2p::tcp; +use libp2p::{identify, identity, noise, ping, yamux, Multiaddr, PeerId}; use tracing::info; pub mod events; @@ -72,7 +74,7 @@ impl RelayClient { ) .expect("Gossipsub behaviour is invalid"), identify: identify::Behaviour::new(identify::Config::new( - "/torii-client/0.0.1".to_string(), + format!("/torii-client/{}", env!("CARGO_PKG_VERSION")), key.public(), )), ping: ping::Behaviour::new(ping::Config::default()), @@ -108,6 +110,14 @@ impl RelayClient { libp2p_webrtc_websys::Transport::new(libp2p_webrtc_websys::Config::new(&key)) }) .expect("Failed to create WebRTC transport") + .with_other_transport(|key| { + libp2p_websocket_websys::Transport::default() + .upgrade(Version::V1) + .authenticate(noise::Config::new(&key).unwrap()) + .multiplex(yamux::Config::default()) + .boxed() + }) + .expect("Failed to create WebSocket transport") .with_behaviour(|key| { let gossipsub_config: gossipsub::Config = gossipsub::ConfigBuilder::default() .heartbeat_interval(Duration::from_secs( @@ -123,7 +133,7 @@ impl RelayClient { ) .expect("Gossipsub behaviour is invalid"), identify: identify::Behaviour::new(identify::Config::new( - "/torii-client/0.0.1".to_string(), + format!("/torii-client/{}", env!("CARGO_PKG_VERSION")), key.public(), )), ping: ping::Behaviour::new(ping::Config::default()), diff --git a/crates/torii/libp2p/src/server/mod.rs b/crates/torii/libp2p/src/server/mod.rs index 43b6d4a8e0..fec2b9cbd9 100644 --- a/crates/torii/libp2p/src/server/mod.rs +++ b/crates/torii/libp2p/src/server/mod.rs @@ -8,22 +8,24 @@ use std::{fs, io}; use chrono::Utc; use dojo_types::schema::Ty; -use dojo_world::contracts::naming::compute_selector_from_names; +use dojo_world::contracts::naming::compute_selector_from_tag; use futures::StreamExt; -use indexmap::IndexMap; use libp2p::core::multiaddr::Protocol; use libp2p::core::muxing::StreamMuxerBox; +use libp2p::core::upgrade::Version; use libp2p::core::Multiaddr; use libp2p::gossipsub::{self, IdentTopic}; use libp2p::swarm::{NetworkBehaviour, SwarmEvent}; -use libp2p::{identify, identity, noise, ping, relay, tcp, yamux, PeerId, Swarm, Transport}; +use libp2p::{ + dns, identify, identity, noise, ping, relay, tcp, websocket, yamux, PeerId, Swarm, Transport, +}; use libp2p_webrtc as webrtc; use rand::thread_rng; use starknet::core::types::{BlockId, BlockTag, Felt, FunctionCall}; use starknet::core::utils::get_selector_from_name; use starknet::providers::Provider; use starknet_crypto::poseidon_hash_many; -use torii_core::sql::Sql; +use torii_core::sql::{felts_sql_string, Sql}; use tracing::{info, warn}; use webrtc::tokio::Certificate; @@ -33,7 +35,7 @@ use crate::errors::Error; mod events; use crate::server::events::ServerEvent; -use crate::typed_data::{parse_value_to_ty, PrimitiveType}; +use crate::typed_data::{parse_value_to_ty, PrimitiveType, TypedData}; use crate::types::Message; pub(crate) const LOG_TARGET: &str = "torii::relay::server"; @@ -61,6 +63,7 @@ impl Relay

{ provider: P, port: u16, port_webrtc: u16, + port_websocket: u16, local_key_path: Option, cert_path: Option, ) -> Result { @@ -85,10 +88,24 @@ impl Relay

{ .with_tcp(tcp::Config::default(), noise::Config::new, yamux::Config::default)? .with_quic() .with_other_transport(|key| { - Ok(webrtc::tokio::Transport::new(key.clone(), cert) - .map(|(peer_id, conn), _| (peer_id, StreamMuxerBox::new(conn)))) + webrtc::tokio::Transport::new(key.clone(), cert) + .map(|(peer_id, conn), _| (peer_id, StreamMuxerBox::new(conn))) }) .expect("Failed to create WebRTC transport") + .with_other_transport(|key| { + let transport = websocket::WsConfig::new( + dns::tokio::Transport::system(tcp::tokio::Transport::new( + tcp::Config::default(), + )) + .unwrap(), + ); + + transport + .upgrade(Version::V1) + .authenticate(noise::Config::new(key).unwrap()) + .multiplex(yamux::Config::default()) + }) + .expect("Failed to create WebSocket transport") .with_behaviour(|key| { // Hash messages by their content. No two messages of the same content will be // propagated. @@ -109,7 +126,7 @@ impl Relay

{ relay: relay::Behaviour::new(key.public().to_peer_id(), Default::default()), ping: ping::Behaviour::new(ping::Config::new()), identify: identify::Behaviour::new(identify::Config::new( - "/torii-relay/0.0.1".to_string(), + format!("/torii-relay/{}", env!("CARGO_PKG_VERSION")), key.public(), )), gossipsub: gossipsub::Behaviour::new( @@ -141,6 +158,12 @@ impl Relay

{ .with(Protocol::WebRTCDirect); swarm.listen_on(listen_addr_webrtc.clone())?; + // WS + let listen_addr_wss = Multiaddr::from(Ipv4Addr::UNSPECIFIED) + .with(Protocol::Tcp(port_websocket)) + .with(Protocol::Ws("/".to_string().into())); + swarm.listen_on(listen_addr_wss.clone())?; + // Clients will send their messages to the "message" topic // with a room name as the message data. // and we will forward those messages to a specific room - in this case the topic @@ -178,7 +201,7 @@ impl Relay

{ } }; - let ty = match validate_message(&self.db, &data.message.message).await { + let ty = match validate_message(&self.db, &data.message).await { Ok(parsed_message) => parsed_message, Err(e) => { info!( @@ -222,6 +245,9 @@ impl Relay

{ continue; } }; + let keys_str = felts_sql_string(&keys); + let entity_id = poseidon_hash_many(&keys); + let model_id = ty_model_id(&ty).unwrap(); // select only identity field, if doesn't exist, empty string let query = format!( @@ -229,7 +255,7 @@ impl Relay

{ ty.name() ); let entity_identity: Option = match sqlx::query_scalar(&query) - .bind(format!("{:#x}", poseidon_hash_many(&keys))) + .bind(format!("{:#x}", entity_id)) .fetch_optional(&mut *pool) .await { @@ -244,44 +270,29 @@ impl Relay

{ } }; - if entity_identity.is_none() { - // we can set the entity without checking identity - if let Err(e) = self - .db - .set_entity( - ty, - &message_id.to_string(), - Utc::now().timestamp() as u64, - ) - .await - { - info!( - target: LOG_TARGET, - error = %e, - "Setting message." - ); - continue; - } else { - info!( - target: LOG_TARGET, - message_id = %message_id, - peer_id = %peer_id, - "Message set." - ); - continue; - } - } - - let entity_identity = match Felt::from_str(&entity_identity.unwrap()) { - Ok(identity) => identity, - Err(e) => { - warn!( - target: LOG_TARGET, - error = %e, - "Parsing identity." - ); - continue; - } + let entity_identity = match entity_identity { + Some(identity) => match Felt::from_str(&identity) { + Ok(identity) => identity, + Err(e) => { + warn!( + target: LOG_TARGET, + error = %e, + "Parsing identity." + ); + continue; + } + }, + None => match get_identity_from_ty(&ty) { + Ok(identity) => identity, + Err(e) => { + warn!( + target: LOG_TARGET, + error = %e, + "Getting identity from message." + ); + continue; + } + }, }; // TODO: have a nonce in model to check @@ -301,6 +312,8 @@ impl Relay

{ }; let mut calldata = vec![message_hash]; + calldata.push(Felt::from(data.signature.len())); + calldata.extend(data.signature); if !match self .provider @@ -343,6 +356,9 @@ impl Relay

{ ty, &message_id.to_string(), Utc::now().timestamp() as u64, + entity_id, + model_id, + &keys_str ) .await { @@ -380,11 +396,13 @@ impl Relay

{ ); } ServerEvent::Identify(identify::Event::Received { + connection_id, info: identify::Info { observed_addr, .. }, peer_id, }) => { info!( target: LOG_TARGET, + connection_id = %connection_id, peer_id = %peer_id, observed_addr = %observed_addr, "Received identify event." @@ -427,39 +445,27 @@ fn ty_keys(ty: &Ty) -> Result, Error> { } } +fn ty_model_id(ty: &Ty) -> Result { + let namespaced_name = ty.name(); + + let selector = compute_selector_from_tag(&namespaced_name); + Ok(selector) +} + // Validates the message model // and returns the identity and signature -async fn validate_message( - db: &Sql, - message: &IndexMap, -) -> Result { - let (selector, model) = if let Some(model_name) = message.get("model") { - if let PrimitiveType::String(model_name) = model_name { - let (namespace, name) = model_name.split_once('-').ok_or_else(|| { - Error::InvalidMessageError( - "Model name is not in the format namespace-model".to_string(), - ) - })?; - - (compute_selector_from_names(namespace, name), model_name) - } else { - return Err(Error::InvalidMessageError("Model name is not a string".to_string())); - } - } else { - return Err(Error::InvalidMessageError("Model name is missing".to_string())); - }; +async fn validate_message(db: &Sql, message: &TypedData) -> Result { + let selector = compute_selector_from_tag(&message.primary_type); let mut ty = db .model(selector) .await - .map_err(|e| Error::InvalidMessageError(format!("Model {} not found: {}", model, e)))? + .map_err(|e| { + Error::InvalidMessageError(format!("Model {} not found: {}", message.primary_type, e)) + })? .schema; - if let Some(object) = message.get(model) { - parse_value_to_ty(object, &mut ty)?; - } else { - return Err(Error::InvalidMessageError("Model is missing".to_string())); - }; + parse_value_to_ty(&PrimitiveType::Object(message.message.clone()), &mut ty)?; Ok(ty) } @@ -499,6 +505,21 @@ fn read_or_create_certificate(path: &Path) -> anyhow::Result { Ok(cert) } +fn get_identity_from_ty(ty: &Ty) -> Result { + let identity = ty + .as_struct() + .ok_or_else(|| Error::InvalidMessageError("Message is not a struct".to_string()))? + .get("identity") + .ok_or_else(|| Error::InvalidMessageError("No field identity".to_string()))? + .as_primitive() + .ok_or_else(|| Error::InvalidMessageError("Identity is not a primitive".to_string()))? + .as_contract_address() + .ok_or_else(|| { + Error::InvalidMessageError("Identity is not a contract address".to_string()) + })?; + Ok(identity) +} + #[cfg(test)] mod tests { use tempfile::tempdir; diff --git a/crates/torii/libp2p/src/tests.rs b/crates/torii/libp2p/src/tests.rs index eeaca36005..552b240590 100644 --- a/crates/torii/libp2p/src/tests.rs +++ b/crates/torii/libp2p/src/tests.rs @@ -590,7 +590,7 @@ mod test { .unwrap(); // Initialize the relay server - let mut relay_server = Relay::new(db, provider, 9900, 9901, None, None)?; + let mut relay_server = Relay::new(db, provider, 9900, 9901, 9902, None, None)?; tokio::spawn(async move { relay_server.run().await; }); @@ -604,20 +604,7 @@ mod test { let mut typed_data = TypedData::new( IndexMap::from_iter(vec![ ( - "OffchainMessage".to_string(), - vec![ - Field::SimpleType(SimpleField { - name: "model".to_string(), - r#type: "shortstring".to_string(), - }), - Field::SimpleType(SimpleField { - name: "types_test-Message".to_string(), - r#type: "Model".to_string(), - }), - ], - ), - ( - "Model".to_string(), + "types_test-Message".to_string(), vec![ Field::SimpleType(SimpleField { name: "identity".to_string(), @@ -651,31 +638,18 @@ mod test { ], ), ]), - "OffchainMessage", + "types_test-Message", Domain::new("types_test-Message", "1", "0x0", Some("1")), IndexMap::new(), ); - typed_data.message.insert( - "model".to_string(), - crate::typed_data::PrimitiveType::String("types_test-Message".to_string()), + "identity".to_string(), + crate::typed_data::PrimitiveType::String(account.address.to_string()), ); + typed_data.message.insert( - "types_test-Message".to_string(), - crate::typed_data::PrimitiveType::Object( - vec![ - ( - "identity".to_string(), - crate::typed_data::PrimitiveType::String(account.address.to_string()), - ), - ( - "message".to_string(), - crate::typed_data::PrimitiveType::String("mimi".to_string()), - ), - ] - .into_iter() - .collect(), - ), + "message".to_string(), + crate::typed_data::PrimitiveType::String("mimi".to_string()), ); let message_hash = typed_data.encode(account.address).unwrap(); diff --git a/crates/torii/migrations/20240829114436_merge_worlds_and_indexer.sql b/crates/torii/migrations/20240829114436_merge_worlds_and_indexer.sql new file mode 100644 index 0000000000..59dcff889b --- /dev/null +++ b/crates/torii/migrations/20240829114436_merge_worlds_and_indexer.sql @@ -0,0 +1,25 @@ +CREATE TABLE contracts ( + -- contract_address + id TEXT NOT NULL PRIMARY KEY, + contract_address TEXT NOT NULL, + -- "WORLD", "ERC20", etc... + contract_type TEXT NOT NULL, + head BIGINT, + pending_block_tx TEXT, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +-- Copy data from world and indexer tables into contracts table +INSERT INTO contracts (id, contract_address, contract_type, head, pending_block_tx) +SELECT + w.id, + w.world_address, + i.head, + i.pending_block_tx, + 'WORLD' +FROM worlds w +LEFT JOIN indexers i ON w.id = i.id; + +-- remove unused tables +DROP TABLE worlds; +DROP TABLE indexers; \ No newline at end of file diff --git a/crates/torii/migrations/20240903110847_add_column_for_non_world_transaction.sql b/crates/torii/migrations/20240903110847_add_column_for_non_world_transaction.sql new file mode 100644 index 0000000000..924d3aff0c --- /dev/null +++ b/crates/torii/migrations/20240903110847_add_column_for_non_world_transaction.sql @@ -0,0 +1,5 @@ +-- Rename pending_block_tx to last_pending_block_world_tx +ALTER TABLE contracts RENAME COLUMN pending_block_tx TO last_pending_block_world_tx; + +-- Add new column last_pending_block_tx +ALTER TABLE contracts ADD COLUMN last_pending_block_tx TEXT; diff --git a/examples/spawn-and-move/Scarb.lock b/examples/spawn-and-move/Scarb.lock index 7a80ac1c79..8bcf917cfe 100644 --- a/examples/spawn-and-move/Scarb.lock +++ b/examples/spawn-and-move/Scarb.lock @@ -24,7 +24,7 @@ dependencies = [ [[package]] name = "dojo_examples" -version = "1.0.0-alpha.4" +version = "1.0.0-alpha.8" dependencies = [ "armory", "bestiary", diff --git a/examples/spawn-and-move/Scarb.toml b/examples/spawn-and-move/Scarb.toml index e1e536520c..275c6c1c8d 100644 --- a/examples/spawn-and-move/Scarb.toml +++ b/examples/spawn-and-move/Scarb.toml @@ -1,7 +1,7 @@ [package] cairo-version = "=2.7.0" name = "dojo_examples" -version = "1.0.0-alpha.4" +version = "1.0.0-alpha.8" # Use the prelude with the less imports as possible # from corelib. edition = "2024_07" @@ -20,3 +20,5 @@ build-external-contracts = [ "armory::Flatbow", "bestiary::RiverSkale" ] [features] default = [ "dungeon" ] dungeon = [ ] + +[profile.saya] diff --git a/examples/spawn-and-move/dojo_dev.toml b/examples/spawn-and-move/dojo_dev.toml index 249785ce4e..deb7909dcf 100644 --- a/examples/spawn-and-move/dojo_dev.toml +++ b/examples/spawn-and-move/dojo_dev.toml @@ -16,4 +16,4 @@ rpc_url = "https://api.cartridge.gg/x/kari-test/katana" # Default account for katana with seed = 0 account_address = "0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03" private_key = "0x1800000000300000180000000000030000000000003006001800006600" -world_address = "0x75f37b9d81cd262f3ba32ef89596e4e6eae99b345cf11fc1a85521c6be87c06" +world_address = "0x5fedbace16902d9ca4cdc1522f9fe156cd8c69a5d25e1436ee4b7b9933ad997" diff --git a/examples/spawn-and-move/manifests/dev/base/abis/contracts/dojo_examples-actions-40b6994c.json b/examples/spawn-and-move/manifests/dev/base/abis/contracts/dojo_examples-actions-40b6994c.json index 821650785e..fdba39296f 100644 --- a/examples/spawn-and-move/manifests/dev/base/abis/contracts/dojo_examples-actions-40b6994c.json +++ b/examples/spawn-and-move/manifests/dev/base/abis/contracts/dojo_examples-actions-40b6994c.json @@ -296,6 +296,22 @@ "outputs": [], "state_mutability": "external" }, + { + "type": "function", + "name": "set_models", + "inputs": [ + { + "name": "seed", + "type": "core::felt252" + }, + { + "name": "n_models", + "type": "core::integer::u32" + } + ], + "outputs": [], + "state_mutability": "external" + }, { "type": "function", "name": "enter_dungeon", diff --git a/examples/spawn-and-move/manifests/dev/base/abis/dojo-world.json b/examples/spawn-and-move/manifests/dev/base/abis/dojo-world.json index 897b4d959c..8553809311 100644 --- a/examples/spawn-and-move/manifests/dev/base/abis/dojo-world.json +++ b/examples/spawn-and-move/manifests/dev/base/abis/dojo-world.json @@ -939,6 +939,11 @@ "type": "core::felt252", "kind": "data" }, + { + "name": "entity_id", + "type": "core::felt252", + "kind": "data" + }, { "name": "keys", "type": "core::array::Span::", diff --git a/examples/spawn-and-move/manifests/dev/base/contracts/dojo_examples-actions-40b6994c.toml b/examples/spawn-and-move/manifests/dev/base/contracts/dojo_examples-actions-40b6994c.toml index 0a34841a2d..0ae312f5c4 100644 --- a/examples/spawn-and-move/manifests/dev/base/contracts/dojo_examples-actions-40b6994c.toml +++ b/examples/spawn-and-move/manifests/dev/base/contracts/dojo_examples-actions-40b6994c.toml @@ -1,6 +1,6 @@ kind = "DojoContract" -class_hash = "0x6bb4a7b55dc9c9b8193beca79526b6b8f8f2c33f5be0662f5215e10883c653f" -original_class_hash = "0x6bb4a7b55dc9c9b8193beca79526b6b8f8f2c33f5be0662f5215e10883c653f" +class_hash = "0x67a20ea91a4b9bb9cdb46cefc41dd6ca4c07c22d4d413205720963944fd817d" +original_class_hash = "0x67a20ea91a4b9bb9cdb46cefc41dd6ca4c07c22d4d413205720963944fd817d" base_class_hash = "0x0" abi = "manifests/dev/base/abis/contracts/dojo_examples-actions-40b6994c.json" reads = [] @@ -8,6 +8,7 @@ writes = [] init_calldata = [] tag = "dojo_examples-actions" systems = [ + "set_models", "spawn", "move", "set_player_config", diff --git a/examples/spawn-and-move/manifests/dev/base/contracts/dojo_examples-dungeon-6620e0e6.toml b/examples/spawn-and-move/manifests/dev/base/contracts/dojo_examples-dungeon-6620e0e6.toml index 74d6bb4a20..75d087c886 100644 --- a/examples/spawn-and-move/manifests/dev/base/contracts/dojo_examples-dungeon-6620e0e6.toml +++ b/examples/spawn-and-move/manifests/dev/base/contracts/dojo_examples-dungeon-6620e0e6.toml @@ -1,6 +1,6 @@ kind = "DojoContract" -class_hash = "0x5d2892f0389e921a051daaad07efb49af7a13213ba309a901fc386acef15c3c" -original_class_hash = "0x5d2892f0389e921a051daaad07efb49af7a13213ba309a901fc386acef15c3c" +class_hash = "0x4590a27e4ec7366358ba5f60323777f301435ebbdd113ab02c54b947717530d" +original_class_hash = "0x4590a27e4ec7366358ba5f60323777f301435ebbdd113ab02c54b947717530d" base_class_hash = "0x0" abi = "manifests/dev/base/abis/contracts/dojo_examples-dungeon-6620e0e6.json" reads = [] diff --git a/examples/spawn-and-move/manifests/dev/base/contracts/dojo_examples-mock_token-31599eb2.toml b/examples/spawn-and-move/manifests/dev/base/contracts/dojo_examples-mock_token-31599eb2.toml index dae6e41296..6385a30c6c 100644 --- a/examples/spawn-and-move/manifests/dev/base/contracts/dojo_examples-mock_token-31599eb2.toml +++ b/examples/spawn-and-move/manifests/dev/base/contracts/dojo_examples-mock_token-31599eb2.toml @@ -1,6 +1,6 @@ kind = "DojoContract" -class_hash = "0x71fdf374f04ab0a918b1e8a0578f38ad2d7d0d61da131b8d3e7b0b41a3d2282" -original_class_hash = "0x71fdf374f04ab0a918b1e8a0578f38ad2d7d0d61da131b8d3e7b0b41a3d2282" +class_hash = "0x67edb33671cd2f5b766d073e3dec53b03400761a20f349ea9628cf4c883b393" +original_class_hash = "0x67edb33671cd2f5b766d073e3dec53b03400761a20f349ea9628cf4c883b393" base_class_hash = "0x0" abi = "manifests/dev/base/abis/contracts/dojo_examples-mock_token-31599eb2.json" reads = [] diff --git a/examples/spawn-and-move/manifests/dev/base/contracts/dojo_examples-others-61de2c18.toml b/examples/spawn-and-move/manifests/dev/base/contracts/dojo_examples-others-61de2c18.toml index 9db0728b63..5a43d3fad8 100644 --- a/examples/spawn-and-move/manifests/dev/base/contracts/dojo_examples-others-61de2c18.toml +++ b/examples/spawn-and-move/manifests/dev/base/contracts/dojo_examples-others-61de2c18.toml @@ -1,6 +1,6 @@ kind = "DojoContract" -class_hash = "0x647fc1b2d2e902e6304e127b36995d8f57fe45c38e38e15d8860db508dbf24a" -original_class_hash = "0x647fc1b2d2e902e6304e127b36995d8f57fe45c38e38e15d8860db508dbf24a" +class_hash = "0x40e824b8814bafef18cce2cf68f5765e9c9a1c86f55a8491b0c2a4faebdcc87" +original_class_hash = "0x40e824b8814bafef18cce2cf68f5765e9c9a1c86f55a8491b0c2a4faebdcc87" base_class_hash = "0x0" abi = "manifests/dev/base/abis/contracts/dojo_examples-others-61de2c18.json" reads = [] diff --git a/examples/spawn-and-move/manifests/dev/base/dojo-world.toml b/examples/spawn-and-move/manifests/dev/base/dojo-world.toml index 38a401a6d8..ff32465d06 100644 --- a/examples/spawn-and-move/manifests/dev/base/dojo-world.toml +++ b/examples/spawn-and-move/manifests/dev/base/dojo-world.toml @@ -1,6 +1,6 @@ kind = "Class" -class_hash = "0x3715f072aa1c07be724249fcda8b0322687f6c5c585eebc4402d162649c707c" -original_class_hash = "0x3715f072aa1c07be724249fcda8b0322687f6c5c585eebc4402d162649c707c" +class_hash = "0x6f4515274ee23404789c3351a77107d0ec07508530119822046600ca6948d6e" +original_class_hash = "0x6f4515274ee23404789c3351a77107d0ec07508530119822046600ca6948d6e" abi = "manifests/dev/base/abis/dojo-world.json" tag = "dojo-world" manifest_name = "dojo-world" diff --git a/examples/spawn-and-move/manifests/dev/deployment/abis/contracts/dojo_examples-actions-40b6994c.json b/examples/spawn-and-move/manifests/dev/deployment/abis/contracts/dojo_examples-actions-40b6994c.json index 821650785e..fdba39296f 100644 --- a/examples/spawn-and-move/manifests/dev/deployment/abis/contracts/dojo_examples-actions-40b6994c.json +++ b/examples/spawn-and-move/manifests/dev/deployment/abis/contracts/dojo_examples-actions-40b6994c.json @@ -296,6 +296,22 @@ "outputs": [], "state_mutability": "external" }, + { + "type": "function", + "name": "set_models", + "inputs": [ + { + "name": "seed", + "type": "core::felt252" + }, + { + "name": "n_models", + "type": "core::integer::u32" + } + ], + "outputs": [], + "state_mutability": "external" + }, { "type": "function", "name": "enter_dungeon", diff --git a/examples/spawn-and-move/manifests/dev/deployment/abis/dojo-world.json b/examples/spawn-and-move/manifests/dev/deployment/abis/dojo-world.json index 897b4d959c..8553809311 100644 --- a/examples/spawn-and-move/manifests/dev/deployment/abis/dojo-world.json +++ b/examples/spawn-and-move/manifests/dev/deployment/abis/dojo-world.json @@ -939,6 +939,11 @@ "type": "core::felt252", "kind": "data" }, + { + "name": "entity_id", + "type": "core::felt252", + "kind": "data" + }, { "name": "keys", "type": "core::array::Span::", diff --git a/examples/spawn-and-move/manifests/dev/deployment/manifest.json b/examples/spawn-and-move/manifests/dev/deployment/manifest.json index 20dffef001..a2f8346a26 100644 --- a/examples/spawn-and-move/manifests/dev/deployment/manifest.json +++ b/examples/spawn-and-move/manifests/dev/deployment/manifest.json @@ -1,8 +1,8 @@ { "world": { "kind": "WorldContract", - "class_hash": "0x3715f072aa1c07be724249fcda8b0322687f6c5c585eebc4402d162649c707c", - "original_class_hash": "0x3715f072aa1c07be724249fcda8b0322687f6c5c585eebc4402d162649c707c", + "class_hash": "0x6f4515274ee23404789c3351a77107d0ec07508530119822046600ca6948d6e", + "original_class_hash": "0x6f4515274ee23404789c3351a77107d0ec07508530119822046600ca6948d6e", "abi": [ { "type": "impl", @@ -944,6 +944,11 @@ "type": "core::felt252", "kind": "data" }, + { + "name": "entity_id", + "type": "core::felt252", + "kind": "data" + }, { "name": "keys", "type": "core::array::Span::", @@ -1229,8 +1234,8 @@ ] } ], - "address": "0x75f37b9d81cd262f3ba32ef89596e4e6eae99b345cf11fc1a85521c6be87c06", - "transaction_hash": "0x4caecaee04b1af0f999a4f5fdb7102e8e1821aac296b260458d422ae0a41a1f", + "address": "0x5fedbace16902d9ca4cdc1522f9fe156cd8c69a5d25e1436ee4b7b9933ad997", + "transaction_hash": "0x506e4efa5fa9ce5808a482b7076db8ca707a013bafaebf089206f28cd5f6bb6", "block_number": 3, "seed": "dojo_examples", "metadata": { @@ -1250,9 +1255,9 @@ "contracts": [ { "kind": "DojoContract", - "address": "0x2cd5e4ee3b898ff0578e0eec1ef5947a0a6b2c0d08ab95a14010c54d26265dc", - "class_hash": "0x6bb4a7b55dc9c9b8193beca79526b6b8f8f2c33f5be0662f5215e10883c653f", - "original_class_hash": "0x6bb4a7b55dc9c9b8193beca79526b6b8f8f2c33f5be0662f5215e10883c653f", + "address": "0x3287947f8080cdf20c0a6e88d50a8d824e04f035bd34550316e6768d87d35de", + "class_hash": "0x67a20ea91a4b9bb9cdb46cefc41dd6ca4c07c22d4d413205720963944fd817d", + "original_class_hash": "0x67a20ea91a4b9bb9cdb46cefc41dd6ca4c07c22d4d413205720963944fd817d", "base_class_hash": "0x2427dd10a58850ac9a5ca6ce04b7771b05330fd18f2e481831ad903b969e6b2", "abi": [ { @@ -1552,6 +1557,22 @@ "outputs": [], "state_mutability": "external" }, + { + "type": "function", + "name": "set_models", + "inputs": [ + { + "name": "seed", + "type": "core::felt252" + }, + { + "name": "n_models", + "type": "core::integer::u32" + } + ], + "outputs": [], + "state_mutability": "external" + }, { "type": "function", "name": "enter_dungeon", @@ -1651,6 +1672,7 @@ "init_calldata": [], "tag": "dojo_examples-actions", "systems": [ + "set_models", "spawn", "move", "set_player_config", @@ -1664,9 +1686,9 @@ }, { "kind": "DojoContract", - "address": "0x1470b0304ed67a22acdca949eb99d50ff7a6def6de539b19254e121904c12c", - "class_hash": "0x5d2892f0389e921a051daaad07efb49af7a13213ba309a901fc386acef15c3c", - "original_class_hash": "0x5d2892f0389e921a051daaad07efb49af7a13213ba309a901fc386acef15c3c", + "address": "0x6ee438f6082f930c1b874cfefa2e380b1bd8eb8d77374bf18e8224c5dd1819", + "class_hash": "0x4590a27e4ec7366358ba5f60323777f301435ebbdd113ab02c54b947717530d", + "original_class_hash": "0x4590a27e4ec7366358ba5f60323777f301435ebbdd113ab02c54b947717530d", "base_class_hash": "0x2427dd10a58850ac9a5ca6ce04b7771b05330fd18f2e481831ad903b969e6b2", "abi": [ { @@ -1903,9 +1925,9 @@ }, { "kind": "DojoContract", - "address": "0x5a651aad0672c139bce3db0770ad128dafae568dde6550dbc5679f819b634fd", - "class_hash": "0x71fdf374f04ab0a918b1e8a0578f38ad2d7d0d61da131b8d3e7b0b41a3d2282", - "original_class_hash": "0x71fdf374f04ab0a918b1e8a0578f38ad2d7d0d61da131b8d3e7b0b41a3d2282", + "address": "0xd9e080358f8bcb8ca52182623e63b4777dbf54dedd3742bd86fabb3d1991ba", + "class_hash": "0x67edb33671cd2f5b766d073e3dec53b03400761a20f349ea9628cf4c883b393", + "original_class_hash": "0x67edb33671cd2f5b766d073e3dec53b03400761a20f349ea9628cf4c883b393", "base_class_hash": "0x2427dd10a58850ac9a5ca6ce04b7771b05330fd18f2e481831ad903b969e6b2", "abi": [ { @@ -2124,9 +2146,9 @@ }, { "kind": "DojoContract", - "address": "0x5114288a545fd35096cacb0b4a1869aadba49b6403bea303f603c23f6455375", - "class_hash": "0x647fc1b2d2e902e6304e127b36995d8f57fe45c38e38e15d8860db508dbf24a", - "original_class_hash": "0x647fc1b2d2e902e6304e127b36995d8f57fe45c38e38e15d8860db508dbf24a", + "address": "0x3fec924b42052f14a9c4bb48abae2068d66034e3cc6e063353b87a5659f5040", + "class_hash": "0x40e824b8814bafef18cce2cf68f5765e9c9a1c86f55a8491b0c2a4faebdcc87", + "original_class_hash": "0x40e824b8814bafef18cce2cf68f5765e9c9a1c86f55a8491b0c2a4faebdcc87", "base_class_hash": "0x2427dd10a58850ac9a5ca6ce04b7771b05330fd18f2e481831ad903b969e6b2", "abi": [ { diff --git a/examples/spawn-and-move/manifests/dev/deployment/manifest.toml b/examples/spawn-and-move/manifests/dev/deployment/manifest.toml index bc54628946..57d890115e 100644 --- a/examples/spawn-and-move/manifests/dev/deployment/manifest.toml +++ b/examples/spawn-and-move/manifests/dev/deployment/manifest.toml @@ -1,10 +1,10 @@ [world] kind = "WorldContract" -class_hash = "0x3715f072aa1c07be724249fcda8b0322687f6c5c585eebc4402d162649c707c" -original_class_hash = "0x3715f072aa1c07be724249fcda8b0322687f6c5c585eebc4402d162649c707c" +class_hash = "0x6f4515274ee23404789c3351a77107d0ec07508530119822046600ca6948d6e" +original_class_hash = "0x6f4515274ee23404789c3351a77107d0ec07508530119822046600ca6948d6e" abi = "manifests/dev/deployment/abis/dojo-world.json" -address = "0x75f37b9d81cd262f3ba32ef89596e4e6eae99b345cf11fc1a85521c6be87c06" -transaction_hash = "0x4caecaee04b1af0f999a4f5fdb7102e8e1821aac296b260458d422ae0a41a1f" +address = "0x5fedbace16902d9ca4cdc1522f9fe156cd8c69a5d25e1436ee4b7b9933ad997" +transaction_hash = "0x506e4efa5fa9ce5808a482b7076db8ca707a013bafaebf089206f28cd5f6bb6" block_number = 3 seed = "dojo_examples" manifest_name = "dojo-world" @@ -23,9 +23,9 @@ manifest_name = "dojo-base" [[contracts]] kind = "DojoContract" -address = "0x2cd5e4ee3b898ff0578e0eec1ef5947a0a6b2c0d08ab95a14010c54d26265dc" -class_hash = "0x6bb4a7b55dc9c9b8193beca79526b6b8f8f2c33f5be0662f5215e10883c653f" -original_class_hash = "0x6bb4a7b55dc9c9b8193beca79526b6b8f8f2c33f5be0662f5215e10883c653f" +address = "0x3287947f8080cdf20c0a6e88d50a8d824e04f035bd34550316e6768d87d35de" +class_hash = "0x67a20ea91a4b9bb9cdb46cefc41dd6ca4c07c22d4d413205720963944fd817d" +original_class_hash = "0x67a20ea91a4b9bb9cdb46cefc41dd6ca4c07c22d4d413205720963944fd817d" base_class_hash = "0x2427dd10a58850ac9a5ca6ce04b7771b05330fd18f2e481831ad903b969e6b2" abi = "manifests/dev/deployment/abis/contracts/dojo_examples-actions-40b6994c.json" reads = [] @@ -33,6 +33,7 @@ writes = ["ns:dojo_examples"] init_calldata = [] tag = "dojo_examples-actions" systems = [ + "set_models", "spawn", "move", "set_player_config", @@ -46,9 +47,9 @@ manifest_name = "dojo_examples-actions-40b6994c" [[contracts]] kind = "DojoContract" -address = "0x1470b0304ed67a22acdca949eb99d50ff7a6def6de539b19254e121904c12c" -class_hash = "0x5d2892f0389e921a051daaad07efb49af7a13213ba309a901fc386acef15c3c" -original_class_hash = "0x5d2892f0389e921a051daaad07efb49af7a13213ba309a901fc386acef15c3c" +address = "0x6ee438f6082f930c1b874cfefa2e380b1bd8eb8d77374bf18e8224c5dd1819" +class_hash = "0x4590a27e4ec7366358ba5f60323777f301435ebbdd113ab02c54b947717530d" +original_class_hash = "0x4590a27e4ec7366358ba5f60323777f301435ebbdd113ab02c54b947717530d" base_class_hash = "0x2427dd10a58850ac9a5ca6ce04b7771b05330fd18f2e481831ad903b969e6b2" abi = "manifests/dev/deployment/abis/contracts/dojo_examples-dungeon-6620e0e6.json" reads = [] @@ -60,9 +61,9 @@ manifest_name = "dojo_examples-dungeon-6620e0e6" [[contracts]] kind = "DojoContract" -address = "0x5a651aad0672c139bce3db0770ad128dafae568dde6550dbc5679f819b634fd" -class_hash = "0x71fdf374f04ab0a918b1e8a0578f38ad2d7d0d61da131b8d3e7b0b41a3d2282" -original_class_hash = "0x71fdf374f04ab0a918b1e8a0578f38ad2d7d0d61da131b8d3e7b0b41a3d2282" +address = "0xd9e080358f8bcb8ca52182623e63b4777dbf54dedd3742bd86fabb3d1991ba" +class_hash = "0x67edb33671cd2f5b766d073e3dec53b03400761a20f349ea9628cf4c883b393" +original_class_hash = "0x67edb33671cd2f5b766d073e3dec53b03400761a20f349ea9628cf4c883b393" base_class_hash = "0x2427dd10a58850ac9a5ca6ce04b7771b05330fd18f2e481831ad903b969e6b2" abi = "manifests/dev/deployment/abis/contracts/dojo_examples-mock_token-31599eb2.json" reads = [] @@ -74,9 +75,9 @@ manifest_name = "dojo_examples-mock_token-31599eb2" [[contracts]] kind = "DojoContract" -address = "0x5114288a545fd35096cacb0b4a1869aadba49b6403bea303f603c23f6455375" -class_hash = "0x647fc1b2d2e902e6304e127b36995d8f57fe45c38e38e15d8860db508dbf24a" -original_class_hash = "0x647fc1b2d2e902e6304e127b36995d8f57fe45c38e38e15d8860db508dbf24a" +address = "0x3fec924b42052f14a9c4bb48abae2068d66034e3cc6e063353b87a5659f5040" +class_hash = "0x40e824b8814bafef18cce2cf68f5765e9c9a1c86f55a8491b0c2a4faebdcc87" +original_class_hash = "0x40e824b8814bafef18cce2cf68f5765e9c9a1c86f55a8491b0c2a4faebdcc87" base_class_hash = "0x2427dd10a58850ac9a5ca6ce04b7771b05330fd18f2e481831ad903b969e6b2" abi = "manifests/dev/deployment/abis/contracts/dojo_examples-others-61de2c18.json" reads = [] diff --git a/examples/spawn-and-move/manifests/release/base/abis/contracts/dojo_examples-actions-40b6994c.json b/examples/spawn-and-move/manifests/release/base/abis/contracts/dojo_examples-actions-40b6994c.json index 821650785e..fdba39296f 100644 --- a/examples/spawn-and-move/manifests/release/base/abis/contracts/dojo_examples-actions-40b6994c.json +++ b/examples/spawn-and-move/manifests/release/base/abis/contracts/dojo_examples-actions-40b6994c.json @@ -296,6 +296,22 @@ "outputs": [], "state_mutability": "external" }, + { + "type": "function", + "name": "set_models", + "inputs": [ + { + "name": "seed", + "type": "core::felt252" + }, + { + "name": "n_models", + "type": "core::integer::u32" + } + ], + "outputs": [], + "state_mutability": "external" + }, { "type": "function", "name": "enter_dungeon", diff --git a/examples/spawn-and-move/manifests/release/base/abis/dojo-world.json b/examples/spawn-and-move/manifests/release/base/abis/dojo-world.json index 897b4d959c..8553809311 100644 --- a/examples/spawn-and-move/manifests/release/base/abis/dojo-world.json +++ b/examples/spawn-and-move/manifests/release/base/abis/dojo-world.json @@ -939,6 +939,11 @@ "type": "core::felt252", "kind": "data" }, + { + "name": "entity_id", + "type": "core::felt252", + "kind": "data" + }, { "name": "keys", "type": "core::array::Span::", diff --git a/examples/spawn-and-move/manifests/release/base/contracts/dojo_examples-actions-40b6994c.toml b/examples/spawn-and-move/manifests/release/base/contracts/dojo_examples-actions-40b6994c.toml index 78d5c93026..eaf518064d 100644 --- a/examples/spawn-and-move/manifests/release/base/contracts/dojo_examples-actions-40b6994c.toml +++ b/examples/spawn-and-move/manifests/release/base/contracts/dojo_examples-actions-40b6994c.toml @@ -1,6 +1,6 @@ kind = "DojoContract" -class_hash = "0x6bb4a7b55dc9c9b8193beca79526b6b8f8f2c33f5be0662f5215e10883c653f" -original_class_hash = "0x6bb4a7b55dc9c9b8193beca79526b6b8f8f2c33f5be0662f5215e10883c653f" +class_hash = "0x67a20ea91a4b9bb9cdb46cefc41dd6ca4c07c22d4d413205720963944fd817d" +original_class_hash = "0x67a20ea91a4b9bb9cdb46cefc41dd6ca4c07c22d4d413205720963944fd817d" base_class_hash = "0x0" abi = "manifests/release/base/abis/contracts/dojo_examples-actions-40b6994c.json" reads = [] @@ -8,6 +8,7 @@ writes = [] init_calldata = [] tag = "dojo_examples-actions" systems = [ + "set_models", "spawn", "move", "set_player_config", diff --git a/examples/spawn-and-move/manifests/release/base/contracts/dojo_examples-dungeon-6620e0e6.toml b/examples/spawn-and-move/manifests/release/base/contracts/dojo_examples-dungeon-6620e0e6.toml index 16b9830374..a3a45a7634 100644 --- a/examples/spawn-and-move/manifests/release/base/contracts/dojo_examples-dungeon-6620e0e6.toml +++ b/examples/spawn-and-move/manifests/release/base/contracts/dojo_examples-dungeon-6620e0e6.toml @@ -1,6 +1,6 @@ kind = "DojoContract" -class_hash = "0x5d2892f0389e921a051daaad07efb49af7a13213ba309a901fc386acef15c3c" -original_class_hash = "0x5d2892f0389e921a051daaad07efb49af7a13213ba309a901fc386acef15c3c" +class_hash = "0x4590a27e4ec7366358ba5f60323777f301435ebbdd113ab02c54b947717530d" +original_class_hash = "0x4590a27e4ec7366358ba5f60323777f301435ebbdd113ab02c54b947717530d" base_class_hash = "0x0" abi = "manifests/release/base/abis/contracts/dojo_examples-dungeon-6620e0e6.json" reads = [] diff --git a/examples/spawn-and-move/manifests/release/base/contracts/dojo_examples-mock_token-31599eb2.toml b/examples/spawn-and-move/manifests/release/base/contracts/dojo_examples-mock_token-31599eb2.toml index 16e42f275e..a91d6e646d 100644 --- a/examples/spawn-and-move/manifests/release/base/contracts/dojo_examples-mock_token-31599eb2.toml +++ b/examples/spawn-and-move/manifests/release/base/contracts/dojo_examples-mock_token-31599eb2.toml @@ -1,6 +1,6 @@ kind = "DojoContract" -class_hash = "0x71fdf374f04ab0a918b1e8a0578f38ad2d7d0d61da131b8d3e7b0b41a3d2282" -original_class_hash = "0x71fdf374f04ab0a918b1e8a0578f38ad2d7d0d61da131b8d3e7b0b41a3d2282" +class_hash = "0x67edb33671cd2f5b766d073e3dec53b03400761a20f349ea9628cf4c883b393" +original_class_hash = "0x67edb33671cd2f5b766d073e3dec53b03400761a20f349ea9628cf4c883b393" base_class_hash = "0x0" abi = "manifests/release/base/abis/contracts/dojo_examples-mock_token-31599eb2.json" reads = [] diff --git a/examples/spawn-and-move/manifests/release/base/contracts/dojo_examples-others-61de2c18.toml b/examples/spawn-and-move/manifests/release/base/contracts/dojo_examples-others-61de2c18.toml index 06f17468c4..0e3f247385 100644 --- a/examples/spawn-and-move/manifests/release/base/contracts/dojo_examples-others-61de2c18.toml +++ b/examples/spawn-and-move/manifests/release/base/contracts/dojo_examples-others-61de2c18.toml @@ -1,6 +1,6 @@ kind = "DojoContract" -class_hash = "0x647fc1b2d2e902e6304e127b36995d8f57fe45c38e38e15d8860db508dbf24a" -original_class_hash = "0x647fc1b2d2e902e6304e127b36995d8f57fe45c38e38e15d8860db508dbf24a" +class_hash = "0x40e824b8814bafef18cce2cf68f5765e9c9a1c86f55a8491b0c2a4faebdcc87" +original_class_hash = "0x40e824b8814bafef18cce2cf68f5765e9c9a1c86f55a8491b0c2a4faebdcc87" base_class_hash = "0x0" abi = "manifests/release/base/abis/contracts/dojo_examples-others-61de2c18.json" reads = [] diff --git a/examples/spawn-and-move/manifests/release/base/dojo-world.toml b/examples/spawn-and-move/manifests/release/base/dojo-world.toml index ecb6fdb514..796442c34f 100644 --- a/examples/spawn-and-move/manifests/release/base/dojo-world.toml +++ b/examples/spawn-and-move/manifests/release/base/dojo-world.toml @@ -1,6 +1,6 @@ kind = "Class" -class_hash = "0x3715f072aa1c07be724249fcda8b0322687f6c5c585eebc4402d162649c707c" -original_class_hash = "0x3715f072aa1c07be724249fcda8b0322687f6c5c585eebc4402d162649c707c" +class_hash = "0x6f4515274ee23404789c3351a77107d0ec07508530119822046600ca6948d6e" +original_class_hash = "0x6f4515274ee23404789c3351a77107d0ec07508530119822046600ca6948d6e" abi = "manifests/release/base/abis/dojo-world.json" tag = "dojo-world" manifest_name = "dojo-world" diff --git a/examples/spawn-and-move/src/actions.cairo b/examples/spawn-and-move/src/actions.cairo index 8c1d1901e5..70c6c5c16e 100644 --- a/examples/spawn-and-move/src/actions.cairo +++ b/examples/spawn-and-move/src/actions.cairo @@ -10,6 +10,7 @@ pub trait IActions { fn update_player_items(ref world: IWorldDispatcher, items: Array); fn reset_player_config(ref world: IWorldDispatcher); fn set_player_server_profile(ref world: IWorldDispatcher, server_id: u32, name: ByteArray); + fn set_models(ref world: IWorldDispatcher, seed: felt252, n_models: u32); #[cfg(feature: 'dungeon')] fn enter_dungeon(ref world: IWorldDispatcher, dungeon_address: starknet::ContractAddress); } @@ -46,6 +47,39 @@ pub mod actions { // impl: implement functions specified in trait #[abi(embed_v0)] impl ActionsImpl of IActions { + // Set some models randomly. + fn set_models(ref world: IWorldDispatcher, seed: felt252, n_models: u32) { + let uint: u256 = seed.into(); + let prng: u32 = (uint % 4_294_967_000).try_into().unwrap(); + let byte: u8 = (uint % 255).try_into().unwrap(); + + let moves = Moves { + player: seed.try_into().unwrap(), remaining: byte, last_direction: Direction::None + }; + let position = Position { + player: seed.try_into().unwrap(), vec: Vec2 { x: prng, y: prng } + }; + let server_profile = ServerProfile { + player: seed.try_into().unwrap(), server_id: prng, name: "hello" + }; + let player_config = PlayerConfig { + player: seed.try_into().unwrap(), + name: "hello", + items: array![], + favorite_item: Option::None + }; + + if n_models == 4 { + set!(world, (moves, position, server_profile, player_config)); + } else if n_models == 3 { + set!(world, (moves, position, server_profile)); + } else if n_models == 2 { + set!(world, (moves, position)); + } else { + set!(world, (moves)); + } + } + // ContractState is defined by system decorator expansion fn spawn(ref world: IWorldDispatcher) { let player = get_caller_address(); @@ -189,7 +223,7 @@ mod tests { use dojo::model::{Model, ModelTest, ModelIndex, ModelEntityTest}; use dojo::world::{IWorldDispatcher, IWorldDispatcherTrait}; - use dojo::utils::test::{spawn_test_world, deploy_contract}; + use dojo::utils::test::deploy_contract; use super::{actions, IActionsDispatcher, IActionsDispatcherTrait}; use armory::flatbow; @@ -201,13 +235,7 @@ mod tests { fn test_world_test_set() { let caller = starknet::contract_address_const::<0x0>(); - let mut models = array![ - position::TEST_CLASS_HASH, moves::TEST_CLASS_HASH, flatbow::TEST_CLASS_HASH - ]; - - let world = spawn_test_world( - ["dojo_examples", "dojo_examples_weapons"].span(), models.span() - ); + let world = spawn_test_world!(); // Without having the permission, we can set data into the dojo database for the given // models. @@ -240,14 +268,8 @@ mod tests { fn test_move() { let caller = starknet::contract_address_const::<0x0>(); - // models - let mut models = array![ - position::TEST_CLASS_HASH, moves::TEST_CLASS_HASH, flatbow::TEST_CLASS_HASH - ]; - // deploy world with models - let world = spawn_test_world( - ["dojo_examples", "dojo_examples_weapons"].span(), models.span() - ); + // deploy world with only the models for the given namespaces. + let world = spawn_test_world!(["dojo_examples", "dojo_examples_weapons"]); // deploy systems contract let contract_address = world diff --git a/scripts/clippy.sh b/scripts/clippy.sh index 9fe1fb49d2..43f9b5f208 100755 --- a/scripts/clippy.sh +++ b/scripts/clippy.sh @@ -8,12 +8,7 @@ set -x set -o pipefail run_clippy() { - cargo +nightly clippy --all-targets "$@" -- -D warnings -D future-incompatible -D nonstandard-style -D rust-2018-idioms -D unused -D missing-debug-implementations + cargo +nightly-2024-08-28 clippy --all-targets "$@" -- -D warnings -D future-incompatible -D nonstandard-style -D rust-2018-idioms -D unused -D missing-debug-implementations } -run_clippy --all-features --workspace --exclude katana --exclude katana-executor - -run_clippy -p katana-executor --all -run_clippy -p katana -# TODO(kariy): uncomment this line when the `sir` support Cairo 2.6.3 -# run_clippy -p katana --no-default-features --features sir +run_clippy --all-features --workspace diff --git a/scripts/rust_fmt.sh b/scripts/rust_fmt.sh index 62a418693a..db5636de2e 100755 --- a/scripts/rust_fmt.sh +++ b/scripts/rust_fmt.sh @@ -1,3 +1,3 @@ #!/bin/bash -cargo +nightly fmt --check --all -- "$@" +cargo +nightly-2024-08-28 fmt --check --all -- "$@" diff --git a/scripts/spam_txs.sh b/scripts/spam_txs.sh new file mode 100644 index 0000000000..d42ac22884 --- /dev/null +++ b/scripts/spam_txs.sh @@ -0,0 +1,50 @@ +#!/usr/bin/env bash +# +# This scripts spams transactions to the spawn-and-move example by targetting +# the set_models function. +# +# Usage: +# ./spam_txs.sh 100 +# +# This will send 100 transactions to the spawn-and-move example. +# +# If working locally with Katana, use release and the `--dev` option to ensure faster setup: +# cargo run -r --bin katana -- --dev +# +# Uncomment to see the commands being executed. +# set -x +set -e + +# Check if an argument is provided to display usage. +if [ $# -eq 0 ]; then + echo "Usage: $0 [rpc_url: http://0.0.0.0:5050]" + echo "Example to send on local Katana: $0 100" + exit 1 +fi + +# Number of transactions to send. +count="$1" + +# RPC URL to use, default to local. +RPC_URL="${2:-http://0.0.0.0:5050}" + +# Send transactions with random seeds to generate a bunch of entities. +for ((i=1; i<=count; i++)) +do + # Generates a random 248-bit number (to be sure it's fit in a felt252). + seed=$(od -An -tx1 -N31 /dev/urandom | tr -d ' \n' | sed 's/^/0x/') + # You can change seed by `$i` for reproducibility. + seed=$i + #seed=$(($i + 100000)) + + # Number of models to spawn + n_models=$((seed % 4 + 1)) + # You can set the number of models for reproducibility. + n_models=1 + + sozo execute actions set_models -c "$seed","$n_models" \ + --manifest-path examples/spawn-and-move/Scarb.toml \ + --rpc-url "$RPC_URL" + + #sleep 1 +done diff --git a/spawn-and-move-db.tar.gz b/spawn-and-move-db.tar.gz index ab81ae231e..76124d426f 100644 Binary files a/spawn-and-move-db.tar.gz and b/spawn-and-move-db.tar.gz differ diff --git a/types-test-db.tar.gz b/types-test-db.tar.gz index 746fa04b9d..234e62017c 100644 Binary files a/types-test-db.tar.gz and b/types-test-db.tar.gz differ diff --git a/xtask/generate-test-db/src/main.rs b/xtask/generate-test-db/src/main.rs index 032277d632..bd36fc4144 100644 --- a/xtask/generate-test-db/src/main.rs +++ b/xtask/generate-test-db/src/main.rs @@ -14,6 +14,7 @@ async fn migrate_spawn_and_move(db_path: &Path) -> Result { let cfg = KatanaRunnerConfig { db_dir: Some(db_path.to_path_buf()), n_accounts: 10, + dev: true, ..Default::default() }; let runner = KatanaRunner::new_with_config(cfg)?; @@ -23,8 +24,6 @@ async fn migrate_spawn_and_move(db_path: &Path) -> Result { let cfg = setup.build_test_config("spawn-and-move", Profile::DEV); let ws = scarb::ops::read_workspace(cfg.manifest_path(), &cfg)?; - println!("account {:?}", runner.account(0)); - let output = sozo_ops::migration::migrate( &ws, None, @@ -45,6 +44,7 @@ async fn migrate_types_test(db_path: &Path) -> Result { let cfg = KatanaRunnerConfig { db_dir: Some(db_path.to_path_buf()), n_accounts: 10, + dev: true, ..Default::default() }; let runner = KatanaRunner::new_with_config(cfg)?;