diff --git a/.github/release-please/manifest.json b/.github/release-please/manifest.json index 7e3c7e01ae74..ddf856a98e26 100644 --- a/.github/release-please/manifest.json +++ b/.github/release-please/manifest.json @@ -1,5 +1,5 @@ { - "core": "25.2.0", - "prover": "17.1.0", + "core": "25.3.0", + "prover": "17.1.1", "zkstack_cli": "0.1.2" } diff --git a/.github/workflows/build-docker-from-tag.yml b/.github/workflows/build-docker-from-tag.yml index b3f442ff4662..e48539c90738 100644 --- a/.github/workflows/build-docker-from-tag.yml +++ b/.github/workflows/build-docker-from-tag.yml @@ -23,7 +23,7 @@ concurrency: docker-build jobs: setup: name: Setup - runs-on: [ ubuntu-latest ] + runs-on: [ubuntu-latest] outputs: image_tag_suffix: ${{ steps.set.outputs.image_tag_suffix }} prover_fri_gpu_key_id: ${{ steps.extract-prover-fri-setup-key-ids.outputs.gpu_short_commit_sha }} @@ -48,7 +48,7 @@ jobs: build-push-core-images: name: Build and push image - needs: [ setup ] + needs: [setup] uses: ./.github/workflows/new-build-core-template.yml if: contains(github.ref_name, 'core') secrets: @@ -61,7 +61,7 @@ jobs: build-push-tee-prover-images: name: Build and push images - needs: [ setup ] + needs: [setup] uses: ./.github/workflows/build-tee-prover-template.yml if: contains(github.ref_name, 'core') secrets: @@ -73,7 +73,7 @@ jobs: build-push-contract-verifier: name: Build and push image - needs: [ setup ] + needs: [setup] uses: ./.github/workflows/new-build-contract-verifier-template.yml if: contains(github.ref_name, 'contract_verifier') secrets: @@ -85,13 +85,12 @@ jobs: build-push-prover-images: name: Build and push image - needs: [ setup ] + needs: [setup] uses: ./.github/workflows/new-build-prover-template.yml if: contains(github.ref_name, 'prover') with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} - CUDA_ARCH: "60;70;75;80;89" action: "push" secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} @@ -99,13 +98,12 @@ jobs: build-push-witness-generator-image-avx512: name: Build and push image - needs: [ setup ] + needs: [setup] uses: ./.github/workflows/new-build-witness-generator-template.yml if: contains(github.ref_name, 'prover') with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }}-avx512 ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} - CUDA_ARCH: "60;70;75;80;89" WITNESS_GENERATOR_RUST_FLAGS: "-Ctarget_feature=+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512vl" action: "push" secrets: @@ -114,7 +112,7 @@ jobs: build-gar-prover-fri-gpu-and-circuit-prover-gpu-gar: name: Build GAR prover FRI GPU - needs: [ setup, build-push-prover-images ] + needs: [setup, build-push-prover-images] uses: ./.github/workflows/build-prover-fri-gpu-gar-and-circuit-prover-gpu-gar.yml if: contains(github.ref_name, 'prover') with: diff --git a/.github/workflows/build-prover-template.yml b/.github/workflows/build-prover-template.yml index 91de5dd51ecf..762ec496943c 100644 --- a/.github/workflows/build-prover-template.yml +++ b/.github/workflows/build-prover-template.yml @@ -30,7 +30,7 @@ on: CUDA_ARCH: description: "CUDA Arch to build" type: string - default: "89" + default: "75;80;89" required: false outputs: protocol_version: diff --git a/.github/workflows/build-witness-generator-template.yml b/.github/workflows/build-witness-generator-template.yml index d9493f97cae1..95053b89d3d8 100644 --- a/.github/workflows/build-witness-generator-template.yml +++ b/.github/workflows/build-witness-generator-template.yml @@ -27,11 +27,6 @@ on: type: boolean default: false required: false - CUDA_ARCH: - description: "CUDA Arch to build" - type: string - default: "89" - required: false WITNESS_GENERATOR_RUST_FLAGS: description: "Rust flags for witness_generator compilation" type: string @@ -49,10 +44,9 @@ jobs: IMAGE_TAG_SUFFIX: ${{ inputs.image_tag_suffix }} RUNNER_COMPOSE_FILE: "docker-compose-runner-nightly.yml" ERA_BELLMAN_CUDA_RELEASE: ${{ inputs.ERA_BELLMAN_CUDA_RELEASE }} - CUDA_ARCH: ${{ inputs.CUDA_ARCH }} WITNESS_GENERATOR_RUST_FLAGS: ${{ inputs.WITNESS_GENERATOR_RUST_FLAGS }} ZKSYNC_USE_CUDA_STUBS: true - runs-on: [ matterlabs-ci-runner-c3d ] + runs-on: [matterlabs-ci-runner-c3d] strategy: matrix: component: @@ -91,7 +85,6 @@ jobs: run: | ci_run run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^24.key - - name: login to Docker registries if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')) run: | @@ -162,11 +155,11 @@ jobs: DOCKER_ACTION: ${{ inputs.action }} COMPONENT: ${{ matrix.component }} run: | - PASSED_ENV_VARS="ERA_BELLMAN_CUDA_RELEASE,CUDA_ARCH,PROTOCOL_VERSION,RUST_FLAGS" \ + PASSED_ENV_VARS="ERA_BELLMAN_CUDA_RELEASE,PROTOCOL_VERSION,RUST_FLAGS" \ ci_run zk docker $DOCKER_ACTION $COMPONENT - name: Show sccache stats if: always() run: | ci_run sccache --show-stats || true - ci_run cat /tmp/sccache_log.txt || true \ No newline at end of file + ci_run cat /tmp/sccache_log.txt || true diff --git a/.github/workflows/new-build-prover-template.yml b/.github/workflows/new-build-prover-template.yml index cb254f602fc5..3a721e4425a8 100644 --- a/.github/workflows/new-build-prover-template.yml +++ b/.github/workflows/new-build-prover-template.yml @@ -30,8 +30,12 @@ on: CUDA_ARCH: description: "CUDA Arch to build" type: string - default: "89" + default: "75;80;89" required: false + # Details: https://arnon.dk/matching-sm-architectures-arch-and-gencode-for-various-nvidia-cards/ + # L4: 89 + # T4: 75 + # A100: 80 outputs: protocol_version: description: "Protocol version of the binary" @@ -210,7 +214,6 @@ jobs: --tag asia-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ inputs.image_tag_suffix }} \ us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ inputs.image_tag_suffix }} - - name: Login and push to Europe GAR run: | gcloud auth print-access-token --lifetime=7200 --impersonate-service-account=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com | docker login -u oauth2accesstoken --password-stdin https://europe-docker.pkg.dev diff --git a/.github/workflows/new-build-witness-generator-template.yml b/.github/workflows/new-build-witness-generator-template.yml index bbd6aee23ed1..a96d217da832 100644 --- a/.github/workflows/new-build-witness-generator-template.yml +++ b/.github/workflows/new-build-witness-generator-template.yml @@ -21,11 +21,6 @@ on: type: string default: non-push required: false - CUDA_ARCH: - description: "CUDA Arch to build" - type: string - default: "89" - required: false WITNESS_GENERATOR_RUST_FLAGS: description: "Rust flags for witness_generator compilation" type: string @@ -39,7 +34,7 @@ on: jobs: get-protocol-version: name: Get protocol version - runs-on: [ matterlabs-ci-runner-high-performance ] + runs-on: [matterlabs-ci-runner-high-performance] outputs: protocol_version: ${{ steps.protocolversion.outputs.protocol_version }} steps: @@ -85,7 +80,7 @@ jobs: needs: get-protocol-version env: PROTOCOL_VERSION: ${{ needs.get-protocol-version.outputs.protocol_version }} - runs-on: [ matterlabs-ci-runner-c3d ] + runs-on: [matterlabs-ci-runner-c3d] strategy: matrix: components: @@ -126,7 +121,6 @@ jobs: context: . push: ${{ inputs.action == 'push' }} build-args: | - CUDA_ARCH=${{ inputs.CUDA_ARCH }} SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com SCCACHE_GCS_RW_MODE=READ_WRITE diff --git a/.github/workflows/release-test-stage.yml b/.github/workflows/release-test-stage.yml index eb75ab179b8e..2e6c7882aa98 100644 --- a/.github/workflows/release-test-stage.yml +++ b/.github/workflows/release-test-stage.yml @@ -102,7 +102,6 @@ jobs: with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} - CUDA_ARCH: "60;70;75;80;89" action: "push" secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} @@ -116,7 +115,6 @@ jobs: with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }}-avx512 ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} - CUDA_ARCH: "60;70;75;80;89" WITNESS_GENERATOR_RUST_FLAGS: "-Ctarget_feature=+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512vl " action: "push" secrets: diff --git a/.github/workflows/zk-environment-publish.yml b/.github/workflows/zk-environment-publish.yml index b9321c8f5d6c..e9dfac1c6bb2 100644 --- a/.github/workflows/zk-environment-publish.yml +++ b/.github/workflows/zk-environment-publish.yml @@ -128,8 +128,7 @@ jobs: push: ${{ (github.event_name == 'push' && github.ref == 'refs/heads/main') || (github.event_name == 'workflow_dispatch') }} zk_environment_multiarch_manifest: - # We'll update the 'latest' tag, only on environments generated from 'main'. - if: needs.changed_files.outputs.zk_environment == 'true' && github.event_name == 'push' && github.ref == 'refs/heads/main' + if: ${{ (needs.changed_files.outputs.zk_environment == 'true' && github.event_name == 'push' && github.ref == 'refs/heads/main') || (github.event_name == 'workflow_dispatch') }} # Needed to push to Gihub Package Registry permissions: packages: write diff --git a/.gitignore b/.gitignore index adf3b7799618..471a601cc34b 100644 --- a/.gitignore +++ b/.gitignore @@ -120,3 +120,6 @@ configs/* era-observability/ core/tests/ts-integration/deployments-zk transactions/ + +# foundry-zksync +install \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 3b8469340840..f96409516cb0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -97,15 +97,15 @@ dependencies = [ [[package]] name = "allocator-api2" -version = "0.2.18" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "alloy-json-abi" -version = "0.8.11" +version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ded610181f3dad5810f6ff12d1a99994cf9b42d2fcb7709029352398a5da5ae6" +checksum = "c357da577dfb56998d01f574d81ad7a1958d248740a7981b205d69d65a7da404" dependencies = [ "alloy-primitives", "alloy-sol-type-parser", @@ -115,9 +115,9 @@ dependencies = [ [[package]] name = "alloy-primitives" -version = "0.8.11" +version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd58d377699e6cfeab52c4a9d28bdc4ef37e2bd235ff2db525071fe37a2e9af5" +checksum = "6259a506ab13e1d658796c31e6e39d2e2ee89243bcc505ddc613b35732e0a430" dependencies = [ "alloy-rlp", "bytes", @@ -126,9 +126,9 @@ dependencies = [ "derive_more 1.0.0", "foldhash", "getrandom", - "hashbrown 0.15.0", + "hashbrown 0.15.2", "hex-literal", - "indexmap 2.6.0", + "indexmap 2.7.0", "itoa", "k256 0.13.4", "keccak-asm", @@ -136,7 +136,7 @@ dependencies = [ "proptest", "rand 0.8.5", "ruint", - "rustc-hash 2.0.0", + "rustc-hash 2.1.0", "serde", "sha3 0.10.8", "tiny-keccak 2.0.2", @@ -144,9 +144,9 @@ dependencies = [ [[package]] name = "alloy-rlp" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da0822426598f95e45dd1ea32a738dac057529a709ee645fcc516ffa4cbde08f" +checksum = "f542548a609dca89fcd72b3b9f355928cf844d4363c5eed9c5273a3dd225e097" dependencies = [ "arrayvec 0.7.6", "bytes", @@ -154,9 +154,9 @@ dependencies = [ [[package]] name = "alloy-sol-type-parser" -version = "0.8.11" +version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12c71028bfbfec210e24106a542aad3def7caf1a70e2c05710e92a98481980d3" +checksum = "ce13ff37285b0870d0a0746992a4ae48efaf34b766ae4c2640fa15e5305f8e73" dependencies = [ "serde", "winnow 0.6.20", @@ -194,9 +194,9 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.17" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23a1e53f0f5d86382dafe1cf314783b2044280f406e7e1506368220ad11b1338" +checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" dependencies = [ "anstyle", "anstyle-parse", @@ -209,9 +209,9 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.9" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8365de52b16c035ff4fcafe0092ba9390540e3e352870ac09933bebcaa2c8c56" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" [[package]] name = "anstyle-parse" @@ -243,15 +243,15 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.91" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c042108f3ed77fd83760a5fd79b53be043192bb3b9dba91d8c574c0ada7850c8" +checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7" [[package]] name = "arbitrary" -version = "1.3.2" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110" +checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223" dependencies = [ "derive_arbitrary", ] @@ -340,7 +340,7 @@ checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" dependencies = [ "num-bigint 0.4.6", "num-traits", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "syn 1.0.109", ] @@ -494,9 +494,9 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.17" +version = "0.4.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cb8f1d480b0ea3783ab015936d2a55c87e219676f0c0b7dec61494043f21857" +checksum = "df895a515f70646414f4b45c0b79082783b80552b373a68283012928df56f522" dependencies = [ "futures-core", "memchr", @@ -547,9 +547,9 @@ dependencies = [ [[package]] name = "async-io" -version = "2.3.4" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "444b0228950ee6501b3568d3c93bf1176a1fdbc3b758dcd9475046d30f4dc7e8" +checksum = "43a2b323ccce0a1d90b449fd71f2a06ca7faa7c54c2751f06c9bd851fc061059" dependencies = [ "async-lock", "cfg-if", @@ -620,9 +620,9 @@ version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -688,9 +688,9 @@ version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -705,9 +705,9 @@ version = "0.1.83" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -748,9 +748,9 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -761,21 +761,20 @@ checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "aws-lc-rs" -version = "1.10.0" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdd82dba44d209fddb11c190e0a94b78651f95299598e472215667417a03ff1d" +checksum = "f47bb8cc16b669d267eeccf585aea077d0882f4777b1c1f740217885d6e6e5a3" dependencies = [ "aws-lc-sys", - "mirai-annotations", "paste", "zeroize", ] [[package]] name = "aws-lc-sys" -version = "0.22.0" +version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df7a4168111d7eb622a31b214057b8509c0a7e1794f44c546d742330dc793972" +checksum = "a2101df3813227bbaaaa0b04cd61c534c7954b22bd68d399b440be937dc63ff7" dependencies = [ "bindgen 0.69.5", "cc", @@ -816,18 +815,18 @@ dependencies = [ [[package]] name = "axum" -version = "0.7.7" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "504e3947307ac8326a5437504c517c4b56716c9d98fac0028c2acc7ca47d70ae" +checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" dependencies = [ "async-trait", "axum-core 0.4.5", "bytes", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", - "hyper 1.5.0", + "hyper 1.5.1", "hyper-util", "itoa", "matchit", @@ -841,7 +840,7 @@ dependencies = [ "serde_json", "serde_path_to_error", "serde_urlencoded", - "sync_wrapper 1.0.1", + "sync_wrapper 1.0.2", "tokio", "tower 0.5.1", "tower-layer", @@ -875,13 +874,13 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", "mime", "pin-project-lite", "rustversion", - "sync_wrapper 1.0.1", + "sync_wrapper 1.0.2", "tower-layer", "tower-service", "tracing", @@ -990,9 +989,9 @@ dependencies = [ [[package]] name = "bigdecimal" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f850665a0385e070b64c38d2354e6c104c8479c59868d1e48a0c13ee2c7a1c1" +checksum = "7f31f3af01c5c65a07985c804d3366560e6fa7883d640a122819b14ec327482c" dependencies = [ "autocfg", "libm", @@ -1023,12 +1022,12 @@ dependencies = [ "lazycell", "peeking_take_while", "prettyplease", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "regex", "rustc-hash 1.1.0", "shlex", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -1045,12 +1044,12 @@ dependencies = [ "lazycell", "log", "prettyplease", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "regex", "rustc-hash 1.1.0", "shlex", - "syn 2.0.85", + "syn 2.0.90", "which", ] @@ -1237,7 +1236,7 @@ name = "block_reverter" version = "0.1.0" dependencies = [ "anyhow", - "clap 4.5.20", + "clap 4.5.23", "serde_json", "tokio", "zksync_block_reverter", @@ -1273,7 +1272,7 @@ dependencies = [ "cid", "dashmap 6.1.0", "multihash", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -1321,9 +1320,9 @@ dependencies = [ [[package]] name = "borsh" -version = "1.5.1" +version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6362ed55def622cddc70a4746a68554d7b687713770de539e59a739b249f8ed" +checksum = "2506947f73ad44e344215ccd6403ac2ae18cd8e046e581a441bf8d199f257f03" dependencies = [ "borsh-derive", "cfg_aliases", @@ -1331,16 +1330,15 @@ dependencies = [ [[package]] name = "borsh-derive" -version = "1.5.1" +version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3ef8005764f53cd4dca619f5bf64cafd4664dada50ece25e4d81de54c80cc0b" +checksum = "c2593a3b8b938bd68373196c9832f516be11fa487ef4ae745eb282e6a56a7244" dependencies = [ "once_cell", "proc-macro-crate 3.2.0", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", - "syn_derive", + "syn 2.0.90", ] [[package]] @@ -1393,7 +1391,7 @@ version = "0.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3db406d29fbcd95542e92559bed4d8ad92636d1ca8b3b72ede10b4bcc010e659" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "syn 1.0.109", ] @@ -1412,9 +1410,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da" +checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" dependencies = [ "serde", ] @@ -1447,9 +1445,9 @@ dependencies = [ [[package]] name = "cargo-platform" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24b1f0365a6c6bb4020cd05806fd0d33c44d38046b8bd7f0e40814b9763cabfc" +checksum = "e35af189006b9c0f00a064685c727031e3ed2d8020f7ba284d78cc2671bd36ea" dependencies = [ "serde", ] @@ -1475,9 +1473,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.1.31" +version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2e7962b54006dcfcc61cb72735f4d89bb97061dd6a7ed882ec6b8ee53714c6f" +checksum = "27f657647bcff5394bf56c7317665bbf790a137a50eaaa5c6bfbb9e27a518f2d" dependencies = [ "jobserver", "libc", @@ -1570,7 +1568,7 @@ dependencies = [ "serde", "serde_repr", "sha2 0.10.8", - "thiserror", + "thiserror 1.0.69", "time", ] @@ -1627,9 +1625,9 @@ dependencies = [ [[package]] name = "chrono" -version = "0.4.38" +version = "0.4.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" dependencies = [ "android-tzdata", "iana-time-zone", @@ -1846,9 +1844,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.20" +version = "4.5.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97f376d85a664d5837dbae44bf546e6477a679ff6610010f17276f686d867e8" +checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84" dependencies = [ "clap_builder", "clap_derive", @@ -1856,13 +1854,13 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.20" +version = "4.5.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19bc80abd44e4bed93ca373a0704ccbd1b710dc5749406201bb018272808dc54" +checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838" dependencies = [ "anstream", "anstyle", - "clap_lex 0.7.2", + "clap_lex 0.7.4", "strsim 0.11.1", "terminal_size", ] @@ -1874,9 +1872,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" dependencies = [ "heck 0.5.0", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -1890,15 +1888,15 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" [[package]] name = "cmake" -version = "0.1.51" +version = "0.1.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb1e43aa7fd152b1f968787f7dbcdeb306d1867ff373c69955211876c053f91a" +checksum = "c682c223677e0e5b6b7f63a64b9351844c3f1b1678a68b7ee617e30fb082620e" dependencies = [ "cc", ] @@ -1966,9 +1964,9 @@ dependencies = [ [[package]] name = "const-hex" -version = "1.13.1" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0121754e84117e65f9d90648ee6aa4882a6e63110307ab73967a4c5e7e69e586" +checksum = "4b0485bab839b018a8f1723fc5391819fea5f8f0f32288ef8a735fd096b6160c" dependencies = [ "cfg-if", "cpufeatures", @@ -1985,20 +1983,20 @@ checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" [[package]] name = "const_format" -version = "0.2.33" +version = "0.2.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50c655d81ff1114fb0dcdea9225ea9f0cc712a6f8d189378e82bdf62a473a64b" +checksum = "126f97965c8ad46d6d9163268ff28432e8f6a1196a55578867832e3049df63dd" dependencies = [ "const_format_proc_macros", ] [[package]] name = "const_format_proc_macros" -version = "0.2.33" +version = "0.2.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eff1a44b93f47b1bac19a27932f5c591e43d1ba357ee4f61526c8a25603f0eb1" +checksum = "1d57c2eccfb16dbac1f4e61e206105db5820c9d26c3c472bc17c774259ef7744" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "unicode-xid 0.2.6", ] @@ -2040,6 +2038,16 @@ dependencies = [ "libc", ] +[[package]] +name = "core-foundation" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation-sys" version = "0.8.7" @@ -2057,9 +2065,9 @@ dependencies = [ [[package]] name = "cpufeatures" -version = "0.2.14" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608697df725056feaccfa42cffdaeeec3fccc4ffc38358ecd19b243e716a78e0" +checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" dependencies = [ "libc", ] @@ -2233,12 +2241,12 @@ dependencies = [ [[package]] name = "ctor" -version = "0.2.8" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edb49164822f3ee45b17acd4a208cfc1251410cf0cad9a833234c9890774dd9f" +checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501" dependencies = [ "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -2282,9 +2290,9 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -2300,6 +2308,24 @@ dependencies = [ "zeroize", ] +[[package]] +name = "custom_genesis_export" +version = "0.1.0" +dependencies = [ + "anyhow", + "bincode", + "clap 4.5.23", + "futures 0.3.31", + "sqlx", + "tokio", + "zksync_contracts", + "zksync_core_leftovers", + "zksync_dal", + "zksync_node_genesis", + "zksync_protobuf_config", + "zksync_types", +] + [[package]] name = "darling" version = "0.13.4" @@ -2338,7 +2364,7 @@ checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "strsim 0.10.0", "syn 1.0.109", @@ -2352,7 +2378,7 @@ checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "strsim 0.10.0", "syn 1.0.109", @@ -2366,10 +2392,10 @@ checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "strsim 0.11.1", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -2402,7 +2428,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core 0.20.10", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -2505,20 +2531,20 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "syn 1.0.109", ] [[package]] name = "derive_arbitrary" -version = "1.3.2" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" +checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -2528,10 +2554,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" dependencies = [ "convert_case 0.4.0", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "rustc_version 0.4.1", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -2549,9 +2575,9 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", "unicode-xid 0.2.6", ] @@ -2630,9 +2656,9 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -2833,9 +2859,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa18ce2bc66555b3218614519ac839ddb759a7d6720732f979ef8d13be147ecd" dependencies = [ "once_cell", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -2876,12 +2902,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -2917,7 +2943,7 @@ dependencies = [ "serde", "serde_json", "sha3 0.10.8", - "thiserror", + "thiserror 1.0.69", "uint", ] @@ -2977,9 +3003,9 @@ dependencies = [ [[package]] name = "event-listener-strategy" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1" +checksum = "3c3e4e0dd3673c1139bf041f3008816d9cf2946bbfac2945c09e523b8d7b05b2" dependencies = [ "event-listener 5.3.1", "pin-project-lite", @@ -2997,9 +3023,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.1.1" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "fastrlp" @@ -3076,9 +3102,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.34" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1b589b4dc103969ad3cf85c950899926ec64300a1a46d76c03a6072957036f0" +checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" dependencies = [ "crc32fast", "miniz_oxide", @@ -3173,7 +3199,7 @@ dependencies = [ "svm-rs", "svm-rs-builds", "tempfile", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "walkdir", @@ -3207,7 +3233,7 @@ dependencies = [ "serde", "serde_json", "serde_repr", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "walkdir", @@ -3243,7 +3269,7 @@ dependencies = [ "semver 1.0.23", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "tracing", "walkdir", "yansi", @@ -3266,7 +3292,7 @@ dependencies = [ "serde_json", "svm-rs", "tempfile", - "thiserror", + "thiserror 1.0.69", "tokio", "walkdir", ] @@ -3443,9 +3469,9 @@ checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" [[package]] name = "futures-lite" -version = "2.3.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52527eb5074e35e9339c6b4e8d12600c7128b68fb25dcb9fa9dec18f7c25f3a5" +checksum = "cef40d21ae2c515b51041df9ed313ed21e572df340ea58a922a0aefe7e8891a1" dependencies = [ "fastrand", "futures-core", @@ -3460,9 +3486,9 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -3522,7 +3548,7 @@ name = "genesis_generator" version = "0.1.0" dependencies = [ "anyhow", - "clap 4.5.20", + "clap 4.5.23", "futures 0.3.31", "serde", "serde_json", @@ -3600,7 +3626,7 @@ dependencies = [ "pin-project", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", @@ -3658,7 +3684,7 @@ dependencies = [ "reqwest 0.12.9", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "time", "tokio", "tracing", @@ -3672,7 +3698,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04f945a208886a13d07636f38fb978da371d0abc3e34bad338124b9f8c135a8f" dependencies = [ "reqwest 0.12.9", - "thiserror", + "thiserror 1.0.69", "tokio", ] @@ -3702,7 +3728,7 @@ dependencies = [ "serde", "serde_json", "sha2 0.10.8", - "thiserror", + "thiserror 1.0.69", "time", "tokio", "tracing", @@ -3769,7 +3795,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap 2.6.0", + "indexmap 2.7.0", "slab", "tokio", "tokio-util", @@ -3778,17 +3804,17 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e8ac6999421f49a846c2d4411f337e53497d8ec55d67753beffa43c5d9205" +checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" dependencies = [ "atomic-waker", "bytes", "fnv", "futures-core", "futures-sink", - "http 1.1.0", - "indexmap 2.6.0", + "http 1.2.0", + "indexmap 2.7.0", "slab", "tokio", "tokio-util", @@ -3841,9 +3867,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.15.0" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" dependencies = [ "allocator-api2", "equivalent", @@ -3995,9 +4021,9 @@ dependencies = [ [[package]] name = "http" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" dependencies = [ "bytes", "fnv", @@ -4022,7 +4048,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.1.0", + "http 1.2.0", ] [[package]] @@ -4033,7 +4059,7 @@ checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "pin-project-lite", ] @@ -4110,15 +4136,15 @@ dependencies = [ [[package]] name = "hyper" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbbff0a806a4728c99295b254c8838933b5b082d75e3cb70c8dab21fdfbcfa9a" +checksum = "97818827ef4f364230e16705d4706e2897df2bb60617d6ca15d598025a3c481f" dependencies = [ "bytes", "futures-channel", "futures-util", - "h2 0.4.6", - "http 1.1.0", + "h2 0.4.7", + "http 1.2.0", "http-body 1.0.1", "httparse", "httpdate", @@ -4152,15 +4178,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" dependencies = [ "futures-util", - "http 1.1.0", - "hyper 1.5.0", + "http 1.2.0", + "hyper 1.5.1", "hyper-util", "log", - "rustls 0.23.16", - "rustls-native-certs 0.8.0", + "rustls 0.23.19", + "rustls-native-certs 0.8.1", "rustls-pki-types", "tokio", - "tokio-rustls 0.26.0", + "tokio-rustls 0.26.1", "tower-service", "webpki-roots", ] @@ -4179,11 +4205,11 @@ dependencies = [ [[package]] name = "hyper-timeout" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3203a961e5c83b6f5498933e78b6b263e208c197b63e9c6c53cc82ffd3f63793" +checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" dependencies = [ - "hyper 1.5.0", + "hyper 1.5.1", "hyper-util", "pin-project-lite", "tokio", @@ -4211,7 +4237,7 @@ checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", - "hyper 1.5.0", + "hyper 1.5.1", "hyper-util", "native-tls", "tokio", @@ -4228,9 +4254,9 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", - "hyper 1.5.0", + "hyper 1.5.1", "pin-project-lite", "socket2", "tokio", @@ -4261,6 +4287,124 @@ dependencies = [ "cc", ] +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", +] + [[package]] name = "ident_case" version = "1.0.1" @@ -4269,12 +4413,23 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "0.5.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" dependencies = [ - "unicode-bidi", - "unicode-normalization", + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", ] [[package]] @@ -4320,13 +4475,13 @@ dependencies = [ [[package]] name = "impl-trait-for-tuples" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" +checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 1.0.109", + "syn 2.0.90", ] [[package]] @@ -4347,12 +4502,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" +checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" dependencies = [ "equivalent", - "hashbrown 0.15.0", + "hashbrown 0.15.2", "serde", ] @@ -4373,9 +4528,9 @@ dependencies = [ [[package]] name = "insta" -version = "1.41.0" +version = "1.41.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1f72d3e19488cf7d8ea52d2fc0f8754fc933398b337cd3cbdb28aaeb35159ef" +checksum = "7e9ffc4d4892617c50a928c52b2961cb5174b6fc6ebf252b2fac9d21955c48b8" dependencies = [ "console", "lazy_static", @@ -4462,9 +4617,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.11" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "jni" @@ -4476,7 +4631,7 @@ dependencies = [ "combine", "jni-sys", "log", - "thiserror", + "thiserror 1.0.69", "walkdir", ] @@ -4497,10 +4652,11 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.72" +version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9" +checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7" dependencies = [ + "once_cell", "wasm-bindgen", ] @@ -4547,7 +4703,7 @@ dependencies = [ "rustls-native-certs 0.7.3", "rustls-pki-types", "soketto 0.7.1", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-rustls 0.25.0", "tokio-util", @@ -4565,16 +4721,16 @@ dependencies = [ "futures-channel", "futures-util", "gloo-net", - "http 1.1.0", + "http 1.2.0", "jsonrpsee-core 0.23.2", "pin-project", - "rustls 0.23.16", + "rustls 0.23.19", "rustls-pki-types", "rustls-platform-verifier", - "soketto 0.8.0", - "thiserror", + "soketto 0.8.1", + "thiserror 1.0.69", "tokio", - "tokio-rustls 0.26.0", + "tokio-rustls 0.26.1", "tokio-util", "tracing", "url", @@ -4598,7 +4754,7 @@ dependencies = [ "rustc-hash 1.1.0", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-stream", "tracing", @@ -4616,7 +4772,7 @@ dependencies = [ "bytes", "futures-timer", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", "jsonrpsee-types 0.23.2", @@ -4626,7 +4782,7 @@ dependencies = [ "rustc-hash 1.1.0", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-stream", "tracing", @@ -4646,7 +4802,7 @@ dependencies = [ "jsonrpsee-types 0.21.0", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "tokio", "tower 0.4.13", "tracing", @@ -4662,16 +4818,16 @@ dependencies = [ "async-trait", "base64 0.22.1", "http-body 1.0.1", - "hyper 1.5.0", + "hyper 1.5.1", "hyper-rustls 0.27.3", "hyper-util", "jsonrpsee-core 0.23.2", "jsonrpsee-types 0.23.2", - "rustls 0.23.16", + "rustls 0.23.19", "rustls-platform-verifier", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "tokio", "tower 0.4.13", "tracing", @@ -4686,9 +4842,9 @@ checksum = "7895f186d5921065d96e16bd795e5ca89ac8356ec423fafc6e3d7cf8ec11aee4" dependencies = [ "heck 0.5.0", "proc-macro-crate 3.2.0", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -4699,10 +4855,10 @@ checksum = "654afab2e92e5d88ebd8a39d6074483f3f2bfdf91c5ac57fe285e7127cdd4f51" dependencies = [ "anyhow", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", - "hyper 1.5.0", + "hyper 1.5.1", "hyper-util", "jsonrpsee-core 0.23.2", "jsonrpsee-types 0.23.2", @@ -4710,8 +4866,8 @@ dependencies = [ "route-recognizer", "serde", "serde_json", - "soketto 0.8.0", - "thiserror", + "soketto 0.8.1", + "thiserror 1.0.69", "tokio", "tokio-stream", "tokio-util", @@ -4729,7 +4885,7 @@ dependencies = [ "beef", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -4739,10 +4895,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9c465fbe385238e861fdc4d1c85e04ada6c1fd246161d26385c1b311724d2af" dependencies = [ "beef", - "http 1.1.0", + "http 1.2.0", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -4762,7 +4918,7 @@ version = "0.23.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c28759775f5cb2f1ea9667672d3fe2b0e701d1f4b7b67954e60afe7fd058b5e" dependencies = [ - "http 1.1.0", + "http 1.2.0", "jsonrpsee-client-transport 0.23.2", "jsonrpsee-core 0.23.2", "jsonrpsee-types 0.23.2", @@ -4866,7 +5022,7 @@ version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "507460a910eb7b32ee961886ff48539633b788a36b65692b95f225b844c82553" dependencies = [ - "regex-automata 0.4.8", + "regex-automata 0.4.9", ] [[package]] @@ -4898,7 +5054,7 @@ checksum = "ee58dbc414bd23885d7da915e0457618b36d1fc950a6169ef2cb29829d1b1a1d" dependencies = [ "bytes", "lazy_static", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -4909,15 +5065,15 @@ checksum = "db13adb97ab515a3691f56e4dbab09283d0b86cb45abd991d8634a9d6f501760" [[package]] name = "libc" -version = "0.2.161" +version = "0.2.168" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9489c2807c139ffd9c1794f4af0ebe86a828db53ecdc7fea2111d0fed085d1" +checksum = "5aaeb2981e0606ca11d79718f8bb01164f1d6ed75080182d3abf017e6d244b6d" [[package]] name = "libloading" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" +checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" dependencies = [ "cfg-if", "windows-targets 0.52.6", @@ -4931,16 +5087,16 @@ checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" [[package]] name = "libp2p-identity" -version = "0.2.9" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55cca1eb2bc1fd29f099f3daaab7effd01e1a54b7c577d0ed082521034d912e8" +checksum = "257b5621d159b32282eac446bed6670c39c7dc68a200a992d8f056afa0066f6d" dependencies = [ "bs58", "hkdf", "multihash", "quick-protobuf", "sha2 0.10.8", - "thiserror", + "thiserror 1.0.69", "tracing", ] @@ -5052,6 +5208,12 @@ version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +[[package]] +name = "litemap" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" + [[package]] name = "loadnext" version = "0.1.0" @@ -5069,7 +5231,7 @@ dependencies = [ "serde", "serde_json", "static_assertions", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "vise", @@ -5089,7 +5251,7 @@ name = "local_blobs_dump" version = "0.1.0" dependencies = [ "anyhow", - "clap 4.5.20", + "clap 4.5.23", "hex", "tokio", "tracing", @@ -5138,11 +5300,11 @@ dependencies = [ [[package]] name = "logos" -version = "0.14.2" +version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c6b6e02facda28ca5fb8dbe4b152496ba3b1bd5a4b40bb2b1b2d8ad74e0f39b" +checksum = "7251356ef8cb7aec833ddf598c6cb24d17b689d20b993f9d11a3d764e34e6458" dependencies = [ - "logos-derive 0.14.2", + "logos-derive 0.14.4", ] [[package]] @@ -5153,25 +5315,25 @@ checksum = "dc487311295e0002e452025d6b580b77bb17286de87b57138f3b5db711cded68" dependencies = [ "beef", "fnv", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "regex-syntax 0.6.29", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] name = "logos-codegen" -version = "0.14.2" +version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b32eb6b5f26efacd015b000bfc562186472cd9b34bdba3f6b264e2a052676d10" +checksum = "59f80069600c0d66734f5ff52cc42f2dabd6b29d205f333d61fd7832e9e9963f" dependencies = [ "beef", "fnv", "lazy_static", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "regex-syntax 0.8.5", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -5185,11 +5347,11 @@ dependencies = [ [[package]] name = "logos-derive" -version = "0.14.2" +version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e5d0c5463c911ef55624739fc353238b4e310f0144be1f875dc42fec6bfd5ec" +checksum = "24fb722b06a9dc12adb0963ed585f19fc61dc5413e6a9be9422ef92c091e731d" dependencies = [ - "logos-codegen 0.14.2", + "logos-codegen 0.14.4", ] [[package]] @@ -5198,7 +5360,7 @@ version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" dependencies = [ - "hashbrown 0.15.0", + "hashbrown 0.15.2", ] [[package]] @@ -5262,7 +5424,7 @@ name = "merkle_tree_consistency_checker" version = "0.1.0" dependencies = [ "anyhow", - "clap 4.5.20", + "clap 4.5.23", "tracing", "zksync_config", "zksync_env_config", @@ -5292,19 +5454,19 @@ checksum = "59bb584eaeeab6bd0226ccf3509a69d7936d148cf3d036ad350abe35e8c6856e" dependencies = [ "miette-derive 5.10.0", "once_cell", - "thiserror", + "thiserror 1.0.69", "unicode-width", ] [[package]] name = "miette" -version = "7.2.0" +version = "7.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4edc8853320c2a0dab800fbda86253c8938f6ea88510dc92c5f1ed20e794afc1" +checksum = "317f146e2eb7021892722af37cf1b971f0a70c8406f487e24952667616192c64" dependencies = [ "cfg-if", - "miette-derive 7.2.0", - "thiserror", + "miette-derive 7.4.0", + "thiserror 1.0.69", "unicode-width", ] @@ -5314,20 +5476,20 @@ version = "5.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49e7bc1560b95a3c4a25d03de42fe76ca718ab92d1a22a55b9b4cf67b3ae635c" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] name = "miette-derive" -version = "7.2.0" +version = "7.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcf09caffaac8068c346b6df2a7fc27a177fd20b39421a39ce0a211bde679a6c" +checksum = "23c9b935fbe1d6cbd1dac857b54a688145e2d93f48db36010514d0f612d0ad67" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -5378,22 +5540,15 @@ dependencies = [ [[package]] name = "mio" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" dependencies = [ - "hermit-abi 0.3.9", "libc", "wasi 0.11.0+wasi-snapshot-preview1", "windows-sys 0.52.0", ] -[[package]] -name = "mirai-annotations" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9be0862c1b3f26a88803c4a49de6889c10e608b3ee9344e6ef5b45fb37ad3d1" - [[package]] name = "multer" version = "3.1.0" @@ -5403,7 +5558,7 @@ dependencies = [ "bytes", "encoding_rs", "futures-util", - "http 1.1.0", + "http 1.2.0", "httparse", "memchr", "mime", @@ -5443,9 +5598,9 @@ dependencies = [ [[package]] name = "multihash" -version = "0.19.2" +version = "0.19.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc41f430805af9d1cf4adae4ed2149c759b877b01d909a1f40256188d09345d2" +checksum = "6b430e7953c29dd6a09afc29ff0bb69c6e306329ee6794700aee27b76a1aea8d" dependencies = [ "core2", "unsigned-varint", @@ -5469,7 +5624,7 @@ dependencies = [ "openssl-probe", "openssl-sys", "schannel", - "security-framework", + "security-framework 2.11.1", "security-framework-sys", "tempfile", ] @@ -5634,7 +5789,7 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "syn 1.0.109", ] @@ -5726,9 +5881,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6" dependencies = [ "proc-macro-crate 1.3.1", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -5738,9 +5893,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56" dependencies = [ "proc-macro-crate 3.2.0", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -5768,12 +5923,12 @@ dependencies = [ "futures 0.3.31", "futures-core", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", - "hyper 1.5.0", + "hyper 1.5.1", "hyper-rustls 0.27.3", - "hyper-timeout 0.5.1", + "hyper-timeout 0.5.2", "hyper-util", "jsonwebtoken", "once_cell", @@ -5787,7 +5942,7 @@ dependencies = [ "snafu", "tokio", "tower 0.5.1", - "tower-http 0.6.1", + "tower-http 0.6.2", "tracing", "url", ] @@ -5831,9 +5986,9 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -5865,7 +6020,7 @@ dependencies = [ "js-sys", "once_cell", "pin-project-lite", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -5888,7 +6043,7 @@ checksum = "ad31e9de44ee3538fb9d64fe3376c1362f406162434609e79aea2a41a0af78ab" dependencies = [ "async-trait", "bytes", - "http 1.1.0", + "http 1.2.0", "opentelemetry", "reqwest 0.12.9", ] @@ -5901,14 +6056,14 @@ checksum = "6b925a602ffb916fb7421276b86756027b37ee708f9dce2dbdcc51739f07e727" dependencies = [ "async-trait", "futures-core", - "http 1.1.0", + "http 1.2.0", "opentelemetry", "opentelemetry-http", "opentelemetry-proto", "opentelemetry_sdk", - "prost 0.13.3", + "prost 0.13.4", "reqwest 0.12.9", - "thiserror", + "thiserror 1.0.69", "tokio", "tonic 0.12.3", ] @@ -5921,7 +6076,7 @@ checksum = "30ee9f20bff9c984511a02f082dc8ede839e4a9bf15cc2487c8d6fea5ad850d9" dependencies = [ "opentelemetry", "opentelemetry_sdk", - "prost 0.13.3", + "prost 0.13.4", "tonic 0.12.3", ] @@ -5947,7 +6102,7 @@ dependencies = [ "percent-encoding", "rand 0.8.5", "serde_json", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-stream", ] @@ -5969,9 +6124,9 @@ dependencies = [ [[package]] name = "os_info" -version = "3.8.2" +version = "3.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae99c7fa6dd38c7cafe1ec085e804f8f555a2f8659b0dbe03f1f9963a9b51092" +checksum = "e5ca711d8b83edbb00b44d504503cd247c9c0bd8b0fa2694f2a1a3d8165379ce" dependencies = [ "log", "serde", @@ -6023,7 +6178,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c" dependencies = [ "proc-macro-crate 3.2.0", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "syn 1.0.109", ] @@ -6148,20 +6303,20 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" -version = "2.7.14" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "879952a81a83930934cbf1786752d6dedc3b1f29e8f8fb2ad1d0a36f377cf442" +checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc" dependencies = [ "memchr", - "thiserror", + "thiserror 2.0.6", "ucd-trie", ] [[package]] name = "pest_derive" -version = "2.7.14" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d214365f632b123a47fd913301e14c946c61d1c183ee245fa76eb752e59a02dd" +checksum = "816518421cfc6887a0d62bf441b6ffb4536fcc926395a69e1a85852d4363f57e" dependencies = [ "pest", "pest_generator", @@ -6169,22 +6324,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.14" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb55586734301717aea2ac313f50b2eb8f60d2fc3dc01d190eefa2e625f60c4e" +checksum = "7d1396fd3a870fc7838768d171b4616d5c91f6cc25e377b673d714567d99377b" dependencies = [ "pest", "pest_meta", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] name = "pest_meta" -version = "2.7.14" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b75da2a70cf4d9cb76833c990ac9cd3923c9a8905a8929789ce347c84564d03d" +checksum = "e1e58089ea25d717bfd31fb534e4f3afcc2cc569c70de3e239778991ea3b7dea" dependencies = [ "once_cell", "pest", @@ -6198,7 +6353,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap 2.6.0", + "indexmap 2.7.0", ] [[package]] @@ -6229,9 +6384,9 @@ checksum = "3444646e286606587e49f3bcf1679b8cef1dc2c5ecc29ddacaffc305180d464b" dependencies = [ "phf_generator", "phf_shared 0.11.2", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -6273,9 +6428,9 @@ version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -6368,9 +6523,9 @@ dependencies = [ [[package]] name = "polling" -version = "3.7.3" +version = "3.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2790cd301dec6cd3b7a025e4815cf825724a51c98dccfe6a3e55f05ffb6511" +checksum = "a604568c3202727d1507653cb121dbd627a58684eb09a820fd746bee38b4442f" dependencies = [ "cfg-if", "concurrent-queue", @@ -6441,8 +6596,8 @@ version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" dependencies = [ - "proc-macro2 1.0.89", - "syn 2.0.85", + "proc-macro2 1.0.92", + "syn 2.0.90", ] [[package]] @@ -6494,7 +6649,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "syn 1.0.109", "version_check", @@ -6506,7 +6661,7 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "version_check", ] @@ -6528,9 +6683,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.89" +version = "1.0.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e" +checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" dependencies = [ "unicode-ident", ] @@ -6553,9 +6708,9 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -6590,12 +6745,12 @@ dependencies = [ [[package]] name = "prost" -version = "0.13.3" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b0487d90e047de87f984913713b85c601c05609aad5b0df4b4573fbf69aa13f" +checksum = "2c0fef6c4230e4ccf618a35c59d7ede15dea37de8427500f50aff708806e42ec" dependencies = [ "bytes", - "prost-derive 0.13.3", + "prost-derive 0.13.4", ] [[package]] @@ -6615,7 +6770,7 @@ dependencies = [ "prost 0.12.6", "prost-types", "regex", - "syn 2.0.85", + "syn 2.0.90", "tempfile", ] @@ -6627,22 +6782,22 @@ checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1" dependencies = [ "anyhow", "itertools 0.12.1", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] name = "prost-derive" -version = "0.13.3" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9552f850d5f0964a4e4d0bf306459ac29323ddfbae05e35a7c0d35cb0803cc5" +checksum = "157c5a9d7ea5c2ed2d9fb8f495b64759f7816c7eaea54ba3978f0d63000162e3" dependencies = [ "anyhow", "itertools 0.13.0", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -6667,8 +6822,8 @@ version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f5eec97d5d34bdd17ad2db2219aabf46b054c6c41bd5529767c9ce55be5898f" dependencies = [ - "logos 0.14.2", - "miette 7.2.0", + "logos 0.14.4", + "miette 7.4.0", "once_cell", "prost 0.12.6", "prost-types", @@ -6695,7 +6850,7 @@ dependencies = [ "prost-reflect 0.12.0", "prost-types", "protox-parse 0.5.0", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -6705,12 +6860,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac532509cee918d40f38c3e12f8ef9230f215f017d54de7dd975015538a42ce7" dependencies = [ "bytes", - "miette 7.2.0", + "miette 7.4.0", "prost 0.12.6", "prost-reflect 0.13.1", "prost-types", "protox-parse 0.6.1", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -6722,7 +6877,7 @@ dependencies = [ "logos 0.13.0", "miette 5.10.0", "prost-types", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -6731,10 +6886,10 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f6c33f43516fe397e2f930779d720ca12cd057f7da4cd6326a0ef78d69dee96" dependencies = [ - "logos 0.14.2", - "miette 7.2.0", + "logos 0.14.4", + "miette 7.4.0", "prost-types", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -6752,7 +6907,7 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "syn 1.0.109", ] @@ -6807,45 +6962,49 @@ dependencies = [ [[package]] name = "quinn" -version = "0.11.5" +version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c7c5fdde3cdae7203427dc4f0a68fe0ed09833edc525a03456b153b79828684" +checksum = "62e96808277ec6f97351a2380e6c25114bc9e67037775464979f3037c92d05ef" dependencies = [ "bytes", "pin-project-lite", "quinn-proto", "quinn-udp", - "rustc-hash 2.0.0", - "rustls 0.23.16", + "rustc-hash 2.1.0", + "rustls 0.23.19", "socket2", - "thiserror", + "thiserror 2.0.6", "tokio", "tracing", ] [[package]] name = "quinn-proto" -version = "0.11.8" +version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fadfaed2cd7f389d0161bb73eeb07b7b78f8691047a6f3e73caaeae55310a4a6" +checksum = "a2fe5ef3495d7d2e377ff17b1a8ce2ee2ec2a18cde8b6ad6619d65d0701c135d" dependencies = [ "bytes", + "getrandom", "rand 0.8.5", "ring", - "rustc-hash 2.0.0", - "rustls 0.23.16", + "rustc-hash 2.1.0", + "rustls 0.23.19", + "rustls-pki-types", "slab", - "thiserror", + "thiserror 2.0.6", "tinyvec", "tracing", + "web-time", ] [[package]] name = "quinn-udp" -version = "0.5.5" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fe68c2e9e1a1234e218683dbdf9f9dfcb094113c5ac2b938dfcb9bab4c4140b" +checksum = "52cd4b1eff68bf27940dd39811292c49e007f4d0b4c357358dc9b0197be6b527" dependencies = [ + "cfg_aliases", "libc", "once_cell", "socket2", @@ -6868,7 +7027,7 @@ version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", ] [[package]] @@ -7009,7 +7168,7 @@ checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ "getrandom", "libredox", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -7020,7 +7179,7 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.8", + "regex-automata 0.4.9", "regex-syntax 0.8.5", ] @@ -7035,9 +7194,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", @@ -7118,11 +7277,11 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2 0.4.6", - "http 1.1.0", + "h2 0.4.7", + "http 1.2.0", "http-body 1.0.1", "http-body-util", - "hyper 1.5.0", + "hyper 1.5.1", "hyper-rustls 0.27.3", "hyper-tls 0.6.0", "hyper-util", @@ -7136,18 +7295,18 @@ dependencies = [ "percent-encoding", "pin-project-lite", "quinn", - "rustls 0.23.16", - "rustls-native-certs 0.8.0", + "rustls 0.23.19", + "rustls-native-certs 0.8.1", "rustls-pemfile 2.2.0", "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", - "sync_wrapper 1.0.1", + "sync_wrapper 1.0.2", "system-configuration 0.6.1", "tokio", "tokio-native-tls", - "tokio-rustls 0.26.0", + "tokio-rustls 0.26.1", "tokio-socks", "tokio-util", "tower-service", @@ -7168,10 +7327,10 @@ checksum = "562ceb5a604d3f7c885a792d42c199fd8af239d0a51b2fa6a78aafa092452b04" dependencies = [ "anyhow", "async-trait", - "http 1.1.0", + "http 1.2.0", "reqwest 0.12.9", "serde", - "thiserror", + "thiserror 1.0.69", "tower-service", ] @@ -7269,7 +7428,7 @@ version = "0.7.45" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "503d1d27590a2b0a3a4ca4c94755aa2875657196ecbf401a42eff41d7de532c0" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "syn 1.0.109", ] @@ -7302,9 +7461,9 @@ checksum = "afab94fb28594581f62d981211a9a4d53cc8130bbcbbb89a0440d9b8e81a7746" [[package]] name = "rsa" -version = "0.9.6" +version = "0.9.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc" +checksum = "47c75d7c5c6b673e58bf54d8544a9f432e3a925b0e80f7cd3602ab5c50c55519" dependencies = [ "const-oid", "digest 0.10.7", @@ -7380,9 +7539,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc-hash" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" +checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497" dependencies = [ "rand 0.8.5", ] @@ -7413,15 +7572,15 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.38" +version = "0.38.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa260229e6538e52293eeb577aabd09945a09d6d9cc0fc550ed7529056c2e32a" +checksum = "f93dc38ecbab2eb790ff964bb77fa94faf256fd3e73285fd7ba0903b76bedb85" dependencies = [ "bitflags 2.6.0", "errno", "libc", "linux-raw-sys", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -7452,9 +7611,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.16" +version = "0.23.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eee87ff5d9b36712a58574e12e9f0ea80f915a5b0ac518d322b24a465617925e" +checksum = "934b404430bb06b3fae2cba809eb45a1ab1aecd64491213d7c3301b88393f8d1" dependencies = [ "aws-lc-rs", "log", @@ -7475,7 +7634,7 @@ dependencies = [ "openssl-probe", "rustls-pemfile 1.0.4", "schannel", - "security-framework", + "security-framework 2.11.1", ] [[package]] @@ -7488,20 +7647,19 @@ dependencies = [ "rustls-pemfile 2.2.0", "rustls-pki-types", "schannel", - "security-framework", + "security-framework 2.11.1", ] [[package]] name = "rustls-native-certs" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcaf18a4f2be7326cd874a5fa579fae794320a0f388d365dca7e480e55f83f8a" +checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3" dependencies = [ "openssl-probe", - "rustls-pemfile 2.2.0", "rustls-pki-types", "schannel", - "security-framework", + "security-framework 3.0.1", ] [[package]] @@ -7527,6 +7685,9 @@ name = "rustls-pki-types" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b" +dependencies = [ + "web-time", +] [[package]] name = "rustls-platform-verifier" @@ -7534,16 +7695,16 @@ version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "afbb878bdfdf63a336a5e63561b1835e7a8c91524f51621db870169eac84b490" dependencies = [ - "core-foundation", + "core-foundation 0.9.4", "core-foundation-sys", "jni", "log", "once_cell", - "rustls 0.23.16", + "rustls 0.23.19", "rustls-native-certs 0.7.3", "rustls-platform-verifier-android", "rustls-webpki 0.102.8", - "security-framework", + "security-framework 2.11.1", "security-framework-sys", "webpki-roots", "winapi", @@ -7655,7 +7816,7 @@ checksum = "d3475108a1b62c7efd1b5c65974f30109a598b2f45f23c9ae030acb9686966db" dependencies = [ "darling 0.14.4", "proc-macro-crate 1.3.1", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "syn 1.0.109", ] @@ -7683,16 +7844,16 @@ checksum = "995491f110efdc6bea96d6a746140e32bfceb4ea47510750a5467295a4707a25" dependencies = [ "darling 0.14.4", "proc-macro-crate 1.3.1", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "syn 1.0.109", ] [[package]] name = "scale-info" -version = "2.11.5" +version = "2.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aa7ffc1c0ef49b0452c6e2986abf2b07743320641ffd5fc63d552458e3b779b" +checksum = "346a3b32eba2640d17a9cb5927056b08f3de90f65b72fe09402c2ad07d684d0b" dependencies = [ "bitvec", "cfg-if", @@ -7704,14 +7865,14 @@ dependencies = [ [[package]] name = "scale-info-derive" -version = "2.11.5" +version = "2.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46385cc24172cf615450267463f937c10072516359b3ff1cb24228a4a08bf951" +checksum = "c6630024bf739e2179b91fb424b28898baf819414262c5d376677dbff1fe7ebf" dependencies = [ "proc-macro-crate 3.2.0", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -7720,11 +7881,11 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "00860983481ac590ac87972062909bef0d6a658013b592ccc0f2feb272feab11" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "scale-info", - "syn 2.0.85", - "thiserror", + "syn 2.0.90", + "thiserror 1.0.69", ] [[package]] @@ -7749,9 +7910,9 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.26" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01227be5826fa0690321a2ba6c5cd57a19cf3f6a09e76973b58e61de6ab9d1c1" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" dependencies = [ "windows-sys 0.59.0", ] @@ -7869,18 +8030,31 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ "bitflags 2.6.0", - "core-foundation", + "core-foundation 0.9.4", "core-foundation-sys", "libc", "num-bigint 0.4.6", "security-framework-sys", ] +[[package]] +name = "security-framework" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1415a607e92bec364ea2cf9264646dcce0f91e6d65281bd6f2819cca3bf39c8" +dependencies = [ + "bitflags 2.6.0", + "core-foundation 0.10.0", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + [[package]] name = "security-framework-sys" -version = "2.12.0" +version = "2.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea4a292869320c0272d7bc55a5a6aafaff59b4f63404a003887b679a2e05b4b6" +checksum = "fa39c7303dc58b5543c94d22c1766b0d31f2ee58306363ea622b10bbc075eaa2" dependencies = [ "core-foundation-sys", "libc", @@ -7891,7 +8065,7 @@ name = "selector_generator" version = "0.1.0" dependencies = [ "anyhow", - "clap 4.5.20", + "clap 4.5.23", "ethabi", "glob", "hex", @@ -7920,9 +8094,9 @@ dependencies = [ [[package]] name = "semver-parser" -version = "0.10.2" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7" +checksum = "9900206b54a3527fdc7b8a938bffd94a568bac4f4aa8113b209df75a09c0dec2" dependencies = [ "pest", ] @@ -8035,7 +8209,7 @@ dependencies = [ "rand 0.8.5", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "time", "url", "uuid", @@ -8049,9 +8223,9 @@ checksum = "a3f0bf26fd526d2a95683cd0f87bf103b8539e2ca1ef48ce002d67aad59aa0b4" [[package]] name = "serde" -version = "1.0.214" +version = "1.0.215" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f55c3193aca71c12ad7890f1785d2b73e1b9f63a0bbc353c08ef26fe03fc56b5" +checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f" dependencies = [ "serde_derive", ] @@ -8077,20 +8251,20 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.214" +version = "1.0.215" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de523f781f095e28fa605cdce0f8307e451cc0fd14e2eb4cd2e98a355b147766" +checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] name = "serde_json" -version = "1.0.132" +version = "1.0.133" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03" +checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" dependencies = [ "itoa", "memchr", @@ -8124,9 +8298,9 @@ version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -8169,7 +8343,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" dependencies = [ "darling 0.13.4", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "syn 1.0.109", ] @@ -8180,7 +8354,7 @@ version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ - "indexmap 2.6.0", + "indexmap 2.7.0", "itoa", "ryu", "serde", @@ -8358,7 +8532,7 @@ checksum = "adc4e5204eb1910f40f9cfa375f6f05b68c3abac4b6fd879c8ff5e7ae8a0a085" dependencies = [ "num-bigint 0.4.6", "num-traits", - "thiserror", + "thiserror 1.0.69", "time", ] @@ -8541,9 +8715,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "03c3c6b7927ffe7ecaa769ee0e3994da3b8cafc8f444578982c83ecb161af917" dependencies = [ "heck 0.5.0", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -8585,9 +8759,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" dependencies = [ "libc", "windows-sys 0.52.0", @@ -8610,14 +8784,14 @@ dependencies = [ [[package]] name = "soketto" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37468c595637c10857701c990f93a40ce0e357cedb0953d1c26c8d8027f9bb53" +checksum = "2e859df029d160cb88608f5d7df7fb4753fd20fdfb4de5644f3d8b8440841721" dependencies = [ "base64 0.22.1", "bytes", "futures 0.3.31", - "http 1.1.0", + "http 1.2.0", "httparse", "log", "rand 0.8.5", @@ -8634,7 +8808,7 @@ dependencies = [ "lalrpop", "lalrpop-util", "phf", - "thiserror", + "thiserror 1.0.69", "unicode-xid 0.2.6", ] @@ -8733,7 +8907,7 @@ dependencies = [ "hashbrown 0.14.5", "hashlink", "hex", - "indexmap 2.6.0", + "indexmap 2.7.0", "ipnetwork", "log", "memchr", @@ -8747,7 +8921,7 @@ dependencies = [ "sha2 0.10.8", "smallvec", "sqlformat", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-stream", "tracing", @@ -8760,11 +8934,11 @@ version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cac0692bcc9de3b073e8d747391827297e075c7710ff6276d9f7a1f3d58c6657" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "sqlx-core", "sqlx-macros-core", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -8778,7 +8952,7 @@ dependencies = [ "heck 0.5.0", "hex", "once_cell", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "serde", "serde_json", @@ -8787,7 +8961,7 @@ dependencies = [ "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 2.0.85", + "syn 2.0.90", "tempfile", "tokio", "url", @@ -8833,7 +9007,7 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror", + "thiserror 1.0.69", "tracing", "whoami", ] @@ -8876,7 +9050,7 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror", + "thiserror 1.0.69", "tracing", "whoami", ] @@ -8978,7 +9152,7 @@ checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" dependencies = [ "heck 0.3.3", "proc-macro-error", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "syn 1.0.109", ] @@ -8999,10 +9173,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" dependencies = [ "heck 0.5.0", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "rustversion", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -9056,7 +9230,7 @@ dependencies = [ "subxt-lightclient", "subxt-macro", "subxt-metadata", - "thiserror", + "thiserror 1.0.69", "tokio-util", "tracing", "url", @@ -9073,13 +9247,13 @@ dependencies = [ "hex", "jsonrpsee 0.21.0", "parity-scale-codec", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "scale-info", "scale-typegen", "subxt-metadata", - "syn 2.0.85", - "thiserror", + "syn 2.0.90", + "thiserror 1.0.69", "tokio", ] @@ -9094,7 +9268,7 @@ dependencies = [ "serde", "serde_json", "smoldot-light", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-stream", "tracing", @@ -9112,7 +9286,7 @@ dependencies = [ "quote 1.0.37", "scale-typegen", "subxt-codegen", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -9125,7 +9299,7 @@ dependencies = [ "parity-scale-codec", "scale-info", "sp-core-hashing", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -9145,7 +9319,7 @@ dependencies = [ "sha2 0.10.8", "sp-core-hashing", "subxt", - "thiserror", + "thiserror 1.0.69", "zeroize", ] @@ -9164,7 +9338,7 @@ dependencies = [ "serde_json", "sha2 0.10.8", "tempfile", - "thiserror", + "thiserror 1.0.69", "url", "zip", ] @@ -9199,34 +9373,22 @@ version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "unicode-ident", ] [[package]] name = "syn" -version = "2.0.85" +version = "2.0.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5023162dfcd14ef8f32034d8bcd4cc5ddc61ef7a247c024a33e24e1f24d21b56" +checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "unicode-ident", ] -[[package]] -name = "syn_derive" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b" -dependencies = [ - "proc-macro-error", - "proc-macro2 1.0.89", - "quote 1.0.37", - "syn 2.0.85", -] - [[package]] name = "sync_wrapper" version = "0.1.2" @@ -9235,13 +9397,24 @@ checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" [[package]] name = "sync_wrapper" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" dependencies = [ "futures-core", ] +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", +] + [[package]] name = "system-configuration" version = "0.5.1" @@ -9249,7 +9422,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" dependencies = [ "bitflags 1.3.2", - "core-foundation", + "core-foundation 0.9.4", "system-configuration-sys 0.5.0", ] @@ -9260,7 +9433,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" dependencies = [ "bitflags 2.6.0", - "core-foundation", + "core-foundation 0.9.4", "system-configuration-sys 0.6.0", ] @@ -9318,9 +9491,9 @@ checksum = "42a4d50cdb458045afc8131fd91b64904da29548bcb63c7236e0844936c13078" [[package]] name = "tempfile" -version = "3.13.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0f2c9fc62d0beef6951ccffd757e241266a2c833136efbe35af6cd2567dca5b" +checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" dependencies = [ "cfg-if", "fastrand", @@ -9351,9 +9524,9 @@ dependencies = [ [[package]] name = "terminal_size" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f599bd7ca042cfdf8f4512b277c02ba102247820f9d9d4a9f521f496751a6ef" +checksum = "5352447f921fda68cf61b4101566c0bdb5104eff6804d0678e5227580ab6a4e9" dependencies = [ "rustix", "windows-sys 0.59.0", @@ -9374,9 +9547,9 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9b53c7124dd88026d5d98a1eb1fd062a578b7d783017c9298825526c7fb6427" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -9396,9 +9569,9 @@ version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5999e24eaa32083191ba4e425deb75cdf25efefabe5aaccb7446dd0d4122a3f5" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -9418,22 +9591,42 @@ checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" [[package]] name = "thiserror" -version = "1.0.65" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fec2a1820ebd077e2b90c4df007bebf344cd394098a13c563957d0afc83ea47" +dependencies = [ + "thiserror-impl 2.0.6", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d11abd9594d9b38965ef50805c5e469ca9cc6f197f883f717e0269a3057b3d5" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ - "thiserror-impl", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] name = "thiserror-impl" -version = "1.0.65" +version = "2.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae71770322cbd277e69d762a16c444af02aa0575ac0d174f0b9562d3b37f8602" +checksum = "d65750cab40f4ff1929fb1ba509e9914eb756131cef4210da8d5d700d26f6312" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -9477,9 +9670,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.36" +version = "0.3.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" dependencies = [ "deranged", "itoa", @@ -9498,9 +9691,9 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.18" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" dependencies = [ "num-conv", "time-core", @@ -9524,6 +9717,16 @@ dependencies = [ "crunchy", ] +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + [[package]] name = "tinytemplate" version = "1.2.1" @@ -9551,22 +9754,22 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tls-listener" -version = "0.10.1" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83a296135fdab7b3a1f708c338c50bab570bcd77d44080cde9341df45c0c6d73" +checksum = "0f1d8809f604e448c7bc53a5a0e4c2a0a20ba44cb1fb407314c8eeccb92127f9" dependencies = [ "futures-util", "pin-project-lite", - "thiserror", + "thiserror 1.0.69", "tokio", - "tokio-rustls 0.26.0", + "tokio-rustls 0.26.1", ] [[package]] name = "tokio" -version = "1.41.0" +version = "1.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "145f3413504347a2be84393cc8a7d2fb4d863b375909ea59f2158261aa258bbb" +checksum = "5cec9b21b0450273377fc97bd4c33a8acffc8c996c987a7c5b319a0083707551" dependencies = [ "backtrace", "bytes", @@ -9596,9 +9799,9 @@ version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -9634,12 +9837,11 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.26.0" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" +checksum = "5f6d0975eaace0cf0fcadee4e4aaa5da15b5c079146f2cffb67c113be122bf37" dependencies = [ - "rustls 0.23.16", - "rustls-pki-types", + "rustls 0.23.19", "tokio", ] @@ -9651,15 +9853,15 @@ checksum = "0d4770b8024672c1101b3f6733eab95b18007dbe0847a8afe341fcf79e06043f" dependencies = [ "either", "futures-util", - "thiserror", + "thiserror 1.0.69", "tokio", ] [[package]] name = "tokio-stream" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" dependencies = [ "futures-core", "pin-project-lite", @@ -9669,9 +9871,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.12" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" +checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" dependencies = [ "bytes", "futures-core", @@ -9708,7 +9910,7 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.6.0", + "indexmap 2.7.0", "toml_datetime", "winnow 0.5.40", ] @@ -9719,7 +9921,7 @@ version = "0.22.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" dependencies = [ - "indexmap 2.6.0", + "indexmap 2.7.0", "serde", "serde_spanned", "toml_datetime", @@ -9765,19 +9967,19 @@ checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52" dependencies = [ "async-stream", "async-trait", - "axum 0.7.7", + "axum 0.7.9", "base64 0.22.1", "bytes", - "h2 0.4.6", - "http 1.1.0", + "h2 0.4.7", + "http 1.2.0", "http-body 1.0.1", "http-body-util", - "hyper 1.5.0", - "hyper-timeout 0.5.1", + "hyper 1.5.1", + "hyper-timeout 0.5.2", "hyper-util", "percent-encoding", "pin-project", - "prost 0.13.3", + "prost 0.13.4", "socket2", "tokio", "tokio-stream", @@ -9834,7 +10036,7 @@ dependencies = [ "bitflags 2.6.0", "bytes", "futures-core", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", "pin-project-lite", @@ -9846,14 +10048,14 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8437150ab6bbc8c5f0f519e3d5ed4aa883a83dd4cdd3d1b21f9482936046cb97" +checksum = "403fa3b783d4b626a8ad51d766ab03cb6d2dbfc46b1c5d4448395e6628dc9697" dependencies = [ "bitflags 2.6.0", "bytes", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "iri-string", "pin-project-lite", @@ -9877,9 +10079,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "log", "pin-project-lite", @@ -9889,20 +10091,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", "valuable", @@ -9939,9 +10141,9 @@ dependencies = [ [[package]] name = "tracing-serde" -version = "0.1.3" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" +checksum = "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1" dependencies = [ "serde", "tracing-core", @@ -9949,9 +10151,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" dependencies = [ "matchers", "nu-ansi-term", @@ -10066,9 +10268,9 @@ checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" [[package]] name = "unicode-ident" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" +checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" [[package]] name = "unicode-normalization" @@ -10155,9 +10357,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "ureq" -version = "2.10.1" +version = "2.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b74fc6b57825be3373f7054754755f03ac3a8f5d70015ccad699ba2029956f4a" +checksum = "02d1a66277ed75f640d608235660df48c8e3c19f3b4edb6a263315626cc3c01d" dependencies = [ "base64 0.22.1", "log", @@ -10168,9 +10370,9 @@ dependencies = [ [[package]] name = "url" -version = "2.5.2" +version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna", @@ -10184,6 +10386,18 @@ version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + [[package]] name = "utf8parse" version = "0.2.2" @@ -10275,9 +10489,9 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a511871dc5de990a3b2a0e715facfbc5da848c0c0395597a1415029fb7c250a" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -10347,9 +10561,9 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" -version = "0.2.95" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e" +checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396" dependencies = [ "cfg-if", "once_cell", @@ -10358,36 +10572,36 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.95" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358" +checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79" dependencies = [ "bumpalo", "log", - "once_cell", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.45" +version = "0.4.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc7ec4f8827a71586374db3e87abdb5a2bb3a15afed140221307c3ec06b1f63b" +checksum = "38176d9b44ea84e9184eff0bc34cc167ed044f816accfe5922e54d84cf48eca2" dependencies = [ "cfg-if", "js-sys", + "once_cell", "wasm-bindgen", "web-sys", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.95" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56" +checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe" dependencies = [ "quote 1.0.37", "wasm-bindgen-macro-support", @@ -10395,22 +10609,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.95" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" +checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.95" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d" +checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6" [[package]] name = "wasm-streams" @@ -10467,9 +10681,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.72" +version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6488b90108c040df0fe62fa815cbdee25124641df01814dd7282749234c6112" +checksum = "04dd7223427d52553d3702c004d3b2fe07c148165faa56313cb00211e31c12bc" dependencies = [ "js-sys", "wasm-bindgen", @@ -10487,9 +10701,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.26.6" +version = "0.26.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "841c67bff177718f1d4dfefde8d8f0e78f9b6589319ba88312f567fc5841a958" +checksum = "5d642ff16b7e79272ae451b7322067cdc17cadf68c23264be9d94a32319efe7e" dependencies = [ "rustls-pki-types", ] @@ -10762,6 +10976,18 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + [[package]] name = "wyz" version = "0.5.1" @@ -10790,9 +11016,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b06cc62d4cec617d3c259537be0fcaa8a5bcf72ddf2983823d9528605f36ed3" dependencies = [ "anes", - "clap 4.5.20", + "clap 4.5.23", "num_cpus", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -10807,6 +11033,30 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff4524214bc4629eba08d78ceb1d6507070cc0bcbbed23af74e19e6e924a24cf" +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", + "synstructure", +] + [[package]] name = "zerocopy" version = "0.7.35" @@ -10823,9 +11073,30 @@ version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", +] + +[[package]] +name = "zerofrom" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +dependencies = [ + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", + "synstructure", ] [[package]] @@ -10843,25 +11114,47 @@ version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", +] + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] name = "zip" -version = "2.2.0" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc5e4288ea4057ae23afc69a4472434a87a2495cafce6632fd1c4ec9f5cf3494" +checksum = "99d52293fc86ea7cf13971b3bb81eb21683636e7ae24c729cdaf1b7c4157a352" dependencies = [ "arbitrary", "crc32fast", "crossbeam-utils", "displaydoc", "flate2", - "indexmap 2.6.0", + "indexmap 2.7.0", "memchr", - "thiserror", + "thiserror 2.0.6", "zopfli", ] @@ -11147,7 +11440,7 @@ dependencies = [ "serde_with", "sha2 0.10.8", "strum", - "thiserror", + "thiserror 1.0.69", "tiny-keccak 2.0.2", "url", ] @@ -11205,7 +11498,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "vise", @@ -11248,16 +11541,16 @@ dependencies = [ [[package]] name = "zksync_concurrency" -version = "0.5.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "035269d811b3770debca372141ab64cad067dce8e58cb39a48cb7617d30c626b" +checksum = "e8312ab73d3caa55775bd531795b507fa8f76bd9dabfaeb0954fe43e8fc1323b" dependencies = [ "anyhow", "once_cell", "pin-project", "rand 0.8.5", "sha3 0.10.8", - "thiserror", + "thiserror 1.0.69", "time", "tokio", "tracing", @@ -11284,15 +11577,15 @@ dependencies = [ [[package]] name = "zksync_consensus_bft" -version = "0.5.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8001633dee671134cf572175a6c4f817904ce5f8d92e9b51f49891c5184a831" +checksum = "fb6b0944322f30f88cd7fb22f7875435b394a135fc1b479719a18c42d9fb724d" dependencies = [ "anyhow", "async-trait", "once_cell", "rand 0.8.5", - "thiserror", + "thiserror 1.0.69", "tracing", "vise", "zksync_concurrency", @@ -11306,9 +11599,9 @@ dependencies = [ [[package]] name = "zksync_consensus_crypto" -version = "0.5.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49e38d1b5ed28c66e785caff53ea4863375555d818aafa03290397192dd3e665" +checksum = "86b539960de98df3c3bd27d2d9b97de862027686bbb3bdfc5aaad5b74bb929a1" dependencies = [ "anyhow", "blst", @@ -11320,16 +11613,16 @@ dependencies = [ "num-traits", "rand 0.8.5", "sha3 0.10.8", - "thiserror", + "thiserror 1.0.69", "tracing", "zeroize", ] [[package]] name = "zksync_consensus_executor" -version = "0.5.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "061546668dd779ecb08302d2c84a6419e0093ad42aaa279bf20a8fa2ffda1be4" +checksum = "2a75d86368579d5aa59b1baebbdc1aebca7c9234f3e3cba734db7e9bbc4880b0" dependencies = [ "anyhow", "async-trait", @@ -11349,9 +11642,9 @@ dependencies = [ [[package]] name = "zksync_consensus_network" -version = "0.5.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e9789b5be26d20511bd7930bd9916d91122ff6cb09a28898563152a52f9f5eb" +checksum = "30f73993b7d677dfd4e4f2598dd20906e6a5f3a2168c6cab3a599c056dc5e39a" dependencies = [ "anyhow", "async-trait", @@ -11360,7 +11653,7 @@ dependencies = [ "bytesize", "http-body-util", "human-repr", - "hyper 1.5.0", + "hyper 1.5.1", "hyper-util", "im", "once_cell", @@ -11369,10 +11662,10 @@ dependencies = [ "rand 0.8.5", "semver 1.0.23", "snow", - "thiserror", + "thiserror 1.0.69", "tls-listener", "tokio", - "tokio-rustls 0.26.0", + "tokio-rustls 0.26.1", "tracing", "vise", "zksync_concurrency", @@ -11386,9 +11679,9 @@ dependencies = [ [[package]] name = "zksync_consensus_roles" -version = "0.5.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e49fbd4e69b276058f3dfc06cf6ada0e8caa6ed826e81289e4d596da95a0f17a" +checksum = "c49949546895a10431b9daec6ec4208ef0917ace006446d304b51f5b234ba462" dependencies = [ "anyhow", "bit-vec", @@ -11397,7 +11690,7 @@ dependencies = [ "prost 0.12.6", "rand 0.8.5", "serde", - "thiserror", + "thiserror 1.0.69", "tracing", "zksync_concurrency", "zksync_consensus_crypto", @@ -11408,15 +11701,15 @@ dependencies = [ [[package]] name = "zksync_consensus_storage" -version = "0.5.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2b2aab4ed18b13cd584f4edcc2546c8da82f89ac62e525063e12935ff28c9be" +checksum = "feb0d6a54e7d8d2adeee4ba38662161e9309180ad497299092e5641db9fb1c1e" dependencies = [ "anyhow", "async-trait", "prost 0.12.6", "rand 0.8.5", - "thiserror", + "thiserror 1.0.69", "tracing", "vise", "zksync_concurrency", @@ -11428,13 +11721,13 @@ dependencies = [ [[package]] name = "zksync_consensus_utils" -version = "0.5.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10bac8f471b182d4fa3d40cf158aac3624fe636a1ff0b4cf3fe26a0e20c68a42" +checksum = "723e2a4b056cc5af192a83163c89a6951ee75c098cc5c4a4cdc435f4232d88bd" dependencies = [ "anyhow", "rand 0.8.5", - "thiserror", + "thiserror 1.0.69", "zksync_concurrency", ] @@ -11448,7 +11741,7 @@ dependencies = [ "once_cell", "serde", "test-casing", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "zksync_config", @@ -11468,7 +11761,7 @@ name = "zksync_contract_verification_server" version = "0.1.0" dependencies = [ "anyhow", - "axum 0.7.7", + "axum 0.7.9", "http-body-util", "serde_json", "test-casing", @@ -11487,7 +11780,7 @@ name = "zksync_contract_verifier" version = "0.1.0" dependencies = [ "anyhow", - "clap 4.5.20", + "clap 4.5.23", "tokio", "tracing", "zksync_config", @@ -11512,13 +11805,13 @@ dependencies = [ "octocrab", "regex", "reqwest 0.12.9", - "rustls 0.23.16", + "rustls 0.23.19", "semver 1.0.23", "serde", "serde_json", "tempfile", "test-casing", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "vise", @@ -11571,7 +11864,7 @@ dependencies = [ "serde", "serde_json", "sha2 0.10.8", - "thiserror", + "thiserror 1.0.69", "zksync_basic_types", ] @@ -11582,7 +11875,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5939e2df4288c263c706ff18ac718e984149223ad4289d6d957d767dcfc04c81" dependencies = [ "proc-macro-error", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "syn 1.0.109", ] @@ -11613,7 +11906,7 @@ dependencies = [ "flate2", "futures 0.3.31", "hex", - "http 1.1.0", + "http 1.2.0", "jsonrpsee 0.23.2", "parity-scale-codec", "pbjson-types", @@ -11672,7 +11965,7 @@ dependencies = [ "serde_json", "sqlx", "strum", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "vise", @@ -11703,7 +11996,7 @@ dependencies = [ "serde_json", "sqlx", "test-casing", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "vise", @@ -11733,7 +12026,7 @@ dependencies = [ "pretty_assertions", "rlp", "serde_json", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "vise", @@ -11753,9 +12046,10 @@ dependencies = [ "async-trait", "chrono", "once_cell", + "serde", "test-casing", "test-log", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "vise", @@ -11763,6 +12057,7 @@ dependencies = [ "zksync_contracts", "zksync_dal", "zksync_eth_client", + "zksync_health_check", "zksync_l1_contract_interface", "zksync_node_fee_model", "zksync_node_test_utils", @@ -11778,7 +12073,7 @@ version = "0.1.0" dependencies = [ "async-trait", "rlp", - "thiserror", + "thiserror 1.0.69", "zksync_basic_types", "zksync_crypto_primitives", ] @@ -11790,8 +12085,11 @@ dependencies = [ "anyhow", "async-recursion", "async-trait", + "bincode", + "hex", + "itertools 0.10.5", "test-log", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "vise", @@ -11799,19 +12097,21 @@ dependencies = [ "zksync_contracts", "zksync_dal", "zksync_eth_client", + "zksync_mini_merkle_tree", "zksync_shared_metrics", "zksync_system_constants", "zksync_types", + "zksync_web3_decl", ] [[package]] name = "zksync_external_node" -version = "25.2.0" +version = "25.3.0" dependencies = [ "anyhow", "assert_matches", "async-trait", - "clap 4.5.20", + "clap 4.5.23", "envy", "futures 0.3.31", "rustc_version 0.4.1", @@ -11849,7 +12149,6 @@ dependencies = [ "zksync_object_store", "zksync_protobuf_config", "zksync_reorg_detector", - "zksync_shared_metrics", "zksync_snapshots_applier", "zksync_state", "zksync_state_keeper", @@ -11886,9 +12185,9 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", - "axum 0.7.7", + "axum 0.7.9", "bincode", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "vise", @@ -11920,7 +12219,7 @@ dependencies = [ "num-bigint 0.4.6", "num-integer", "num-traits", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", "serde", "syn 1.0.109", @@ -11935,7 +12234,7 @@ dependencies = [ "futures 0.3.31", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "vise", @@ -12003,10 +12302,10 @@ dependencies = [ "bincode", "blake2 0.10.6", "chrono", - "clap 4.5.20", + "clap 4.5.23", "ethabi", "hex", - "indexmap 2.6.0", + "indexmap 2.7.0", "rand 0.8.5", "reqwest 0.12.9", "rocksdb", @@ -12014,7 +12313,7 @@ dependencies = [ "serde_json", "tempfile", "test-log", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "tracing-subscriber", @@ -12064,7 +12363,7 @@ version = "0.1.0" dependencies = [ "anyhow", "assert_matches", - "clap 4.5.20", + "clap 4.5.23", "insta", "leb128", "once_cell", @@ -12075,7 +12374,7 @@ dependencies = [ "serde_with", "tempfile", "test-casing", - "thiserror", + "thiserror 1.0.69", "thread_local", "tracing", "tracing-subscriber", @@ -12094,7 +12393,7 @@ dependencies = [ "anyhow", "assert_matches", "async-trait", - "axum 0.7.7", + "axum 0.7.9", "futures 0.3.31", "itertools 0.10.5", "once_cell", @@ -12103,7 +12402,7 @@ dependencies = [ "serde_json", "tempfile", "test-casing", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "vise", @@ -12149,7 +12448,7 @@ dependencies = [ "pretty_assertions", "rand 0.8.5", "test-casing", - "thiserror", + "thiserror 1.0.69", "tracing", "vise", "zk_evm 0.131.0-rc.2", @@ -12174,12 +12473,12 @@ dependencies = [ "anyhow", "assert_matches", "async-trait", - "axum 0.7.7", + "axum 0.7.9", "chrono", "futures 0.3.31", "governor", "hex", - "http 1.1.0", + "http 1.2.0", "itertools 0.10.5", "lru", "once_cell", @@ -12189,7 +12488,7 @@ dependencies = [ "serde_json", "strum", "test-casing", - "thiserror", + "thiserror 1.0.69", "thread_local", "tokio", "tower 0.4.13", @@ -12200,6 +12499,7 @@ dependencies = [ "zksync_config", "zksync_consensus_roles", "zksync_contracts", + "zksync_crypto_primitives", "zksync_dal", "zksync_health_check", "zksync_metadata_calculator", @@ -12231,7 +12531,7 @@ dependencies = [ "semver 1.0.23", "tempfile", "test-casing", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "vise", @@ -12314,7 +12614,8 @@ dependencies = [ "futures 0.3.31", "pin-project-lite", "semver 1.0.23", - "thiserror", + "serde", + "thiserror 1.0.69", "tokio", "tracing", "trybuild", @@ -12353,6 +12654,7 @@ dependencies = [ "zksync_proof_data_handler", "zksync_queued_job_processor", "zksync_reorg_detector", + "zksync_shared_metrics", "zksync_state", "zksync_state_keeper", "zksync_storage", @@ -12368,9 +12670,9 @@ dependencies = [ name = "zksync_node_framework_derive" version = "0.1.0" dependencies = [ - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -12378,8 +12680,9 @@ name = "zksync_node_genesis" version = "0.1.0" dependencies = [ "anyhow", + "bincode", "itertools 0.10.5", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "vise", @@ -12430,7 +12733,7 @@ dependencies = [ "serde", "serde_json", "test-casing", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "vise", @@ -12474,7 +12777,7 @@ dependencies = [ "flate2", "google-cloud-auth", "google-cloud-storage", - "http 1.1.0", + "http 1.2.0", "prost 0.12.6", "rand 0.8.5", "reqwest 0.12.9", @@ -12506,9 +12809,9 @@ name = "zksync_proof_data_handler" version = "0.1.0" dependencies = [ "anyhow", - "axum 0.7.7", + "axum 0.7.9", "chrono", - "hyper 1.5.0", + "hyper 1.5.1", "serde_json", "tokio", "tower 0.4.13", @@ -12527,9 +12830,9 @@ dependencies = [ [[package]] name = "zksync_protobuf" -version = "0.5.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abd55c64f54cb10967a435422f66ff5880ae14a232b245517c7ce38da32e0cab" +checksum = "e8986ad796f8e00d8999fee72effba1a21bce40f5f877d681ac9cd89a94834d8" dependencies = [ "anyhow", "bit-vec", @@ -12548,19 +12851,19 @@ dependencies = [ [[package]] name = "zksync_protobuf_build" -version = "0.5.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4121952bcaf711005dd554612fc6e2de9b30cb58088508df87f1d38046ce8ac8" +checksum = "8d870b31995e3acb8e47afeb68ebeeffcf6121e70020e65b3d5d31692115d236" dependencies = [ "anyhow", "heck 0.5.0", "prettyplease", - "proc-macro2 1.0.89", + "proc-macro2 1.0.92", "prost-build", "prost-reflect 0.12.0", "protox 0.5.1", "quote 1.0.37", - "syn 2.0.85", + "syn 2.0.90", ] [[package]] @@ -12576,6 +12879,7 @@ dependencies = [ "serde_yaml", "tracing", "zksync_basic_types", + "zksync_concurrency", "zksync_config", "zksync_protobuf", "zksync_protobuf_build", @@ -12620,7 +12924,7 @@ dependencies = [ "async-trait", "serde_json", "test-casing", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "zksync_dal", @@ -12637,7 +12941,7 @@ name = "zksync_server" version = "0.1.0" dependencies = [ "anyhow", - "clap 4.5.20", + "clap 4.5.23", "futures 0.3.31", "serde_json", "tikv-jemallocator", @@ -12668,6 +12972,7 @@ name = "zksync_shared_metrics" version = "0.1.0" dependencies = [ "rustc_version 0.4.1", + "serde", "tracing", "vise", "zksync_dal", @@ -12684,7 +12989,7 @@ dependencies = [ "futures 0.3.31", "serde", "test-casing", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "vise", @@ -12750,9 +13055,10 @@ dependencies = [ "itertools 0.10.5", "once_cell", "rand 0.8.5", + "serde", "tempfile", "test-casing", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "vise", @@ -12761,6 +13067,7 @@ dependencies = [ "zksync_contracts", "zksync_dal", "zksync_eth_client", + "zksync_health_check", "zksync_mempool", "zksync_multivm", "zksync_node_fee_model", @@ -12807,7 +13114,7 @@ dependencies = [ "reqwest 0.12.9", "secp256k1", "serde", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "url", @@ -12878,7 +13185,7 @@ dependencies = [ "serde_json", "serde_with", "strum", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "zksync_basic_types", @@ -12919,7 +13226,7 @@ dependencies = [ "sentry", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "time", "tokio", "tracing", @@ -12979,7 +13286,7 @@ dependencies = [ "pretty_assertions", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "tracing", "zksync_contracts", "zksync_system_constants", @@ -13032,11 +13339,11 @@ dependencies = [ "pin-project-lite", "rand 0.8.5", "rlp", - "rustls 0.23.16", + "rustls 0.23.19", "serde", "serde_json", "test-casing", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "vise", diff --git a/Cargo.toml b/Cargo.toml index 6e16707704d6..126c7ac53723 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,6 +3,7 @@ members = [ # Binaries "core/bin/block_reverter", "core/bin/contract-verifier", + "core/bin/custom_genesis_export", "core/bin/external_node", "core/bin/merkle_tree_consistency_checker", "core/bin/snapshots_creator", @@ -249,16 +250,16 @@ zk_evm_1_5_0 = { package = "zk_evm", version = "=0.150.7" } zksync_vm2 = { git = "https://github.com/matter-labs/vm2.git", rev = "457d8a7eea9093af9440662e33e598c13ba41633" } # Consensus dependencies. -zksync_concurrency = "=0.5.0" -zksync_consensus_bft = "=0.5.0" -zksync_consensus_crypto = "=0.5.0" -zksync_consensus_executor = "=0.5.0" -zksync_consensus_network = "=0.5.0" -zksync_consensus_roles = "=0.5.0" -zksync_consensus_storage = "=0.5.0" -zksync_consensus_utils = "=0.5.0" -zksync_protobuf = "=0.5.0" -zksync_protobuf_build = "=0.5.0" +zksync_concurrency = "=0.7.0" +zksync_consensus_bft = "=0.7.0" +zksync_consensus_crypto = "=0.7.0" +zksync_consensus_executor = "=0.7.0" +zksync_consensus_network = "=0.7.0" +zksync_consensus_roles = "=0.7.0" +zksync_consensus_storage = "=0.7.0" +zksync_consensus_utils = "=0.7.0" +zksync_protobuf = "=0.7.0" +zksync_protobuf_build = "=0.7.0" # "Local" dependencies zksync_multivm = { version = "0.1.0", path = "core/lib/multivm" } @@ -281,6 +282,7 @@ zksync_health_check = { version = "0.1.0", path = "core/lib/health_check" } zksync_l1_contract_interface = { version = "0.1.0", path = "core/lib/l1_contract_interface" } zksync_mempool = { version = "0.1.0", path = "core/lib/mempool" } zksync_merkle_tree = { version = "0.1.0", path = "core/lib/merkle_tree" } +zksync_bin_metadata = { version = "0.1.0", path = "core/lib/bin_metadata" } zksync_mini_merkle_tree = { version = "0.1.0", path = "core/lib/mini_merkle_tree" } zksync_object_store = { version = "0.1.0", path = "core/lib/object_store" } zksync_protobuf_config = { version = "0.1.0", path = "core/lib/protobuf_config" } diff --git a/core/CHANGELOG.md b/core/CHANGELOG.md index 0ca0a3be025a..acdd2fefb1ab 100644 --- a/core/CHANGELOG.md +++ b/core/CHANGELOG.md @@ -1,5 +1,27 @@ # Changelog +## [25.3.0](https://github.com/matter-labs/zksync-era/compare/core-v25.2.0...core-v25.3.0) (2024-12-11) + + +### Features + +* change seal criteria for gateway ([#3320](https://github.com/matter-labs/zksync-era/issues/3320)) ([a0a74aa](https://github.com/matter-labs/zksync-era/commit/a0a74aaeb42f076d20c4ae8a32925eff2de11d0c)) +* **contract-verifier:** Download compilers from GH automatically ([#3291](https://github.com/matter-labs/zksync-era/issues/3291)) ([a10c4ba](https://github.com/matter-labs/zksync-era/commit/a10c4baa312f26ebac2a10115fb7bd314d18b9c1)) +* integrate gateway changes for some components ([#3274](https://github.com/matter-labs/zksync-era/issues/3274)) ([cbc91e3](https://github.com/matter-labs/zksync-era/commit/cbc91e35f84d04f2e4c8e81028596db009e478d1)) +* **proof-data-handler:** exclude batches without object file in GCS ([#2980](https://github.com/matter-labs/zksync-era/issues/2980)) ([3e309e0](https://github.com/matter-labs/zksync-era/commit/3e309e06b24649c74bfe120e8ca45247cb2b5628)) +* **pruning:** Record L1 batch root hash in pruning logs ([#3266](https://github.com/matter-labs/zksync-era/issues/3266)) ([7b6e590](https://github.com/matter-labs/zksync-era/commit/7b6e59083cf0cafeaef5dd4b2dd39257ff91316d)) +* **state-keeper:** mempool io opens batch if there is protocol upgrade tx ([#3360](https://github.com/matter-labs/zksync-era/issues/3360)) ([f6422cd](https://github.com/matter-labs/zksync-era/commit/f6422cd59dab2c105bb7c125c172f2621fe39464)) +* **tee:** add error handling for unstable_getTeeProofs API endpoint ([#3321](https://github.com/matter-labs/zksync-era/issues/3321)) ([26f630c](https://github.com/matter-labs/zksync-era/commit/26f630cb75958c711d67d13bc77ddbb1117156c3)) +* **zksync_cli:** Health checkpoint improvements ([#3193](https://github.com/matter-labs/zksync-era/issues/3193)) ([440fe8d](https://github.com/matter-labs/zksync-era/commit/440fe8d8afdf0fc2768692a1b40b0910873e2faf)) + + +### Bug Fixes + +* **api:** batch fee input scaling for `debug_traceCall` ([#3344](https://github.com/matter-labs/zksync-era/issues/3344)) ([7ace594](https://github.com/matter-labs/zksync-era/commit/7ace594fb3140212bd94ffd6bffcac99805cf4b1)) +* **tee:** correct previous fix for race condition in batch locking ([#3358](https://github.com/matter-labs/zksync-era/issues/3358)) ([b12da8d](https://github.com/matter-labs/zksync-era/commit/b12da8d1fddc7870bf17d5e08312d20773815269)) +* **tee:** fix race condition in batch locking ([#3342](https://github.com/matter-labs/zksync-era/issues/3342)) ([a7dc0ed](https://github.com/matter-labs/zksync-era/commit/a7dc0ed5007f6b2f789f4c61cb3d137843151860)) +* **tracer:** adds vm error to flatCallTracer error field if exists ([#3374](https://github.com/matter-labs/zksync-era/issues/3374)) ([5d77727](https://github.com/matter-labs/zksync-era/commit/5d77727cd3ba5f4d84643fee1873f03656310b4d)) + ## [25.2.0](https://github.com/matter-labs/zksync-era/compare/core-v25.1.0...core-v25.2.0) (2024-11-19) diff --git a/core/bin/custom_genesis_export/.gitignore b/core/bin/custom_genesis_export/.gitignore new file mode 100644 index 000000000000..a8a0dcec4472 --- /dev/null +++ b/core/bin/custom_genesis_export/.gitignore @@ -0,0 +1 @@ +*.bin diff --git a/core/bin/custom_genesis_export/Cargo.toml b/core/bin/custom_genesis_export/Cargo.toml new file mode 100644 index 000000000000..566f0a979297 --- /dev/null +++ b/core/bin/custom_genesis_export/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "custom_genesis_export" +version.workspace = true +edition.workspace = true +authors.workspace = true +homepage.workspace = true +repository.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true + +[dependencies] +clap = { workspace = true, features = ["derive"] } +futures.workspace = true +sqlx = { workspace = true, features = [ + "runtime-tokio", + "tls-native-tls", + "macros", + "postgres", +] } +tokio = { workspace = true, features = ["full"] } + +zksync_types.workspace = true +zksync_node_genesis.workspace = true +zksync_contracts.workspace = true +zksync_core_leftovers.workspace = true +zksync_protobuf_config.workspace = true +zksync_dal.workspace = true +anyhow.workspace = true +bincode.workspace = true diff --git a/core/bin/custom_genesis_export/README.md b/core/bin/custom_genesis_export/README.md new file mode 100644 index 000000000000..e95bcc814f2a --- /dev/null +++ b/core/bin/custom_genesis_export/README.md @@ -0,0 +1,60 @@ +# Custom Genesis Export + +The `custom_genesis_export` tool allows exporting a state from a zkSync PostgreSQL database in a format that can be +included as a custom genesis state for a new chain. + +This is particularly useful in data migration scenarios where a large existing state needs to be applied to a newly +created chain. + +A typical workflow could be: + +- Run a chain locally, not connected to the real L1, and add all required data to it. +- Export the data using the `custom_genesis_export` tool. +- Create a new chain connected to the real ecosystem using the exported data. + +## How it works + +The tool exports all entries from `storage_logs`, and `factory_deps`, except those related to the system context. The +data is then written to a binary file using the Rust standard library following a simple serialisation format. + +`custom_genesis_export` can be built using the following command: + +```shell +cargo build --release -p custom_genesis_export +``` + +And then executed using the following command, where: + +- `database-url` is the URL of the PostgreSQL database. +- `genesis-config-path` is the path to the `genesis.yaml` configuration file, used to set up a new chain (located in the + `file_based` directory). +- `output-path` is the path to the generated binary output file. + +```shell +custom_genesis_export --database-url=postgres://postgres:notsecurepassword@localhost:5432/zksync_server_localhost_validium --genesis-config-path=/Users/ischasny/Dev/zksync-era/etc/env/file_based/genesis.yaml --output-path=export.bin +``` + +> Please make sure that the database is not written into before running data export. + +After the export is completed, the tool will make the following updates to the `genesis.yaml` file in-place: + +- Update `genesis_root_hash`, `rollup_last_leaf_index`, and `genesis_commitment` to match the contents of the export + file. +- Add a `custom_genesis_state_path` property pointing to the data export. + +The modified genesis file can be used to bootstrap an ecosystem or initialize new chains. The data export will be +automatically recognized by the server during the execution of `zkstack ecosystem init ...` and +`zkstack chain create ...` commands. + +### Running considerations + +- All chains within the same ecosystem must be bootstrapped from the same genesis state. This is enforced at the + protocol level. If two chains require different states, this can only be achieved by bringing the chain into the + ecosystem through governance voting. + - If a chain is added to the ecosystem via a vote, ensure no assets are minted on the old bridge, as this would create + discrepancies with the new one. One should set gas prices to zero when generating a state to account for that. +- To calculate genesis parameters, the tool must load all VM logs into RAM. This is due to implementation specifics. For + larger states, ensure the VM has sufficient RAM capacity. +- After the import, block numbers for all VM logs will be reset to zero - if the imported data has been indexed based on + block number, such indexes will break. +- External Nodes will have to be bootstrapped from data snapshot (i.e. genesis can't be generated locally). diff --git a/core/bin/custom_genesis_export/src/main.rs b/core/bin/custom_genesis_export/src/main.rs new file mode 100644 index 000000000000..9db715aad99f --- /dev/null +++ b/core/bin/custom_genesis_export/src/main.rs @@ -0,0 +1,141 @@ +extern crate core; + +use std::{fs, fs::File, io::BufWriter, path::PathBuf, str::FromStr}; + +use clap::Parser; +use zksync_contracts::BaseSystemContractsHashes; +use zksync_core_leftovers::temp_config_store::read_yaml_repr; +use zksync_dal::{custom_genesis_export_dal::GenesisState, ConnectionPool, Core, CoreDal}; +use zksync_node_genesis::{make_genesis_batch_params, utils::get_deduped_log_queries}; +use zksync_protobuf_config::encode_yaml_repr; +use zksync_types::{url::SensitiveUrl, StorageLog}; + +#[derive(Debug, Parser)] +#[command(name = "Custom genesis export tool", author = "Matter Labs")] +struct Args { + /// PostgreSQL connection string for the database to export. + #[arg(short, long)] + database_url: Option, + + /// Output file path. + #[arg(short, long, default_value = "genesis_export.bin")] + output_path: PathBuf, + + /// Path to the genesis.yaml + #[arg(short, long)] + genesis_config_path: PathBuf, +} + +/// The `custom_genesis_export` tool allows exporting storage logs and factory dependencies +/// from the ZKSync PostgreSQL database in a way that they can be used as a custom genesis state for a new chain. +/// +/// Inputs: +/// * `database_url` - URL to the PostgreSQL database. +/// * `output` - Path to the output file. +/// * `genesis_config_path` - Path to the `genesis.yaml` configuration file, which will be used to set up a new chain (located in the `file_based` directory). +/// +/// Given the inputs above, `custom_genesis_export` will perform the following: +/// * Read storage logs, and factory dependencies; filter out those related to the system context, +/// and save the remaining data to the output file. +/// * Calculate the new `genesis_root_hash`, `rollup_last_leaf_index`, and `genesis_commitment`, then update these +/// in-place in the provided `genesis.yaml`. Additionally, the tool will add a `custom_genesis_state_path` property +/// pointing to the genesis export. +/// +/// Note: To calculate the new genesis parameters, the current implementation requires loading all storage logs +/// into RAM. This is necessary due to the specific sorting and filtering that need to be applied. +/// For larger states, keep this in mind and ensure you have a machine with sufficient RAM. +#[tokio::main] +async fn main() -> anyhow::Result<()> { + let args = Args::parse(); + + let mut out = BufWriter::new(File::create(&args.output_path)?); + + println!( + "Export file: {}", + args.output_path.canonicalize()?.display(), + ); + + println!("Connecting to source database..."); + + let db_url = args.database_url.or_else(|| std::env::var("DATABASE_URL").ok()).expect("Specify the database connection string in either a CLI argument or in the DATABASE_URL environment variable."); + // we need only 1 DB connection at most for data export + let connection_pool_builder = + ConnectionPool::::builder(SensitiveUrl::from_str(db_url.as_str())?, 1); + let connection_pool = connection_pool_builder.build().await?; + + let mut storage = connection_pool.connection().await?; + let mut transaction = storage.start_transaction().await?; + + println!("Connected to source database."); + + let storage_logs = transaction + .custom_genesis_export_dal() + .get_storage_logs() + .await?; + let factory_deps = transaction + .custom_genesis_export_dal() + .get_factory_deps() + .await?; + + transaction.commit().await?; + + println!( + "Loaded {} storage logs {} factory deps from source database.", + storage_logs.len(), + factory_deps.len() + ); + + let storage_logs_for_genesis: Vec = + storage_logs.iter().map(StorageLog::from).collect(); + + bincode::serialize_into( + &mut out, + &GenesisState { + storage_logs, + factory_deps, + }, + )?; + + println!( + "Saved genesis state into the file {}.", + args.output_path.display() + ); + println!("Calculating new genesis parameters"); + + let mut genesis_config = read_yaml_repr::( + &args.genesis_config_path, + )?; + + let base_system_contract_hashes = BaseSystemContractsHashes { + bootloader: genesis_config + .bootloader_hash + .ok_or(anyhow::anyhow!("No bootloader_hash specified"))?, + default_aa: genesis_config + .default_aa_hash + .ok_or(anyhow::anyhow!("No default_aa_hash specified"))?, + evm_emulator: genesis_config.evm_emulator_hash, + }; + + let (genesis_batch_params, _) = make_genesis_batch_params( + get_deduped_log_queries(&storage_logs_for_genesis), + base_system_contract_hashes, + genesis_config + .protocol_version + .ok_or(anyhow::anyhow!("No bootloader_hash specified"))? + .minor, + ); + + genesis_config.genesis_root_hash = Some(genesis_batch_params.root_hash); + genesis_config.rollup_last_leaf_index = Some(genesis_batch_params.rollup_last_leaf_index); + genesis_config.genesis_commitment = Some(genesis_batch_params.commitment); + genesis_config.custom_genesis_state_path = + args.output_path.canonicalize()?.to_str().map(String::from); + + let bytes = + encode_yaml_repr::(&genesis_config)?; + fs::write(&args.genesis_config_path, &bytes)?; + + println!("Done."); + + Ok(()) +} diff --git a/core/bin/external_node/Cargo.toml b/core/bin/external_node/Cargo.toml index a69fdf263794..f56af827bc45 100644 --- a/core/bin/external_node/Cargo.toml +++ b/core/bin/external_node/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "zksync_external_node" description = "Non-validator ZKsync node" -version = "25.2.0" # x-release-please-version +version = "25.3.0" # x-release-please-version edition.workspace = true authors.workspace = true homepage.workspace = true @@ -29,7 +29,6 @@ zksync_health_check.workspace = true zksync_web3_decl.workspace = true zksync_types.workspace = true zksync_block_reverter.workspace = true -zksync_shared_metrics.workspace = true zksync_node_genesis.workspace = true zksync_node_fee_model.workspace = true zksync_node_db_pruner.workspace = true diff --git a/core/bin/external_node/src/config/mod.rs b/core/bin/external_node/src/config/mod.rs index b20cd13bea53..9eacb5208e80 100644 --- a/core/bin/external_node/src/config/mod.rs +++ b/core/bin/external_node/src/config/mod.rs @@ -102,11 +102,15 @@ impl ConfigurationSource for Environment { /// This part of the external node config is fetched directly from the main node. #[derive(Debug, Deserialize)] pub(crate) struct RemoteENConfig { - pub bridgehub_proxy_addr: Option
, - pub state_transition_proxy_addr: Option
, - pub transparent_proxy_admin_addr: Option
, - /// Should not be accessed directly. Use [`ExternalNodeConfig::diamond_proxy_address`] instead. - diamond_proxy_addr: Address, + #[serde(alias = "bridgehub_proxy_addr")] + pub l1_bridgehub_proxy_addr: Option
, + #[serde(alias = "state_transition_proxy_addr")] + pub l1_state_transition_proxy_addr: Option
, + #[serde(alias = "transparent_proxy_admin_addr")] + pub l1_transparent_proxy_admin_addr: Option
, + /// Should not be accessed directly. Use [`ExternalNodeConfig::l1_diamond_proxy_address`] instead. + #[serde(alias = "diamond_proxy_addr")] + l1_diamond_proxy_addr: Address, // While on L1 shared bridge and legacy bridge are different contracts with different addresses, // the `l2_erc20_bridge_addr` and `l2_shared_bridge_addr` are basically the same contract, but with // a different name, with names adapted only for consistency. @@ -144,7 +148,7 @@ impl RemoteENConfig { .rpc_context("ecosystem_contracts") .await .ok(); - let diamond_proxy_addr = client + let l1_diamond_proxy_addr = client .get_main_contract() .rpc_context("get_main_contract") .await?; @@ -180,14 +184,14 @@ impl RemoteENConfig { } Ok(Self { - bridgehub_proxy_addr: ecosystem_contracts.as_ref().map(|a| a.bridgehub_proxy_addr), - state_transition_proxy_addr: ecosystem_contracts + l1_bridgehub_proxy_addr: ecosystem_contracts.as_ref().map(|a| a.bridgehub_proxy_addr), + l1_state_transition_proxy_addr: ecosystem_contracts .as_ref() .map(|a| a.state_transition_proxy_addr), - transparent_proxy_admin_addr: ecosystem_contracts + l1_transparent_proxy_admin_addr: ecosystem_contracts .as_ref() .map(|a| a.transparent_proxy_admin_addr), - diamond_proxy_addr, + l1_diamond_proxy_addr, l2_testnet_paymaster_addr, l1_erc20_bridge_proxy_addr: bridges.l1_erc20_default_bridge, l2_erc20_bridge_addr: l2_erc20_default_bridge, @@ -212,10 +216,10 @@ impl RemoteENConfig { #[cfg(test)] fn mock() -> Self { Self { - bridgehub_proxy_addr: None, - state_transition_proxy_addr: None, - transparent_proxy_admin_addr: None, - diamond_proxy_addr: Address::repeat_byte(1), + l1_bridgehub_proxy_addr: None, + l1_state_transition_proxy_addr: None, + l1_transparent_proxy_admin_addr: None, + l1_diamond_proxy_addr: Address::repeat_byte(1), l1_erc20_bridge_proxy_addr: Some(Address::repeat_byte(2)), l2_erc20_bridge_addr: Some(Address::repeat_byte(3)), l2_weth_bridge_addr: None, @@ -480,7 +484,6 @@ pub(crate) struct OptionalENConfig { #[serde(default = "OptionalENConfig::default_pruning_data_retention_sec")] pruning_data_retention_sec: u64, /// Gateway RPC URL, needed for operating during migration. - #[allow(dead_code)] pub gateway_url: Option, /// Interval for bridge addresses refreshing in seconds. bridge_addresses_refresh_interval_sec: Option, @@ -490,7 +493,11 @@ pub(crate) struct OptionalENConfig { } impl OptionalENConfig { - fn from_configs(general_config: &GeneralConfig, enconfig: &ENConfig) -> anyhow::Result { + fn from_configs( + general_config: &GeneralConfig, + enconfig: &ENConfig, + secrets: &Secrets, + ) -> anyhow::Result { let api_namespaces = load_config!(general_config.api_config, web3_json_rpc.api_namespaces) .map(|a: Vec| a.iter().map(|a| a.parse()).collect::>()) .transpose()?; @@ -722,7 +729,10 @@ impl OptionalENConfig { .unwrap_or_else(Self::default_main_node_rate_limit_rps), api_namespaces, contracts_diamond_proxy_addr: None, - gateway_url: enconfig.gateway_url.clone(), + gateway_url: secrets + .l1 + .as_ref() + .and_then(|l1| l1.gateway_rpc_url.clone()), bridge_addresses_refresh_interval_sec: enconfig.bridge_addresses_refresh_interval_sec, timestamp_asserter_min_time_till_end_sec: general_config .timestamp_asserter_config @@ -1349,7 +1359,11 @@ impl ExternalNodeConfig<()> { &external_node_config, &secrets_config, )?; - let optional = OptionalENConfig::from_configs(&general_config, &external_node_config)?; + let optional = OptionalENConfig::from_configs( + &general_config, + &external_node_config, + &secrets_config, + )?; let postgres = PostgresConfig { database_url: secrets_config .database @@ -1392,7 +1406,7 @@ impl ExternalNodeConfig<()> { let remote = RemoteENConfig::fetch(main_node_client) .await .context("Unable to fetch required config values from the main node")?; - let remote_diamond_proxy_addr = remote.diamond_proxy_addr; + let remote_diamond_proxy_addr = remote.l1_diamond_proxy_addr; if let Some(local_diamond_proxy_addr) = self.optional.contracts_diamond_proxy_addr { anyhow::ensure!( local_diamond_proxy_addr == remote_diamond_proxy_addr, @@ -1439,14 +1453,14 @@ impl ExternalNodeConfig { } } - /// Returns a verified diamond proxy address. + /// Returns verified L1 diamond proxy address. /// If local configuration contains the address, it will be checked against the one returned by the main node. /// Otherwise, the remote value will be used. However, using remote value has trust implications for the main /// node so relying on it solely is not recommended. - pub fn diamond_proxy_address(&self) -> Address { + pub fn l1_diamond_proxy_address(&self) -> Address { self.optional .contracts_diamond_proxy_addr - .unwrap_or(self.remote.diamond_proxy_addr) + .unwrap_or(self.remote.l1_diamond_proxy_addr) } } @@ -1470,10 +1484,10 @@ impl From<&ExternalNodeConfig> for InternalApiConfig { l1_weth_bridge: config.remote.l1_weth_bridge_addr, l2_weth_bridge: config.remote.l2_weth_bridge_addr, }, - bridgehub_proxy_addr: config.remote.bridgehub_proxy_addr, - state_transition_proxy_addr: config.remote.state_transition_proxy_addr, - transparent_proxy_admin_addr: config.remote.transparent_proxy_admin_addr, - diamond_proxy_addr: config.remote.diamond_proxy_addr, + l1_bridgehub_proxy_addr: config.remote.l1_bridgehub_proxy_addr, + l1_state_transition_proxy_addr: config.remote.l1_state_transition_proxy_addr, + l1_transparent_proxy_admin_addr: config.remote.l1_transparent_proxy_admin_addr, + l1_diamond_proxy_addr: config.remote.l1_diamond_proxy_addr, l2_testnet_paymaster_addr: config.remote.l2_testnet_paymaster_addr, req_entities_limit: config.optional.req_entities_limit, fee_history_limit: config.optional.fee_history_limit, diff --git a/core/bin/external_node/src/metrics/framework.rs b/core/bin/external_node/src/metrics/framework.rs index fc9d4fe51345..81c9e57d9b9a 100644 --- a/core/bin/external_node/src/metrics/framework.rs +++ b/core/bin/external_node/src/metrics/framework.rs @@ -5,7 +5,6 @@ use zksync_node_framework::{ implementations::resources::pools::{MasterPool, PoolResource}, FromContext, IntoContext, StopReceiver, Task, TaskId, WiringError, WiringLayer, }; -use zksync_shared_metrics::rustc::RUST_METRICS; use zksync_types::{L1ChainId, L2ChainId, SLChainId}; use super::EN_METRICS; @@ -39,7 +38,6 @@ impl WiringLayer for ExternalNodeMetricsLayer { } async fn wire(self, input: Self::Input) -> Result { - RUST_METRICS.initialize(); EN_METRICS.observe_config( self.l1_chain_id, self.sl_chain_id, diff --git a/core/bin/external_node/src/node_builder.rs b/core/bin/external_node/src/node_builder.rs index c1646ca11c9f..a78b2c9bf138 100644 --- a/core/bin/external_node/src/node_builder.rs +++ b/core/bin/external_node/src/node_builder.rs @@ -34,7 +34,7 @@ use zksync_node_framework::{ NodeStorageInitializerLayer, }, pools_layer::PoolsLayerBuilder, - postgres_metrics::PostgresMetricsLayer, + postgres::PostgresLayer, prometheus_exporter::PrometheusExporterLayer, pruning::PruningLayer, query_eth_client::QueryEthClientLayer, @@ -58,7 +58,7 @@ use zksync_node_framework::{ service::{ZkStackService, ZkStackServiceBuilder}, }; use zksync_state::RocksdbStorageOptions; -use zksync_types::{L1ChainId, L2_NATIVE_TOKEN_VAULT_ADDRESS}; +use zksync_types::{L1ChainId, L2_ASSET_ROUTER_ADDRESS}; use crate::{config::ExternalNodeConfig, metrics::framework::ExternalNodeMetricsLayer, Component}; @@ -126,8 +126,8 @@ impl ExternalNodeBuilder { Ok(self) } - fn add_postgres_metrics_layer(mut self) -> anyhow::Result { - self.node.add_layer(PostgresMetricsLayer); + fn add_postgres_layer(mut self) -> anyhow::Result { + self.node.add_layer(PostgresLayer); Ok(self) } @@ -182,8 +182,7 @@ impl ExternalNodeBuilder { let query_eth_client_layer = QueryEthClientLayer::new( self.config.required.settlement_layer_id(), self.config.required.eth_client_url.clone(), - // TODO(EVM-676): add this config for external node - Default::default(), + self.config.optional.gateway_url.clone(), ); self.node.add_layer(query_eth_client_layer); Ok(self) @@ -201,12 +200,11 @@ impl ExternalNodeBuilder { .remote .l2_shared_bridge_addr .context("Missing `l2_shared_bridge_addr`")?; - let l2_legacy_shared_bridge_addr = if l2_shared_bridge_addr == L2_NATIVE_TOKEN_VAULT_ADDRESS - { - // System has migrated to `L2_NATIVE_TOKEN_VAULT_ADDRESS`, use legacy shared bridge address from main node. + let l2_legacy_shared_bridge_addr = if l2_shared_bridge_addr == L2_ASSET_ROUTER_ADDRESS { + // System has migrated to `L2_ASSET_ROUTER_ADDRESS`, use legacy shared bridge address from main node. self.config.remote.l2_legacy_shared_bridge_addr } else { - // System hasn't migrated on `L2_NATIVE_TOKEN_VAULT_ADDRESS`, we can safely use `l2_shared_bridge_addr`. + // System hasn't migrated on `L2_ASSET_ROUTER_ADDRESS`, we can safely use `l2_shared_bridge_addr`. Some(l2_shared_bridge_addr) }; @@ -279,7 +277,7 @@ impl ExternalNodeBuilder { fn add_l1_batch_commitment_mode_validation_layer(mut self) -> anyhow::Result { let layer = L1BatchCommitmentModeValidationLayer::new( - self.config.diamond_proxy_address(), + self.config.l1_diamond_proxy_address(), self.config.optional.l1_batch_commit_data_generator_mode, ); self.node.add_layer(layer); @@ -298,9 +296,10 @@ impl ExternalNodeBuilder { fn add_consistency_checker_layer(mut self) -> anyhow::Result { let max_batches_to_recheck = 10; // TODO (BFT-97): Make it a part of a proper EN config let layer = ConsistencyCheckerLayer::new( - self.config.diamond_proxy_address(), + self.config.l1_diamond_proxy_address(), max_batches_to_recheck, self.config.optional.l1_batch_commit_data_generator_mode, + self.config.required.l2_chain_id, ); self.node.add_layer(layer); Ok(self) @@ -325,7 +324,10 @@ impl ExternalNodeBuilder { } fn add_tree_data_fetcher_layer(mut self) -> anyhow::Result { - let layer = TreeDataFetcherLayer::new(self.config.diamond_proxy_address()); + let layer = TreeDataFetcherLayer::new( + self.config.l1_diamond_proxy_address(), + self.config.required.l2_chain_id, + ); self.node.add_layer(layer); Ok(self) } @@ -602,7 +604,7 @@ impl ExternalNodeBuilder { // so until we have a dedicated component for "auxiliary" tasks, // it's responsible for things like metrics. self = self - .add_postgres_metrics_layer()? + .add_postgres_layer()? .add_external_node_metrics_layer()?; // We assign the storage initialization to the core, as it's considered to be // the "main" component. diff --git a/core/bin/external_node/src/tests/mod.rs b/core/bin/external_node/src/tests/mod.rs index 59aceea819f1..fd616a467ee8 100644 --- a/core/bin/external_node/src/tests/mod.rs +++ b/core/bin/external_node/src/tests/mod.rs @@ -35,7 +35,7 @@ async fn external_node_basics(components_str: &'static str) { } let l2_client = utils::mock_l2_client(&env); - let eth_client = utils::mock_eth_client(env.config.diamond_proxy_address()); + let eth_client = utils::mock_eth_client(env.config.l1_diamond_proxy_address()); let node_handle = tokio::task::spawn_blocking(move || { std::thread::spawn(move || { @@ -104,7 +104,7 @@ async fn node_reacts_to_stop_signal_during_initial_reorg_detection() { let (env, env_handles) = utils::TestEnvironment::with_genesis_block("core").await; let l2_client = utils::mock_l2_client_hanging(); - let eth_client = utils::mock_eth_client(env.config.diamond_proxy_address()); + let eth_client = utils::mock_eth_client(env.config.l1_diamond_proxy_address()); let mut node_handle = tokio::task::spawn_blocking(move || { std::thread::spawn(move || { @@ -140,7 +140,7 @@ async fn running_tree_without_core_is_not_allowed() { let (env, _env_handles) = utils::TestEnvironment::with_genesis_block("tree").await; let l2_client = utils::mock_l2_client(&env); - let eth_client = utils::mock_eth_client(env.config.diamond_proxy_address()); + let eth_client = utils::mock_eth_client(env.config.l1_diamond_proxy_address()); let node_handle = tokio::task::spawn_blocking(move || { std::thread::spawn(move || { diff --git a/core/bin/external_node/src/tests/utils.rs b/core/bin/external_node/src/tests/utils.rs index b26fa80d1a95..58e2a88e5fb7 100644 --- a/core/bin/external_node/src/tests/utils.rs +++ b/core/bin/external_node/src/tests/utils.rs @@ -23,10 +23,13 @@ pub(super) fn block_details_base(hash: H256) -> api::BlockDetailsBase { status: api::BlockStatus::Sealed, commit_tx_hash: None, committed_at: None, + commit_chain_id: None, prove_tx_hash: None, proven_at: None, + prove_chain_id: None, execute_tx_hash: None, executed_at: None, + execute_chain_id: None, l1_gas_price: 0, l2_fair_gas_price: 0, fair_pubdata_price: None, diff --git a/core/bin/zksync_server/src/node_builder.rs b/core/bin/zksync_server/src/node_builder.rs index 1ad518d2fec8..8147ab8e0633 100644 --- a/core/bin/zksync_server/src/node_builder.rs +++ b/core/bin/zksync_server/src/node_builder.rs @@ -49,7 +49,7 @@ use zksync_node_framework::{ object_store::ObjectStoreLayer, pk_signing_eth_client::PKSigningEthClientLayer, pools_layer::PoolsLayerBuilder, - postgres_metrics::PostgresMetricsLayer, + postgres::PostgresLayer, prometheus_exporter::PrometheusExporterLayer, proof_data_handler::ProofDataHandlerLayer, query_eth_client::QueryEthClientLayer, @@ -140,8 +140,8 @@ impl MainNodeBuilder { Ok(self) } - fn add_postgres_metrics_layer(mut self) -> anyhow::Result { - self.node.add_layer(PostgresMetricsLayer); + fn add_postgres_layer(mut self) -> anyhow::Result { + self.node.add_layer(PostgresLayer); Ok(self) } @@ -163,11 +163,7 @@ impl MainNodeBuilder { let query_eth_client_layer = QueryEthClientLayer::new( genesis.settlement_layer_id(), eth_config.l1_rpc_url, - self.configs - .eth - .as_ref() - .and_then(|x| Some(x.gas_adjuster?.settlement_mode)) - .unwrap_or(SettlementMode::SettlesToL1), + eth_config.gateway_rpc_url, ); self.node.add_layer(query_eth_client_layer); Ok(self) @@ -299,6 +295,7 @@ impl MainNodeBuilder { self.node.add_layer(EthWatchLayer::new( try_load_config!(eth_config.watcher), self.contracts_config.clone(), + self.genesis_config.l2_chain_id, )); Ok(self) } @@ -800,9 +797,7 @@ impl MainNodeBuilder { self = self.add_eth_tx_manager_layer()?; } Component::Housekeeper => { - self = self - .add_house_keeper_layer()? - .add_postgres_metrics_layer()?; + self = self.add_house_keeper_layer()?.add_postgres_layer()?; } Component::ProofDataHandler => { self = self.add_proof_data_handler_layer()?; diff --git a/core/lib/basic_types/src/lib.rs b/core/lib/basic_types/src/lib.rs index d79bc57cc5e1..8b8e24af4339 100644 --- a/core/lib/basic_types/src/lib.rs +++ b/core/lib/basic_types/src/lib.rs @@ -149,7 +149,7 @@ impl<'de> Deserialize<'de> for L2ChainId { } impl L2ChainId { - fn new(number: u64) -> Result { + pub fn new(number: u64) -> Result { if number > L2ChainId::max().0 { return Err(format!( "Cannot convert given value {} into L2ChainId. It's greater than MAX: {}", diff --git a/core/lib/basic_types/src/web3/mod.rs b/core/lib/basic_types/src/web3/mod.rs index e6d3cab37273..98625831c991 100644 --- a/core/lib/basic_types/src/web3/mod.rs +++ b/core/lib/basic_types/src/web3/mod.rs @@ -198,7 +198,7 @@ pub struct Filter { } #[derive(Default, Debug, PartialEq, Clone)] -pub struct ValueOrArray(Vec); +pub struct ValueOrArray(pub Vec); impl ValueOrArray { pub fn flatten(self) -> Vec { @@ -206,6 +206,12 @@ impl ValueOrArray { } } +impl From for ValueOrArray { + fn from(value: T) -> Self { + Self(vec![value]) + } +} + impl Serialize for ValueOrArray where T: Serialize, diff --git a/core/lib/config/src/configs/api.rs b/core/lib/config/src/configs/api.rs index ce0d96129584..1321f25e7604 100644 --- a/core/lib/config/src/configs/api.rs +++ b/core/lib/config/src/configs/api.rs @@ -243,7 +243,7 @@ impl Web3JsonRpcConfig { pubsub_polling_interval: Some(200), max_nonce_ahead: 50, gas_price_scale_factor: 1.2, - estimate_gas_scale_factor: 1.2, + estimate_gas_scale_factor: 1.5, estimate_gas_acceptable_overestimation: 1000, estimate_gas_optimize_search: false, max_tx_size: 1000000, diff --git a/core/lib/config/src/configs/consensus.rs b/core/lib/config/src/configs/consensus.rs index 7f5a0f56aa17..17359f188a70 100644 --- a/core/lib/config/src/configs/consensus.rs +++ b/core/lib/config/src/configs/consensus.rs @@ -126,6 +126,9 @@ pub struct ConsensusConfig { /// Maximal allowed size of the payload in bytes. pub max_payload_size: usize, + /// View timeout duration in milliseconds. + pub view_timeout: Option, + /// Maximal allowed size of the sync-batch payloads in bytes. /// /// The batch consists of block payloads and a Merkle proof of inclusion on L1 (~1kB), @@ -155,6 +158,10 @@ pub struct ConsensusConfig { } impl ConsensusConfig { + pub fn view_timeout(&self) -> time::Duration { + self.view_timeout.unwrap_or(time::Duration::seconds(2)) + } + pub fn rpc(&self) -> RpcConfig { self.rpc.clone().unwrap_or_default() } diff --git a/core/lib/config/src/configs/en_config.rs b/core/lib/config/src/configs/en_config.rs index 4cab47b0779e..13a0e1f2c99d 100644 --- a/core/lib/config/src/configs/en_config.rs +++ b/core/lib/config/src/configs/en_config.rs @@ -18,6 +18,5 @@ pub struct ENConfig { pub main_node_url: SensitiveUrl, pub main_node_rate_limit_rps: Option, - pub gateway_url: Option, pub bridge_addresses_refresh_interval_sec: Option, } diff --git a/core/lib/config/src/configs/eth_sender.rs b/core/lib/config/src/configs/eth_sender.rs index 7b67f015238d..be2f5b532a3e 100644 --- a/core/lib/config/src/configs/eth_sender.rs +++ b/core/lib/config/src/configs/eth_sender.rs @@ -23,7 +23,6 @@ impl EthConfig { pub fn for_tests() -> Self { Self { sender: Some(SenderConfig { - aggregated_proof_sizes: vec![1], wait_confirmations: Some(10), tx_poll_period: 1, aggregate_tx_poll_period: 1, @@ -82,7 +81,6 @@ pub enum ProofLoadingMode { #[derive(Debug, Deserialize, Clone, PartialEq)] pub struct SenderConfig { - pub aggregated_proof_sizes: Vec, /// Amount of confirmations required to consider L1 transaction committed. /// If not specified L1 transaction will be considered finalized once its block is finalized. pub wait_confirmations: Option, diff --git a/core/lib/config/src/configs/genesis.rs b/core/lib/config/src/configs/genesis.rs index 9e1ffbd87cb5..5d5e39309bb8 100644 --- a/core/lib/config/src/configs/genesis.rs +++ b/core/lib/config/src/configs/genesis.rs @@ -32,6 +32,7 @@ pub struct GenesisConfig { pub fee_account: Address, pub dummy_verifier: bool, pub l1_batch_commit_data_generator_mode: L1BatchCommitmentMode, + pub custom_genesis_state_path: Option, } impl GenesisConfig { @@ -60,6 +61,7 @@ impl GenesisConfig { l2_chain_id: L2ChainId::default(), dummy_verifier: false, l1_batch_commit_data_generator_mode: L1BatchCommitmentMode::Rollup, + custom_genesis_state_path: None, } } } diff --git a/core/lib/config/src/configs/secrets.rs b/core/lib/config/src/configs/secrets.rs index 75ff067c2473..8285d81e4bd2 100644 --- a/core/lib/config/src/configs/secrets.rs +++ b/core/lib/config/src/configs/secrets.rs @@ -16,6 +16,7 @@ pub struct DatabaseSecrets { #[derive(Debug, Clone, PartialEq)] pub struct L1Secrets { pub l1_rpc_url: SensitiveUrl, + pub gateway_rpc_url: Option, } #[derive(Debug, Clone, PartialEq)] diff --git a/core/lib/config/src/testonly.rs b/core/lib/config/src/testonly.rs index 7900d1bcc010..e783de1e09f8 100644 --- a/core/lib/config/src/testonly.rs +++ b/core/lib/config/src/testonly.rs @@ -399,7 +399,6 @@ impl Distribution for EncodeDist { impl Distribution for EncodeDist { fn sample(&self, rng: &mut R) -> configs::eth_sender::SenderConfig { configs::eth_sender::SenderConfig { - aggregated_proof_sizes: self.sample_collect(rng), wait_confirmations: self.sample(rng), tx_poll_period: self.sample(rng), aggregate_tx_poll_period: self.sample(rng), @@ -747,6 +746,7 @@ impl Distribution for EncodeDist { 0 => L1BatchCommitmentMode::Rollup, _ => L1BatchCommitmentMode::Validium, }, + custom_genesis_state_path: None, } } } @@ -809,6 +809,7 @@ impl Distribution for EncodeDist { server_addr: self.sample(rng), public_addr: Host(self.sample(rng)), max_payload_size: self.sample(rng), + view_timeout: self.sample(rng), max_batch_size: self.sample(rng), gossip_dynamic_inbound_limit: self.sample(rng), gossip_static_inbound: self @@ -852,6 +853,7 @@ impl Distribution for EncodeDist { use configs::secrets::L1Secrets; L1Secrets { l1_rpc_url: format!("localhost:{}", rng.gen::()).parse().unwrap(), + gateway_rpc_url: Some(format!("localhost:{}", rng.gen::()).parse().unwrap()), } } } @@ -938,8 +940,6 @@ impl Distribution for EncodeDist { _ => L1BatchCommitmentMode::Validium, }, main_node_rate_limit_rps: self.sample_opt(|| rng.gen()), - gateway_url: self - .sample_opt(|| format!("localhost:{}", rng.gen::()).parse().unwrap()), bridge_addresses_refresh_interval_sec: self.sample_opt(|| rng.gen()), } } diff --git a/core/lib/constants/src/contracts.rs b/core/lib/constants/src/contracts.rs index 4f0f362d9149..6e402c117bfe 100644 --- a/core/lib/constants/src/contracts.rs +++ b/core/lib/constants/src/contracts.rs @@ -135,6 +135,8 @@ pub const EVM_GAS_MANAGER_ADDRESS: Address = H160([ 0x00, 0x00, 0x80, 0x13, ]); +/// Note, that the `Create2Factory` and higher are explicitly deployed on a non-system-contract address +/// as they don't require any kernel space features. pub const CREATE2_FACTORY_ADDRESS: Address = H160([ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, diff --git a/core/lib/constants/src/message_root.rs b/core/lib/constants/src/message_root.rs index a8f4a034fb99..9bb8764cd667 100644 --- a/core/lib/constants/src/message_root.rs +++ b/core/lib/constants/src/message_root.rs @@ -1,5 +1,14 @@ -// Position of `FullTree::_height` in `MessageRoot`'s storage layout. +/// Position of `chainCount` in `MessageRoot`'s storage layout. +pub const CHAIN_COUNT_KEY: usize = 0; + +/// Position of `chainIndexToId` in `MessageRoot`'s storage layout. +pub const CHAIN_INDEX_TO_ID_KEY: usize = 2; + +/// Position of `FullTree::_height` in `MessageRoot`'s storage layout. pub const AGG_TREE_HEIGHT_KEY: usize = 3; -// Position of `FullTree::nodes` in `MessageRoot`'s storage layout. +/// Position of `FullTree::nodes` in `MessageRoot`'s storage layout. pub const AGG_TREE_NODES_KEY: usize = 5; + +/// Position of `chainTree` in `MessageRoot`'s storage layout. +pub const CHAIN_TREE_KEY: usize = 7; diff --git a/core/lib/contract_verifier/src/resolver/github/gh_api.rs b/core/lib/contract_verifier/src/resolver/github/gh_api.rs index 8c9ac6723249..e19bae72b477 100644 --- a/core/lib/contract_verifier/src/resolver/github/gh_api.rs +++ b/core/lib/contract_verifier/src/resolver/github/gh_api.rs @@ -86,6 +86,7 @@ impl CompilerGitHubRelease { match self { Self::Solc | Self::Vyper => { // Solidity and Vyper releases are tagged with version numbers in form of `vX.Y.Z`. + // Our API does not require the `v` prefix for solc/vyper, so we strip it. tag_name .strip_prefix('v') .filter(|v| semver::Version::parse(v).is_ok()) @@ -94,6 +95,7 @@ impl CompilerGitHubRelease { Self::ZkVmSolc => { // ZkVmSolc releases are tagged with version numbers in form of `X.Y.Z-A.B.C`, where // `X.Y.Z` is the version of the Solidity compiler, and `A.B.C` is the version of the ZkSync fork. + // `v` prefix is not required. if let Some((main, fork)) = tag_name.split_once('-') { if semver::Version::parse(main).is_ok() && semver::Version::parse(fork).is_ok() { @@ -105,8 +107,9 @@ impl CompilerGitHubRelease { } Self::ZkSolc | Self::ZkVyper => { // zksolc and zkvyper releases are tagged with version numbers in form of `X.Y.Z` (without 'v'). + // Our API expects versions to be prefixed with `v` for zksolc/zkvyper, so we add it. if semver::Version::parse(tag_name).is_ok() { - Some(tag_name.to_string()) + Some(format!("v{tag_name}")) } else { None } diff --git a/core/lib/contracts/src/lib.rs b/core/lib/contracts/src/lib.rs index 74efa72793aa..e60fdb0e59f3 100644 --- a/core/lib/contracts/src/lib.rs +++ b/core/lib/contracts/src/lib.rs @@ -256,7 +256,9 @@ impl SystemContractsRepo { "artifacts-zk/contracts-preprocessed/{0}{1}.sol/{1}.json", directory, name ))) - .expect("One of the outputs should exists") + .unwrap_or_else(|| { + panic!("One of the outputs should exists for {directory}{name}"); + }) } } ContractLanguage::Yul => { @@ -975,3 +977,461 @@ pub static DIAMOND_CUT: Lazy = Lazy::new(|| { }"#; serde_json::from_str(abi).unwrap() }); + +pub static POST_BOOJUM_COMMIT_FUNCTION: Lazy = Lazy::new(|| { + let abi = r#" + { + "inputs": [ + { + "components": [ + { + "internalType": "uint64", + "name": "batchNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "batchHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBatchInfo", + "name": "_lastCommittedBatchData", + "type": "tuple" + }, + { + "components": [ + { + "internalType": "uint64", + "name": "batchNumber", + "type": "uint64" + }, + { + "internalType": "uint64", + "name": "timestamp", + "type": "uint64" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "newStateRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "bootloaderHeapInitialContentsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "eventsQueueStateHash", + "type": "bytes32" + }, + { + "internalType": "bytes", + "name": "systemLogs", + "type": "bytes" + }, + { + "internalType": "bytes", + "name": "pubdataCommitments", + "type": "bytes" + } + ], + "internalType": "struct IExecutor.CommitBatchInfo[]", + "name": "_newBatchesData", + "type": "tuple[]" + } + ], + "name": "commitBatches", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }"#; + serde_json::from_str(abi).unwrap() +}); + +pub static POST_SHARED_BRIDGE_COMMIT_FUNCTION: Lazy = Lazy::new(|| { + let abi = r#" + { + "inputs": [ + { + "internalType": "uint256", + "name": "_chainId", + "type": "uint256" + }, + { + "components": [ + { + "internalType": "uint64", + "name": "batchNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "batchHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBatchInfo", + "name": "_lastCommittedBatchData", + "type": "tuple" + }, + { + "components": [ + { + "internalType": "uint64", + "name": "batchNumber", + "type": "uint64" + }, + { + "internalType": "uint64", + "name": "timestamp", + "type": "uint64" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "newStateRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "bootloaderHeapInitialContentsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "eventsQueueStateHash", + "type": "bytes32" + }, + { + "internalType": "bytes", + "name": "systemLogs", + "type": "bytes" + }, + { + "internalType": "bytes", + "name": "pubdataCommitments", + "type": "bytes" + } + ], + "internalType": "struct IExecutor.CommitBatchInfo[]", + "name": "_newBatchesData", + "type": "tuple[]" + } + ], + "name": "commitBatchesSharedBridge", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }"#; + serde_json::from_str(abi).unwrap() +}); + +pub static POST_SHARED_BRIDGE_PROVE_FUNCTION: Lazy = Lazy::new(|| { + let abi = r#" + { + "inputs": [ + { + "internalType": "uint256", + "name": "_chainId", + "type": "uint256" + }, + { + "components": [ + { + "internalType": "uint64", + "name": "batchNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "batchHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBatchInfo", + "name": "_prevBatch", + "type": "tuple" + }, + { + "components": [ + { + "internalType": "uint64", + "name": "batchNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "batchHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBatchInfo[]", + "name": "_committedBatches", + "type": "tuple[]" + }, + { + "components": [ + { + "internalType": "uint256[]", + "name": "recursiveAggregationInput", + "type": "uint256[]" + }, + { + "internalType": "uint256[]", + "name": "serializedProof", + "type": "uint256[]" + } + ], + "internalType": "struct IExecutor.ProofInput", + "name": "_proof", + "type": "tuple" + } + ], + "name": "proveBatchesSharedBridge", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }"#; + serde_json::from_str(abi).unwrap() +}); + +pub static POST_SHARED_BRIDGE_EXECUTE_FUNCTION: Lazy = Lazy::new(|| { + let abi = r#" + { + "inputs": [ + { + "internalType": "uint256", + "name": "_chainId", + "type": "uint256" + }, + { + "components": [ + { + "internalType": "uint64", + "name": "batchNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "batchHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBatchInfo[]", + "name": "_batchesData", + "type": "tuple[]" + } + ], + "name": "executeBatchesSharedBridge", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }"#; + serde_json::from_str(abi).unwrap() +}); + +// Temporary thing, should be removed when new contracts are merged. +pub static MESSAGE_ROOT_CONTRACT: Lazy = Lazy::new(|| { + let abi = r#" + [{ + "inputs": [ + { + "internalType": "uint256", + "name": "_chainId", + "type": "uint256" + } + ], + "name": "getChainRoot", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }]"#; + serde_json::from_str(abi).unwrap() +}); diff --git a/core/lib/dal/.sqlx/query-10b8981f7aa47ce5d3507571af45f7cef0d50c4938105684971e8adc86bb6366.json b/core/lib/dal/.sqlx/query-10b8981f7aa47ce5d3507571af45f7cef0d50c4938105684971e8adc86bb6366.json deleted file mode 100644 index d59f237ed3c3..000000000000 --- a/core/lib/dal/.sqlx/query-10b8981f7aa47ce5d3507571af45f7cef0d50c4938105684971e8adc86bb6366.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT COALESCE(SUM(predicted_execute_gas_cost), 0) AS \"sum!\" FROM l1_batches WHERE number BETWEEN $1 AND $2", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "sum!", - "type_info": "Numeric" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int8" - ] - }, - "nullable": [ - null - ] - }, - "hash": "10b8981f7aa47ce5d3507571af45f7cef0d50c4938105684971e8adc86bb6366" -} diff --git a/core/lib/dal/.sqlx/query-2234d7728d91cefaee792c900448aafe4b1aa2250fc535bfcdff39172551d42b.json b/core/lib/dal/.sqlx/query-2234d7728d91cefaee792c900448aafe4b1aa2250fc535bfcdff39172551d42b.json index 48fa673b2c91..df60f114f5ef 100644 --- a/core/lib/dal/.sqlx/query-2234d7728d91cefaee792c900448aafe4b1aa2250fc535bfcdff39172551d42b.json +++ b/core/lib/dal/.sqlx/query-2234d7728d91cefaee792c900448aafe4b1aa2250fc535bfcdff39172551d42b.json @@ -103,7 +103,7 @@ false, true, true, - false, + true, true, true, false, diff --git a/core/lib/dal/.sqlx/query-228aa5ec4c4eb56143823b96a8190ded732839b9f5bf16042205a730fac07c3a.json b/core/lib/dal/.sqlx/query-228aa5ec4c4eb56143823b96a8190ded732839b9f5bf16042205a730fac07c3a.json index b8d6482ea744..32a2212dfdf6 100644 --- a/core/lib/dal/.sqlx/query-228aa5ec4c4eb56143823b96a8190ded732839b9f5bf16042205a730fac07c3a.json +++ b/core/lib/dal/.sqlx/query-228aa5ec4c4eb56143823b96a8190ded732839b9f5bf16042205a730fac07c3a.json @@ -11,7 +11,8 @@ "kind": { "Enum": [ "ProtocolUpgrades", - "PriorityTransactions" + "PriorityTransactions", + "ChainBatchRoot" ] } } diff --git a/core/lib/dal/.sqlx/query-2d0a4e9281e53b0e410b9be0ebd53b2126b52d568196f333973a345f984ea7c4.json b/core/lib/dal/.sqlx/query-2d0a4e9281e53b0e410b9be0ebd53b2126b52d568196f333973a345f984ea7c4.json new file mode 100644 index 000000000000..adbd2c0931ec --- /dev/null +++ b/core/lib/dal/.sqlx/query-2d0a4e9281e53b0e410b9be0ebd53b2126b52d568196f333973a345f984ea7c4.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n batch_chain_merkle_path\n FROM\n l1_batches\n WHERE\n number = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "batch_chain_merkle_path", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + true + ] + }, + "hash": "2d0a4e9281e53b0e410b9be0ebd53b2126b52d568196f333973a345f984ea7c4" +} diff --git a/core/lib/dal/.sqlx/query-2e3107b0c5e8466598066ceca9844469e431e35c4419fd710050d51eeefd6b8b.json b/core/lib/dal/.sqlx/query-2e3107b0c5e8466598066ceca9844469e431e35c4419fd710050d51eeefd6b8b.json new file mode 100644 index 000000000000..69dd87a6c355 --- /dev/null +++ b/core/lib/dal/.sqlx/query-2e3107b0c5e8466598066ceca9844469e431e35c4419fd710050d51eeefd6b8b.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n local_root\n FROM\n l1_batches\n WHERE\n number = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "local_root", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + true + ] + }, + "hash": "2e3107b0c5e8466598066ceca9844469e431e35c4419fd710050d51eeefd6b8b" +} diff --git a/core/lib/dal/.sqlx/query-3566423188a5d6bed7150f327d83613cd34138b59ef3b9271fd0bfdaddd086f8.json b/core/lib/dal/.sqlx/query-3566423188a5d6bed7150f327d83613cd34138b59ef3b9271fd0bfdaddd086f8.json new file mode 100644 index 000000000000..123afc6060a6 --- /dev/null +++ b/core/lib/dal/.sqlx/query-3566423188a5d6bed7150f327d83613cd34138b59ef3b9271fd0bfdaddd086f8.json @@ -0,0 +1,50 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT *\n FROM _sqlx_migrations\n ORDER BY _sqlx_migrations.version DESC\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "version", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "description", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "installed_on", + "type_info": "Timestamptz" + }, + { + "ordinal": 3, + "name": "success", + "type_info": "Bool" + }, + { + "ordinal": 4, + "name": "checksum", + "type_info": "Bytea" + }, + { + "ordinal": 5, + "name": "execution_time", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false, + false, + false, + false, + false + ] + }, + "hash": "3566423188a5d6bed7150f327d83613cd34138b59ef3b9271fd0bfdaddd086f8" +} diff --git a/core/lib/dal/.sqlx/query-3785c01a8eb1eaeaf7baf0a8ba7f30cbc385e6fb6c8b8ae2c2d06b871a4cae72.json b/core/lib/dal/.sqlx/query-3785c01a8eb1eaeaf7baf0a8ba7f30cbc385e6fb6c8b8ae2c2d06b871a4cae72.json new file mode 100644 index 000000000000..dfaccfbc99d3 --- /dev/null +++ b/core/lib/dal/.sqlx/query-3785c01a8eb1eaeaf7baf0a8ba7f30cbc385e6fb6c8b8ae2c2d06b871a4cae72.json @@ -0,0 +1,27 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n pruning_log (\n pruned_l1_batch,\n pruned_miniblock,\n pruned_l1_batch_root_hash,\n type,\n created_at,\n updated_at\n )\n VALUES\n ($1, $2, $3, $4, NOW(), NOW())\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int8", + "Bytea", + { + "Custom": { + "name": "prune_type", + "kind": { + "Enum": [ + "Soft", + "Hard" + ] + } + } + } + ] + }, + "nullable": [] + }, + "hash": "3785c01a8eb1eaeaf7baf0a8ba7f30cbc385e6fb6c8b8ae2c2d06b871a4cae72" +} diff --git a/core/lib/dal/.sqlx/query-3ee6c2a87c65eaece7048da53c9f98ded0ad3e59e6de69c2b13d92d8ab1a07dd.json b/core/lib/dal/.sqlx/query-3ee6c2a87c65eaece7048da53c9f98ded0ad3e59e6de69c2b13d92d8ab1a07dd.json index e2a808d41f89..8bab74d20f5c 100644 --- a/core/lib/dal/.sqlx/query-3ee6c2a87c65eaece7048da53c9f98ded0ad3e59e6de69c2b13d92d8ab1a07dd.json +++ b/core/lib/dal/.sqlx/query-3ee6c2a87c65eaece7048da53c9f98ded0ad3e59e6de69c2b13d92d8ab1a07dd.json @@ -17,7 +17,8 @@ "kind": { "Enum": [ "ProtocolUpgrades", - "PriorityTransactions" + "PriorityTransactions", + "ChainBatchRoot" ] } } diff --git a/core/lib/dal/.sqlx/query-400457c97449097837d5e31d4476c32170ea4b47051a100f71a41dd6ed9bb1a9.json b/core/lib/dal/.sqlx/query-400457c97449097837d5e31d4476c32170ea4b47051a100f71a41dd6ed9bb1a9.json index 3e64cc5f5b2f..c6c189d45fcb 100644 --- a/core/lib/dal/.sqlx/query-400457c97449097837d5e31d4476c32170ea4b47051a100f71a41dd6ed9bb1a9.json +++ b/core/lib/dal/.sqlx/query-400457c97449097837d5e31d4476c32170ea4b47051a100f71a41dd6ed9bb1a9.json @@ -108,7 +108,7 @@ false, true, true, - false, + true, true, true, false, diff --git a/core/lib/dal/.sqlx/query-42f15afb71632bdfab7befb651eaa3061382dfe4142c7fc46df1dfebec34ec92.json b/core/lib/dal/.sqlx/query-42f15afb71632bdfab7befb651eaa3061382dfe4142c7fc46df1dfebec34ec92.json new file mode 100644 index 000000000000..aeff27a88300 --- /dev/null +++ b/core/lib/dal/.sqlx/query-42f15afb71632bdfab7befb651eaa3061382dfe4142c7fc46df1dfebec34ec92.json @@ -0,0 +1,32 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH latest_storage_logs AS (\n SELECT DISTINCT ON (hashed_key)\n hashed_key,\n address,\n key,\n value\n FROM storage_logs\n ORDER BY hashed_key, miniblock_number DESC, operation_number DESC\n )\n \n SELECT\n lsl.address,\n lsl.key,\n lsl.value\n FROM\n initial_writes iw\n JOIN\n latest_storage_logs lsl ON iw.hashed_key = lsl.hashed_key\n WHERE\n lsl.value\n <> '\\x0000000000000000000000000000000000000000000000000000000000000000'::bytea\n AND (\n lsl.address <> '\\x000000000000000000000000000000000000800b'::bytea OR\n lsl.key IN (\n '\\x0000000000000000000000000000000000000000000000000000000000000000'::bytea,\n '\\x0000000000000000000000000000000000000000000000000000000000000003'::bytea,\n '\\x0000000000000000000000000000000000000000000000000000000000000004'::bytea,\n '\\x0000000000000000000000000000000000000000000000000000000000000005'::bytea\n )\n );\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "address", + "type_info": "Bytea" + }, + { + "ordinal": 1, + "name": "key", + "type_info": "Bytea" + }, + { + "ordinal": 2, + "name": "value", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + true, + true, + false + ] + }, + "hash": "42f15afb71632bdfab7befb651eaa3061382dfe4142c7fc46df1dfebec34ec92" +} diff --git a/core/lib/dal/.sqlx/query-4777de5d3f313f1eb8c3b6a4c1782b0fa233913582fe9091cc1e8954dfd0eb1b.json b/core/lib/dal/.sqlx/query-4777de5d3f313f1eb8c3b6a4c1782b0fa233913582fe9091cc1e8954dfd0eb1b.json new file mode 100644 index 000000000000..37adf92582e7 --- /dev/null +++ b/core/lib/dal/.sqlx/query-4777de5d3f313f1eb8c3b6a4c1782b0fa233913582fe9091cc1e8954dfd0eb1b.json @@ -0,0 +1,30 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n tee_proof_generation_details (\n l1_batch_number, tee_type, status, created_at, updated_at, prover_taken_at\n )\n VALUES\n (\n $1,\n $2,\n $3,\n NOW(),\n NOW(),\n NOW()\n )\n ON CONFLICT (l1_batch_number, tee_type) DO\n UPDATE\n SET\n status = $3,\n updated_at = NOW(),\n prover_taken_at = NOW()\n RETURNING\n l1_batch_number,\n created_at\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "created_at", + "type_info": "Timestamp" + } + ], + "parameters": { + "Left": [ + "Int8", + "Text", + "Text" + ] + }, + "nullable": [ + false, + false + ] + }, + "hash": "4777de5d3f313f1eb8c3b6a4c1782b0fa233913582fe9091cc1e8954dfd0eb1b" +} diff --git a/core/lib/dal/.sqlx/query-52f1ab74661845b61a371d01c5d5f86051e80928c5530cacc02530a93812be8c.json b/core/lib/dal/.sqlx/query-52f1ab74661845b61a371d01c5d5f86051e80928c5530cacc02530a93812be8c.json deleted file mode 100644 index 19c03a62c02e..000000000000 --- a/core/lib/dal/.sqlx/query-52f1ab74661845b61a371d01c5d5f86051e80928c5530cacc02530a93812be8c.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "INSERT INTO eth_txs (raw_tx, nonce, tx_type, contract_address, predicted_gas_cost, created_at, updated_at) VALUES ('\\x00', 0, $1, $2, 0, now(), now()) RETURNING id", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - } - ], - "parameters": { - "Left": [ - "Text", - "Text" - ] - }, - "nullable": [ - false - ] - }, - "hash": "52f1ab74661845b61a371d01c5d5f86051e80928c5530cacc02530a93812be8c" -} diff --git a/core/lib/dal/.sqlx/query-6069d168d5c4b5131b50500302cdde79388b62926ff83d954b4d93dedfe2503a.json b/core/lib/dal/.sqlx/query-6069d168d5c4b5131b50500302cdde79388b62926ff83d954b4d93dedfe2503a.json deleted file mode 100644 index 98d228726d48..000000000000 --- a/core/lib/dal/.sqlx/query-6069d168d5c4b5131b50500302cdde79388b62926ff83d954b4d93dedfe2503a.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE l1_batches\n SET\n l1_tx_count = $2,\n l2_tx_count = $3,\n l2_to_l1_messages = $4,\n bloom = $5,\n priority_ops_onchain_data = $6,\n predicted_commit_gas_cost = $7,\n predicted_prove_gas_cost = $8,\n predicted_execute_gas_cost = $9,\n initial_bootloader_heap_content = $10,\n used_contract_hashes = $11,\n bootloader_code_hash = $12,\n default_aa_code_hash = $13,\n evm_emulator_code_hash = $14,\n protocol_version = $15,\n system_logs = $16,\n storage_refunds = $17,\n pubdata_costs = $18,\n pubdata_input = $19,\n predicted_circuits_by_type = $20,\n updated_at = NOW(),\n sealed_at = NOW(),\n is_sealed = TRUE\n WHERE\n number = $1\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Int4", - "Int4", - "ByteaArray", - "Bytea", - "ByteaArray", - "Int8", - "Int8", - "Int8", - "Jsonb", - "Jsonb", - "Bytea", - "Bytea", - "Bytea", - "Int4", - "ByteaArray", - "Int8Array", - "Int8Array", - "Bytea", - "Jsonb" - ] - }, - "nullable": [] - }, - "hash": "6069d168d5c4b5131b50500302cdde79388b62926ff83d954b4d93dedfe2503a" -} diff --git a/core/lib/dal/.sqlx/query-63f5f9bff4b2c15fa4230af2c73b5b5cc7e37dd6a607e9453e822e34ba77cdc3.json b/core/lib/dal/.sqlx/query-63f5f9bff4b2c15fa4230af2c73b5b5cc7e37dd6a607e9453e822e34ba77cdc3.json deleted file mode 100644 index ab2283fa60c3..000000000000 --- a/core/lib/dal/.sqlx/query-63f5f9bff4b2c15fa4230af2c73b5b5cc7e37dd6a607e9453e822e34ba77cdc3.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT COALESCE(SUM(predicted_prove_gas_cost), 0) AS \"sum!\" FROM l1_batches WHERE number BETWEEN $1 AND $2", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "sum!", - "type_info": "Numeric" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int8" - ] - }, - "nullable": [ - null - ] - }, - "hash": "63f5f9bff4b2c15fa4230af2c73b5b5cc7e37dd6a607e9453e822e34ba77cdc3" -} diff --git a/core/lib/dal/.sqlx/query-6692ff6c0fbb2fc94f5cd2837a43ce80f9b2b27758651ccfc09df61a4ae8a363.json b/core/lib/dal/.sqlx/query-6692ff6c0fbb2fc94f5cd2837a43ce80f9b2b27758651ccfc09df61a4ae8a363.json index 1a3c160cee1b..a6ac67328b14 100644 --- a/core/lib/dal/.sqlx/query-6692ff6c0fbb2fc94f5cd2837a43ce80f9b2b27758651ccfc09df61a4ae8a363.json +++ b/core/lib/dal/.sqlx/query-6692ff6c0fbb2fc94f5cd2837a43ce80f9b2b27758651ccfc09df61a4ae8a363.json @@ -101,7 +101,7 @@ false, true, true, - false, + true, true, true, false, diff --git a/core/lib/dal/.sqlx/query-8065f779631edb7ba30229485ff5e419ffae1fece87427c6571713047d44f015.json b/core/lib/dal/.sqlx/query-8065f779631edb7ba30229485ff5e419ffae1fece87427c6571713047d44f015.json new file mode 100644 index 000000000000..7559630217be --- /dev/null +++ b/core/lib/dal/.sqlx/query-8065f779631edb7ba30229485ff5e419ffae1fece87427c6571713047d44f015.json @@ -0,0 +1,52 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n tp.pubkey,\n tp.signature,\n tp.proof,\n tp.updated_at,\n tp.status,\n ta.attestation\n FROM\n tee_proof_generation_details tp\n LEFT JOIN\n tee_attestations ta ON tp.pubkey = ta.pubkey\n WHERE\n tp.l1_batch_number = $1\n ORDER BY tp.l1_batch_number ASC, tp.tee_type ASC", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "pubkey", + "type_info": "Bytea" + }, + { + "ordinal": 1, + "name": "signature", + "type_info": "Bytea" + }, + { + "ordinal": 2, + "name": "proof", + "type_info": "Bytea" + }, + { + "ordinal": 3, + "name": "updated_at", + "type_info": "Timestamp" + }, + { + "ordinal": 4, + "name": "status", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "attestation", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + true, + true, + true, + false, + false, + true + ] + }, + "hash": "8065f779631edb7ba30229485ff5e419ffae1fece87427c6571713047d44f015" +} diff --git a/core/lib/dal/.sqlx/query-843b299bbffbade83dd673ed4a488eefd4419e8a9eb05a9528da11163f52b2c7.json b/core/lib/dal/.sqlx/query-843b299bbffbade83dd673ed4a488eefd4419e8a9eb05a9528da11163f52b2c7.json new file mode 100644 index 000000000000..63f71c88c574 --- /dev/null +++ b/core/lib/dal/.sqlx/query-843b299bbffbade83dd673ed4a488eefd4419e8a9eb05a9528da11163f52b2c7.json @@ -0,0 +1,35 @@ +{ + "db_name": "PostgreSQL", +<<<<<<<< HEAD:core/lib/dal/.sqlx/query-52f1ab74661845b61a371d01c5d5f86051e80928c5530cacc02530a93812be8c.json + "query": "INSERT INTO eth_txs (raw_tx, nonce, tx_type, contract_address, predicted_gas_cost, created_at, updated_at) VALUES ('\\x00', 0, $1, $2, 0, now(), now()) RETURNING id", +======== + "query": "INSERT INTO eth_txs (raw_tx, nonce, tx_type, contract_address, predicted_gas_cost, chain_id, created_at, updated_at) VALUES ('\\x00', 0, $1, '', NULL, $2, now(), now()) RETURNING id", +>>>>>>>> origin/main:core/lib/dal/.sqlx/query-843b299bbffbade83dd673ed4a488eefd4419e8a9eb05a9528da11163f52b2c7.json + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Text", +<<<<<<<< HEAD:core/lib/dal/.sqlx/query-52f1ab74661845b61a371d01c5d5f86051e80928c5530cacc02530a93812be8c.json + "Text" +======== + "Int8" +>>>>>>>> origin/main:core/lib/dal/.sqlx/query-843b299bbffbade83dd673ed4a488eefd4419e8a9eb05a9528da11163f52b2c7.json + ] + }, + "nullable": [ + false + ] + }, +<<<<<<<< HEAD:core/lib/dal/.sqlx/query-52f1ab74661845b61a371d01c5d5f86051e80928c5530cacc02530a93812be8c.json + "hash": "52f1ab74661845b61a371d01c5d5f86051e80928c5530cacc02530a93812be8c" +======== + "hash": "843b299bbffbade83dd673ed4a488eefd4419e8a9eb05a9528da11163f52b2c7" +>>>>>>>> origin/main:core/lib/dal/.sqlx/query-843b299bbffbade83dd673ed4a488eefd4419e8a9eb05a9528da11163f52b2c7.json +} diff --git a/core/lib/dal/.sqlx/query-868bfdc5d8ee5eab395fa690891751dfd285628a75a35b152bccb3c73e9cc057.json b/core/lib/dal/.sqlx/query-868bfdc5d8ee5eab395fa690891751dfd285628a75a35b152bccb3c73e9cc057.json index c2e662ef376e..e47911f3d776 100644 --- a/core/lib/dal/.sqlx/query-868bfdc5d8ee5eab395fa690891751dfd285628a75a35b152bccb3c73e9cc057.json +++ b/core/lib/dal/.sqlx/query-868bfdc5d8ee5eab395fa690891751dfd285628a75a35b152bccb3c73e9cc057.json @@ -102,7 +102,7 @@ false, true, true, - false, + true, true, true, false, diff --git a/core/lib/dal/.sqlx/query-8bab3c429fc5bbf4a91819e55cfb71ef6258e011f424c705d56c9827e8277c84.json b/core/lib/dal/.sqlx/query-8bab3c429fc5bbf4a91819e55cfb71ef6258e011f424c705d56c9827e8277c84.json new file mode 100644 index 000000000000..0132af8fbe6c --- /dev/null +++ b/core/lib/dal/.sqlx/query-8bab3c429fc5bbf4a91819e55cfb71ef6258e011f424c705d56c9827e8277c84.json @@ -0,0 +1,44 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH\n soft AS (\n SELECT\n pruned_l1_batch,\n pruned_miniblock\n FROM\n pruning_log\n WHERE\n type = 'Soft'\n ORDER BY\n pruned_l1_batch DESC\n LIMIT\n 1\n ),\n \n hard AS (\n SELECT\n pruned_l1_batch,\n pruned_miniblock,\n pruned_l1_batch_root_hash\n FROM\n pruning_log\n WHERE\n type = 'Hard'\n ORDER BY\n pruned_l1_batch DESC\n LIMIT\n 1\n )\n \n SELECT\n soft.pruned_l1_batch AS last_soft_pruned_l1_batch,\n soft.pruned_miniblock AS last_soft_pruned_l2_block,\n hard.pruned_l1_batch AS last_hard_pruned_l1_batch,\n hard.pruned_miniblock AS last_hard_pruned_l2_block,\n hard.pruned_l1_batch_root_hash AS last_hard_pruned_batch_root_hash\n FROM\n soft\n FULL JOIN hard ON TRUE\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "last_soft_pruned_l1_batch", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "last_soft_pruned_l2_block", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "last_hard_pruned_l1_batch", + "type_info": "Int8" + }, + { + "ordinal": 3, + "name": "last_hard_pruned_l2_block", + "type_info": "Int8" + }, + { + "ordinal": 4, + "name": "last_hard_pruned_batch_root_hash", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + true, + true, + true, + true, + true + ] + }, + "hash": "8bab3c429fc5bbf4a91819e55cfb71ef6258e011f424c705d56c9827e8277c84" +} diff --git a/core/lib/dal/.sqlx/query-d3760406b7bf5d14a3fe6cbc9fb3926df634ebf0d8286181fa04884fb747cee8.json b/core/lib/dal/.sqlx/query-9b011c7afa158edd17fe0dc56dad5204831b7ede1a8e3b3c2d441d505c4ca58f.json similarity index 52% rename from core/lib/dal/.sqlx/query-d3760406b7bf5d14a3fe6cbc9fb3926df634ebf0d8286181fa04884fb747cee8.json rename to core/lib/dal/.sqlx/query-9b011c7afa158edd17fe0dc56dad5204831b7ede1a8e3b3c2d441d505c4ca58f.json index ed3270de573e..8de8a19da5f8 100644 --- a/core/lib/dal/.sqlx/query-d3760406b7bf5d14a3fe6cbc9fb3926df634ebf0d8286181fa04884fb747cee8.json +++ b/core/lib/dal/.sqlx/query-9b011c7afa158edd17fe0dc56dad5204831b7ede1a8e3b3c2d441d505c4ca58f.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n miniblocks.number,\n COALESCE(\n miniblocks.l1_batch_number,\n (\n SELECT\n (MAX(number) + 1)\n FROM\n l1_batches\n WHERE\n is_sealed\n )\n ) AS \"l1_batch_number!\",\n miniblocks.timestamp,\n miniblocks.l1_tx_count,\n miniblocks.l2_tx_count,\n miniblocks.hash AS \"root_hash?\",\n commit_tx.tx_hash AS \"commit_tx_hash?\",\n commit_tx.confirmed_at AS \"committed_at?\",\n prove_tx.tx_hash AS \"prove_tx_hash?\",\n prove_tx.confirmed_at AS \"proven_at?\",\n execute_tx.tx_hash AS \"execute_tx_hash?\",\n execute_tx.confirmed_at AS \"executed_at?\",\n miniblocks.l1_gas_price,\n miniblocks.l2_fair_gas_price,\n miniblocks.fair_pubdata_price,\n miniblocks.bootloader_code_hash,\n miniblocks.default_aa_code_hash,\n l1_batches.evm_emulator_code_hash,\n miniblocks.protocol_version,\n miniblocks.fee_account_address\n FROM\n miniblocks\n LEFT JOIN l1_batches ON miniblocks.l1_batch_number = l1_batches.number\n LEFT JOIN eth_txs_history AS commit_tx\n ON (\n l1_batches.eth_commit_tx_id = commit_tx.eth_tx_id\n AND commit_tx.confirmed_at IS NOT NULL\n )\n LEFT JOIN eth_txs_history AS prove_tx\n ON (\n l1_batches.eth_prove_tx_id = prove_tx.eth_tx_id\n AND prove_tx.confirmed_at IS NOT NULL\n )\n LEFT JOIN eth_txs_history AS execute_tx\n ON (\n l1_batches.eth_execute_tx_id = execute_tx.eth_tx_id\n AND execute_tx.confirmed_at IS NOT NULL\n )\n WHERE\n miniblocks.number = $1\n ", + "query": "\n SELECT\n miniblocks.number,\n COALESCE(\n miniblocks.l1_batch_number,\n (\n SELECT\n (MAX(number) + 1)\n FROM\n l1_batches\n WHERE\n is_sealed\n )\n ) AS \"l1_batch_number!\",\n miniblocks.timestamp,\n miniblocks.l1_tx_count,\n miniblocks.l2_tx_count,\n miniblocks.hash AS \"root_hash?\",\n commit_tx.tx_hash AS \"commit_tx_hash?\",\n commit_tx.confirmed_at AS \"committed_at?\",\n commit_tx_data.chain_id AS \"commit_chain_id?\",\n prove_tx.tx_hash AS \"prove_tx_hash?\",\n prove_tx.confirmed_at AS \"proven_at?\",\n prove_tx_data.chain_id AS \"prove_chain_id?\",\n execute_tx.tx_hash AS \"execute_tx_hash?\",\n execute_tx.confirmed_at AS \"executed_at?\",\n execute_tx_data.chain_id AS \"execute_chain_id?\",\n miniblocks.l1_gas_price,\n miniblocks.l2_fair_gas_price,\n miniblocks.fair_pubdata_price,\n miniblocks.bootloader_code_hash,\n miniblocks.default_aa_code_hash,\n l1_batches.evm_emulator_code_hash,\n miniblocks.protocol_version,\n miniblocks.fee_account_address\n FROM\n miniblocks\n LEFT JOIN l1_batches ON miniblocks.l1_batch_number = l1_batches.number\n LEFT JOIN eth_txs_history AS commit_tx\n ON (\n l1_batches.eth_commit_tx_id = commit_tx.eth_tx_id\n AND commit_tx.confirmed_at IS NOT NULL\n )\n LEFT JOIN eth_txs_history AS prove_tx\n ON (\n l1_batches.eth_prove_tx_id = prove_tx.eth_tx_id\n AND prove_tx.confirmed_at IS NOT NULL\n )\n LEFT JOIN eth_txs_history AS execute_tx\n ON (\n l1_batches.eth_execute_tx_id = execute_tx.eth_tx_id\n AND execute_tx.confirmed_at IS NOT NULL\n )\n LEFT JOIN eth_txs AS commit_tx_data\n ON (\n l1_batches.eth_commit_tx_id = commit_tx_data.id\n AND commit_tx_data.confirmed_eth_tx_history_id IS NOT NULL\n )\n LEFT JOIN eth_txs AS prove_tx_data\n ON (\n l1_batches.eth_prove_tx_id = prove_tx_data.id\n AND prove_tx_data.confirmed_eth_tx_history_id IS NOT NULL\n )\n LEFT JOIN eth_txs AS execute_tx_data\n ON (\n l1_batches.eth_execute_tx_id = execute_tx_data.id\n AND execute_tx_data.confirmed_eth_tx_history_id IS NOT NULL\n )\n WHERE\n miniblocks.number = $1\n ", "describe": { "columns": [ { @@ -45,61 +45,76 @@ }, { "ordinal": 8, + "name": "commit_chain_id?", + "type_info": "Int8" + }, + { + "ordinal": 9, "name": "prove_tx_hash?", "type_info": "Text" }, { - "ordinal": 9, + "ordinal": 10, "name": "proven_at?", "type_info": "Timestamp" }, { - "ordinal": 10, + "ordinal": 11, + "name": "prove_chain_id?", + "type_info": "Int8" + }, + { + "ordinal": 12, "name": "execute_tx_hash?", "type_info": "Text" }, { - "ordinal": 11, + "ordinal": 13, "name": "executed_at?", "type_info": "Timestamp" }, { - "ordinal": 12, + "ordinal": 14, + "name": "execute_chain_id?", + "type_info": "Int8" + }, + { + "ordinal": 15, "name": "l1_gas_price", "type_info": "Int8" }, { - "ordinal": 13, + "ordinal": 16, "name": "l2_fair_gas_price", "type_info": "Int8" }, { - "ordinal": 14, + "ordinal": 17, "name": "fair_pubdata_price", "type_info": "Int8" }, { - "ordinal": 15, + "ordinal": 18, "name": "bootloader_code_hash", "type_info": "Bytea" }, { - "ordinal": 16, + "ordinal": 19, "name": "default_aa_code_hash", "type_info": "Bytea" }, { - "ordinal": 17, + "ordinal": 20, "name": "evm_emulator_code_hash", "type_info": "Bytea" }, { - "ordinal": 18, + "ordinal": 21, "name": "protocol_version", "type_info": "Int4" }, { - "ordinal": 19, + "ordinal": 22, "name": "fee_account_address", "type_info": "Bytea" } @@ -118,10 +133,13 @@ false, false, true, + true, false, true, + true, false, true, + true, false, false, true, @@ -132,5 +150,5 @@ false ] }, - "hash": "d3760406b7bf5d14a3fe6cbc9fb3926df634ebf0d8286181fa04884fb747cee8" + "hash": "9b011c7afa158edd17fe0dc56dad5204831b7ede1a8e3b3c2d441d505c4ca58f" } diff --git a/core/lib/dal/.sqlx/query-a0ad14cd53afb6c360e70c47dbd79d66dd9fbd20941aec20e2e8c3874b15a953.json b/core/lib/dal/.sqlx/query-a0ad14cd53afb6c360e70c47dbd79d66dd9fbd20941aec20e2e8c3874b15a953.json new file mode 100644 index 000000000000..a64bdb403aef --- /dev/null +++ b/core/lib/dal/.sqlx/query-a0ad14cd53afb6c360e70c47dbd79d66dd9fbd20941aec20e2e8c3874b15a953.json @@ -0,0 +1,30 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE l1_batches\n SET\n l1_tx_count = $2,\n l2_tx_count = $3,\n l2_to_l1_messages = $4,\n bloom = $5,\n priority_ops_onchain_data = $6,\n initial_bootloader_heap_content = $7,\n used_contract_hashes = $8,\n bootloader_code_hash = $9,\n default_aa_code_hash = $10,\n evm_emulator_code_hash = $11,\n protocol_version = $12,\n system_logs = $13,\n storage_refunds = $14,\n pubdata_costs = $15,\n pubdata_input = $16,\n predicted_circuits_by_type = $17,\n updated_at = NOW(),\n sealed_at = NOW(),\n is_sealed = TRUE\n WHERE\n number = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int4", + "Int4", + "ByteaArray", + "Bytea", + "ByteaArray", + "Jsonb", + "Jsonb", + "Bytea", + "Bytea", + "Bytea", + "Int4", + "ByteaArray", + "Int8Array", + "Int8Array", + "Bytea", + "Jsonb" + ] + }, + "nullable": [] + }, + "hash": "a0ad14cd53afb6c360e70c47dbd79d66dd9fbd20941aec20e2e8c3874b15a953" +} diff --git a/core/lib/dal/.sqlx/query-b6961d273f833f8babaf16f256822a6e92698fcfcf5d0a9252d84b75459b2664.json b/core/lib/dal/.sqlx/query-b6961d273f833f8babaf16f256822a6e92698fcfcf5d0a9252d84b75459b2664.json new file mode 100644 index 000000000000..b206d337201b --- /dev/null +++ b/core/lib/dal/.sqlx/query-b6961d273f833f8babaf16f256822a6e92698fcfcf5d0a9252d84b75459b2664.json @@ -0,0 +1,26 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n p.l1_batch_number\n FROM\n proof_generation_details p\n LEFT JOIN\n tee_proof_generation_details tee\n ON\n p.l1_batch_number = tee.l1_batch_number\n AND tee.tee_type = $1\n WHERE\n (\n p.l1_batch_number >= $5\n AND p.vm_run_data_blob_url IS NOT NULL\n AND p.proof_gen_data_blob_url IS NOT NULL\n )\n AND (\n tee.l1_batch_number IS NULL\n OR (\n (tee.status = $2 OR tee.status = $3)\n AND tee.prover_taken_at < NOW() - $4::INTERVAL\n )\n )\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Text", + "Text", + "Text", + "Interval", + "Int8" + ] + }, + "nullable": [ + false + ] + }, + "hash": "b6961d273f833f8babaf16f256822a6e92698fcfcf5d0a9252d84b75459b2664" +} diff --git a/core/lib/dal/.sqlx/query-b7fa581d662640236d11143953e3a0aedbd2455b2ff728fe88cab94728925c0e.json b/core/lib/dal/.sqlx/query-b7fa581d662640236d11143953e3a0aedbd2455b2ff728fe88cab94728925c0e.json new file mode 100644 index 000000000000..89f0706843bd --- /dev/null +++ b/core/lib/dal/.sqlx/query-b7fa581d662640236d11143953e3a0aedbd2455b2ff728fe88cab94728925c0e.json @@ -0,0 +1,53 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n tp.pubkey,\n tp.signature,\n tp.proof,\n tp.updated_at,\n tp.status,\n ta.attestation\n FROM\n tee_proof_generation_details tp\n LEFT JOIN\n tee_attestations ta ON tp.pubkey = ta.pubkey\n WHERE\n tp.l1_batch_number = $1\n AND tp.tee_type = $2ORDER BY tp.l1_batch_number ASC, tp.tee_type ASC", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "pubkey", + "type_info": "Bytea" + }, + { + "ordinal": 1, + "name": "signature", + "type_info": "Bytea" + }, + { + "ordinal": 2, + "name": "proof", + "type_info": "Bytea" + }, + { + "ordinal": 3, + "name": "updated_at", + "type_info": "Timestamp" + }, + { + "ordinal": 4, + "name": "status", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "attestation", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Int8", + "Text" + ] + }, + "nullable": [ + true, + true, + true, + false, + false, + true + ] + }, + "hash": "b7fa581d662640236d11143953e3a0aedbd2455b2ff728fe88cab94728925c0e" +} diff --git a/core/lib/dal/.sqlx/query-b9f77e6c15f9e635024b73f1fc985c5196c431363802b6b988939c99853b9c97.json b/core/lib/dal/.sqlx/query-b9f77e6c15f9e635024b73f1fc985c5196c431363802b6b988939c99853b9c97.json deleted file mode 100644 index 379c1f75d9b3..000000000000 --- a/core/lib/dal/.sqlx/query-b9f77e6c15f9e635024b73f1fc985c5196c431363802b6b988939c99853b9c97.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT COALESCE(SUM(predicted_commit_gas_cost), 0) AS \"sum!\" FROM l1_batches WHERE number BETWEEN $1 AND $2", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "sum!", - "type_info": "Numeric" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int8" - ] - }, - "nullable": [ - null - ] - }, - "hash": "b9f77e6c15f9e635024b73f1fc985c5196c431363802b6b988939c99853b9c97" -} diff --git a/core/lib/dal/.sqlx/query-bd37db29c86a84ed09ed0633e8511c5e74988422abd052a33ff1ec5db41f7d52.json b/core/lib/dal/.sqlx/query-bd37db29c86a84ed09ed0633e8511c5e74988422abd052a33ff1ec5db41f7d52.json new file mode 100644 index 000000000000..f33238c1f0c8 --- /dev/null +++ b/core/lib/dal/.sqlx/query-bd37db29c86a84ed09ed0633e8511c5e74988422abd052a33ff1ec5db41f7d52.json @@ -0,0 +1,26 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n bytecode_hash AS \"bytecode_hash!\",\n bytecode AS \"bytecode!\"\n FROM factory_deps\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "bytecode_hash!", + "type_info": "Bytea" + }, + { + "ordinal": 1, + "name": "bytecode!", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false + ] + }, + "hash": "bd37db29c86a84ed09ed0633e8511c5e74988422abd052a33ff1ec5db41f7d52" +} diff --git a/core/lib/dal/.sqlx/query-c1f9ecf033d609457106189bc4d7928aa933616d2186c13a4e005297b0ad63a7.json b/core/lib/dal/.sqlx/query-c1f9ecf033d609457106189bc4d7928aa933616d2186c13a4e005297b0ad63a7.json new file mode 100644 index 000000000000..90623e77e985 --- /dev/null +++ b/core/lib/dal/.sqlx/query-c1f9ecf033d609457106189bc4d7928aa933616d2186c13a4e005297b0ad63a7.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE\n l1_batches\n SET\n batch_chain_merkle_path = $2\n WHERE\n number = $1\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Bytea" + ] + }, + "nullable": [] + }, + "hash": "c1f9ecf033d609457106189bc4d7928aa933616d2186c13a4e005297b0ad63a7" +} diff --git a/core/lib/dal/.sqlx/query-c2c288d268d6b266acbfc1058bc55a360f8ae12b6378f8168c000d668d6489d0.json b/core/lib/dal/.sqlx/query-c2c288d268d6b266acbfc1058bc55a360f8ae12b6378f8168c000d668d6489d0.json new file mode 100644 index 000000000000..751d272b0b0e --- /dev/null +++ b/core/lib/dal/.sqlx/query-c2c288d268d6b266acbfc1058bc55a360f8ae12b6378f8168c000d668d6489d0.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n l2_l1_merkle_root\n FROM\n l1_batches\n WHERE\n number = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l2_l1_merkle_root", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + true + ] + }, + "hash": "c2c288d268d6b266acbfc1058bc55a360f8ae12b6378f8168c000d668d6489d0" +} diff --git a/core/lib/dal/.sqlx/query-c2c40d5aae2e0276de453c78a39ce5a6cca1524adfe99b0cb35662746479dcc1.json b/core/lib/dal/.sqlx/query-c2c40d5aae2e0276de453c78a39ce5a6cca1524adfe99b0cb35662746479dcc1.json index 61832d25fd24..5e2ea45e0bc2 100644 --- a/core/lib/dal/.sqlx/query-c2c40d5aae2e0276de453c78a39ce5a6cca1524adfe99b0cb35662746479dcc1.json +++ b/core/lib/dal/.sqlx/query-c2c40d5aae2e0276de453c78a39ce5a6cca1524adfe99b0cb35662746479dcc1.json @@ -11,7 +11,8 @@ "kind": { "Enum": [ "ProtocolUpgrades", - "PriorityTransactions" + "PriorityTransactions", + "ChainBatchRoot" ] } } diff --git a/core/lib/dal/.sqlx/query-c9a842d04e8b225e43f07f76541dc766262b5bdc58be0444e164b1bd9feed02d.json b/core/lib/dal/.sqlx/query-c9a842d04e8b225e43f07f76541dc766262b5bdc58be0444e164b1bd9feed02d.json deleted file mode 100644 index fb28539ccdf6..000000000000 --- a/core/lib/dal/.sqlx/query-c9a842d04e8b225e43f07f76541dc766262b5bdc58be0444e164b1bd9feed02d.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n WITH\n soft AS (\n SELECT\n pruned_l1_batch,\n pruned_miniblock\n FROM\n pruning_log\n WHERE\n type = 'Soft'\n ORDER BY\n pruned_l1_batch DESC\n LIMIT\n 1\n ),\n \n hard AS (\n SELECT\n pruned_l1_batch,\n pruned_miniblock\n FROM\n pruning_log\n WHERE\n type = 'Hard'\n ORDER BY\n pruned_l1_batch DESC\n LIMIT\n 1\n )\n \n SELECT\n soft.pruned_l1_batch AS last_soft_pruned_l1_batch,\n soft.pruned_miniblock AS last_soft_pruned_miniblock,\n hard.pruned_l1_batch AS last_hard_pruned_l1_batch,\n hard.pruned_miniblock AS last_hard_pruned_miniblock\n FROM\n soft\n FULL JOIN hard ON TRUE\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "last_soft_pruned_l1_batch", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "last_soft_pruned_miniblock", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "last_hard_pruned_l1_batch", - "type_info": "Int8" - }, - { - "ordinal": 3, - "name": "last_hard_pruned_miniblock", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - true, - true, - true, - true - ] - }, - "hash": "c9a842d04e8b225e43f07f76541dc766262b5bdc58be0444e164b1bd9feed02d" -} diff --git a/core/lib/dal/.sqlx/query-a88b113b5dc06ac990a66202b3c05e2c2f10d5cbdb03d02c3c541f7eaa1f58a6.json b/core/lib/dal/.sqlx/query-cd6ab0aea6e1f72c58c189e098be5d1cd01521f82e2962c3feebac395caef36f.json similarity index 51% rename from core/lib/dal/.sqlx/query-a88b113b5dc06ac990a66202b3c05e2c2f10d5cbdb03d02c3c541f7eaa1f58a6.json rename to core/lib/dal/.sqlx/query-cd6ab0aea6e1f72c58c189e098be5d1cd01521f82e2962c3feebac395caef36f.json index 28ffcc5ae468..1af3384a2d9f 100644 --- a/core/lib/dal/.sqlx/query-a88b113b5dc06ac990a66202b3c05e2c2f10d5cbdb03d02c3c541f7eaa1f58a6.json +++ b/core/lib/dal/.sqlx/query-cd6ab0aea6e1f72c58c189e098be5d1cd01521f82e2962c3feebac395caef36f.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n WITH\n mb AS (\n SELECT\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price\n FROM\n miniblocks\n WHERE\n l1_batch_number = $1\n LIMIT\n 1\n )\n \n SELECT\n l1_batches.number,\n l1_batches.timestamp,\n l1_batches.l1_tx_count,\n l1_batches.l2_tx_count,\n l1_batches.hash AS \"root_hash?\",\n commit_tx.tx_hash AS \"commit_tx_hash?\",\n commit_tx.confirmed_at AS \"committed_at?\",\n prove_tx.tx_hash AS \"prove_tx_hash?\",\n prove_tx.confirmed_at AS \"proven_at?\",\n execute_tx.tx_hash AS \"execute_tx_hash?\",\n execute_tx.confirmed_at AS \"executed_at?\",\n mb.l1_gas_price,\n mb.l2_fair_gas_price,\n mb.fair_pubdata_price,\n l1_batches.bootloader_code_hash,\n l1_batches.default_aa_code_hash,\n l1_batches.evm_emulator_code_hash\n FROM\n l1_batches\n INNER JOIN mb ON TRUE\n LEFT JOIN eth_txs_history AS commit_tx\n ON (\n l1_batches.eth_commit_tx_id = commit_tx.eth_tx_id\n AND commit_tx.confirmed_at IS NOT NULL\n )\n LEFT JOIN eth_txs_history AS prove_tx\n ON (\n l1_batches.eth_prove_tx_id = prove_tx.eth_tx_id\n AND prove_tx.confirmed_at IS NOT NULL\n )\n LEFT JOIN eth_txs_history AS execute_tx\n ON (\n l1_batches.eth_execute_tx_id = execute_tx.eth_tx_id\n AND execute_tx.confirmed_at IS NOT NULL\n )\n WHERE\n l1_batches.number = $1\n ", + "query": "\n WITH\n mb AS (\n SELECT\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price\n FROM\n miniblocks\n WHERE\n l1_batch_number = $1\n LIMIT\n 1\n )\n \n SELECT\n l1_batches.number,\n l1_batches.timestamp,\n l1_batches.l1_tx_count,\n l1_batches.l2_tx_count,\n l1_batches.hash AS \"root_hash?\",\n commit_tx.tx_hash AS \"commit_tx_hash?\",\n commit_tx.confirmed_at AS \"committed_at?\",\n commit_tx_data.chain_id AS \"commit_chain_id?\",\n prove_tx.tx_hash AS \"prove_tx_hash?\",\n prove_tx.confirmed_at AS \"proven_at?\",\n prove_tx_data.chain_id AS \"prove_chain_id?\",\n execute_tx.tx_hash AS \"execute_tx_hash?\",\n execute_tx.confirmed_at AS \"executed_at?\",\n execute_tx_data.chain_id AS \"execute_chain_id?\",\n mb.l1_gas_price,\n mb.l2_fair_gas_price,\n mb.fair_pubdata_price,\n l1_batches.bootloader_code_hash,\n l1_batches.default_aa_code_hash,\n l1_batches.evm_emulator_code_hash\n FROM\n l1_batches\n INNER JOIN mb ON TRUE\n LEFT JOIN eth_txs_history AS commit_tx\n ON (\n l1_batches.eth_commit_tx_id = commit_tx.eth_tx_id\n AND commit_tx.confirmed_at IS NOT NULL\n )\n LEFT JOIN eth_txs_history AS prove_tx\n ON (\n l1_batches.eth_prove_tx_id = prove_tx.eth_tx_id\n AND prove_tx.confirmed_at IS NOT NULL\n )\n LEFT JOIN eth_txs_history AS execute_tx\n ON (\n l1_batches.eth_execute_tx_id = execute_tx.eth_tx_id\n AND execute_tx.confirmed_at IS NOT NULL\n )\n LEFT JOIN eth_txs AS commit_tx_data\n ON (\n l1_batches.eth_commit_tx_id = commit_tx_data.id\n AND commit_tx_data.confirmed_eth_tx_history_id IS NOT NULL\n )\n LEFT JOIN eth_txs AS prove_tx_data\n ON (\n l1_batches.eth_prove_tx_id = prove_tx_data.id\n AND prove_tx_data.confirmed_eth_tx_history_id IS NOT NULL\n )\n LEFT JOIN eth_txs AS execute_tx_data\n ON (\n l1_batches.eth_execute_tx_id = execute_tx_data.id\n AND execute_tx_data.confirmed_eth_tx_history_id IS NOT NULL\n )\n WHERE\n l1_batches.number = $1\n ", "describe": { "columns": [ { @@ -40,51 +40,66 @@ }, { "ordinal": 7, + "name": "commit_chain_id?", + "type_info": "Int8" + }, + { + "ordinal": 8, "name": "prove_tx_hash?", "type_info": "Text" }, { - "ordinal": 8, + "ordinal": 9, "name": "proven_at?", "type_info": "Timestamp" }, { - "ordinal": 9, + "ordinal": 10, + "name": "prove_chain_id?", + "type_info": "Int8" + }, + { + "ordinal": 11, "name": "execute_tx_hash?", "type_info": "Text" }, { - "ordinal": 10, + "ordinal": 12, "name": "executed_at?", "type_info": "Timestamp" }, { - "ordinal": 11, + "ordinal": 13, + "name": "execute_chain_id?", + "type_info": "Int8" + }, + { + "ordinal": 14, "name": "l1_gas_price", "type_info": "Int8" }, { - "ordinal": 12, + "ordinal": 15, "name": "l2_fair_gas_price", "type_info": "Int8" }, { - "ordinal": 13, + "ordinal": 16, "name": "fair_pubdata_price", "type_info": "Int8" }, { - "ordinal": 14, + "ordinal": 17, "name": "bootloader_code_hash", "type_info": "Bytea" }, { - "ordinal": 15, + "ordinal": 18, "name": "default_aa_code_hash", "type_info": "Bytea" }, { - "ordinal": 16, + "ordinal": 19, "name": "evm_emulator_code_hash", "type_info": "Bytea" } @@ -102,10 +117,13 @@ true, false, true, + true, false, true, + true, false, true, + true, false, false, true, @@ -114,5 +132,5 @@ true ] }, - "hash": "a88b113b5dc06ac990a66202b3c05e2c2f10d5cbdb03d02c3c541f7eaa1f58a6" + "hash": "cd6ab0aea6e1f72c58c189e098be5d1cd01521f82e2962c3feebac395caef36f" } diff --git a/core/lib/dal/.sqlx/query-cf8aaa95e3e8c376b6083c7015753e30af54675ce58273cbb29312e6e88cbdf5.json b/core/lib/dal/.sqlx/query-cf8aaa95e3e8c376b6083c7015753e30af54675ce58273cbb29312e6e88cbdf5.json new file mode 100644 index 000000000000..b79441ab4d63 --- /dev/null +++ b/core/lib/dal/.sqlx/query-cf8aaa95e3e8c376b6083c7015753e30af54675ce58273cbb29312e6e88cbdf5.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT eth_txs.chain_id\n FROM l1_batches\n JOIN eth_txs ON eth_txs.id = l1_batches.eth_commit_tx_id\n WHERE\n number = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "chain_id", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + true + ] + }, + "hash": "cf8aaa95e3e8c376b6083c7015753e30af54675ce58273cbb29312e6e88cbdf5" +} diff --git a/core/lib/dal/.sqlx/query-e46c99b23db91800b27c717100f8203a62629904bc4956249e690a8ad7a48983.json b/core/lib/dal/.sqlx/query-e46c99b23db91800b27c717100f8203a62629904bc4956249e690a8ad7a48983.json deleted file mode 100644 index 7ca2c9e7e9fa..000000000000 --- a/core/lib/dal/.sqlx/query-e46c99b23db91800b27c717100f8203a62629904bc4956249e690a8ad7a48983.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n WITH upsert AS (\n SELECT\n p.l1_batch_number\n FROM\n proof_generation_details p\n LEFT JOIN\n tee_proof_generation_details tee\n ON\n p.l1_batch_number = tee.l1_batch_number\n AND tee.tee_type = $1\n WHERE\n (\n p.l1_batch_number >= $5\n AND p.vm_run_data_blob_url IS NOT NULL\n AND p.proof_gen_data_blob_url IS NOT NULL\n )\n AND (\n tee.l1_batch_number IS NULL\n OR (\n (tee.status = $2 OR tee.status = $3)\n AND tee.prover_taken_at < NOW() - $4::INTERVAL\n )\n )\n FETCH FIRST ROW ONLY\n )\n \n INSERT INTO\n tee_proof_generation_details (\n l1_batch_number, tee_type, status, created_at, updated_at, prover_taken_at\n )\n SELECT\n l1_batch_number,\n $1,\n $2,\n NOW(),\n NOW(),\n NOW()\n FROM\n upsert\n ON CONFLICT (l1_batch_number, tee_type) DO\n UPDATE\n SET\n status = $2,\n updated_at = NOW(),\n prover_taken_at = NOW()\n RETURNING\n l1_batch_number,\n created_at\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "l1_batch_number", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "created_at", - "type_info": "Timestamp" - } - ], - "parameters": { - "Left": [ - "Text", - "Text", - "Text", - "Interval", - "Int8" - ] - }, - "nullable": [ - false, - false - ] - }, - "hash": "e46c99b23db91800b27c717100f8203a62629904bc4956249e690a8ad7a48983" -} diff --git a/core/lib/dal/.sqlx/query-f516657dd48332522a5580e26c509fb7e3baa5ae84bd5e010008f8972e1a7f98.json b/core/lib/dal/.sqlx/query-f516657dd48332522a5580e26c509fb7e3baa5ae84bd5e010008f8972e1a7f98.json new file mode 100644 index 000000000000..9f7de50539be --- /dev/null +++ b/core/lib/dal/.sqlx/query-f516657dd48332522a5580e26c509fb7e3baa5ae84bd5e010008f8972e1a7f98.json @@ -0,0 +1,28 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n number, l2_l1_merkle_root\n FROM\n l1_batches\n JOIN eth_txs ON eth_txs.id = l1_batches.eth_execute_tx_id\n WHERE\n batch_chain_merkle_path IS NOT NULL\n AND chain_id = $1\n ORDER BY number\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "l2_l1_merkle_root", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + true + ] + }, + "hash": "f516657dd48332522a5580e26c509fb7e3baa5ae84bd5e010008f8972e1a7f98" +} diff --git a/core/lib/dal/migrations/20241011081834_batch_chain_merkle_path.down.sql b/core/lib/dal/migrations/20241011081834_batch_chain_merkle_path.down.sql new file mode 100644 index 000000000000..da7142b8f819 --- /dev/null +++ b/core/lib/dal/migrations/20241011081834_batch_chain_merkle_path.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE l1_batches + DROP COLUMN batch_chain_merkle_path BYTEA; diff --git a/core/lib/dal/migrations/20241011081834_batch_chain_merkle_path.up.sql b/core/lib/dal/migrations/20241011081834_batch_chain_merkle_path.up.sql new file mode 100644 index 000000000000..8b133f70904b --- /dev/null +++ b/core/lib/dal/migrations/20241011081834_batch_chain_merkle_path.up.sql @@ -0,0 +1,5 @@ +ALTER TABLE l1_batches + ADD COLUMN batch_chain_merkle_path BYTEA; + +-- postgres doesn't allow dropping enum variant, so nothing is done in down.sql +ALTER TYPE event_type ADD VALUE 'ChainBatchRoot'; diff --git a/core/lib/dal/migrations/20241112120944_add_batch_root_hash_to_pruning_logs.down.sql b/core/lib/dal/migrations/20241112120944_add_batch_root_hash_to_pruning_logs.down.sql new file mode 100644 index 000000000000..d6beeb80e8a5 --- /dev/null +++ b/core/lib/dal/migrations/20241112120944_add_batch_root_hash_to_pruning_logs.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE pruning_log + DROP COLUMN pruned_l1_batch_root_hash; diff --git a/core/lib/dal/migrations/20241112120944_add_batch_root_hash_to_pruning_logs.up.sql b/core/lib/dal/migrations/20241112120944_add_batch_root_hash_to_pruning_logs.up.sql new file mode 100644 index 000000000000..6a990781f61c --- /dev/null +++ b/core/lib/dal/migrations/20241112120944_add_batch_root_hash_to_pruning_logs.up.sql @@ -0,0 +1,3 @@ +-- nullable for backward compatibility +ALTER TABLE pruning_log + ADD COLUMN pruned_l1_batch_root_hash BYTEA DEFAULT NULL; diff --git a/core/lib/dal/migrations/20241121142103_drop_predicted_gas_cost_not_null.down.sql b/core/lib/dal/migrations/20241121142103_drop_predicted_gas_cost_not_null.down.sql new file mode 100644 index 000000000000..6d0c32ea6c24 --- /dev/null +++ b/core/lib/dal/migrations/20241121142103_drop_predicted_gas_cost_not_null.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE eth_txs + ALTER COLUMN predicted_gas_cost SET NOT NULL; diff --git a/core/lib/dal/migrations/20241121142103_drop_predicted_gas_cost_not_null.up.sql b/core/lib/dal/migrations/20241121142103_drop_predicted_gas_cost_not_null.up.sql new file mode 100644 index 000000000000..cbb1e3c1a3f1 --- /dev/null +++ b/core/lib/dal/migrations/20241121142103_drop_predicted_gas_cost_not_null.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE eth_txs + ALTER COLUMN predicted_gas_cost DROP NOT NULL; diff --git a/core/lib/dal/src/blocks_dal.rs b/core/lib/dal/src/blocks_dal.rs index 0cb607a304e2..5002c5a8afbf 100644 --- a/core/lib/dal/src/blocks_dal.rs +++ b/core/lib/dal/src/blocks_dal.rs @@ -6,25 +6,24 @@ use std::{ }; use anyhow::Context as _; -use bigdecimal::{BigDecimal, FromPrimitive, ToPrimitive}; +use bigdecimal::{BigDecimal, FromPrimitive}; use sqlx::types::chrono::{DateTime, Utc}; use zksync_db_connection::{ connection::Connection, error::{DalResult, SqlxContext}, instrument::{InstrumentExt, Instrumented}, - interpolate_query, match_query_as, }; use zksync_types::{ aggregated_operations::AggregatedActionType, block::{ - BlockGasCount, L1BatchHeader, L1BatchStatistics, L1BatchTreeData, L2BlockHeader, - StorageOracleInfo, UnsealedL1BatchHeader, + L1BatchHeader, L1BatchStatistics, L1BatchTreeData, L2BlockHeader, StorageOracleInfo, + UnsealedL1BatchHeader, }, commitment::{L1BatchCommitmentArtifacts, L1BatchWithMetadata}, fee_model::BatchFeeInput, - l2_to_l1_log::UserL2ToL1Log, + l2_to_l1_log::{BatchAndChainMerklePath, UserL2ToL1Log}, writes::TreeWrite, - Address, Bloom, L1BatchNumber, L2BlockNumber, ProtocolVersionId, H256, U256, + Address, Bloom, L1BatchNumber, L2BlockNumber, ProtocolVersionId, SLChainId, H256, U256, }; use zksync_vm_interface::CircuitStatistic; @@ -688,7 +687,6 @@ impl BlocksDal<'_, '_> { &mut self, header: &L1BatchHeader, initial_bootloader_contents: &[(usize, U256)], - predicted_block_gas: BlockGasCount, storage_refunds: &[u32], pubdata_costs: &[i32], predicted_circuits_by_type: CircuitStatistic, // predicted number of circuits for each circuit type @@ -728,20 +726,17 @@ impl BlocksDal<'_, '_> { l2_to_l1_messages = $4, bloom = $5, priority_ops_onchain_data = $6, - predicted_commit_gas_cost = $7, - predicted_prove_gas_cost = $8, - predicted_execute_gas_cost = $9, - initial_bootloader_heap_content = $10, - used_contract_hashes = $11, - bootloader_code_hash = $12, - default_aa_code_hash = $13, - evm_emulator_code_hash = $14, - protocol_version = $15, - system_logs = $16, - storage_refunds = $17, - pubdata_costs = $18, - pubdata_input = $19, - predicted_circuits_by_type = $20, + initial_bootloader_heap_content = $7, + used_contract_hashes = $8, + bootloader_code_hash = $9, + default_aa_code_hash = $10, + evm_emulator_code_hash = $11, + protocol_version = $12, + system_logs = $13, + storage_refunds = $14, + pubdata_costs = $15, + pubdata_input = $16, + predicted_circuits_by_type = $17, updated_at = NOW(), sealed_at = NOW(), is_sealed = TRUE @@ -754,9 +749,6 @@ impl BlocksDal<'_, '_> { &header.l2_to_l1_messages, header.bloom.as_bytes(), &priority_onchain_data, - i64::from(predicted_block_gas.commit), - i64::from(predicted_block_gas.prove), - i64::from(predicted_block_gas.execute), initial_bootloader_contents, used_contract_hashes, header.base_system_contracts_hashes.bootloader.as_bytes(), @@ -1982,6 +1974,150 @@ impl BlocksDal<'_, '_> { Ok(Some((H256::from_slice(&hash), row.timestamp as u64))) } + pub async fn get_l1_batch_local_root( + &mut self, + number: L1BatchNumber, + ) -> DalResult> { + let Some(row) = sqlx::query!( + r#" + SELECT + local_root + FROM + l1_batches + WHERE + number = $1 + "#, + i64::from(number.0) + ) + .instrument("get_l1_batch_local_root") + .with_arg("number", &number) + .fetch_optional(self.storage) + .await? + else { + return Ok(None); + }; + let Some(local_root) = row.local_root else { + return Ok(None); + }; + Ok(Some(H256::from_slice(&local_root))) + } + + pub async fn get_l1_batch_l2_l1_merkle_root( + &mut self, + number: L1BatchNumber, + ) -> DalResult> { + let Some(row) = sqlx::query!( + r#" + SELECT + l2_l1_merkle_root + FROM + l1_batches + WHERE + number = $1 + "#, + i64::from(number.0) + ) + .instrument("get_l1_batch_l2_l1_merkle_root") + .with_arg("number", &number) + .fetch_optional(self.storage) + .await? + else { + return Ok(None); + }; + let Some(l2_l1_merkle_root) = row.l2_l1_merkle_root else { + return Ok(None); + }; + Ok(Some(H256::from_slice(&l2_l1_merkle_root))) + } + + pub async fn get_l1_batch_chain_merkle_path( + &mut self, + number: L1BatchNumber, + ) -> DalResult> { + let Some(row) = sqlx::query!( + r#" + SELECT + batch_chain_merkle_path + FROM + l1_batches + WHERE + number = $1 + "#, + i64::from(number.0) + ) + .instrument("get_l1_batch_chain_merkle_path") + .with_arg("number", &number) + .fetch_optional(self.storage) + .await? + else { + return Ok(None); + }; + let Some(batch_chain_merkle_path) = row.batch_chain_merkle_path else { + return Ok(None); + }; + Ok(Some( + bincode::deserialize(&batch_chain_merkle_path).unwrap(), + )) + } + + pub async fn get_executed_batch_roots_on_sl( + &mut self, + sl_chain_id: SLChainId, + ) -> DalResult> { + let result = sqlx::query!( + r#" + SELECT + number, l2_l1_merkle_root + FROM + l1_batches + JOIN eth_txs ON eth_txs.id = l1_batches.eth_execute_tx_id + WHERE + batch_chain_merkle_path IS NOT NULL + AND chain_id = $1 + ORDER BY number + "#, + sl_chain_id.0 as i64 + ) + .instrument("get_executed_batch_roots_on_sl") + .with_arg("sl_chain_id", &sl_chain_id) + .fetch_all(self.storage) + .await? + .into_iter() + .map(|row| { + let number = L1BatchNumber(row.number as u32); + let root = H256::from_slice(&row.l2_l1_merkle_root.unwrap()); + (number, root) + }) + .collect(); + Ok(result) + } + + pub async fn set_batch_chain_merkle_path( + &mut self, + number: L1BatchNumber, + proof: BatchAndChainMerklePath, + ) -> DalResult<()> { + let proof_bin = bincode::serialize(&proof).unwrap(); + sqlx::query!( + r#" + UPDATE + l1_batches + SET + batch_chain_merkle_path = $2 + WHERE + number = $1 + "#, + i64::from(number.0), + &proof_bin + ) + .instrument("set_batch_chain_merkle_path") + .with_arg("number", &number) + .execute(self.storage) + .await?; + + Ok(()) + } + pub async fn get_l1_batch_metadata( &mut self, number: L1BatchNumber, @@ -2227,40 +2363,6 @@ impl BlocksDal<'_, '_> { Ok(()) } - /// Returns sum of predicted gas costs on the given L1 batch range. - /// Panics if the sum doesn't fit into `u32`. - pub async fn get_l1_batches_predicted_gas( - &mut self, - number_range: ops::RangeInclusive, - op_type: AggregatedActionType, - ) -> anyhow::Result { - #[derive(Debug)] - struct SumRow { - sum: BigDecimal, - } - - let start = i64::from(number_range.start().0); - let end = i64::from(number_range.end().0); - let query = match_query_as!( - SumRow, - [ - "SELECT COALESCE(SUM(", _, r#"), 0) AS "sum!" FROM l1_batches WHERE number BETWEEN $1 AND $2"# - ], - match (op_type) { - AggregatedActionType::Commit => ("predicted_commit_gas_cost"; start, end), - AggregatedActionType::PublishProofOnchain => ("predicted_prove_gas_cost"; start, end), - AggregatedActionType::Execute => ("predicted_execute_gas_cost"; start, end), - } - ); - - query - .fetch_one(self.storage.conn()) - .await? - .sum - .to_u32() - .context("Sum of predicted gas costs should fit into u32") - } - pub async fn get_l2_block_range_of_l1_batch( &mut self, l1_batch_number: L1BatchNumber, @@ -2788,15 +2890,8 @@ impl BlocksDal<'_, '_> { header.to_unsealed_header(BatchFeeInput::pubdata_independent(100, 100, 100)), ) .await?; - self.mark_l1_batch_as_sealed( - header, - &[], - Default::default(), - &[], - &[], - Default::default(), - ) - .await + self.mark_l1_batch_as_sealed(header, &[], &[], &[], Default::default()) + .await } /// Deletes all L2 blocks and L1 batches, including the genesis ones. Should only be used in tests. @@ -2862,8 +2957,7 @@ impl BlocksDal<'_, '_> { #[cfg(test)] mod tests { - use zksync_contracts::BaseSystemContractsHashes; - use zksync_types::{tx::IncludedTxLocation, Address, ProtocolVersion, ProtocolVersionId}; + use zksync_types::{tx::IncludedTxLocation, Address, ProtocolVersion}; use super::*; use crate::{ @@ -2878,7 +2972,7 @@ mod tests { vec![], action_type, Address::default(), - 1, + Some(1), None, None, false, @@ -3078,77 +3172,4 @@ mod tests { .unwrap() .is_none()); } - - #[tokio::test] - async fn getting_predicted_gas() { - let pool = ConnectionPool::::test_pool().await; - let mut conn = pool.connection().await.unwrap(); - conn.protocol_versions_dal() - .save_protocol_version_with_tx(&ProtocolVersion::default()) - .await - .unwrap(); - let mut header = L1BatchHeader::new( - L1BatchNumber(1), - 100, - BaseSystemContractsHashes::default(), - ProtocolVersionId::default(), - ); - let mut predicted_gas = BlockGasCount { - commit: 2, - prove: 3, - execute: 10, - }; - conn.blocks_dal() - .insert_l1_batch( - header.to_unsealed_header(BatchFeeInput::pubdata_independent(100, 100, 100)), - ) - .await - .unwrap(); - conn.blocks_dal() - .mark_l1_batch_as_sealed(&header, &[], predicted_gas, &[], &[], Default::default()) - .await - .unwrap(); - - header.number = L1BatchNumber(2); - header.timestamp += 100; - predicted_gas += predicted_gas; - conn.blocks_dal() - .insert_l1_batch( - header.to_unsealed_header(BatchFeeInput::pubdata_independent(100, 100, 100)), - ) - .await - .unwrap(); - conn.blocks_dal() - .mark_l1_batch_as_sealed(&header, &[], predicted_gas, &[], &[], Default::default()) - .await - .unwrap(); - - let action_types_and_predicted_gas = [ - (AggregatedActionType::Execute, 10), - (AggregatedActionType::Commit, 2), - (AggregatedActionType::PublishProofOnchain, 3), - ]; - for (action_type, expected_gas) in action_types_and_predicted_gas { - let gas = conn - .blocks_dal() - .get_l1_batches_predicted_gas(L1BatchNumber(1)..=L1BatchNumber(1), action_type) - .await - .unwrap(); - assert_eq!(gas, expected_gas); - - let gas = conn - .blocks_dal() - .get_l1_batches_predicted_gas(L1BatchNumber(2)..=L1BatchNumber(2), action_type) - .await - .unwrap(); - assert_eq!(gas, 2 * expected_gas); - - let gas = conn - .blocks_dal() - .get_l1_batches_predicted_gas(L1BatchNumber(1)..=L1BatchNumber(2), action_type) - .await - .unwrap(); - assert_eq!(gas, 3 * expected_gas); - } - } } diff --git a/core/lib/dal/src/blocks_web3_dal.rs b/core/lib/dal/src/blocks_web3_dal.rs index 4699eac4e5eb..229f49da6e37 100644 --- a/core/lib/dal/src/blocks_web3_dal.rs +++ b/core/lib/dal/src/blocks_web3_dal.rs @@ -679,10 +679,13 @@ impl BlocksWeb3Dal<'_, '_> { miniblocks.hash AS "root_hash?", commit_tx.tx_hash AS "commit_tx_hash?", commit_tx.confirmed_at AS "committed_at?", + commit_tx_data.chain_id AS "commit_chain_id?", prove_tx.tx_hash AS "prove_tx_hash?", prove_tx.confirmed_at AS "proven_at?", + prove_tx_data.chain_id AS "prove_chain_id?", execute_tx.tx_hash AS "execute_tx_hash?", execute_tx.confirmed_at AS "executed_at?", + execute_tx_data.chain_id AS "execute_chain_id?", miniblocks.l1_gas_price, miniblocks.l2_fair_gas_price, miniblocks.fair_pubdata_price, @@ -709,6 +712,21 @@ impl BlocksWeb3Dal<'_, '_> { l1_batches.eth_execute_tx_id = execute_tx.eth_tx_id AND execute_tx.confirmed_at IS NOT NULL ) + LEFT JOIN eth_txs AS commit_tx_data + ON ( + l1_batches.eth_commit_tx_id = commit_tx_data.id + AND commit_tx_data.confirmed_eth_tx_history_id IS NOT NULL + ) + LEFT JOIN eth_txs AS prove_tx_data + ON ( + l1_batches.eth_prove_tx_id = prove_tx_data.id + AND prove_tx_data.confirmed_eth_tx_history_id IS NOT NULL + ) + LEFT JOIN eth_txs AS execute_tx_data + ON ( + l1_batches.eth_execute_tx_id = execute_tx_data.id + AND execute_tx_data.confirmed_eth_tx_history_id IS NOT NULL + ) WHERE miniblocks.number = $1 "#, @@ -752,10 +770,13 @@ impl BlocksWeb3Dal<'_, '_> { l1_batches.hash AS "root_hash?", commit_tx.tx_hash AS "commit_tx_hash?", commit_tx.confirmed_at AS "committed_at?", + commit_tx_data.chain_id AS "commit_chain_id?", prove_tx.tx_hash AS "prove_tx_hash?", prove_tx.confirmed_at AS "proven_at?", + prove_tx_data.chain_id AS "prove_chain_id?", execute_tx.tx_hash AS "execute_tx_hash?", execute_tx.confirmed_at AS "executed_at?", + execute_tx_data.chain_id AS "execute_chain_id?", mb.l1_gas_price, mb.l2_fair_gas_price, mb.fair_pubdata_price, @@ -780,6 +801,21 @@ impl BlocksWeb3Dal<'_, '_> { l1_batches.eth_execute_tx_id = execute_tx.eth_tx_id AND execute_tx.confirmed_at IS NOT NULL ) + LEFT JOIN eth_txs AS commit_tx_data + ON ( + l1_batches.eth_commit_tx_id = commit_tx_data.id + AND commit_tx_data.confirmed_eth_tx_history_id IS NOT NULL + ) + LEFT JOIN eth_txs AS prove_tx_data + ON ( + l1_batches.eth_prove_tx_id = prove_tx_data.id + AND prove_tx_data.confirmed_eth_tx_history_id IS NOT NULL + ) + LEFT JOIN eth_txs AS execute_tx_data + ON ( + l1_batches.eth_execute_tx_id = execute_tx_data.id + AND execute_tx_data.confirmed_eth_tx_history_id IS NOT NULL + ) WHERE l1_batches.number = $1 "#, @@ -1008,7 +1044,7 @@ mod tests { vec![], AggregatedActionType::Commit, Address::default(), - 0, + None, None, None, false, diff --git a/core/lib/dal/src/consensus_dal/mod.rs b/core/lib/dal/src/consensus_dal/mod.rs index a091421d857c..7f3bcd1166ad 100644 --- a/core/lib/dal/src/consensus_dal/mod.rs +++ b/core/lib/dal/src/consensus_dal/mod.rs @@ -301,10 +301,10 @@ impl ConsensusDal<'_, '_> { .get_pruning_info() .await .context("get_pruning_info()")?; - Ok(match info.last_soft_pruned_l2_block { + Ok(match info.last_soft_pruned { // It is guaranteed that pruning info values are set for storage recovered from // snapshot, even if pruning was not enabled. - Some(last_pruned) => validator::BlockNumber(last_pruned.0.into()) + 1, + Some(last_pruned) => validator::BlockNumber(last_pruned.l2_block.0.into()) + 1, // No snapshot and no pruning: None => validator::BlockNumber(0), }) diff --git a/core/lib/dal/src/custom_genesis_export_dal.rs b/core/lib/dal/src/custom_genesis_export_dal.rs new file mode 100644 index 000000000000..da74061fb5a5 --- /dev/null +++ b/core/lib/dal/src/custom_genesis_export_dal.rs @@ -0,0 +1,124 @@ +use serde::{Deserialize, Serialize}; +use zksync_db_connection::{connection::Connection, error::DalResult, instrument::InstrumentExt}; +use zksync_types::{AccountTreeId, StorageKey, StorageLog, H160, H256}; + +use crate::Core; + +#[derive(Debug)] +pub struct CustomGenesisExportDal<'a, 'c> { + pub(crate) storage: &'a mut Connection<'c, Core>, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct GenesisState { + pub storage_logs: Vec, + pub factory_deps: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct StorageLogRow { + pub address: [u8; 20], + pub key: [u8; 32], + pub value: [u8; 32], +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct FactoryDepRow { + pub bytecode_hash: [u8; 32], + pub bytecode: Vec, +} + +impl CustomGenesisExportDal<'_, '_> { + pub async fn get_storage_logs(&mut self) -> DalResult> { + // This method returns storage logs that are used for genesis export. + // + // The where clause with addresses filters out SystemContext related records + // 0x0 -- chainId, + // 0x3 -- blockGasLimit, + // 0x4 -- coinbase, + // 0x5 -- difficulty + let rows = sqlx::query!( + r#" + WITH latest_storage_logs AS ( + SELECT DISTINCT ON (hashed_key) + hashed_key, + address, + key, + value + FROM storage_logs + ORDER BY hashed_key, miniblock_number DESC, operation_number DESC + ) + + SELECT + lsl.address, + lsl.key, + lsl.value + FROM + initial_writes iw + JOIN + latest_storage_logs lsl ON iw.hashed_key = lsl.hashed_key + WHERE + lsl.value + <> '\x0000000000000000000000000000000000000000000000000000000000000000'::bytea + AND ( + lsl.address <> '\x000000000000000000000000000000000000800b'::bytea OR + lsl.key IN ( + '\x0000000000000000000000000000000000000000000000000000000000000000'::bytea, + '\x0000000000000000000000000000000000000000000000000000000000000003'::bytea, + '\x0000000000000000000000000000000000000000000000000000000000000004'::bytea, + '\x0000000000000000000000000000000000000000000000000000000000000005'::bytea + ) + ); + "#, + ) + .instrument("get_storage_logs") + .fetch_all(self.storage) + .await?; + + let storage_logs: Vec = rows + .into_iter() + .map(|row| StorageLogRow { + address: row.address.unwrap().try_into().unwrap(), + key: row.key.unwrap().try_into().unwrap(), + value: row.value.try_into().unwrap(), + }) + .collect(); + + Ok(storage_logs) + } + + pub async fn get_factory_deps(&mut self) -> DalResult> { + // 1. Fetch the rows from the database + let rows = sqlx::query!( + r#" + SELECT + bytecode_hash AS "bytecode_hash!", + bytecode AS "bytecode!" + FROM factory_deps + "# + ) + .instrument("get_factory_deps") + .fetch_all(self.storage) + .await?; + + // 2. Map the rows to FactoryDepRow structs + let factory_deps: Vec = rows + .into_iter() + .map(|row| FactoryDepRow { + bytecode_hash: row.bytecode_hash.try_into().unwrap(), + bytecode: row.bytecode, + }) + .collect(); + + Ok(factory_deps) + } +} + +impl From<&StorageLogRow> for StorageLog { + fn from(value: &StorageLogRow) -> Self { + StorageLog::new_write_log( + StorageKey::new(AccountTreeId::new(H160(value.address)), H256(value.key)), + H256(value.value), + ) + } +} diff --git a/core/lib/dal/src/eth_sender_dal.rs b/core/lib/dal/src/eth_sender_dal.rs index 828fda2ca6a6..32d484c30b50 100644 --- a/core/lib/dal/src/eth_sender_dal.rs +++ b/core/lib/dal/src/eth_sender_dal.rs @@ -2,11 +2,14 @@ use std::{convert::TryFrom, str::FromStr}; use anyhow::Context as _; use sqlx::types::chrono::{DateTime, Utc}; -use zksync_db_connection::{connection::Connection, interpolate_query, match_query_as}; +use zksync_db_connection::{ + connection::Connection, error::DalResult, instrument::InstrumentExt, interpolate_query, + match_query_as, +}; use zksync_types::{ aggregated_operations::AggregatedActionType, eth_sender::{EthTx, EthTxBlobSidecar, TxHistory, TxHistoryToSend}, - Address, L1BatchNumber, H160, H256, U256, + Address, L1BatchNumber, SLChainId, H160, H256, U256, }; use crate::{ @@ -228,7 +231,7 @@ impl EthSenderDal<'_, '_> { raw_tx: Vec, tx_type: AggregatedActionType, contract_address: Address, - predicted_gas_cost: u32, + predicted_gas_cost: Option, from_address: Option
, blob_sidecar: Option, is_gateway: bool, @@ -259,7 +262,7 @@ impl EthSenderDal<'_, '_> { nonce as i64, tx_type.to_string(), address, - i64::from(predicted_gas_cost), + predicted_gas_cost.map(|c| i64::from(c)), from_address.as_ref().map(Address::as_bytes), blob_sidecar.map(|sidecar| bincode::serialize(&sidecar) .expect("can always bincode serialize EthTxBlobSidecar; qed")), @@ -421,6 +424,27 @@ impl EthSenderDal<'_, '_> { Ok(()) } + pub async fn get_batch_commit_chain_id( + &mut self, + batch_number: L1BatchNumber, + ) -> DalResult> { + let row = sqlx::query!( + r#" + SELECT eth_txs.chain_id + FROM l1_batches + JOIN eth_txs ON eth_txs.id = l1_batches.eth_commit_tx_id + WHERE + number = $1 + "#, + i64::from(batch_number.0), + ) + .instrument("get_batch_commit_chain_id") + .with_arg("batch_number", &batch_number) + .fetch_optional(self.storage) + .await?; + Ok(row.and_then(|r| r.chain_id).map(|id| SLChainId(id as u64))) + } + pub async fn get_confirmed_tx_hash_by_eth_tx_id( &mut self, eth_tx_id: u32, @@ -464,6 +488,7 @@ impl EthSenderDal<'_, '_> { tx_type: AggregatedActionType, tx_hash: H256, confirmed_at: DateTime, + sl_chain_id: Option, ) -> anyhow::Result<()> { let mut transaction = self .storage @@ -489,11 +514,12 @@ impl EthSenderDal<'_, '_> { // Insert general tx descriptor. let eth_tx_id = sqlx::query_scalar!( - "INSERT INTO eth_txs (raw_tx, nonce, tx_type, contract_address, predicted_gas_cost, created_at, updated_at) \ - VALUES ('\\x00', 0, $1, $2, 0, now(), now()) \ + "INSERT INTO eth_txs (raw_tx, nonce, tx_type, contract_address, predicted_gas_cost, chain_id, created_at, updated_at) \ + VALUES ('\\x00', 0, $1, $2, NULL, $3, now(), now()) \ RETURNING id", tx_type.to_string(), format!("{:#x}", H160::zero()), + sl_chain_id.map(|chain_id| chain_id.0 as i64) ) .fetch_one(transaction.conn()) .await?; @@ -670,7 +696,7 @@ impl EthSenderDal<'_, '_> { Ok(()) } - pub async fn get_number_of_failed_transactions(&mut self) -> anyhow::Result { + pub async fn get_number_of_failed_transactions(&mut self) -> anyhow::Result { sqlx::query!( r#" SELECT @@ -684,6 +710,7 @@ impl EthSenderDal<'_, '_> { .fetch_one(self.storage.conn()) .await? .count + .map(|c| c as u64) .context("count field is missing") } diff --git a/core/lib/dal/src/eth_watcher_dal.rs b/core/lib/dal/src/eth_watcher_dal.rs index 062ad47219d8..84061a03650d 100644 --- a/core/lib/dal/src/eth_watcher_dal.rs +++ b/core/lib/dal/src/eth_watcher_dal.rs @@ -12,6 +12,7 @@ pub struct EthWatcherDal<'a, 'c> { pub enum EventType { ProtocolUpgrades, PriorityTransactions, + ChainBatchRoot, } impl EthWatcherDal<'_, '_> { diff --git a/core/lib/dal/src/lib.rs b/core/lib/dal/src/lib.rs index 20b428adec44..212ce4b50036 100644 --- a/core/lib/dal/src/lib.rs +++ b/core/lib/dal/src/lib.rs @@ -14,10 +14,10 @@ pub use zksync_db_connection::{ use crate::{ base_token_dal::BaseTokenDal, blocks_dal::BlocksDal, blocks_web3_dal::BlocksWeb3Dal, consensus_dal::ConsensusDal, contract_verification_dal::ContractVerificationDal, - data_availability_dal::DataAvailabilityDal, eth_sender_dal::EthSenderDal, - eth_watcher_dal::EthWatcherDal, events_dal::EventsDal, events_web3_dal::EventsWeb3Dal, - factory_deps_dal::FactoryDepsDal, proof_generation_dal::ProofGenerationDal, - protocol_versions_dal::ProtocolVersionsDal, + custom_genesis_export_dal::CustomGenesisExportDal, data_availability_dal::DataAvailabilityDal, + eth_sender_dal::EthSenderDal, eth_watcher_dal::EthWatcherDal, events_dal::EventsDal, + events_web3_dal::EventsWeb3Dal, factory_deps_dal::FactoryDepsDal, + proof_generation_dal::ProofGenerationDal, protocol_versions_dal::ProtocolVersionsDal, protocol_versions_web3_dal::ProtocolVersionsWeb3Dal, pruning_dal::PruningDal, snapshot_recovery_dal::SnapshotRecoveryDal, snapshots_creator_dal::SnapshotsCreatorDal, snapshots_dal::SnapshotsDal, storage_logs_dal::StorageLogsDal, @@ -33,6 +33,7 @@ pub mod blocks_web3_dal; pub mod consensus; pub mod consensus_dal; pub mod contract_verification_dal; +pub mod custom_genesis_export_dal; mod data_availability_dal; pub mod eth_sender_dal; pub mod eth_watcher_dal; @@ -132,6 +133,8 @@ where fn base_token_dal(&mut self) -> BaseTokenDal<'_, 'a>; fn eth_watcher_dal(&mut self) -> EthWatcherDal<'_, 'a>; + + fn custom_genesis_export_dal(&mut self) -> CustomGenesisExportDal<'_, 'a>; } #[derive(Clone, Debug)] @@ -258,4 +261,8 @@ impl<'a> CoreDal<'a> for Connection<'a, Core> { fn eth_watcher_dal(&mut self) -> EthWatcherDal<'_, 'a> { EthWatcherDal { storage: self } } + + fn custom_genesis_export_dal(&mut self) -> CustomGenesisExportDal<'_, 'a> { + CustomGenesisExportDal { storage: self } + } } diff --git a/core/lib/dal/src/models/storage_block.rs b/core/lib/dal/src/models/storage_block.rs index 159ed71cc3e9..54635932a1af 100644 --- a/core/lib/dal/src/models/storage_block.rs +++ b/core/lib/dal/src/models/storage_block.rs @@ -10,7 +10,7 @@ use zksync_types::{ commitment::{L1BatchCommitmentMode, L1BatchMetaParameters, L1BatchMetadata, PubdataParams}, fee_model::{BatchFeeInput, L1PeggedBatchFeeModelInput, PubdataIndependentBatchFeeModelInput}, l2_to_l1_log::{L2ToL1Log, SystemL2ToL1Log, UserL2ToL1Log}, - Address, Bloom, L1BatchNumber, L2BlockNumber, ProtocolVersionId, H256, + Address, Bloom, L1BatchNumber, L2BlockNumber, ProtocolVersionId, SLChainId, H256, }; /// This is the gas limit that was used inside blocks before we started saving block gas limit into the database. @@ -317,10 +317,13 @@ pub(crate) struct StorageBlockDetails { pub root_hash: Option>, pub commit_tx_hash: Option, pub committed_at: Option, + pub commit_chain_id: Option, pub prove_tx_hash: Option, pub proven_at: Option, + pub prove_chain_id: Option, pub execute_tx_hash: Option, pub executed_at: Option, + pub execute_chain_id: Option, // L1 gas price assumed in the corresponding batch pub l1_gas_price: i64, // L2 gas price assumed in the corresponding batch @@ -355,6 +358,7 @@ impl From for api::BlockDetails { committed_at: details .committed_at .map(|committed_at| DateTime::from_naive_utc_and_offset(committed_at, Utc)), + commit_chain_id: details.commit_chain_id.map(|id| SLChainId(id as u64)), prove_tx_hash: details .prove_tx_hash .as_deref() @@ -362,6 +366,7 @@ impl From for api::BlockDetails { proven_at: details .proven_at .map(|proven_at| DateTime::::from_naive_utc_and_offset(proven_at, Utc)), + prove_chain_id: details.prove_chain_id.map(|id| SLChainId(id as u64)), execute_tx_hash: details .execute_tx_hash .as_deref() @@ -369,6 +374,7 @@ impl From for api::BlockDetails { executed_at: details .executed_at .map(|executed_at| DateTime::::from_naive_utc_and_offset(executed_at, Utc)), + execute_chain_id: details.execute_chain_id.map(|id| SLChainId(id as u64)), l1_gas_price: details.l1_gas_price as u64, l2_fair_gas_price: details.l2_fair_gas_price as u64, fair_pubdata_price: details.fair_pubdata_price.map(|x| x as u64), @@ -399,10 +405,13 @@ pub(crate) struct StorageL1BatchDetails { pub root_hash: Option>, pub commit_tx_hash: Option, pub committed_at: Option, + pub commit_chain_id: Option, pub prove_tx_hash: Option, pub proven_at: Option, + pub prove_chain_id: Option, pub execute_tx_hash: Option, pub executed_at: Option, + pub execute_chain_id: Option, pub l1_gas_price: i64, pub l2_fair_gas_price: i64, pub fair_pubdata_price: Option, @@ -432,6 +441,7 @@ impl From for api::L1BatchDetails { committed_at: details .committed_at .map(|committed_at| DateTime::::from_naive_utc_and_offset(committed_at, Utc)), + commit_chain_id: details.commit_chain_id.map(|id| SLChainId(id as u64)), prove_tx_hash: details .prove_tx_hash .as_deref() @@ -439,6 +449,7 @@ impl From for api::L1BatchDetails { proven_at: details .proven_at .map(|proven_at| DateTime::::from_naive_utc_and_offset(proven_at, Utc)), + prove_chain_id: details.prove_chain_id.map(|id| SLChainId(id as u64)), execute_tx_hash: details .execute_tx_hash .as_deref() @@ -446,6 +457,7 @@ impl From for api::L1BatchDetails { executed_at: details .executed_at .map(|executed_at| DateTime::::from_naive_utc_and_offset(executed_at, Utc)), + execute_chain_id: details.execute_chain_id.map(|id| SLChainId(id as u64)), l1_gas_price: details.l1_gas_price as u64, l2_fair_gas_price: details.l2_fair_gas_price as u64, fair_pubdata_price: details.fair_pubdata_price.map(|x| x as u64), diff --git a/core/lib/dal/src/models/storage_eth_tx.rs b/core/lib/dal/src/models/storage_eth_tx.rs index a47f6acfff46..b4a124c5d455 100644 --- a/core/lib/dal/src/models/storage_eth_tx.rs +++ b/core/lib/dal/src/models/storage_eth_tx.rs @@ -17,7 +17,7 @@ pub struct StorageEthTx { pub has_failed: bool, pub confirmed_eth_tx_history_id: Option, pub gas_used: Option, - pub predicted_gas_cost: i64, + pub predicted_gas_cost: Option, pub created_at: NaiveDateTime, pub updated_at: NaiveDateTime, // TODO (SMA-1614): remove the field @@ -80,7 +80,7 @@ impl From for EthTx { raw_tx: tx.raw_tx.clone(), tx_type: AggregatedActionType::from_str(&tx.tx_type).expect("Wrong agg type"), created_at_timestamp: tx.created_at.and_utc().timestamp() as u64, - predicted_gas_cost: tx.predicted_gas_cost as u64, + predicted_gas_cost: tx.predicted_gas_cost.map(|c| c as u64), from_addr: tx.from_addr.map(|f| Address::from_slice(&f)), blob_sidecar: tx.blob_sidecar.map(|b| { bincode::deserialize(&b).expect("EthTxBlobSidecar is encoded correctly; qed") diff --git a/core/lib/dal/src/models/storage_tee_proof.rs b/core/lib/dal/src/models/storage_tee_proof.rs index 6f80c59511f9..6e031674b585 100644 --- a/core/lib/dal/src/models/storage_tee_proof.rs +++ b/core/lib/dal/src/models/storage_tee_proof.rs @@ -9,6 +9,7 @@ pub struct StorageTeeProof { pub signature: Option>, pub proof: Option>, pub updated_at: NaiveDateTime, + pub status: String, pub attestation: Option>, } diff --git a/core/lib/dal/src/pruning_dal/mod.rs b/core/lib/dal/src/pruning_dal/mod.rs index bcd9fdcfc3e1..85127ac7030b 100644 --- a/core/lib/dal/src/pruning_dal/mod.rs +++ b/core/lib/dal/src/pruning_dal/mod.rs @@ -1,25 +1,43 @@ use std::ops; use zksync_db_connection::{connection::Connection, error::DalResult, instrument::InstrumentExt}; -use zksync_types::{L1BatchNumber, L2BlockNumber}; +use zksync_types::{L1BatchNumber, L2BlockNumber, H256}; use crate::Core; #[cfg(test)] mod tests; -#[derive(Debug)] -pub struct PruningDal<'a, 'c> { - pub(crate) storage: &'a mut Connection<'c, Core>, +#[derive(Debug, Clone, Copy, PartialEq)] +pub struct SoftPruningInfo { + pub l1_batch: L1BatchNumber, + pub l2_block: L2BlockNumber, +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub struct HardPruningInfo { + pub l1_batch: L1BatchNumber, + pub l2_block: L2BlockNumber, + /// May be set to `None` for old pruning logs. + pub l1_batch_root_hash: Option, } /// Information about Postgres pruning. #[derive(Debug, Clone, Copy, Default, PartialEq)] pub struct PruningInfo { - pub last_soft_pruned_l1_batch: Option, - pub last_soft_pruned_l2_block: Option, - pub last_hard_pruned_l1_batch: Option, - pub last_hard_pruned_l2_block: Option, + /// Information about last soft pruning. Soft pruning is expected to be ahead or equal to hard pruning. + pub last_soft_pruned: Option, + /// Information about last hard pruning. + pub last_hard_pruned: Option, +} + +impl PruningInfo { + /// Returns `true` iff pruning is caught up, i.e., all soft-pruned data is hard-pruned. + pub fn is_caught_up(&self) -> bool { + let soft_pruned_l1_batch = self.last_soft_pruned.map(|info| info.l1_batch); + let hard_pruned_l1_batch = self.last_hard_pruned.map(|info| info.l1_batch); + soft_pruned_l1_batch == hard_pruned_l1_batch + } } /// Statistics about a single hard pruning iteration. @@ -33,6 +51,44 @@ pub struct HardPruningStats { pub deleted_l2_to_l1_logs: u64, } +#[derive(Debug)] +struct StoragePruningInfo { + last_soft_pruned_l1_batch: Option, + last_soft_pruned_l2_block: Option, + last_hard_pruned_l1_batch: Option, + last_hard_pruned_l2_block: Option, + last_hard_pruned_batch_root_hash: Option>, +} + +impl StoragePruningInfo { + fn as_soft(&self) -> Option { + Some(SoftPruningInfo { + l1_batch: L1BatchNumber(self.last_soft_pruned_l1_batch? as u32), + l2_block: L2BlockNumber(self.last_soft_pruned_l2_block? as u32), + }) + } + + fn as_hard(&self) -> Option { + Some(HardPruningInfo { + l1_batch: L1BatchNumber(self.last_hard_pruned_l1_batch? as u32), + l2_block: L2BlockNumber(self.last_hard_pruned_l2_block? as u32), + l1_batch_root_hash: self + .last_hard_pruned_batch_root_hash + .as_deref() + .map(H256::from_slice), + }) + } +} + +impl From for PruningInfo { + fn from(row: StoragePruningInfo) -> Self { + Self { + last_soft_pruned: row.as_soft(), + last_hard_pruned: row.as_hard(), + } + } +} + #[derive(Debug, sqlx::Type)] #[sqlx(type_name = "prune_type")] enum PruneType { @@ -40,9 +96,15 @@ enum PruneType { Hard, } +#[derive(Debug)] +pub struct PruningDal<'a, 'c> { + pub(crate) storage: &'a mut Connection<'c, Core>, +} + impl PruningDal<'_, '_> { pub async fn get_pruning_info(&mut self) -> DalResult { - let pruning_info = sqlx::query!( + let row = sqlx::query_as!( + StoragePruningInfo, r#" WITH soft AS ( @@ -62,7 +124,8 @@ impl PruningDal<'_, '_> { hard AS ( SELECT pruned_l1_batch, - pruned_miniblock + pruned_miniblock, + pruned_l1_batch_root_hash FROM pruning_log WHERE @@ -75,36 +138,24 @@ impl PruningDal<'_, '_> { SELECT soft.pruned_l1_batch AS last_soft_pruned_l1_batch, - soft.pruned_miniblock AS last_soft_pruned_miniblock, + soft.pruned_miniblock AS last_soft_pruned_l2_block, hard.pruned_l1_batch AS last_hard_pruned_l1_batch, - hard.pruned_miniblock AS last_hard_pruned_miniblock + hard.pruned_miniblock AS last_hard_pruned_l2_block, + hard.pruned_l1_batch_root_hash AS last_hard_pruned_batch_root_hash FROM soft FULL JOIN hard ON TRUE "# ) - .map(|row| PruningInfo { - last_soft_pruned_l1_batch: row - .last_soft_pruned_l1_batch - .map(|num| L1BatchNumber(num as u32)), - last_soft_pruned_l2_block: row - .last_soft_pruned_miniblock - .map(|num| L2BlockNumber(num as u32)), - last_hard_pruned_l1_batch: row - .last_hard_pruned_l1_batch - .map(|num| L1BatchNumber(num as u32)), - last_hard_pruned_l2_block: row - .last_hard_pruned_miniblock - .map(|num| L2BlockNumber(num as u32)), - }) .instrument("get_last_soft_pruned_batch") .report_latency() .fetch_optional(self.storage) .await?; - Ok(pruning_info.unwrap_or_default()) + + Ok(row.map(PruningInfo::from).unwrap_or_default()) } - pub async fn soft_prune_batches_range( + pub async fn insert_soft_pruning_log( &mut self, last_l1_batch_to_prune: L1BatchNumber, last_l2_block_to_prune: L2BlockNumber, @@ -137,6 +188,7 @@ impl PruningDal<'_, '_> { Ok(()) } + /// Does not insert pruning logs; the caller is responsible to do this! pub async fn hard_prune_batches_range( &mut self, last_l1_batch_to_prune: L1BatchNumber, @@ -159,42 +211,38 @@ impl PruningDal<'_, '_> { .fetch_one(self.storage) .await?; - // We don't have any L2 blocks available when recovering from a snapshot - let stats = if let Some(first_l2_block_to_prune) = row.first_miniblock_to_prune { - let first_l2_block_to_prune = L2BlockNumber(first_l2_block_to_prune as u32); - - let deleted_events = self - .delete_events(first_l2_block_to_prune..=last_l2_block_to_prune) - .await?; - let deleted_l2_to_l1_logs = self - .delete_l2_to_l1_logs(first_l2_block_to_prune..=last_l2_block_to_prune) - .await?; - let deleted_call_traces = self - .delete_call_traces(first_l2_block_to_prune..=last_l2_block_to_prune) - .await?; - self.clear_transaction_fields(first_l2_block_to_prune..=last_l2_block_to_prune) - .await?; - - let deleted_storage_logs = self - .prune_storage_logs(first_l2_block_to_prune..=last_l2_block_to_prune) - .await?; - let deleted_l1_batches = self.delete_l1_batches(last_l1_batch_to_prune).await?; - let deleted_l2_blocks = self.delete_l2_blocks(last_l2_block_to_prune).await?; - - HardPruningStats { - deleted_l1_batches, - deleted_l2_blocks, - deleted_events, - deleted_l2_to_l1_logs, - deleted_call_traces, - deleted_storage_logs, - } - } else { - HardPruningStats::default() + let Some(first_l2_block_to_prune) = row.first_miniblock_to_prune else { + return Ok(HardPruningStats::default()); }; - self.insert_hard_pruning_log(last_l1_batch_to_prune, last_l2_block_to_prune) + let first_l2_block_to_prune = L2BlockNumber(first_l2_block_to_prune as u32); + + let deleted_events = self + .delete_events(first_l2_block_to_prune..=last_l2_block_to_prune) + .await?; + let deleted_l2_to_l1_logs = self + .delete_l2_to_l1_logs(first_l2_block_to_prune..=last_l2_block_to_prune) + .await?; + let deleted_call_traces = self + .delete_call_traces(first_l2_block_to_prune..=last_l2_block_to_prune) .await?; + self.clear_transaction_fields(first_l2_block_to_prune..=last_l2_block_to_prune) + .await?; + + let deleted_storage_logs = self + .prune_storage_logs(first_l2_block_to_prune..=last_l2_block_to_prune) + .await?; + let deleted_l1_batches = self.delete_l1_batches(last_l1_batch_to_prune).await?; + let deleted_l2_blocks = self.delete_l2_blocks(last_l2_block_to_prune).await?; + + let stats = HardPruningStats { + deleted_l1_batches, + deleted_l2_blocks, + deleted_events, + deleted_l2_to_l1_logs, + deleted_call_traces, + deleted_storage_logs, + }; Ok(stats) } @@ -389,10 +437,11 @@ impl PruningDal<'_, '_> { Ok(execution_result.rows_affected()) } - async fn insert_hard_pruning_log( + pub async fn insert_hard_pruning_log( &mut self, last_l1_batch_to_prune: L1BatchNumber, last_l2_block_to_prune: L2BlockNumber, + last_pruned_l1_batch_root_hash: H256, ) -> DalResult<()> { sqlx::query!( r#" @@ -400,15 +449,17 @@ impl PruningDal<'_, '_> { pruning_log ( pruned_l1_batch, pruned_miniblock, + pruned_l1_batch_root_hash, type, created_at, updated_at ) VALUES - ($1, $2, $3, NOW(), NOW()) + ($1, $2, $3, $4, NOW(), NOW()) "#, i64::from(last_l1_batch_to_prune.0), i64::from(last_l2_block_to_prune.0), + last_pruned_l1_batch_root_hash.as_bytes(), PruneType::Hard as PruneType ) .instrument("hard_prune_batches_range#insert_pruning_log") diff --git a/core/lib/dal/src/pruning_dal/tests.rs b/core/lib/dal/src/pruning_dal/tests.rs index 70dda48d8c82..14f664a401fe 100644 --- a/core/lib/dal/src/pruning_dal/tests.rs +++ b/core/lib/dal/src/pruning_dal/tests.rs @@ -96,6 +96,13 @@ async fn insert_l1_batch(conn: &mut Connection<'_, Core>, l1_batch_number: L1Bat .insert_mock_l1_batch(&header) .await .unwrap(); + conn.blocks_dal() + .set_l1_batch_hash( + l1_batch_number, + H256::from_low_u64_be(l1_batch_number.0.into()), + ) + .await + .unwrap(); } async fn insert_realistic_l1_batches(conn: &mut Connection<'_, Core>, l1_batches_count: u32) { @@ -121,11 +128,11 @@ async fn insert_realistic_l1_batches(conn: &mut Connection<'_, Core>, l1_batches } } -async fn assert_l1_batch_objects_exists( +async fn assert_l1_batches_exist( conn: &mut Connection<'_, Core>, l1_batches_range: ops::RangeInclusive, ) { - for l1_batch_number in l1_batches_range.start().0..l1_batches_range.end().0 { + for l1_batch_number in l1_batches_range.start().0..=l1_batches_range.end().0 { let l1_batch_number = L1BatchNumber(l1_batch_number); assert!(conn .blocks_dal() @@ -150,7 +157,7 @@ async fn assert_l1_batch_objects_exists( } } -async fn assert_l1_batch_objects_dont_exist( +async fn assert_l1_batches_not_exist( conn: &mut Connection<'_, Core>, l1_batches_range: ops::RangeInclusive, ) { @@ -159,7 +166,7 @@ async fn assert_l1_batch_objects_dont_exist( .dump_all_storage_logs_for_tests() .await; - for l1_batch_number in l1_batches_range.start().0..l1_batches_range.end().0 { + for l1_batch_number in l1_batches_range.start().0..=l1_batches_range.end().0 { let l1_batch_number = L1BatchNumber(l1_batch_number); let mut l2_block_number = L2BlockNumber(l1_batch_number.0 * 2); assert!(conn @@ -204,55 +211,60 @@ async fn soft_pruning_works() { assert_eq!( PruningInfo { - last_soft_pruned_l2_block: None, - last_soft_pruned_l1_batch: None, - last_hard_pruned_l2_block: None, - last_hard_pruned_l1_batch: None + last_soft_pruned: None, + last_hard_pruned: None, }, transaction.pruning_dal().get_pruning_info().await.unwrap() ); transaction .pruning_dal() - .soft_prune_batches_range(L1BatchNumber(5), L2BlockNumber(11)) + .insert_soft_pruning_log(L1BatchNumber(5), L2BlockNumber(11)) .await .unwrap(); assert_eq!( PruningInfo { - last_soft_pruned_l2_block: Some(L2BlockNumber(11)), - last_soft_pruned_l1_batch: Some(L1BatchNumber(5)), - last_hard_pruned_l2_block: None, - last_hard_pruned_l1_batch: None + last_soft_pruned: Some(SoftPruningInfo { + l2_block: L2BlockNumber(11), + l1_batch: L1BatchNumber(5), + }), + last_hard_pruned: None, }, transaction.pruning_dal().get_pruning_info().await.unwrap() ); transaction .pruning_dal() - .soft_prune_batches_range(L1BatchNumber(10), L2BlockNumber(21)) + .insert_soft_pruning_log(L1BatchNumber(10), L2BlockNumber(21)) .await .unwrap(); assert_eq!( PruningInfo { - last_soft_pruned_l2_block: Some(L2BlockNumber(21)), - last_soft_pruned_l1_batch: Some(L1BatchNumber(10)), - last_hard_pruned_l2_block: None, - last_hard_pruned_l1_batch: None + last_soft_pruned: Some(SoftPruningInfo { + l2_block: L2BlockNumber(21), + l1_batch: L1BatchNumber(10), + }), + last_hard_pruned: None, }, transaction.pruning_dal().get_pruning_info().await.unwrap() ); transaction .pruning_dal() - .hard_prune_batches_range(L1BatchNumber(10), L2BlockNumber(21)) + .insert_hard_pruning_log(L1BatchNumber(10), L2BlockNumber(21), H256::repeat_byte(23)) .await .unwrap(); assert_eq!( PruningInfo { - last_soft_pruned_l2_block: Some(L2BlockNumber(21)), - last_soft_pruned_l1_batch: Some(L1BatchNumber(10)), - last_hard_pruned_l2_block: Some(L2BlockNumber(21)), - last_hard_pruned_l1_batch: Some(L1BatchNumber(10)) + last_soft_pruned: Some(SoftPruningInfo { + l2_block: L2BlockNumber(21), + l1_batch: L1BatchNumber(10), + }), + last_hard_pruned: Some(HardPruningInfo { + l2_block: L2BlockNumber(21), + l1_batch: L1BatchNumber(10), + l1_batch_root_hash: Some(H256::repeat_byte(23)), + }), }, transaction.pruning_dal().get_pruning_info().await.unwrap() ); @@ -362,7 +374,7 @@ async fn storage_logs_pruning_works_correctly() { let stats = transaction .pruning_dal() - .hard_prune_batches_range(L1BatchNumber(10), L2BlockNumber(21)) + .hard_prune_batches_range(L1BatchNumber(9), L2BlockNumber(19)) .await .unwrap(); let actual_logs = transaction @@ -393,13 +405,13 @@ async fn l1_batches_can_be_hard_pruned() { let mut transaction = conn.start_transaction().await.unwrap(); insert_realistic_l1_batches(&mut transaction, 10).await; - assert_l1_batch_objects_exists(&mut transaction, L1BatchNumber(1)..=L1BatchNumber(10)).await; + assert_l1_batches_exist(&mut transaction, L1BatchNumber(1)..=L1BatchNumber(9)).await; assert!(transaction .pruning_dal() .get_pruning_info() .await .unwrap() - .last_hard_pruned_l1_batch + .last_hard_pruned .is_none()); transaction @@ -408,21 +420,12 @@ async fn l1_batches_can_be_hard_pruned() { .await .unwrap(); - assert_l1_batch_objects_dont_exist(&mut transaction, L1BatchNumber(1)..=L1BatchNumber(5)).await; - assert_l1_batch_objects_exists(&mut transaction, L1BatchNumber(6)..=L1BatchNumber(10)).await; - assert_eq!( - Some(L1BatchNumber(5)), - transaction - .pruning_dal() - .get_pruning_info() - .await - .unwrap() - .last_hard_pruned_l1_batch - ); + assert_l1_batches_not_exist(&mut transaction, L1BatchNumber(1)..=L1BatchNumber(5)).await; + assert_l1_batches_exist(&mut transaction, L1BatchNumber(6)..=L1BatchNumber(9)).await; let stats = transaction .pruning_dal() - .hard_prune_batches_range(L1BatchNumber(10), L2BlockNumber(21)) + .hard_prune_batches_range(L1BatchNumber(9), L2BlockNumber(19)) .await .unwrap(); assert_eq!(stats.deleted_l1_batches, 4); @@ -430,17 +433,7 @@ async fn l1_batches_can_be_hard_pruned() { assert_eq!(stats.deleted_events, 40); assert_eq!(stats.deleted_l2_to_l1_logs, 40); - assert_l1_batch_objects_dont_exist(&mut transaction, L1BatchNumber(1)..=L1BatchNumber(10)) - .await; - assert_eq!( - Some(L1BatchNumber(10)), - transaction - .pruning_dal() - .get_pruning_info() - .await - .unwrap() - .last_hard_pruned_l1_batch - ); + assert_l1_batches_not_exist(&mut transaction, L1BatchNumber(1)..=L1BatchNumber(9)).await; } #[tokio::test] diff --git a/core/lib/dal/src/system_dal.rs b/core/lib/dal/src/system_dal.rs index 105665fa2ec6..f4de4faf8eb3 100644 --- a/core/lib/dal/src/system_dal.rs +++ b/core/lib/dal/src/system_dal.rs @@ -1,5 +1,7 @@ use std::{collections::HashMap, time::Duration}; +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; use zksync_db_connection::{connection::Connection, error::DalResult, instrument::InstrumentExt}; use crate::Core; @@ -12,6 +14,16 @@ pub(crate) struct TableSize { pub total_size: u64, } +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DatabaseMigration { + pub version: i64, + pub description: String, + pub installed_on: DateTime, + pub success: bool, + pub checksum: String, + pub execution_time: Duration, +} + #[derive(Debug)] pub struct SystemDal<'a, 'c> { pub(crate) storage: &'a mut Connection<'c, Core>, @@ -86,4 +98,27 @@ impl SystemDal<'_, '_> { }); Ok(table_sizes.collect()) } + + pub async fn get_last_migration(&mut self) -> DalResult { + let row = sqlx::query!( + r#" + SELECT * + FROM _sqlx_migrations + ORDER BY _sqlx_migrations.version DESC + LIMIT 1 + "# + ) + .instrument("get_last_migration") + .fetch_one(self.storage) + .await?; + + Ok(DatabaseMigration { + version: row.version, + description: row.description, + installed_on: row.installed_on, + success: row.success, + checksum: hex::encode(row.checksum), + execution_time: Duration::from_nanos(u64::try_from(row.execution_time).unwrap_or(0)), + }) + } } diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index 4d19c3ff0c8b..12761a3d6d34 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -7,6 +7,7 @@ use zksync_db_connection::{ connection::Connection, error::DalResult, instrument::{InstrumentExt, Instrumented}, + interpolate_query, match_query_as, utils::pg_interval_from_duration, }; use zksync_types::{tee_types::TeeType, L1BatchNumber}; @@ -63,72 +64,102 @@ impl TeeProofGenerationDal<'_, '_> { ) -> DalResult> { let processing_timeout = pg_interval_from_duration(processing_timeout); let min_batch_number = i64::from(min_batch_number.0); + let mut transaction = self.storage.start_transaction().await?; + + // Lock the entire tee_proof_generation_details table in EXCLUSIVE mode to prevent race + // conditions. Locking the table ensures that two different TEE prover instances will not + // try to prove the same batch. + sqlx::query("LOCK TABLE tee_proof_generation_details IN EXCLUSIVE MODE") + .instrument("lock_batch_for_proving#lock_table") + .execute(&mut transaction) + .await?; + + // The tee_proof_generation_details table does not have corresponding entries yet if this is + // the first time the query is invoked for a batch. + let batch_number = sqlx::query!( + r#" + SELECT + p.l1_batch_number + FROM + proof_generation_details p + LEFT JOIN + tee_proof_generation_details tee + ON + p.l1_batch_number = tee.l1_batch_number + AND tee.tee_type = $1 + WHERE + ( + p.l1_batch_number >= $5 + AND p.vm_run_data_blob_url IS NOT NULL + AND p.proof_gen_data_blob_url IS NOT NULL + ) + AND ( + tee.l1_batch_number IS NULL + OR ( + (tee.status = $2 OR tee.status = $3) + AND tee.prover_taken_at < NOW() - $4::INTERVAL + ) + ) + LIMIT 1 + "#, + tee_type.to_string(), + TeeProofGenerationJobStatus::PickedByProver.to_string(), + TeeProofGenerationJobStatus::Failed.to_string(), + processing_timeout, + min_batch_number + ) + .instrument("lock_batch_for_proving#get_batch_no") + .with_arg("tee_type", &tee_type) + .with_arg("processing_timeout", &processing_timeout) + .with_arg("min_batch_number", &min_batch_number) + .fetch_optional(&mut transaction) + .await?; + + let batch_number = match batch_number { + Some(batch) => batch.l1_batch_number, + None => { + return Ok(None); + } + }; + let locked_batch = sqlx::query_as!( StorageLockedBatch, r#" - WITH upsert AS ( - SELECT - p.l1_batch_number - FROM - proof_generation_details p - LEFT JOIN - tee_proof_generation_details tee - ON - p.l1_batch_number = tee.l1_batch_number - AND tee.tee_type = $1 - WHERE - ( - p.l1_batch_number >= $5 - AND p.vm_run_data_blob_url IS NOT NULL - AND p.proof_gen_data_blob_url IS NOT NULL - ) - AND ( - tee.l1_batch_number IS NULL - OR ( - (tee.status = $2 OR tee.status = $3) - AND tee.prover_taken_at < NOW() - $4::INTERVAL - ) - ) - FETCH FIRST ROW ONLY - ) - INSERT INTO tee_proof_generation_details ( l1_batch_number, tee_type, status, created_at, updated_at, prover_taken_at ) - SELECT - l1_batch_number, + VALUES + ( $1, $2, + $3, NOW(), NOW(), NOW() - FROM - upsert + ) ON CONFLICT (l1_batch_number, tee_type) DO UPDATE SET - status = $2, + status = $3, updated_at = NOW(), prover_taken_at = NOW() RETURNING l1_batch_number, created_at "#, + batch_number, tee_type.to_string(), TeeProofGenerationJobStatus::PickedByProver.to_string(), - TeeProofGenerationJobStatus::Failed.to_string(), - processing_timeout, - min_batch_number ) - .instrument("lock_batch_for_proving") + .instrument("lock_batch_for_proving#insert") + .with_arg("batch_number", &batch_number) .with_arg("tee_type", &tee_type) - .with_arg("processing_timeout", &processing_timeout) - .with_arg("l1_batch_number", &min_batch_number) - .fetch_optional(self.storage) + .fetch_optional(&mut transaction) .await? .map(Into::into); + transaction.commit().await?; Ok(locked_batch) } @@ -242,13 +273,16 @@ impl TeeProofGenerationDal<'_, '_> { batch_number: L1BatchNumber, tee_type: Option, ) -> DalResult> { - let query = format!( + let query = match_query_as!( + StorageTeeProof, + [ r#" SELECT tp.pubkey, tp.signature, tp.proof, tp.updated_at, + tp.status, ta.attestation FROM tee_proof_generation_details tp @@ -256,22 +290,22 @@ impl TeeProofGenerationDal<'_, '_> { tee_attestations ta ON tp.pubkey = ta.pubkey WHERE tp.l1_batch_number = $1 - AND tp.status = $2 - {} - ORDER BY tp.l1_batch_number ASC, tp.tee_type ASC "#, - tee_type.map_or_else(String::new, |_| "AND tp.tee_type = $3".to_string()) + _, + "ORDER BY tp.l1_batch_number ASC, tp.tee_type ASC" + ], + match(&tee_type) { + Some(tee_type) => + ("AND tp.tee_type = $2"; i64::from(batch_number.0), tee_type.to_string()), + None => (""; i64::from(batch_number.0)), + } ); - let mut query = sqlx::query_as(&query) - .bind(i64::from(batch_number.0)) - .bind(TeeProofGenerationJobStatus::Generated.to_string()); - - if let Some(tee_type) = tee_type { - query = query.bind(tee_type.to_string()); - } - - let proofs: Vec = query.fetch_all(self.storage.conn()).await.unwrap(); + let proofs = query + .instrument("get_tee_proofs") + .with_arg("l1_batch_number", &batch_number) + .fetch_all(self.storage) + .await?; Ok(proofs) } diff --git a/core/lib/env_config/src/eth_sender.rs b/core/lib/env_config/src/eth_sender.rs index 0fd61fd173b6..f822109ed935 100644 --- a/core/lib/env_config/src/eth_sender.rs +++ b/core/lib/env_config/src/eth_sender.rs @@ -23,6 +23,9 @@ impl FromEnv for L1Secrets { .context("ETH_CLIENT_WEB3_URL")? .parse() .context("ETH_CLIENT_WEB3_URL")?, + gateway_rpc_url: std::env::var("ETH_CLIENT_GATEWAY_WEB3_URL") + .ok() + .map(|url| url.parse().expect("ETH_CLIENT_GATEWAY_WEB3_URL")), }) } } @@ -53,7 +56,6 @@ mod tests { ( EthConfig { sender: Some(SenderConfig { - aggregated_proof_sizes: vec![1, 5], aggregated_block_commit_deadline: 30, aggregated_block_prove_deadline: 3_000, aggregated_block_execute_deadline: 4_000, @@ -97,6 +99,7 @@ mod tests { }, L1Secrets { l1_rpc_url: "http://127.0.0.1:8545".to_string().parse().unwrap(), + gateway_rpc_url: Some("http://127.0.0.1:8547".to_string().parse().unwrap()), }, ) } @@ -124,7 +127,6 @@ mod tests { ETH_SENDER_GAS_ADJUSTER_MAX_BLOB_BASE_FEE_SAMPLES="10" ETH_SENDER_GAS_ADJUSTER_INTERNAL_PUBDATA_PRICING_MULTIPLIER="1.0" ETH_SENDER_WAIT_FOR_PROOFS="false" - ETH_SENDER_SENDER_AGGREGATED_PROOF_SIZES="1,5" ETH_SENDER_SENDER_MAX_AGGREGATED_BLOCKS_TO_COMMIT="3" ETH_SENDER_SENDER_MAX_AGGREGATED_BLOCKS_TO_EXECUTE="4" ETH_SENDER_SENDER_AGGREGATED_BLOCK_COMMIT_DEADLINE="30" @@ -140,6 +142,7 @@ mod tests { ETH_WATCH_CONFIRMATIONS_FOR_ETH_EVENT="0" ETH_WATCH_ETH_NODE_POLL_INTERVAL="300" ETH_CLIENT_WEB3_URL="http://127.0.0.1:8545" + ETH_CLIENT_GATEWAY_WEB3_URL="http://127.0.0.1:8547" "#; lock.set_env(config); diff --git a/core/lib/env_config/src/genesis.rs b/core/lib/env_config/src/genesis.rs index 55c79eceb502..db0228d91c13 100644 --- a/core/lib/env_config/src/genesis.rs +++ b/core/lib/env_config/src/genesis.rs @@ -37,6 +37,19 @@ impl FromEnv for ContractsForGenesis { } } +// For initializing genesis file from env it's required to have an additional struct, +// because these data is not present in any other structs +#[derive(Deserialize, Serialize, Debug, Clone)] +struct CustomGenesisState { + pub path: Option, +} + +impl FromEnv for CustomGenesisState { + fn from_env() -> anyhow::Result { + envy_load("custom_genesis_state", "CUSTOM_GENESIS_STATE_") + } +} + impl FromEnv for GenesisConfig { fn from_env() -> anyhow::Result { // Getting genesis from environmental variables is a temporary measure, that will be @@ -44,6 +57,7 @@ impl FromEnv for GenesisConfig { // #PLA-811 let network_config = &NetworkConfig::from_env()?; let contracts_config = &ContractsForGenesis::from_env()?; + let custom_genesis_state_config = CustomGenesisState::from_env()?; let state_keeper = StateKeeperConfig::from_env()?; // This is needed for backward compatibility, so if the new variable `genesis_protocol_semantic_version` @@ -79,6 +93,7 @@ impl FromEnv for GenesisConfig { .context("Fee account required for genesis")?, dummy_verifier: false, l1_batch_commit_data_generator_mode: state_keeper.l1_batch_commit_data_generator_mode, + custom_genesis_state_path: custom_genesis_state_config.path, }) } } diff --git a/core/lib/eth_client/src/clients/http/query.rs b/core/lib/eth_client/src/clients/http/query.rs index e2c64c330fdc..414946d07710 100644 --- a/core/lib/eth_client/src/clients/http/query.rs +++ b/core/lib/eth_client/src/clients/http/query.rs @@ -430,7 +430,7 @@ where let chunk_size = chunk_end - chunk_start + 1; let fee_history = client - .fee_history(U64::from(chunk_size).into(), chunk_end.into(), vec![]) + .fee_history(U64::from(chunk_size).into(), chunk_end.into(), None) .rpc_context("fee_history") .with_arg("chunk_size", &chunk_size) .with_arg("block", &chunk_end) diff --git a/core/lib/eth_client/src/clients/mock.rs b/core/lib/eth_client/src/clients/mock.rs index 8e81b6c6f209..2b0100a39dc6 100644 --- a/core/lib/eth_client/src/clients/mock.rs +++ b/core/lib/eth_client/src/clients/mock.rs @@ -10,7 +10,7 @@ use zksync_types::{ api::FeeHistory, ethabi, web3::{self, contract::Tokenize, BlockId}, - Address, L1ChainId, L2ChainId, SLChainId, EIP_4844_TX_TYPE, H160, H256, U256, U64, + Address, L2ChainId, SLChainId, EIP_4844_TX_TYPE, H160, H256, U256, U64, }; use zksync_web3_decl::client::{MockClient, MockClientBuilder, Network, L1, L2}; @@ -237,6 +237,7 @@ pub struct MockSettlementLayerBuilder { non_ordering_confirmations: bool, inner: Arc>, call_handler: Box, + chain_id: u64, _network: PhantomData, } @@ -267,6 +268,7 @@ impl Default for MockSettlementLayerBuilder { call_handler: Box::new(|call, block_id| { panic!("Unexpected eth_call: {call:?}, {block_id:?}"); }), + chain_id: 9, _network: PhantomData, } } @@ -315,6 +317,10 @@ impl MockSettlementLayerBuilder { } } + pub fn with_chain_id(self, chain_id: u64) -> Self { + Self { chain_id, ..self } + } + fn get_block_by_number( fee_history: &[BaseFees], block: web3::BlockNumber, @@ -449,12 +455,12 @@ fn l2_eth_fee_history( impl SupportedMockSLNetwork for L1 { fn build_client(builder: MockSettlementLayerBuilder) -> MockClient { - const CHAIN_ID: L1ChainId = L1ChainId(9); - let base_fee_history = builder.base_fee_history.clone(); + let chain_id = builder.chain_id; + let net = SLChainId(builder.chain_id).into(); builder - .build_client_inner(CHAIN_ID.0, CHAIN_ID.into()) + .build_client_inner(chain_id, net) .method( "eth_feeHistory", move |block_count: U64, newest_block: web3::BlockNumber, _: Option>| { @@ -467,12 +473,12 @@ impl SupportedMockSLNetwork for L1 { impl SupportedMockSLNetwork for L2 { fn build_client(builder: MockSettlementLayerBuilder) -> MockClient { - let chain_id: L2ChainId = 9u64.try_into().unwrap(); - let base_fee_history = builder.base_fee_history.clone(); + let chain_id = builder.chain_id; + let net = L2ChainId::new(builder.chain_id).unwrap().into(); builder - .build_client_inner(chain_id.as_u64(), chain_id.into()) + .build_client_inner(chain_id, net) .method( "eth_feeHistory", move |block_count: U64, newest_block: web3::BlockNumber, _: Option>| { diff --git a/core/lib/health_check/src/lib.rs b/core/lib/health_check/src/lib.rs index e4e8ba3c9a58..76b1c4d8b0ff 100644 --- a/core/lib/health_check/src/lib.rs +++ b/core/lib/health_check/src/lib.rs @@ -12,10 +12,10 @@ use futures::future; use serde::Serialize; use tokio::sync::watch; -use self::metrics::{CheckResult, METRICS}; -use crate::metrics::AppHealthCheckConfig; +use crate::metrics::{AppHealthCheckConfig, CheckResult, METRICS}; mod metrics; + #[cfg(test)] mod tests; @@ -111,6 +111,8 @@ pub struct AppHealthCheck { #[derive(Debug, Clone)] struct AppHealthCheckInner { + /// Application-level health details. + app_details: Option, components: Vec>, slow_time_limit: Duration, hard_time_limit: Duration, @@ -133,6 +135,7 @@ impl AppHealthCheck { let inner = AppHealthCheckInner { components: Vec::default(), + app_details: None, slow_time_limit, hard_time_limit, }; @@ -178,6 +181,13 @@ impl AppHealthCheck { } } + /// Sets app-level health details. They can include build info etc. + pub fn set_details(&self, details: impl Serialize) { + let details = serde_json::to_value(details).expect("failed serializing app details"); + let mut inner = self.inner.lock().expect("`AppHealthCheck` is poisoned"); + inner.app_details = Some(details); + } + /// Inserts health check for a component. /// /// # Errors @@ -217,6 +227,7 @@ impl AppHealthCheck { // Clone `inner` so that we don't hold a lock for them across a wait point. let AppHealthCheckInner { components, + app_details, slow_time_limit, hard_time_limit, } = self @@ -235,7 +246,8 @@ impl AppHealthCheck { .map(|health| health.status) .max_by_key(|status| status.priority_for_aggregation()) .unwrap_or(HealthStatus::Ready); - let inner = aggregated_status.into(); + let mut inner = Health::from(aggregated_status); + inner.details = app_details.clone(); let health = AppHealth { inner, components }; if !health.inner.status.is_healthy() { diff --git a/core/lib/health_check/src/tests.rs b/core/lib/health_check/src/tests.rs index 14c610e9fd83..76863db05415 100644 --- a/core/lib/health_check/src/tests.rs +++ b/core/lib/health_check/src/tests.rs @@ -82,6 +82,7 @@ async fn aggregating_health_checks() { let (first_check, first_updater) = ReactiveHealthCheck::new("first"); let (second_check, second_updater) = ReactiveHealthCheck::new("second"); let inner = AppHealthCheckInner { + app_details: None, components: vec![Arc::new(first_check), Arc::new(second_check)], slow_time_limit: AppHealthCheck::DEFAULT_SLOW_TIME_LIMIT, hard_time_limit: AppHealthCheck::DEFAULT_HARD_TIME_LIMIT, diff --git a/core/lib/l1_contract_interface/src/i_executor/methods/commit_batches.rs b/core/lib/l1_contract_interface/src/i_executor/methods/commit_batches.rs index 67819f7d7ccd..01e362fb7d65 100644 --- a/core/lib/l1_contract_interface/src/i_executor/methods/commit_batches.rs +++ b/core/lib/l1_contract_interface/src/i_executor/methods/commit_batches.rs @@ -1,11 +1,11 @@ use zksync_types::{ commitment::{L1BatchCommitmentMode, L1BatchWithMetadata}, - ethabi::Token, + ethabi::{encode, Token}, pubdata_da::PubdataSendingMode, }; use crate::{ - i_executor::structures::{CommitBatchInfo, StoredBatchInfo}, + i_executor::structures::{CommitBatchInfo, StoredBatchInfo, SUPPORTED_ENCODING_VERSION}, Tokenizable, Tokenize, }; @@ -20,13 +20,29 @@ pub struct CommitBatches<'a> { impl Tokenize for CommitBatches<'_> { fn into_tokens(self) -> Vec { + let protocol_version = self.l1_batches[0].header.protocol_version.unwrap(); let stored_batch_info = StoredBatchInfo::from(self.last_committed_l1_batch).into_token(); let l1_batches_to_commit = self .l1_batches .iter() .map(|batch| CommitBatchInfo::new(self.mode, batch, self.pubdata_da).into_token()) .collect(); - - vec![stored_batch_info, Token::Array(l1_batches_to_commit)] + if protocol_version.is_pre_gateway() { + vec![stored_batch_info, Token::Array(l1_batches_to_commit)] + } else { + let mut encoded_data = encode(&[ + stored_batch_info.clone(), + Token::Array(l1_batches_to_commit), + ]); + encoded_data.insert(0, SUPPORTED_ENCODING_VERSION); + vec![ + Token::Uint((self.last_committed_l1_batch.header.number.0 + 1).into()), + Token::Uint( + (self.last_committed_l1_batch.header.number.0 + self.l1_batches.len() as u32) + .into(), + ), + Token::Bytes(encoded_data), + ] + } } } diff --git a/core/lib/l1_contract_interface/src/i_executor/methods/execute_batches.rs b/core/lib/l1_contract_interface/src/i_executor/methods/execute_batches.rs index fe5213d8c561..649a7ca2b419 100644 --- a/core/lib/l1_contract_interface/src/i_executor/methods/execute_batches.rs +++ b/core/lib/l1_contract_interface/src/i_executor/methods/execute_batches.rs @@ -1,20 +1,55 @@ -use zksync_types::{commitment::L1BatchWithMetadata, ethabi::Token}; +use zksync_types::{ + commitment::{L1BatchWithMetadata, PriorityOpsMerkleProof}, + ethabi::{encode, Token}, +}; -use crate::{i_executor::structures::StoredBatchInfo, Tokenizable, Tokenize}; +use crate::{ + i_executor::structures::{StoredBatchInfo, SUPPORTED_ENCODING_VERSION}, + Tokenizable, Tokenize, +}; /// Input required to encode `executeBatches` call. #[derive(Debug, Clone)] pub struct ExecuteBatches { pub l1_batches: Vec, + pub priority_ops_proofs: Vec, } impl Tokenize for &ExecuteBatches { fn into_tokens(self) -> Vec { - vec![Token::Array( - self.l1_batches - .iter() - .map(|batch| StoredBatchInfo::from(batch).into_token()) - .collect(), - )] + let protocol_version = self.l1_batches[0].header.protocol_version.unwrap(); + + if protocol_version.is_pre_gateway() { + vec![Token::Array( + self.l1_batches + .iter() + .map(|batch| StoredBatchInfo::from(batch).into_token()) + .collect(), + )] + } else { + let encoded_data = encode(&[ + Token::Array( + self.l1_batches + .iter() + .map(|batch| StoredBatchInfo::from(batch).into_token()) + .collect(), + ), + Token::Array( + self.priority_ops_proofs + .iter() + .map(|proof| proof.into_token()) + .collect(), + ), + ]); + let execute_data = [[SUPPORTED_ENCODING_VERSION].to_vec(), encoded_data] + .concat() + .to_vec(); + + vec![ + Token::Uint(self.l1_batches[0].header.number.0.into()), + Token::Uint(self.l1_batches.last().unwrap().header.number.0.into()), + Token::Bytes(execute_data), + ] + } } } diff --git a/core/lib/l1_contract_interface/src/i_executor/methods/prove_batches.rs b/core/lib/l1_contract_interface/src/i_executor/methods/prove_batches.rs index 935d8a44e0b7..a54cf407d09f 100644 --- a/core/lib/l1_contract_interface/src/i_executor/methods/prove_batches.rs +++ b/core/lib/l1_contract_interface/src/i_executor/methods/prove_batches.rs @@ -1,8 +1,14 @@ use crypto_codegen::serialize_proof; use zksync_prover_interface::outputs::L1BatchProofForL1; -use zksync_types::{commitment::L1BatchWithMetadata, ethabi::Token, U256}; +use zksync_types::{ + commitment::L1BatchWithMetadata, + ethabi::{encode, Token}, +}; -use crate::{i_executor::structures::StoredBatchInfo, Tokenizable, Tokenize}; +use crate::{ + i_executor::structures::{StoredBatchInfo, SUPPORTED_ENCODING_VERSION}, + Tokenizable, Tokenize, +}; /// Input required to encode `proveBatches` call. #[derive(Debug, Clone)] @@ -15,13 +21,14 @@ pub struct ProveBatches { impl Tokenize for &ProveBatches { fn into_tokens(self) -> Vec { - let prev_l1_batch = StoredBatchInfo::from(&self.prev_l1_batch).into_token(); + let prev_l1_batch_info = StoredBatchInfo::from(&self.prev_l1_batch).into_token(); let batches_arg = self .l1_batches .iter() .map(|batch| StoredBatchInfo::from(batch).into_token()) .collect(); let batches_arg = Token::Array(batches_arg); + let protocol_version = self.l1_batches[0].header.protocol_version.unwrap(); if self.should_verify { // currently we only support submitting a single proof @@ -29,40 +36,53 @@ impl Tokenize for &ProveBatches { assert_eq!(self.l1_batches.len(), 1); let L1BatchProofForL1 { - aggregation_result_coords, - scheduler_proof, - .. + scheduler_proof, .. } = self.proofs.first().unwrap(); let (_, proof) = serialize_proof(scheduler_proof); - let aggregation_result_coords = if self.l1_batches[0] - .header - .protocol_version - .unwrap() - .is_pre_boojum() - { - Token::Array( - aggregation_result_coords - .iter() - .map(|bytes| Token::Uint(U256::from_big_endian(bytes))) - .collect(), - ) + if protocol_version.is_pre_gateway() { + let proof_input = Token::Tuple(vec![ + Token::Array(Vec::new()), + Token::Array(proof.into_iter().map(Token::Uint).collect()), + ]); + + vec![prev_l1_batch_info, batches_arg, proof_input] } else { - Token::Array(Vec::new()) - }; - let proof_input = Token::Tuple(vec![ - aggregation_result_coords, - Token::Array(proof.into_iter().map(Token::Uint).collect()), - ]); + let proof_input = Token::Array(proof.into_iter().map(Token::Uint).collect()); - vec![prev_l1_batch, batches_arg, proof_input] - } else { + let encoded_data = encode(&[prev_l1_batch_info, batches_arg, proof_input]); + let prove_data = [[SUPPORTED_ENCODING_VERSION].to_vec(), encoded_data] + .concat() + .to_vec(); + + vec![ + Token::Uint((self.prev_l1_batch.header.number.0 + 1).into()), + Token::Uint( + (self.prev_l1_batch.header.number.0 + self.l1_batches.len() as u32).into(), + ), + Token::Bytes(prove_data), + ] + } + } else if protocol_version.is_pre_gateway() { vec![ - prev_l1_batch, + prev_l1_batch_info, batches_arg, Token::Tuple(vec![Token::Array(vec![]), Token::Array(vec![])]), ] + } else { + let encoded_data = encode(&[prev_l1_batch_info, batches_arg, Token::Array(vec![])]); + let prove_data = [[SUPPORTED_ENCODING_VERSION].to_vec(), encoded_data] + .concat() + .to_vec(); + + vec![ + Token::Uint((self.prev_l1_batch.header.number.0 + 1).into()), + Token::Uint( + (self.prev_l1_batch.header.number.0 + self.l1_batches.len() as u32).into(), + ), + Token::Bytes(prove_data), + ] } } } diff --git a/core/lib/l1_contract_interface/src/i_executor/structures/commit_batch_info.rs b/core/lib/l1_contract_interface/src/i_executor/structures/commit_batch_info.rs index 6438aeb7f55c..573b3c65a3e3 100644 --- a/core/lib/l1_contract_interface/src/i_executor/structures/commit_batch_info.rs +++ b/core/lib/l1_contract_interface/src/i_executor/structures/commit_batch_info.rs @@ -3,10 +3,10 @@ use zksync_types::{ pre_boojum_serialize_commitments, serialize_commitments, L1BatchCommitmentMode, L1BatchWithMetadata, }, - ethabi::Token, + ethabi::{ParamType, Token}, pubdata_da::PubdataSendingMode, - web3::contract::Error as ContractError, - ProtocolVersionId, U256, + web3::{contract::Error as ContractError, keccak256}, + ProtocolVersionId, H256, U256, }; use crate::{ @@ -15,9 +15,9 @@ use crate::{ }; /// These are used by the L1 Contracts to indicate what DA layer is used for pubdata -const PUBDATA_SOURCE_CALLDATA: u8 = 0; -const PUBDATA_SOURCE_BLOBS: u8 = 1; -const PUBDATA_SOURCE_CUSTOM: u8 = 2; +pub const PUBDATA_SOURCE_CALLDATA: u8 = 0; +pub const PUBDATA_SOURCE_BLOBS: u8 = 1; +pub const PUBDATA_SOURCE_CUSTOM_PRE_GATEWAY: u8 = 2; /// Encoding for `CommitBatchInfo` from `IExecutor.sol` for a contract running in rollup mode. #[derive(Debug)] @@ -40,6 +40,21 @@ impl<'a> CommitBatchInfo<'a> { } } + pub fn post_gateway_schema() -> ParamType { + ParamType::Tuple(vec![ + ParamType::Uint(64), // `batch_number` + ParamType::Uint(64), // `timestamp` + ParamType::Uint(64), // `index_repeated_storage_changes` + ParamType::FixedBytes(32), // `new_state_root` + ParamType::Uint(256), // `numberOfLayer1Txs` + ParamType::FixedBytes(32), // `priorityOperationsHash` + ParamType::FixedBytes(32), // `bootloaderHeapInitialContentsHash` + ParamType::FixedBytes(32), // `eventsQueueStateHash` + ParamType::Bytes, // `systemLogs` + ParamType::Bytes, // `operatorDAInput` + ]) + } + fn base_tokens(&self) -> Vec { if self .l1_batch_with_metadata @@ -199,7 +214,7 @@ impl Tokenizable for CommitBatchInfo<'_> { // Here we're not pushing any pubdata on purpose; no pubdata is sent in Validium mode. L1BatchCommitmentMode::Validium => vec![], })); - } else { + } else if protocol_version.is_pre_gateway() { tokens.push(Token::Bytes(match (self.mode, self.pubdata_da) { // Here we're not pushing any pubdata on purpose; no pubdata is sent in Validium mode. ( @@ -211,14 +226,12 @@ impl Tokenizable for CommitBatchInfo<'_> { (L1BatchCommitmentMode::Validium, PubdataSendingMode::Blobs) => { vec![PUBDATA_SOURCE_BLOBS] } - (L1BatchCommitmentMode::Rollup, PubdataSendingMode::Custom) => { panic!("Custom pubdata DA is incompatible with Rollup mode") } (L1BatchCommitmentMode::Validium, PubdataSendingMode::Custom) => { - vec![PUBDATA_SOURCE_CUSTOM] + vec![PUBDATA_SOURCE_CUSTOM_PRE_GATEWAY] } - ( L1BatchCommitmentMode::Rollup, PubdataSendingMode::Calldata | PubdataSendingMode::RelayedL2Calldata, @@ -227,7 +240,8 @@ impl Tokenizable for CommitBatchInfo<'_> { // even if we are not using blobs. let pubdata = self.pubdata_input(); let blob_commitment = KzgInfo::new(&pubdata).to_blob_commitment(); - std::iter::once(PUBDATA_SOURCE_CALLDATA) + [PUBDATA_SOURCE_CALLDATA] + .into_iter() .chain(pubdata) .chain(blob_commitment) .collect() @@ -239,7 +253,86 @@ impl Tokenizable for CommitBatchInfo<'_> { let kzg_info = KzgInfo::new(blob); kzg_info.to_pubdata_commitment() }); - std::iter::once(PUBDATA_SOURCE_BLOBS) + [PUBDATA_SOURCE_BLOBS] + .into_iter() + .chain(pubdata_commitments) + .collect() + } + })); + } else { + let state_diff_hash = self + .l1_batch_with_metadata + .metadata + .state_diff_hash + .expect("Failed to get state_diff_hash from metadata"); + tokens.push(Token::Bytes(match (self.mode, self.pubdata_da) { + // Validiums with custom DA need the inclusion data to be part of operator_da_input + (L1BatchCommitmentMode::Validium, PubdataSendingMode::Custom) => { + let mut operator_da_input: Vec = state_diff_hash.0.into(); + + operator_da_input.extend( + &self + .l1_batch_with_metadata + .metadata + .da_inclusion_data + .clone() + .unwrap_or_default(), + ); + + operator_da_input + } + // Here we're not pushing any pubdata on purpose; no pubdata is sent in Validium mode. + ( + L1BatchCommitmentMode::Validium, + PubdataSendingMode::Calldata + | PubdataSendingMode::RelayedL2Calldata + | PubdataSendingMode::Blobs, + ) => state_diff_hash.0.into(), + (L1BatchCommitmentMode::Rollup, PubdataSendingMode::Custom) => { + panic!("Custom pubdata DA is incompatible with Rollup mode") + } + ( + L1BatchCommitmentMode::Rollup, + PubdataSendingMode::Calldata | PubdataSendingMode::RelayedL2Calldata, + ) => { + let pubdata = self.pubdata_input(); + + let header = + compose_header_for_l1_commit_rollup(state_diff_hash, pubdata.clone()); + + // We compute and add the blob commitment to the pubdata payload so that we can verify the proof + // even if we are not using blobs. + let blob_commitment = KzgInfo::new(&pubdata).to_blob_commitment(); + header + .into_iter() + .chain([PUBDATA_SOURCE_CALLDATA]) + .chain(pubdata) + .chain(blob_commitment) + .collect() + } + (L1BatchCommitmentMode::Rollup, PubdataSendingMode::Blobs) => { + let pubdata = self.pubdata_input(); + + let header = + compose_header_for_l1_commit_rollup(state_diff_hash, pubdata.clone()); + + let pubdata_commitments: Vec = pubdata + .chunks(ZK_SYNC_BYTES_PER_BLOB) + .flat_map(|blob| { + let kzg_info = KzgInfo::new(blob); + + let blob_commitment = kzg_info.to_pubdata_commitment(); + + // We also append 0s to show that we do not reuse previously published blobs. + blob_commitment + .into_iter() + .chain([0u8; 32]) + .collect::>() + }) + .collect(); + header + .into_iter() + .chain([PUBDATA_SOURCE_BLOBS]) .chain(pubdata_commitments) .collect() } @@ -249,3 +342,38 @@ impl Tokenizable for CommitBatchInfo<'_> { Token::Tuple(tokens) } } + +fn compose_header_for_l1_commit_rollup(state_diff_hash: H256, pubdata: Vec) -> Vec { + // The preimage under the hash `l2DAValidatorOutputHash` is expected to be in the following format: + // - First 32 bytes are the hash of the uncompressed state diff. + // - Then, there is a 32-byte hash of the full pubdata. + // - Then, there is the 1-byte number of blobs published. + // - Then, there are linear hashes of the published blobs, 32 bytes each. + + let mut full_header = vec![]; + + full_header.extend(state_diff_hash.0); + + let mut full_pubdata = pubdata; + let full_pubdata_hash = keccak256(&full_pubdata); + full_header.extend(full_pubdata_hash); + + // Now, we need to calculate the linear hashes of the blobs. + // Firstly, let's pad the pubdata to the size of the blob. + if full_pubdata.len() % ZK_SYNC_BYTES_PER_BLOB != 0 { + full_pubdata.resize( + full_pubdata.len() + ZK_SYNC_BYTES_PER_BLOB + - full_pubdata.len() % ZK_SYNC_BYTES_PER_BLOB, + 0, + ); + } + full_header.push((full_pubdata.len() / ZK_SYNC_BYTES_PER_BLOB) as u8); + + full_pubdata + .chunks(ZK_SYNC_BYTES_PER_BLOB) + .for_each(|chunk| { + full_header.extend(keccak256(chunk)); + }); + + full_header +} diff --git a/core/lib/l1_contract_interface/src/i_executor/structures/mod.rs b/core/lib/l1_contract_interface/src/i_executor/structures/mod.rs index aa9872049015..9583e0204f75 100644 --- a/core/lib/l1_contract_interface/src/i_executor/structures/mod.rs +++ b/core/lib/l1_contract_interface/src/i_executor/structures/mod.rs @@ -6,4 +6,12 @@ mod stored_batch_info; #[cfg(test)] mod tests; -pub use self::{commit_batch_info::CommitBatchInfo, stored_batch_info::StoredBatchInfo}; +pub use self::{ + commit_batch_info::{ + CommitBatchInfo, PUBDATA_SOURCE_BLOBS, PUBDATA_SOURCE_CALLDATA, + PUBDATA_SOURCE_CUSTOM_PRE_GATEWAY, + }, + stored_batch_info::StoredBatchInfo, +}; + +pub const SUPPORTED_ENCODING_VERSION: u8 = 0; diff --git a/core/lib/l1_contract_interface/src/i_executor/structures/stored_batch_info.rs b/core/lib/l1_contract_interface/src/i_executor/structures/stored_batch_info.rs index 26f9b30392ea..d2090097dbeb 100644 --- a/core/lib/l1_contract_interface/src/i_executor/structures/stored_batch_info.rs +++ b/core/lib/l1_contract_interface/src/i_executor/structures/stored_batch_info.rs @@ -23,17 +23,17 @@ pub struct StoredBatchInfo { } impl StoredBatchInfo { - fn schema() -> Vec { - vec![ParamType::Tuple(vec![ - ParamType::Uint(64), - ParamType::FixedBytes(32), - ParamType::Uint(64), - ParamType::Uint(256), - ParamType::FixedBytes(32), - ParamType::FixedBytes(32), - ParamType::Uint(256), - ParamType::FixedBytes(32), - ])] + pub fn schema() -> ParamType { + ParamType::Tuple(vec![ + ParamType::Uint(64), // `batch_number` + ParamType::FixedBytes(32), // `batch_hash` + ParamType::Uint(64), // `index_repeated_storage_changes` + ParamType::Uint(256), // `number_of_layer1_txs` + ParamType::FixedBytes(32), // `priority_operations_hash` + ParamType::FixedBytes(32), // `l2_logs_tree_root` + ParamType::Uint(256), // `timestamp` + ParamType::FixedBytes(32), // `commitment` + ]) } /// Encodes the struct into RLP. @@ -43,7 +43,7 @@ impl StoredBatchInfo { /// Decodes the struct from RLP. pub fn decode(rlp: &[u8]) -> anyhow::Result { - let [token] = ethabi::decode_whole(&Self::schema(), rlp)? + let [token] = ethabi::decode_whole(&[Self::schema()], rlp)? .try_into() .unwrap(); Ok(Self::from_token(token)?) diff --git a/core/lib/mini_merkle_tree/src/lib.rs b/core/lib/mini_merkle_tree/src/lib.rs index d34f57999961..fed28edb10c0 100644 --- a/core/lib/mini_merkle_tree/src/lib.rs +++ b/core/lib/mini_merkle_tree/src/lib.rs @@ -159,7 +159,7 @@ where /// Returns the root hash and the Merkle proof for a leaf with the specified 0-based `index`. /// `index` is relative to the leftmost uncached leaf. /// # Panics - /// Panics if `index` is >= than the number of leaves in the tree. + /// Panics if `index` is >= than the number of uncached leaves in the tree. pub fn merkle_root_and_path(&self, index: usize) -> (H256, Vec) { assert!(index < self.hashes.len(), "leaf index out of bounds"); let mut end_path = vec![]; @@ -170,6 +170,15 @@ where ) } + /// Returns the root hash and the Merkle proof for a leaf with the specified 0-based `index`. + /// `index` is an absolute position of the leaf. + /// # Panics + /// Panics if leaf at `index` is cached or if `index` is >= than the number of leaves in the tree. + pub fn merkle_root_and_path_by_absolute_index(&self, index: usize) -> (H256, Vec) { + assert!(index >= self.start_index, "leaf is cached"); + self.merkle_root_and_path(index - self.start_index) + } + /// Returns the root hash and the Merkle proofs for a range of leafs. /// The range is 0..length, where `0` is the leftmost untrimmed leaf (i.e. leaf under `self.start_index`). /// # Panics @@ -280,6 +289,16 @@ where hashes[0] } + + /// Returns the number of non-empty merkle tree elements. + pub fn length(&self) -> usize { + self.start_index + self.hashes.len() + } + + /// Returns index of the leftmost untrimmed leaf. + pub fn start_index(&self) -> usize { + self.start_index + } } fn tree_depth_by_size(tree_size: usize) -> usize { @@ -314,6 +333,12 @@ impl HashEmptySubtree<[u8; 88]> for KeccakHasher { } } +impl HashEmptySubtree<[u8; 96]> for KeccakHasher { + fn empty_leaf_hash(&self) -> H256 { + self.hash_bytes(&[0_u8; 96]) + } +} + fn compute_empty_tree_hashes(empty_leaf_hash: H256) -> Vec { iter::successors(Some(empty_leaf_hash), |hash| { Some(KeccakHasher.compress(hash, hash)) diff --git a/core/lib/mini_merkle_tree/src/tests.rs b/core/lib/mini_merkle_tree/src/tests.rs index 5aadab1d4e6f..51a684d945fd 100644 --- a/core/lib/mini_merkle_tree/src/tests.rs +++ b/core/lib/mini_merkle_tree/src/tests.rs @@ -4,6 +4,10 @@ use std::collections::VecDeque; use super::*; +fn empty_subtree_root(depth: usize) -> H256 { + >::empty_subtree_hash(&KeccakHasher, depth) +} + #[test] fn tree_depth_is_computed_correctly() { const TREE_SIZES_AND_DEPTHS: &[(usize, usize)] = &[ @@ -29,7 +33,7 @@ fn hash_of_empty_tree_with_single_item() { let len = 1 << depth; println!("checking tree with {len} items"); let tree = MiniMerkleTree::new(iter::once([0_u8; 88]), Some(len)); - assert_eq!(tree.merkle_root(), KeccakHasher.empty_subtree_hash(depth)); + assert_eq!(tree.merkle_root(), empty_subtree_root(depth)); } } @@ -42,10 +46,10 @@ fn hash_of_large_empty_tree_with_multiple_items() { let tree = MiniMerkleTree::new(leaves.clone(), Some(tree_size)); let depth = tree_depth_by_size(tree_size); - assert_eq!(tree.merkle_root(), KeccakHasher.empty_subtree_hash(depth)); + assert_eq!(tree.merkle_root(), empty_subtree_root(depth)); let tree = MiniMerkleTree::new(leaves, None); let depth = tree_depth_by_size(tree_size); - assert_eq!(tree.merkle_root(), KeccakHasher.empty_subtree_hash(depth)); + assert_eq!(tree.merkle_root(), empty_subtree_root(depth)); } } @@ -285,20 +289,20 @@ fn merkle_proofs_are_valid_in_very_small_trees() { fn dynamic_merkle_tree_growth() { let mut tree = MiniMerkleTree::new(iter::empty(), None); assert_eq!(tree.binary_tree_size, 1); - assert_eq!(tree.merkle_root(), KeccakHasher.empty_subtree_hash(0)); + assert_eq!(tree.merkle_root(), empty_subtree_root(0)); for len in 1..=8_usize { tree.push([0; 88]); assert_eq!(tree.binary_tree_size, len.next_power_of_two()); let depth = tree_depth_by_size(tree.binary_tree_size); - assert_eq!(tree.merkle_root(), KeccakHasher.empty_subtree_hash(depth)); + assert_eq!(tree.merkle_root(), empty_subtree_root(depth)); } // Shouldn't shrink after caching tree.trim_start(6); assert_eq!(tree.binary_tree_size, 8); - assert_eq!(tree.merkle_root(), KeccakHasher.empty_subtree_hash(3)); + assert_eq!(tree.merkle_root(), empty_subtree_root(3)); } #[test] diff --git a/core/lib/multivm/src/versions/shadow/tests.rs b/core/lib/multivm/src/versions/shadow/tests.rs index 4466d96a96b7..354459853f11 100644 --- a/core/lib/multivm/src/versions/shadow/tests.rs +++ b/core/lib/multivm/src/versions/shadow/tests.rs @@ -3,11 +3,11 @@ use std::{collections::HashSet, rc::Rc}; use zksync_types::{writes::StateDiffRecord, StorageKey, Transaction, H256, U256}; -use zksync_vm_interface::pubdata::PubdataBuilder; use super::ShadowedFastVm; use crate::{ interface::{ + pubdata::{PubdataBuilder, PubdataInput}, utils::{ShadowMut, ShadowRef}, CurrentExecutionState, L2BlockEnv, VmExecutionResultAndLogs, }, @@ -126,6 +126,13 @@ impl TestedVm for ShadowedFastVm { ShadowMut::Shadow(vm) => vm.push_transaction_with_refund(tx.clone(), refund), }); } + + fn pubdata_input(&self) -> PubdataInput { + self.get("pubdata_input", |r| match r { + ShadowRef::Main(vm) => vm.pubdata_input(), + ShadowRef::Shadow(vm) => vm.pubdata_input(), + }) + } } mod block_tip { @@ -297,6 +304,16 @@ mod is_write_initial { } } +mod l1_messenger { + use crate::versions::testonly::l1_messenger::*; + + #[test] + #[ignore] // Requires post-gateway system contracts + fn rollup_da_output_hash_match() { + test_rollup_da_output_hash_match::(); + } +} + mod l1_tx_execution { use crate::versions::testonly::l1_tx_execution::*; diff --git a/core/lib/multivm/src/versions/testonly/default_aa.rs b/core/lib/multivm/src/versions/testonly/default_aa.rs index 9255854e8703..0ae4cc2bbe50 100644 --- a/core/lib/multivm/src/versions/testonly/default_aa.rs +++ b/core/lib/multivm/src/versions/testonly/default_aa.rs @@ -34,7 +34,12 @@ pub(crate) fn test_default_aa_interaction() { let result = vm.vm.execute(InspectExecutionMode::OneTx); assert!(!result.result.is_failed(), "Transaction wasn't successful"); - vm.vm.finish_batch(default_pubdata_builder()); + let batch_result = vm.vm.finish_batch(default_pubdata_builder()); + assert!( + !batch_result.block_tip_execution_result.result.is_failed(), + "Batch tip execution wasn't successful" + ); + vm.vm.get_current_execution_state(); // Both deployment and ordinary nonce should be incremented by one. diff --git a/core/lib/multivm/src/versions/testonly/l1_messenger.rs b/core/lib/multivm/src/versions/testonly/l1_messenger.rs new file mode 100644 index 000000000000..daf07b2750f7 --- /dev/null +++ b/core/lib/multivm/src/versions/testonly/l1_messenger.rs @@ -0,0 +1,178 @@ +use std::rc::Rc; + +use ethabi::Token; +use zksync_contracts::l1_messenger_contract; +use zksync_test_contracts::{TestContract, TxType}; +use zksync_types::{ + address_to_h256, u256_to_h256, web3::keccak256, Address, Execute, ProtocolVersionId, + L1_MESSENGER_ADDRESS, U256, +}; +use zksync_vm_interface::SystemEnv; + +use super::{default_system_env, ContractToDeploy, TestedVm, VmTesterBuilder}; +use crate::{ + interface::{ + pubdata::{PubdataBuilder, PubdataInput}, + InspectExecutionMode, TxExecutionMode, VmInterfaceExt, + }, + pubdata_builders::RollupPubdataBuilder, + vm_latest::constants::ZK_SYNC_BYTES_PER_BLOB, +}; + +const L2_DA_VALIDATOR_OUTPUT_HASH_KEY: usize = 5; +const USED_L2_DA_VALIDATOR_ADDRESS_KEY: usize = 6; + +// Bytecode is temporary hardcoded, should be removed after contracts are merged. +fn l2_rollup_da_validator_bytecode() -> Vec { + hex::decode("0012000000000002000a000000000002000000000301001900000060043002700000012703400197000100000031035500020000003103550003000000310355000400000031035500050000003103550006000000310355000700000031035500080000003103550009000000310355000a000000310355000b000000310355000c000000310355000d000000310355000e000000310355000f00000031035500100000003103550011000000010355000001270040019d0000008004000039000000400040043f00000001002001900000005d0000c13d000000040030008c000000fe0000413d000000000201043b00000129022001970000012a0020009c000000fe0000c13d000000a40030008c000000fe0000413d0000000002000416000000000002004b000000fe0000c13d0000008402100370000000000202043b000300000002001d0000012b0020009c000000fe0000213d00000003020000290000002302200039000000000032004b000000fe0000813d00000003020000290000000402200039000000000421034f000000000604043b0000012b0060009c000000fe0000213d0000000304000029000700240040003d0000000704600029000000000034004b000000fe0000213d0000004403100370000000000303043b000400000003001d0000006403100370000000000303043b000200000003001d000000040060008c000000fe0000413d0000002002200039000000000221034f000000000202043b000000e00220027000000058022000c90000000804200039000000000064004b000000fe0000213d00000003022000290000002802200039000000000121034f000000000101043b000500e00010027a000600000006001d000000650000c13d00000000090000190000000403000029000000000039004b000000f10000c13d0000014e0040009c000000fb0000a13d0000014001000041000000000010043f0000001101000039000000040010043f00000138010000410000049a000104300000000001000416000000000001004b000000fe0000c13d0000002001000039000001000010044300000120000004430000012801000041000004990001042e000000000800001900000000090000190000014f0040009c000000570000813d0000000403400039000000000063004b000000fe0000213d00000007024000290000001101000367000000000221034f000000000502043b000000e004500270000000000034001a000000570000413d0000000007340019000000000067004b000000fe0000213d00000000020004140000012c0050009c0000007b0000813d0000000003000031000000840000013d000000070330002900000127053001970001000000510355000000000034001a000000570000413d0000000003340019000000000330007b000000570000413d000000000151034f000a00000009001d000800000008001d000900000007001d000001270330019700010000003103e50000012d0020009c000003c20000813d00000000013103df000000c0022002100000012e022001970000012c022001c700010000002103b500000000012103af0000801002000039049804930000040f0000000003010019000000600330027000000127033001970000000100200190000002450000613d0000001f0230003900000130052001970000003f025000390000013104200197000000400200043d0000000004420019000000000024004b000000000600003900000001060040390000012b0040009c0000023f0000213d00000001006001900000023f0000c13d000000400040043f0000000004320436000000000005004b000000b10000613d0000000005540019000000000600003100000011066003670000000007040019000000006806043c0000000007870436000000000057004b000000ad0000c13d0000012f063001980000000005640019000000ba0000613d000000000701034f0000000008040019000000007907043c0000000008980436000000000058004b000000b60000c13d0000001f03300190000000c70000613d000000000161034f0000000303300210000000000605043300000000063601cf000000000636022f000000000101043b0000010003300089000000000131022f00000000013101cf000000000161019f00000000001504350000000001020433000000200010008c0000000a05000029000004210000c13d0000000002040433000000400100043d000000400310003900000000002304350000002002100039000000000052043500000040030000390000000000310435000001320010009c0000023f0000213d0000006003100039000000400030043f000001270020009c000001270200804100000040022002100000000001010433000001270010009c00000127010080410000006001100210000000000121019f0000000002000414000001270020009c0000012702008041000000c002200210000000000112019f00000133011001c700008010020000390498048e0000040f0000000100200190000000060600002900000009040000290000000808000029000000fe0000613d000000000901043b0000000108800039000000050080006c000000670000413d000000520000013d000000400100043d000000440210003900000000009204350000002402100039000000000032043500000134020000410000000000210435000000040210003900000000000204350000042d0000013d0000000403400039000000000063004b000001000000a13d00000000010000190000049a0001043000000007014000290000001101100367000000000101043b000400e00010027a0000025d0000c13d000000000900001900000000050300190000000003090019000000020090006c000002f20000c13d000000060050006c000002fd0000813d00000007015000290000001102000367000000000112034f000000000101043b000000f801100270000000010010008c000003030000c13d00000000060500190000014e0060009c0000000604000029000000570000213d0000000403600039000000000043004b000000fe0000213d00000003016000290000002501100039000000000112034f000000000101043b000000000043004b000002fd0000813d000000e8011002700000000703300029000000000432034f0000000503500039000000000404043b000000000031001a0000000607000029000000570000413d000a00000031001d0000000a0070006b000000fe0000213d000000050600008a0000000a0060006b000000570000213d0000000a050000290000000405500039000000000075004b000000fe0000213d0000000a08000029000300070080002d0000000306200360000000000606043b000400000006001d000000e006600272000500000006001d00090110006000cd0000013f0000613d000000090800002900000005068000fa000001100060008c000000570000c13d000000090050002a000000570000413d000200090050002d000000020070006c000000fe0000413d000000f804400270000000400a00043d0000004406a00039000000800700003900000000007604350000002406a000390000000000460435000001410400004100000000004a043500000007055000290000008404a00039000000090900002900000000009404350000000404a0003900000005060000290000000000640435000000000752034f0000001f0890018f00080000000a001d000000a405a0003900000142099001980000000006950019000001610000613d000000000a07034f000000000b05001900000000ac0a043c000000000bcb043600000000006b004b0000015d0000c13d0000000703300029000000000008004b0000016f0000613d000000000797034f0000000308800210000000000906043300000000098901cf000000000989022f000000000707043b0000010008800089000000000787022f00000000078701cf000000000797019f00000000007604350000000907000029000000000675001900000000000604350000001f06700039000001430660019700000000066500190000000004460049000000080500002900000064055000390000000000450435000000000432034f0000001f0510018f000000000216043600000144061001980000000003620019000001850000613d000000000704034f0000000008020019000000007907043c0000000008980436000000000038004b000001810000c13d000000000005004b000001920000613d000000000464034f0000000305500210000000000603043300000000065601cf000000000656022f000000000404043b0000010005500089000000000454022f00000000045401cf000000000464019f0000000000430435000000000312001900000000000304350000001f011000390000014501100197000000080300002900000000013100490000000001210019000001270010009c00000127010080410000006001100210000001270030009c000001270200004100000000020340190000004002200210000000000121019f0000000002000414000001270020009c0000012702008041000000c002200210000000000112019f0000800e02000039049804890000040f000000000301001900000060033002700000012703300197000000200030008c000000200400003900000000040340190000001f0640018f00000020074001900000000805700029000001b80000613d000000000801034f0000000809000029000000008a08043c0000000009a90436000000000059004b000001b40000c13d000000000006004b000001c50000613d000000000771034f0000000306600210000000000805043300000000086801cf000000000868022f000000000707043b0000010006600089000000000767022f00000000066701cf000000000686019f00000000006504350000000100200190000003480000613d0000001f01400039000000600110018f0000000802100029000000000012004b00000000010000390000000101004039000100000002001d0000012b0020009c0000023f0000213d00000001001001900000023f0000c13d0000000101000029000000400010043f000000200030008c0000000604000029000000fe0000413d00000008010000290000000001010433000800000001001d00000004010000290000012c0010009c000001e10000413d000000090200002900000005012000fa000001100010008c000000570000c13d0000000103000029000000440130003900000024023000390000000403300039000000020440006c000003660000c13d000001460400004100000001050000290000000000450435000000200400003900000000004304350000000a04000029000000000042043500000150034001980000001f0440018f000000000231001900000007050000290000001105500367000001fa0000613d000000000605034f0000000007010019000000006806043c0000000007870436000000000027004b000001f60000c13d000000000004004b000002070000613d000000000335034f0000000304400210000000000502043300000000054501cf000000000545022f000000000303043b0000010004400089000000000343022f00000000034301cf000000000353019f00000000003204350000000a030000290000001f023000390000015002200197000000000131001900000000000104350000004401200039000001270010009c000001270100804100000060011002100000000102000029000001270020009c00000127020080410000004002200210000000000112019f0000000002000414000001270020009c0000012702008041000000c002200210000000000112019f00008011020000390498048e0000040f000000000301001900000060033002700000001f0430018f0000012f0530019700000127033001970000000100200190000003720000613d0000000102500029000000000005004b0000022c0000613d000000000601034f0000000107000029000000006806043c0000000007870436000000000027004b000002280000c13d000000000004004b000002390000613d000000000151034f0000000304400210000000000502043300000000054501cf000000000545022f000000000101043b0000010004400089000000000141022f00000000014101cf000000000151019f00000000001204350000001f0130003900000130011001970000000101100029000900000001001d0000012b0010009c0000038a0000a13d0000014001000041000000000010043f0000004101000039000000040010043f00000138010000410000049a000104300000001f0430018f0000012f023001980000024e0000613d000000000501034f0000000006000019000000005705043c0000000006760436000000000026004b0000024a0000c13d000000000004004b0000025b0000613d000000000121034f0000000304400210000000000502043300000000054501cf000000000545022f000000000101043b0000010004400089000000000141022f00000000014101cf000000000151019f000000000012043500000060013002100000049a00010430000000000800001900000000090000190000014e0030009c000000570000213d0000000402300039000000000062004b000000fe0000213d00000007033000290000001101000367000000000331034f000000000303043b000000e00a30027000000000002a001a000000570000413d00000000072a0019000000000067004b000000fe0000213d0000013600300198000003130000c13d000001390030009c000003190000813d0000013a003001980000031f0000613d000000070420002900000127034001970000000002000414000100000031035500000000004a001a000000570000413d00000000044a0019000000000440007b000000570000413d00090000000a001d000a00000009001d000500000008001d000800000007001d000000000131034f000001270340019700010000003103e5000001270020009c000003c20000213d00000000013103df000000c0022002100000012e022001970000012c022001c700010000002103b500000000012103af0000000202000039049804930000040f00000000030100190000006003300270000001270330019700000001002001900000032a0000613d0000001f0230003900000130052001970000003f025000390000013104200197000000400200043d0000000004420019000000000024004b000000000600003900000001060040390000012b0040009c0000023f0000213d00000001006001900000023f0000c13d000000400040043f0000000004320436000000000005004b000000090a000029000002ad0000613d0000000005540019000000000600003100000011066003670000000007040019000000006806043c0000000007870436000000000057004b000002a90000c13d0000012f063001980000000005640019000002b60000613d000000000701034f0000000008040019000000007907043c0000000008980436000000000058004b000002b20000c13d0000001f03300190000002c30000613d000000000161034f0000000303300210000000000605043300000000063601cf000000000636022f000000000101043b0000010003300089000000000131022f00000000013101cf000000000161019f0000000000150435000000400100043d0000000002020433000000200020008c0000000a05000029000003420000c13d00000000020404330000013d02200197000000db03a002100000013e03300197000000000223019f0000013f022001c7000000400310003900000000002304350000002002100039000000000052043500000040030000390000000000310435000001320010009c0000023f0000213d0000006003100039000000400030043f000001270020009c000001270200804100000040022002100000000001010433000001270010009c00000127010080410000006001100210000000000121019f0000000002000414000001270020009c0000012702008041000000c002200210000000000112019f00000133011001c700008010020000390498048e0000040f0000000100200190000000060600002900000008030000290000000508000029000000fe0000613d000000000901043b0000000108800039000000040080006c0000025f0000413d000001060000013d000000400100043d0000004402100039000000000032043500000024021000390000000203000029000000000032043500000134020000410000000000210435000000040210003900000001030000390000042c0000013d0000014001000041000000000010043f0000003201000039000000040010043f00000138010000410000049a00010430000000400200043d0000004403200039000000000013043500000024012000390000000103000039000000000031043500000134010000410000000000120435000000040120003900000002030000390000000000310435000001270020009c0000012702008041000000400120021000000135011001c70000049a00010430000000400100043d0000013702000041000000000021043500000004021000390000000203000039000003240000013d000000400100043d0000013702000041000000000021043500000004021000390000000103000039000003240000013d000000400100043d00000137020000410000000000210435000000040210003900000003030000390000000000320435000001270010009c0000012701008041000000400110021000000138011001c70000049a000104300000001f0430018f0000012f02300198000003330000613d000000000501034f0000000006000019000000005705043c0000000006760436000000000026004b0000032f0000c13d000000000004004b000003400000613d000000000121034f0000000304400210000000000502043300000000054501cf000000000545022f000000000101043b0000010004400089000000000141022f00000000014101cf000000000151019f000000000012043500000060013002100000049a0001043000000044021000390000013b03000041000000000032043500000024021000390000001903000039000004270000013d0000001f0530018f0000012f06300198000000400200043d0000000004620019000003530000613d000000000701034f0000000008020019000000007907043c0000000008980436000000000048004b0000034f0000c13d000000000005004b000003600000613d000000000161034f0000000305500210000000000604043300000000065601cf000000000656022f000000000101043b0000010005500089000000000151022f00000000015101cf000000000161019f00000000001404350000006001300210000001270020009c00000127020080410000004002200210000000000112019f0000049a000104300000013405000041000000010600002900000000005604350000000305000039000000000053043500000000000204350000000000410435000001270060009c0000012706008041000000400160021000000135011001c70000049a00010430000000400200043d0000000006520019000000000005004b0000037c0000613d000000000701034f0000000008020019000000007907043c0000000008980436000000000068004b000003780000c13d000000000004004b000003600000613d000000000151034f0000000304400210000000000506043300000000054501cf000000000545022f000000000101043b0000010004400089000000000141022f00000000014101cf000000000151019f0000000000160435000003600000013d0000000901000029000000400010043f000000200030008c000000fe0000413d000000010100002900000000010104330000012b0010009c000000fe0000213d000000010230002900000001011000290000001f03100039000000000023004b000000fe0000813d00000000140104340000012b0040009c0000023f0000213d00000005034002100000003f05300039000001470550019700000009055000290000012b0050009c0000023f0000213d000000400050043f000000090500002900000000004504350000000003130019000000000023004b000000fe0000213d000000000004004b000003ae0000613d0000000902000029000000200220003900000000140104340000000000420435000000000031004b000003a90000413d000000000100041400000011020003670000000a0000006b000003b40000c13d0000000003000031000003be0000013d00000007030000290000012704300197000100000042035500000003050000290000000a0050006c000000570000413d0000000305000029000000000350007b000000570000413d000000000242034f000001270330019700010000003203e5000001270010009c000003c90000a13d000000400100043d00000044021000390000014d03000041000000000032043500000024021000390000000803000039000004270000013d00000000023203df000000c0011002100000012e011001970000012c011001c700010000001203b500000000011203af0000801002000039049804930000040f0000000003010019000000600330027000000127033001970000000100200190000004320000613d0000001f0230003900000130052001970000003f025000390000013104200197000000400200043d0000000004420019000000000024004b000000000600003900000001060040390000012b0040009c0000023f0000213d00000001006001900000023f0000c13d000000400040043f0000000004320436000000000005004b000003ef0000613d0000000005540019000000000600003100000011066003670000000007040019000000006806043c0000000007870436000000000057004b000003eb0000c13d0000001f0530018f0000012f063001980000000003640019000003f90000613d000000000701034f0000000008040019000000007907043c0000000008980436000000000038004b000003f50000c13d000000000005004b000004060000613d000000000161034f0000000305500210000000000603043300000000065601cf000000000656022f000000000101043b0000010005500089000000000151022f00000000015101cf000000000161019f00000000001304350000000001020433000000200010008c000004210000c13d000000400100043d00000009020000290000000002020433000001000020008c0000044a0000413d00000064021000390000014a03000041000000000032043500000044021000390000014b0300004100000000003204350000002402100039000000250300003900000000003204350000013c020000410000000000210435000000040210003900000020030000390000000000320435000001270010009c000001270100804100000040011002100000014c011001c70000049a00010430000000400100043d00000044021000390000014803000041000000000032043500000024021000390000001f0300003900000000003204350000013c020000410000000000210435000000040210003900000020030000390000000000320435000001270010009c0000012701008041000000400110021000000135011001c70000049a000104300000001f0430018f0000012f023001980000043b0000613d000000000501034f0000000006000019000000005705043c0000000006760436000000000026004b000004370000c13d000000000004004b000004480000613d000000000121034f0000000304400210000000000502043300000000054501cf000000000545022f000000000101043b0000010004400089000000000141022f00000000014101cf000000000151019f000000000012043500000060013002100000049a000104300000000003040433000000f8022002100000006004100039000000000024043500000040021000390000000000320435000000200210003900000008030000290000000000320435000000610310003900000009040000290000000004040433000000000004004b000004610000613d000000000500001900000009060000290000002006600039000900000006001d000000000606043300000000036304360000000105500039000000000045004b000004590000413d0000000003130049000000200430008a00000000004104350000001f0330003900000150043001970000000003140019000000000043004b000000000400003900000001040040390000012b0030009c0000023f0000213d00000001004001900000023f0000c13d000000400030043f000001270020009c000001270200804100000040022002100000000001010433000001270010009c00000127010080410000006001100210000000000121019f0000000002000414000001270020009c0000012702008041000000c002200210000000000112019f00000133011001c700008010020000390498048e0000040f0000000100200190000000fe0000613d000000000101043b000000400200043d0000000000120435000001270020009c0000012702008041000000400120021000000149011001c7000004990001042e0000048c002104210000000102000039000000000001042d0000000002000019000000000001042d00000491002104230000000102000039000000000001042d0000000002000019000000000001042d00000496002104230000000102000039000000000001042d0000000002000019000000000001042d0000049800000432000004990001042e0000049a00010430000000000000000000000000000000000000000000000000000000000000000000000000ffffffff0000000200000000000000000000000000000040000001000000000000000000ffffffff0000000000000000000000000000000000000000000000000000000089f9a07200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffff0000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000ffffffff00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffe000000000000000000000000000000000000000000000000000000001ffffffe000000000000000000000000000000000000000000000000000000003ffffffe0000000000000000000000000000000000000000000000000ffffffffffffff9f02000000000000000000000000000000000000000000000000000000000000007f7b0cf70000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000001f0000000000000000000000000000000000000000000000000000000043e266b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000007368612072657475726e656420696e76616c696420646174610000000000000008c379a00000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffff06ffffff0000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000004e487b71000000000000000000000000000000000000000000000000000000006006d8b500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffffe0000000000000000000000000000000000000000000000000000003ffffffffe00000000000000000000000000000000000000000000000000000000000ffffe00000000000000000000000000000000000000000000000000000000001ffffe018876a04000000000000000000000000000000000000000000000000000000007fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe06b656363616b3235362072657475726e656420696e76616c69642064617461000000000000000000000000000000000000000020000000000000000000000000206269747300000000000000000000000000000000000000000000000000000053616665436173743a2076616c756520646f65736e27742066697420696e203800000000000000000000000000000000000000840000000000000000000000004f766572666c6f77000000000000000000000000000000000000000000000000fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffbfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffcffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe00000000000000000000000000000000000000000000000000000000000000000e901f5bd8811df26e614332e2110b9bc002e2cbadd82065c67e102f858079d5a").unwrap() +} + +fn encoded_uncompressed_state_diffs(input: &PubdataInput) -> Vec { + let mut result = vec![]; + for state_diff in input.state_diffs.iter() { + result.extend(state_diff.encode_padded()); + } + result +} + +fn compose_header_for_l1_commit_rollup(input: PubdataInput) -> Vec { + // The preimage under the hash `l2DAValidatorOutputHash` is expected to be in the following format: + // - First 32 bytes are the hash of the uncompressed state diff. + // - Then, there is a 32-byte hash of the full pubdata. + // - Then, there is the 1-byte number of blobs published. + // - Then, there are linear hashes of the published blobs, 32 bytes each. + + let mut full_header = vec![]; + + let uncompressed_state_diffs = encoded_uncompressed_state_diffs(&input); + let uncompressed_state_diffs_hash = keccak256(&uncompressed_state_diffs); + full_header.extend(uncompressed_state_diffs_hash); + + let pubdata_builder = RollupPubdataBuilder::new(Address::zero()); + let mut full_pubdata = + pubdata_builder.settlement_layer_pubdata(&input, ProtocolVersionId::latest()); + let full_pubdata_hash = keccak256(&full_pubdata); + full_header.extend(full_pubdata_hash); + + // Now, we need to calculate the linear hashes of the blobs. + // Firstly, let's pad the pubdata to the size of the blob. + if full_pubdata.len() % ZK_SYNC_BYTES_PER_BLOB != 0 { + full_pubdata.resize( + full_pubdata.len() + ZK_SYNC_BYTES_PER_BLOB + - full_pubdata.len() % ZK_SYNC_BYTES_PER_BLOB, + 0, + ); + } + full_header.push((full_pubdata.len() / ZK_SYNC_BYTES_PER_BLOB) as u8); + + full_pubdata + .chunks(ZK_SYNC_BYTES_PER_BLOB) + .for_each(|chunk| { + full_header.extend(keccak256(chunk)); + }); + + full_header +} + +pub(crate) fn test_rollup_da_output_hash_match() { + // In this test, we check whether the L2 DA output hash is as expected. + + let l2_da_validator_address = Address::repeat_byte(0x12); + let system_env = SystemEnv { + version: ProtocolVersionId::Version27, + ..default_system_env() + }; + let mut vm = VmTesterBuilder::new() + .with_empty_in_memory_storage() + .with_execution_mode(TxExecutionMode::VerifyExecute) + .with_rich_accounts(1) + .with_system_env(system_env) + .with_custom_contracts(vec![ContractToDeploy { + bytecode: l2_rollup_da_validator_bytecode(), + address: l2_da_validator_address, + is_account: false, + is_funded: false, + }]) + .build::(); + + let account = &mut vm.rich_accounts[0]; + + // Firstly, deploy tx. It should publish the bytecode of the "test contract" + let counter_bytecode = TestContract::counter().bytecode; + let tx = account + .get_deploy_tx(&counter_bytecode, None, TxType::L2) + .tx; + // We do not use compression here, to have the bytecode published in full. + let (_, result) = vm + .vm + .execute_transaction_with_bytecode_compression(tx, false); + assert!(!result.result.is_failed(), "Transaction wasn't successful"); + + // Then, we call the l1 messenger to also send an L2->L1 message. + let l1_messenger_contract = l1_messenger_contract(); + let encoded_data = l1_messenger_contract + .function("sendToL1") + .unwrap() + .encode_input(&[Token::Bytes(vec![])]) + .unwrap(); + + let tx = account.get_l2_tx_for_execute( + Execute { + contract_address: Some(L1_MESSENGER_ADDRESS), + calldata: encoded_data, + value: U256::zero(), + factory_deps: vec![], + }, + None, + ); + vm.vm.push_transaction(tx); + let result = vm.vm.execute(InspectExecutionMode::OneTx); + assert!(!result.result.is_failed(), "Transaction wasn't successful"); + + let pubdata_builder = RollupPubdataBuilder::new(l2_da_validator_address); + let batch_result = vm.vm.finish_batch(Rc::new(pubdata_builder)); + assert!( + !batch_result.block_tip_execution_result.result.is_failed(), + "Transaction wasn't successful {:?}", + batch_result.block_tip_execution_result.result + ); + let pubdata_input = vm.vm.pubdata_input(); + + // Just to double check that the test makes sense. + assert!(!pubdata_input.user_logs.is_empty()); + assert!(!pubdata_input.l2_to_l1_messages.is_empty()); + assert!(!pubdata_input.published_bytecodes.is_empty()); + assert!(!pubdata_input.state_diffs.is_empty()); + + let expected_header: Vec = compose_header_for_l1_commit_rollup(pubdata_input); + + let l2_da_validator_output_hash = batch_result + .block_tip_execution_result + .logs + .system_l2_to_l1_logs + .iter() + .find(|log| log.0.key == u256_to_h256(L2_DA_VALIDATOR_OUTPUT_HASH_KEY.into())) + .unwrap() + .0 + .value; + + assert_eq!( + l2_da_validator_output_hash, + keccak256(&expected_header).into() + ); + + let l2_used_da_validator_address = batch_result + .block_tip_execution_result + .logs + .system_l2_to_l1_logs + .iter() + .find(|log| log.0.key == u256_to_h256(USED_L2_DA_VALIDATOR_ADDRESS_KEY.into())) + .unwrap() + .0 + .value; + + assert_eq!( + l2_used_da_validator_address, + address_to_h256(&l2_da_validator_address) + ); +} diff --git a/core/lib/multivm/src/versions/testonly/mod.rs b/core/lib/multivm/src/versions/testonly/mod.rs index 38a09049b15a..a0f08546197c 100644 --- a/core/lib/multivm/src/versions/testonly/mod.rs +++ b/core/lib/multivm/src/versions/testonly/mod.rs @@ -40,6 +40,7 @@ pub(super) mod evm_emulator; pub(super) mod gas_limit; pub(super) mod get_used_contracts; pub(super) mod is_write_initial; +pub(super) mod l1_messenger; pub(super) mod l1_tx_execution; pub(super) mod l2_blocks; pub(super) mod nonce_holder; diff --git a/core/lib/multivm/src/versions/testonly/tester/mod.rs b/core/lib/multivm/src/versions/testonly/tester/mod.rs index 32499e409d82..d3cf2d6f782f 100644 --- a/core/lib/multivm/src/versions/testonly/tester/mod.rs +++ b/core/lib/multivm/src/versions/testonly/tester/mod.rs @@ -7,17 +7,16 @@ use zksync_types::{ writes::StateDiffRecord, Address, L1BatchNumber, StorageKey, Transaction, H256, U256, }; -use zksync_vm_interface::{ - pubdata::PubdataBuilder, CurrentExecutionState, InspectExecutionMode, VmExecutionResultAndLogs, - VmInterfaceHistoryEnabled, -}; pub(crate) use self::transaction_test_info::{ExpectedError, TransactionTestInfo, TxModifier}; use super::get_empty_storage; use crate::{ interface::{ + pubdata::{PubdataBuilder, PubdataInput}, storage::{InMemoryStorage, StoragePtr, StorageView}, - L1BatchEnv, L2BlockEnv, SystemEnv, TxExecutionMode, VmFactory, VmInterfaceExt, + CurrentExecutionState, InspectExecutionMode, L1BatchEnv, L2BlockEnv, SystemEnv, + TxExecutionMode, VmExecutionResultAndLogs, VmFactory, VmInterfaceExt, + VmInterfaceHistoryEnabled, }, versions::testonly::{ default_l1_batch, default_system_env, make_address_rich, ContractToDeploy, @@ -228,4 +227,7 @@ pub(crate) trait TestedVm: /// Pushes a transaction with predefined refund value. fn push_transaction_with_refund(&mut self, tx: Transaction, refund: u64); + + /// Returns pubdata input. + fn pubdata_input(&self) -> PubdataInput; } diff --git a/core/lib/multivm/src/versions/vm_fast/tests/l1_messenger.rs b/core/lib/multivm/src/versions/vm_fast/tests/l1_messenger.rs new file mode 100644 index 000000000000..c7d4594d7692 --- /dev/null +++ b/core/lib/multivm/src/versions/vm_fast/tests/l1_messenger.rs @@ -0,0 +1,7 @@ +use crate::{versions::testonly::l1_messenger::test_rollup_da_output_hash_match, vm_fast::Vm}; + +#[test] +#[ignore] // Requires post-gateway system contracts +fn rollup_da_output_hash_match() { + test_rollup_da_output_hash_match::>(); +} diff --git a/core/lib/multivm/src/versions/vm_fast/tests/mod.rs b/core/lib/multivm/src/versions/vm_fast/tests/mod.rs index 0a26e895b5a7..2093d0ec496f 100644 --- a/core/lib/multivm/src/versions/vm_fast/tests/mod.rs +++ b/core/lib/multivm/src/versions/vm_fast/tests/mod.rs @@ -5,8 +5,9 @@ use zksync_types::{ }; use zksync_vm2::interface::{Event, HeapId, StateInterface}; use zksync_vm_interface::{ - pubdata::PubdataBuilder, storage::ReadStorage, CurrentExecutionState, L2BlockEnv, - VmExecutionMode, VmExecutionResultAndLogs, VmInterface, + pubdata::{PubdataBuilder, PubdataInput}, + storage::ReadStorage, + CurrentExecutionState, L2BlockEnv, VmExecutionMode, VmExecutionResultAndLogs, VmInterface, }; use super::{circuits_tracer::CircuitsTracer, Vm}; @@ -26,6 +27,7 @@ mod evm_emulator; mod gas_limit; mod get_used_contracts; mod is_write_initial; +mod l1_messenger; mod l1_tx_execution; mod l2_blocks; mod nonce_holder; @@ -167,4 +169,8 @@ impl TestedVm for Vm> { fn push_transaction_with_refund(&mut self, tx: Transaction, refund: u64) { self.push_transaction_inner(tx, refund, true); } + + fn pubdata_input(&self) -> PubdataInput { + self.bootloader_state.get_pubdata_information().clone() + } } diff --git a/core/lib/multivm/src/versions/vm_latest/tests/l1_messenger.rs b/core/lib/multivm/src/versions/vm_latest/tests/l1_messenger.rs new file mode 100644 index 000000000000..7d301f33a131 --- /dev/null +++ b/core/lib/multivm/src/versions/vm_latest/tests/l1_messenger.rs @@ -0,0 +1,10 @@ +use crate::{ + versions::testonly::l1_messenger::test_rollup_da_output_hash_match, + vm_latest::{HistoryEnabled, Vm}, +}; + +#[test] +#[ignore] // Requires post-gateway system contracts +fn rollup_da_output_hash_match() { + test_rollup_da_output_hash_match::>(); +} diff --git a/core/lib/multivm/src/versions/vm_latest/tests/mod.rs b/core/lib/multivm/src/versions/vm_latest/tests/mod.rs index b059c9716d89..aac3b1655b3a 100644 --- a/core/lib/multivm/src/versions/vm_latest/tests/mod.rs +++ b/core/lib/multivm/src/versions/vm_latest/tests/mod.rs @@ -12,11 +12,11 @@ use zksync_types::{ bytecode::BytecodeHash, writes::StateDiffRecord, StorageKey, StorageValue, Transaction, H256, U256, }; -use zksync_vm_interface::pubdata::PubdataBuilder; use super::{HistoryEnabled, Vm}; use crate::{ interface::{ + pubdata::{PubdataBuilder, PubdataInput}, storage::{InMemoryStorage, ReadStorage, StorageView, WriteStorage}, CurrentExecutionState, L2BlockEnv, VmExecutionMode, VmExecutionResultAndLogs, }, @@ -46,6 +46,7 @@ mod evm_emulator; mod gas_limit; mod get_used_contracts; mod is_write_initial; +mod l1_messenger; mod l1_tx_execution; mod l2_blocks; mod nonce_holder; @@ -188,6 +189,10 @@ impl TestedVm for TestedLatestVm { let overhead = tx.overhead_gas(); self.push_raw_transaction(tx, overhead, refund, true) } + + fn pubdata_input(&self) -> PubdataInput { + self.bootloader_state.get_pubdata_information().clone() + } } #[derive(Clone, Debug)] diff --git a/core/lib/protobuf_config/Cargo.toml b/core/lib/protobuf_config/Cargo.toml index 92d9bd53978c..2d910430863b 100644 --- a/core/lib/protobuf_config/Cargo.toml +++ b/core/lib/protobuf_config/Cargo.toml @@ -16,6 +16,7 @@ links = "zksync_protobuf_config_proto" serde_json.workspace = true serde_yaml.workspace = true zksync_basic_types.workspace = true +zksync_concurrency.workspace = true zksync_config.workspace = true zksync_protobuf.workspace = true zksync_types.workspace = true diff --git a/core/lib/protobuf_config/src/consensus.rs b/core/lib/protobuf_config/src/consensus.rs index 2219b6a82ea8..567224acd4a6 100644 --- a/core/lib/protobuf_config/src/consensus.rs +++ b/core/lib/protobuf_config/src/consensus.rs @@ -1,5 +1,6 @@ use anyhow::Context as _; use zksync_basic_types::L2ChainId; +use zksync_concurrency::time; use zksync_config::configs::consensus::{ AttesterPublicKey, ConsensusConfig, GenesisSpec, Host, NodePublicKey, ProtocolVersion, RpcConfig, ValidatorPublicKey, WeightedAttester, WeightedValidator, @@ -154,6 +155,11 @@ impl ProtoRepr for proto::Config { .context("server_addr")?, public_addr: Host(required(&self.public_addr).context("public_addr")?.clone()), max_payload_size, + view_timeout: self + .view_timeout + .as_ref() + .map(|x| time::Duration::read(x).context("view_timeout")) + .transpose()?, max_batch_size, gossip_dynamic_inbound_limit: required(&self.gossip_dynamic_inbound_limit) .and_then(|x| Ok((*x).try_into()?)) @@ -187,6 +193,7 @@ impl ProtoRepr for proto::Config { server_addr: Some(this.server_addr.to_string()), public_addr: Some(this.public_addr.0.clone()), max_payload_size: Some(this.max_payload_size.try_into().unwrap()), + view_timeout: this.view_timeout.as_ref().map(ProtoFmt::build), max_batch_size: Some(this.max_batch_size.try_into().unwrap()), gossip_dynamic_inbound_limit: Some( this.gossip_dynamic_inbound_limit.try_into().unwrap(), diff --git a/core/lib/protobuf_config/src/en.rs b/core/lib/protobuf_config/src/en.rs index 9d1a39310604..700a1f0a8104 100644 --- a/core/lib/protobuf_config/src/en.rs +++ b/core/lib/protobuf_config/src/en.rs @@ -34,11 +34,6 @@ impl ProtoRepr for proto::ExternalNode { main_node_rate_limit_rps: self .main_node_rate_limit_rps .and_then(|a| NonZeroUsize::new(a as usize)), - gateway_url: self - .gateway_url - .as_ref() - .map(|a| a.parse().context("gateway_url")) - .transpose()?, bridge_addresses_refresh_interval_sec: self .bridge_addresses_refresh_interval_sec .and_then(NonZeroU64::new), @@ -57,10 +52,6 @@ impl ProtoRepr for proto::ExternalNode { .into(), ), main_node_rate_limit_rps: this.main_node_rate_limit_rps.map(|a| a.get() as u64), - gateway_url: this - .gateway_url - .as_ref() - .map(|a| a.expose_str().to_string()), bridge_addresses_refresh_interval_sec: this .bridge_addresses_refresh_interval_sec .map(|a| a.get()), diff --git a/core/lib/protobuf_config/src/eth.rs b/core/lib/protobuf_config/src/eth.rs index d4ea1d9f2697..10e80810be57 100644 --- a/core/lib/protobuf_config/src/eth.rs +++ b/core/lib/protobuf_config/src/eth.rs @@ -69,13 +69,6 @@ impl ProtoRepr for proto::Sender { type Type = configs::eth_sender::SenderConfig; fn read(&self) -> anyhow::Result { Ok(Self::Type { - aggregated_proof_sizes: self - .aggregated_proof_sizes - .iter() - .enumerate() - .map(|(i, x)| (*x).try_into().context(i)) - .collect::>() - .context("aggregated_proof_sizes")?, wait_confirmations: self.wait_confirmations, tx_poll_period: *required(&self.tx_poll_period).context("tx_poll_period")?, aggregate_tx_poll_period: *required(&self.aggregate_tx_poll_period) @@ -122,11 +115,6 @@ impl ProtoRepr for proto::Sender { fn build(this: &Self::Type) -> Self { Self { - aggregated_proof_sizes: this - .aggregated_proof_sizes - .iter() - .map(|x| (*x).try_into().unwrap()) - .collect(), wait_confirmations: this.wait_confirmations, tx_poll_period: Some(this.tx_poll_period), aggregate_tx_poll_period: Some(this.aggregate_tx_poll_period), diff --git a/core/lib/protobuf_config/src/genesis.rs b/core/lib/protobuf_config/src/genesis.rs index 7ecc768100fb..469140f7b0c2 100644 --- a/core/lib/protobuf_config/src/genesis.rs +++ b/core/lib/protobuf_config/src/genesis.rs @@ -99,6 +99,7 @@ impl ProtoRepr for proto::Genesis { .and_then(|x| Ok(proto::L1BatchCommitDataGeneratorMode::try_from(*x)?)) .context("l1_batch_commit_data_generator_mode")? .parse(), + custom_genesis_state_path: self.custom_genesis_state_path.clone(), }) } @@ -126,6 +127,7 @@ impl ProtoRepr for proto::Genesis { ) .into(), ), + custom_genesis_state_path: this.custom_genesis_state_path.clone(), } } } diff --git a/core/lib/protobuf_config/src/proto/config/en.proto b/core/lib/protobuf_config/src/proto/config/en.proto index 69412704ea0f..a8d304c8289e 100644 --- a/core/lib/protobuf_config/src/proto/config/en.proto +++ b/core/lib/protobuf_config/src/proto/config/en.proto @@ -9,6 +9,6 @@ message ExternalNode { optional uint64 l1_chain_id = 3; // required optional uint64 main_node_rate_limit_rps = 6; // optional optional config.genesis.L1BatchCommitDataGeneratorMode l1_batch_commit_data_generator_mode = 7; // optional, default to rollup - optional string gateway_url = 8; // optional + reserved 8; reserved "gateway_url"; optional uint64 bridge_addresses_refresh_interval_sec = 9; // optional } diff --git a/core/lib/protobuf_config/src/proto/config/eth_sender.proto b/core/lib/protobuf_config/src/proto/config/eth_sender.proto index 6438573e08df..1176afd7c442 100644 --- a/core/lib/protobuf_config/src/proto/config/eth_sender.proto +++ b/core/lib/protobuf_config/src/proto/config/eth_sender.proto @@ -28,7 +28,7 @@ enum PubdataSendingMode { } message Sender { - repeated uint64 aggregated_proof_sizes = 1; // ? + reserved 1; reserved "aggregated_proof_sizes"; optional uint64 wait_confirmations = 2; // optional optional uint64 tx_poll_period = 3; // required; s optional uint64 aggregate_tx_poll_period = 4; // required; s diff --git a/core/lib/protobuf_config/src/proto/config/genesis.proto b/core/lib/protobuf_config/src/proto/config/genesis.proto index e3a9a45366f9..2e9ebc82f25e 100644 --- a/core/lib/protobuf_config/src/proto/config/genesis.proto +++ b/core/lib/protobuf_config/src/proto/config/genesis.proto @@ -29,5 +29,6 @@ message Genesis { optional L1BatchCommitDataGeneratorMode l1_batch_commit_data_generator_mode = 29; // optional, default to rollup optional string genesis_protocol_semantic_version = 12; // optional; optional string evm_emulator_hash = 13; // optional; h256 + optional string custom_genesis_state_path = 14; // optional; reserved 11; reserved "shared_bridge"; } diff --git a/core/lib/protobuf_config/src/proto/config/secrets.proto b/core/lib/protobuf_config/src/proto/config/secrets.proto index 7c9d0f928237..46392920369c 100644 --- a/core/lib/protobuf_config/src/proto/config/secrets.proto +++ b/core/lib/protobuf_config/src/proto/config/secrets.proto @@ -11,6 +11,7 @@ message DatabaseSecrets { message L1Secrets { optional string l1_rpc_url = 1; // required + optional string gateway_rpc_url = 2; // optional } message ConsensusSecrets { diff --git a/core/lib/protobuf_config/src/proto/core/consensus.proto b/core/lib/protobuf_config/src/proto/core/consensus.proto index 9b0d69e7270c..0b915603cc67 100644 --- a/core/lib/protobuf_config/src/proto/core/consensus.proto +++ b/core/lib/protobuf_config/src/proto/core/consensus.proto @@ -87,6 +87,9 @@ message Config { // Maximal allowed size of the sync batches. optional uint64 max_batch_size = 10; // required; bytes + // View timeout for the consensus protocol. + optional std.Duration view_timeout = 13; // optional + // Inbound connections that should be unconditionally accepted on the gossip network. repeated string gossip_static_inbound = 5; // required; NodePublicKey diff --git a/core/lib/protobuf_config/src/secrets.rs b/core/lib/protobuf_config/src/secrets.rs index f5bc10a3e340..e0f41e325b79 100644 --- a/core/lib/protobuf_config/src/secrets.rs +++ b/core/lib/protobuf_config/src/secrets.rs @@ -86,12 +86,22 @@ impl ProtoRepr for proto::L1Secrets { fn read(&self) -> anyhow::Result { Ok(Self::Type { l1_rpc_url: SensitiveUrl::from_str(required(&self.l1_rpc_url).context("l1_rpc_url")?)?, + gateway_rpc_url: self + .gateway_rpc_url + .clone() + .map(|url| SensitiveUrl::from_str(&url)) + .transpose() + .context("gateway_rpc_url")?, }) } fn build(this: &Self::Type) -> Self { Self { l1_rpc_url: Some(this.l1_rpc_url.expose_str().to_string()), + gateway_rpc_url: this + .gateway_rpc_url + .as_ref() + .map(|url| url.expose_url().to_string()), } } } diff --git a/core/lib/snapshots_applier/src/lib.rs b/core/lib/snapshots_applier/src/lib.rs index 5452008987c5..142ba8ed3c85 100644 --- a/core/lib/snapshots_applier/src/lib.rs +++ b/core/lib/snapshots_applier/src/lib.rs @@ -743,16 +743,17 @@ impl<'a> SnapshotsApplier<'a> { // This allows to not deal with the corner cases when a node was recovered from a snapshot, but its pruning log is empty. storage_transaction .pruning_dal() - .soft_prune_batches_range( + .insert_soft_pruning_log( this.applied_snapshot_status.l1_batch_number, this.applied_snapshot_status.l2_block_number, ) .await?; storage_transaction .pruning_dal() - .hard_prune_batches_range( + .insert_hard_pruning_log( this.applied_snapshot_status.l1_batch_number, this.applied_snapshot_status.l2_block_number, + this.applied_snapshot_status.l1_batch_root_hash, ) .await?; } diff --git a/core/lib/snapshots_applier/src/tests/utils.rs b/core/lib/snapshots_applier/src/tests/utils.rs index d42b5f56bf02..99b4e05e0164 100644 --- a/core/lib/snapshots_applier/src/tests/utils.rs +++ b/core/lib/snapshots_applier/src/tests/utils.rs @@ -210,10 +210,13 @@ fn block_details_base(hash: H256) -> api::BlockDetailsBase { status: api::BlockStatus::Sealed, commit_tx_hash: None, committed_at: None, + commit_chain_id: None, prove_tx_hash: None, proven_at: None, + prove_chain_id: None, execute_tx_hash: None, executed_at: None, + execute_chain_id: None, l1_gas_price: 0, l2_fair_gas_price: 0, fair_pubdata_price: None, diff --git a/core/lib/test_contracts/contracts/transfer/ERC20.sol b/core/lib/test_contracts/contracts/transfer/ERC20.sol new file mode 100644 index 000000000000..aad741e66a56 --- /dev/null +++ b/core/lib/test_contracts/contracts/transfer/ERC20.sol @@ -0,0 +1,11 @@ +// SPDX-License-Identifier: UNLICENSED + +pragma solidity ^0.8.0; + +import "@openzeppelin/contracts/token/ERC20/ERC20.sol"; + +contract TestERC20 is ERC20("Test", "TEST") { + constructor(uint256 _toMint) { + _mint(msg.sender, _toMint); + } +} diff --git a/core/lib/test_contracts/src/contracts.rs b/core/lib/test_contracts/src/contracts.rs index 09a0535824df..36d758c46de2 100644 --- a/core/lib/test_contracts/src/contracts.rs +++ b/core/lib/test_contracts/src/contracts.rs @@ -171,6 +171,13 @@ impl TestContract { &CONTRACT } + /// Returns a test ERC20 token implementation. + pub fn test_erc20() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::transfer::TestERC20)); + &CONTRACT + } + /// Returns a mock version of `ContractDeployer`. pub fn mock_deployer() -> &'static Self { static CONTRACT: Lazy = diff --git a/core/lib/types/src/aggregated_operations.rs b/core/lib/types/src/aggregated_operations.rs index dadfad265cb2..d68610ffeb41 100644 --- a/core/lib/types/src/aggregated_operations.rs +++ b/core/lib/types/src/aggregated_operations.rs @@ -1,6 +1,8 @@ use std::{fmt, str::FromStr}; -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] pub enum AggregatedActionType { Commit, PublishProofOnchain, @@ -39,3 +41,11 @@ impl FromStr for AggregatedActionType { } } } + +/// Additional gas cost of processing `Execute` operation per batch. +/// It's applicable iff SL is Ethereum. +pub const L1_BATCH_EXECUTE_BASE_COST: u32 = 30_000; + +/// Additional gas cost of processing `Execute` operation per L1->L2 tx. +/// It's applicable iff SL is Ethereum. +pub const L1_OPERATION_EXECUTE_COST: u32 = 12_500; diff --git a/core/lib/types/src/api/mod.rs b/core/lib/types/src/api/mod.rs index b5d2b3276527..4a45756823c2 100644 --- a/core/lib/types/src/api/mod.rs +++ b/core/lib/types/src/api/mod.rs @@ -5,7 +5,7 @@ use serde_with::{hex::Hex, serde_as}; use strum::Display; use zksync_basic_types::{ web3::{AccessList, Bytes, Index}, - Bloom, L1BatchNumber, H160, H256, H64, U256, U64, + Bloom, L1BatchNumber, SLChainId, H160, H256, H64, U256, U64, }; use zksync_contracts::BaseSystemContractsHashes; @@ -196,6 +196,13 @@ pub struct L2ToL1LogProof { pub root: H256, } +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct ChainAggProof { + pub chain_id_leaf_proof: Vec, + pub chain_id_leaf_proof_mask: u64, +} + /// A struct with the two default bridge contracts. #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] @@ -466,6 +473,45 @@ impl Log { } } +impl From for zksync_basic_types::web3::Log { + fn from(log: Log) -> Self { + zksync_basic_types::web3::Log { + address: log.address, + topics: log.topics, + data: log.data, + block_hash: log.block_hash, + block_number: log.block_number, + transaction_hash: log.transaction_hash, + transaction_index: log.transaction_index, + log_index: log.log_index, + transaction_log_index: log.transaction_log_index, + log_type: log.log_type, + removed: log.removed, + block_timestamp: log.block_timestamp, + } + } +} + +impl From for Log { + fn from(log: zksync_basic_types::web3::Log) -> Self { + Log { + address: log.address, + topics: log.topics, + data: log.data, + block_hash: log.block_hash, + block_number: log.block_number, + transaction_hash: log.transaction_hash, + transaction_index: log.transaction_index, + log_index: log.log_index, + transaction_log_index: log.transaction_log_index, + log_type: log.log_type, + removed: log.removed, + block_timestamp: log.block_timestamp, + l1_batch_number: None, + } + } +} + /// A log produced by a transaction. #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] @@ -819,10 +865,13 @@ pub struct BlockDetailsBase { pub status: BlockStatus, pub commit_tx_hash: Option, pub committed_at: Option>, + pub commit_chain_id: Option, pub prove_tx_hash: Option, pub proven_at: Option>, + pub prove_chain_id: Option, pub execute_tx_hash: Option, pub executed_at: Option>, + pub execute_chain_id: Option, pub l1_gas_price: u64, pub l2_fair_gas_price: u64, // Cost of publishing one byte (in wei). @@ -878,6 +927,7 @@ pub struct TeeProof { #[serde_as(as = "Option")] pub proof: Option>, pub proved_at: DateTime, + pub status: String, #[serde_as(as = "Option")] pub attestation: Option>, } diff --git a/core/lib/types/src/block.rs b/core/lib/types/src/block.rs index 804da61b7295..c4fd3306f2d5 100644 --- a/core/lib/types/src/block.rs +++ b/core/lib/types/src/block.rs @@ -1,5 +1,3 @@ -use std::{fmt, ops}; - use serde::{Deserialize, Serialize}; use zksync_basic_types::{commitment::PubdataParams, Address, Bloom, BloomInput, H256, U256}; use zksync_contracts::BaseSystemContractsHashes; @@ -177,51 +175,6 @@ impl L1BatchHeader { } } -#[derive(Clone, Copy, Eq, PartialEq, Default)] -pub struct BlockGasCount { - pub commit: u32, - pub prove: u32, - pub execute: u32, -} - -impl fmt::Debug for BlockGasCount { - fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( - formatter, - "c:{}/p:{}/e:{}", - self.commit, self.prove, self.execute - ) - } -} - -impl BlockGasCount { - pub fn any_field_greater_than(&self, bound: u32) -> bool { - self.commit > bound || self.prove > bound || self.execute > bound - } -} - -impl ops::Add for BlockGasCount { - type Output = Self; - - fn add(self, rhs: Self) -> Self::Output { - Self { - commit: self.commit + rhs.commit, - prove: self.prove + rhs.prove, - execute: self.execute + rhs.execute, - } - } -} - -impl ops::AddAssign for BlockGasCount { - fn add_assign(&mut self, other: Self) { - *self = Self { - commit: self.commit + other.commit, - prove: self.prove + other.prove, - execute: self.execute + other.execute, - }; - } -} - /// Hasher of L2 block contents used by the VM. #[derive(Debug)] pub struct L2BlockHasher { diff --git a/core/lib/types/src/commitment/mod.rs b/core/lib/types/src/commitment/mod.rs index 1eba7e7a9ec0..786ce03e671d 100644 --- a/core/lib/types/src/commitment/mod.rs +++ b/core/lib/types/src/commitment/mod.rs @@ -21,6 +21,7 @@ use zksync_system_constants::{ use crate::{ blob::num_blobs_required, block::{L1BatchHeader, L1BatchTreeData}, + ethabi, l2_to_l1_log::{ l2_to_l1_logs_tree_size, parse_system_logs_for_blob_hashes_pre_gateway, L2ToL1Log, SystemL2ToL1Log, UserL2ToL1Log, @@ -73,6 +74,31 @@ pub fn serialize_commitments(values: &[I]) -> Vec { input } +#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)] +pub struct PriorityOpsMerkleProof { + pub left_path: Vec, + pub right_path: Vec, + pub hashes: Vec, +} + +impl PriorityOpsMerkleProof { + pub fn into_token(&self) -> ethabi::Token { + let array_into_token = |array: &[H256]| { + ethabi::Token::Array( + array + .iter() + .map(|hash| ethabi::Token::FixedBytes(hash.as_bytes().to_vec())) + .collect(), + ) + }; + ethabi::Token::Tuple(vec![ + array_into_token(&self.left_path), + array_into_token(&self.right_path), + array_into_token(&self.hashes), + ]) + } +} + /// Precalculated data for the L1 batch that was used in commitment and L1 transaction. #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct L1BatchMetadata { diff --git a/core/lib/types/src/eth_sender.rs b/core/lib/types/src/eth_sender.rs index 12a5a5a8fb13..e8e12993054f 100644 --- a/core/lib/types/src/eth_sender.rs +++ b/core/lib/types/src/eth_sender.rs @@ -47,7 +47,7 @@ pub struct EthTx { pub raw_tx: Vec, pub tx_type: AggregatedActionType, pub created_at_timestamp: u64, - pub predicted_gas_cost: u64, + pub predicted_gas_cost: Option, /// If this field is `Some` then it contains address of a custom operator that has sent /// this transaction. If it is set to `None` this transaction was sent by the main operator. pub from_addr: Option
, diff --git a/core/lib/types/src/l2_to_l1_log.rs b/core/lib/types/src/l2_to_l1_log.rs index 1b84a79024c7..be4275ac5fb5 100644 --- a/core/lib/types/src/l2_to_l1_log.rs +++ b/core/lib/types/src/l2_to_l1_log.rs @@ -79,6 +79,26 @@ pub fn l2_to_l1_logs_tree_size(protocol_version: ProtocolVersionId) -> usize { } } +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct BatchAndChainMerklePath { + pub batch_proof_len: u32, + pub proof: Vec, +} + +pub const LOG_PROOF_SUPPORTED_METADATA_VERSION: u8 = 1; + +// keccak256("zkSync:BatchLeaf") +pub const BATCH_LEAF_PADDING: H256 = H256([ + 0xd8, 0x2f, 0xec, 0x4a, 0x37, 0xcb, 0xdc, 0x47, 0xf1, 0xe5, 0xcc, 0x4a, 0xd6, 0x4d, 0xea, 0xcf, + 0x34, 0xa4, 0x8e, 0x6f, 0x7c, 0x61, 0xfa, 0x5b, 0x68, 0xfd, 0x58, 0xe5, 0x43, 0x25, 0x9c, 0xf4, +]); + +// keccak256("zkSync:ChainIdLeaf") +pub const CHAIN_ID_LEAF_PADDING: H256 = H256([ + 0x39, 0xbc, 0x69, 0x36, 0x3b, 0xb9, 0xe2, 0x6c, 0xf1, 0x42, 0x40, 0xde, 0x4e, 0x22, 0x56, 0x9e, + 0x95, 0xcf, 0x17, 0x5c, 0xfb, 0xcf, 0x1a, 0xde, 0x1a, 0x47, 0xa2, 0x53, 0xb4, 0xbf, 0x7f, 0x61, +]); + /// Returns the blob hashes parsed out from the system logs pub fn parse_system_logs_for_blob_hashes_pre_gateway( protocol_version: &ProtocolVersionId, @@ -117,6 +137,7 @@ pub fn parse_system_logs_for_blob_hashes_pre_gateway( #[cfg(test)] mod tests { + use zksync_basic_types::web3::keccak256; use zksync_system_constants::L1_MESSENGER_ADDRESS; use super::*; @@ -142,4 +163,13 @@ mod tests { assert_eq!(expected_log_bytes, log.to_bytes()); } + + #[test] + fn check_padding_constants() { + let batch_leaf_padding_expected = keccak256("zkSync:BatchLeaf".as_bytes()); + assert_eq!(batch_leaf_padding_expected, BATCH_LEAF_PADDING.0); + + let chain_id_leaf_padding_expected = keccak256("zkSync:ChainIdLeaf".as_bytes()); + assert_eq!(chain_id_leaf_padding_expected, CHAIN_ID_LEAF_PADDING.0); + } } diff --git a/core/lib/types/src/storage/mod.rs b/core/lib/types/src/storage/mod.rs index 84a29ed8c039..0f851857e6a4 100644 --- a/core/lib/types/src/storage/mod.rs +++ b/core/lib/types/src/storage/mod.rs @@ -91,6 +91,11 @@ pub fn get_system_context_key(key: H256) -> StorageKey { StorageKey::new(system_context, key) } +pub fn get_message_root_log_key(key: H256) -> StorageKey { + let message_root = AccountTreeId::new(L2_MESSAGE_ROOT_ADDRESS); + StorageKey::new(message_root, key) +} + pub fn get_deployer_key(key: H256) -> StorageKey { let deployer_contract = AccountTreeId::new(CONTRACT_DEPLOYER_ADDRESS); StorageKey::new(deployer_contract, key) diff --git a/core/lib/vm_interface/src/pubdata/mod.rs b/core/lib/vm_interface/src/pubdata/mod.rs index f901687b5fa6..54d7ebf5e1b0 100644 --- a/core/lib/vm_interface/src/pubdata/mod.rs +++ b/core/lib/vm_interface/src/pubdata/mod.rs @@ -13,7 +13,7 @@ use zksync_types::{ /// bytes32 value; /// } /// ``` -#[derive(Debug, Default, Clone, PartialEq)] +#[derive(Debug, Default, Clone, PartialEq, Eq)] pub struct L1MessengerL2ToL1Log { pub l2_shard_id: u8, pub is_service: bool, @@ -63,7 +63,7 @@ impl From for L2ToL1Log { } /// Struct based on which the pubdata blob is formed -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone, Default, PartialEq, Eq)] pub struct PubdataInput { pub user_logs: Vec, pub l2_to_l1_messages: Vec>, diff --git a/core/lib/vm_interface/src/types/outputs/execution_result.rs b/core/lib/vm_interface/src/types/outputs/execution_result.rs index 9bb784fbf71c..e95721dbb461 100644 --- a/core/lib/vm_interface/src/types/outputs/execution_result.rs +++ b/core/lib/vm_interface/src/types/outputs/execution_result.rs @@ -200,6 +200,7 @@ impl VmExecutionResultAndLogs { published_bytecode_bytes, l2_l1_long_messages, l2_to_l1_logs: self.logs.total_l2_to_l1_logs_count(), + user_l2_to_l1_logs: self.logs.user_l2_to_l1_logs.len(), contracts_used: self.statistics.contracts_used, contracts_deployed, vm_events: self.logs.events.len(), diff --git a/core/lib/vm_interface/src/types/outputs/statistic.rs b/core/lib/vm_interface/src/types/outputs/statistic.rs index f8e3851c8321..fdb89c08f498 100644 --- a/core/lib/vm_interface/src/types/outputs/statistic.rs +++ b/core/lib/vm_interface/src/types/outputs/statistic.rs @@ -177,6 +177,7 @@ pub struct TransactionExecutionMetrics { pub published_bytecode_bytes: usize, pub l2_l1_long_messages: usize, pub l2_l1_logs: usize, + pub user_l2_l1_logs: usize, pub contracts_used: usize, pub contracts_deployed: u16, pub vm_events: usize, @@ -201,6 +202,7 @@ impl Default for TransactionExecutionMetrics { published_bytecode_bytes: 0, l2_l1_long_messages: 0, l2_l1_logs: 0, + user_l2_l1_logs: 0, contracts_used: 0, contracts_deployed: 0, vm_events: 0, @@ -221,6 +223,7 @@ pub struct VmExecutionMetrics { pub published_bytecode_bytes: usize, pub l2_l1_long_messages: usize, pub l2_to_l1_logs: usize, + pub user_l2_to_l1_logs: usize, pub contracts_used: usize, pub contracts_deployed: u16, pub vm_events: usize, @@ -238,6 +241,7 @@ impl VmExecutionMetrics { published_bytecode_bytes: tx_metrics.published_bytecode_bytes, l2_l1_long_messages: tx_metrics.l2_l1_long_messages, l2_to_l1_logs: tx_metrics.l2_l1_logs, + user_l2_to_l1_logs: tx_metrics.user_l2_l1_logs, contracts_deployed: tx_metrics.contracts_deployed, contracts_used: tx_metrics.contracts_used, gas_used: tx_metrics.gas_used, @@ -274,6 +278,7 @@ impl ops::Add for VmExecutionMetrics { contracts_used: self.contracts_used + other.contracts_used, l2_l1_long_messages: self.l2_l1_long_messages + other.l2_l1_long_messages, l2_to_l1_logs: self.l2_to_l1_logs + other.l2_to_l1_logs, + user_l2_to_l1_logs: self.user_l2_to_l1_logs + other.user_l2_to_l1_logs, gas_used: self.gas_used + other.gas_used, vm_events: self.vm_events + other.vm_events, storage_logs: self.storage_logs + other.storage_logs, diff --git a/core/lib/web3_decl/src/namespaces/eth.rs b/core/lib/web3_decl/src/namespaces/eth.rs index 40cb6300cffa..4db58a06c59d 100644 --- a/core/lib/web3_decl/src/namespaces/eth.rs +++ b/core/lib/web3_decl/src/namespaces/eth.rs @@ -183,7 +183,7 @@ pub trait EthNamespace { &self, block_count: U64Number, newest_block: BlockNumber, - reward_percentiles: Vec, + reward_percentiles: Option>, ) -> RpcResult; #[method(name = "maxPriorityFeePerGas")] diff --git a/core/lib/web3_decl/src/namespaces/unstable.rs b/core/lib/web3_decl/src/namespaces/unstable.rs index e6b36dd26846..f666f02f2811 100644 --- a/core/lib/web3_decl/src/namespaces/unstable.rs +++ b/core/lib/web3_decl/src/namespaces/unstable.rs @@ -2,9 +2,9 @@ use jsonrpsee::core::RpcResult; use jsonrpsee::proc_macros::rpc; use zksync_types::{ - api::{TeeProof, TransactionExecutionInfo}, + api::{ChainAggProof, TeeProof, TransactionExecutionInfo}, tee_types::TeeType, - L1BatchNumber, H256, + L1BatchNumber, L2ChainId, H256, }; use crate::client::{ForWeb3Network, L2}; @@ -31,4 +31,11 @@ pub trait UnstableNamespace { l1_batch_number: L1BatchNumber, tee_type: Option, ) -> RpcResult>; + + #[method(name = "getChainLogProof")] + async fn get_chain_log_proof( + &self, + l1_batch_number: L1BatchNumber, + chain_id: L2ChainId, + ) -> RpcResult>; } diff --git a/core/lib/web3_decl/src/types.rs b/core/lib/web3_decl/src/types.rs index 36ee48a54a1b..375e5c4d992c 100644 --- a/core/lib/web3_decl/src/types.rs +++ b/core/lib/web3_decl/src/types.rs @@ -5,19 +5,16 @@ //! //! These "extensions" are required to provide more ZKsync-specific information while remaining Web3-compilant. -use core::{ - convert::{TryFrom, TryInto}, - fmt, - marker::PhantomData, -}; +use core::convert::{TryFrom, TryInto}; use rlp::Rlp; -use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; +use serde::{Deserialize, Serialize}; pub use zksync_types::{ api::{Block, BlockNumber, Log, TransactionReceipt, TransactionRequest}, ethabi, web3::{ - BlockHeader, Bytes, CallRequest, FeeHistory, Index, SyncState, TraceFilter, U64Number, Work, + BlockHeader, Bytes, CallRequest, FeeHistory, Index, SyncState, TraceFilter, U64Number, + ValueOrArray, Work, }, Address, Transaction, H160, H256, H64, U256, U64, }; @@ -101,71 +98,6 @@ pub enum FilterChanges { Empty([u8; 0]), } -/// Either value or array of values. -/// -/// A value must serialize into a string. -#[derive(Default, Debug, PartialEq, Clone)] -pub struct ValueOrArray(pub Vec); - -impl From for ValueOrArray { - fn from(value: T) -> Self { - Self(vec![value]) - } -} - -impl Serialize for ValueOrArray { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - match self.0.len() { - 0 => serializer.serialize_none(), - 1 => Serialize::serialize(&self.0[0], serializer), - _ => Serialize::serialize(&self.0, serializer), - } - } -} - -impl<'de, T: Deserialize<'de>> Deserialize<'de> for ValueOrArray { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - struct Visitor(PhantomData); - - impl<'de, T: Deserialize<'de>> de::Visitor<'de> for Visitor { - type Value = ValueOrArray; - - fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - formatter.write_str("string value or sequence of values") - } - - fn visit_str(self, value: &str) -> Result - where - E: de::Error, - { - use serde::de::IntoDeserializer; - - Deserialize::deserialize(value.into_deserializer()) - .map(|value| ValueOrArray(vec![value])) - } - - fn visit_seq(self, mut visitor: S) -> Result - where - S: de::SeqAccess<'de>, - { - let mut elements = Vec::with_capacity(visitor.size_hint().unwrap_or(1)); - while let Some(element) = visitor.next_element()? { - elements.push(element); - } - Ok(ValueOrArray(elements)) - } - } - - deserializer.deserialize_any(Visitor(PhantomData)) - } -} - /// Filter #[derive(Default, Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct Filter { @@ -185,6 +117,28 @@ pub struct Filter { pub block_hash: Option, } +impl From for Filter { + fn from(value: zksync_types::web3::Filter) -> Self { + let convert_block_number = |b: zksync_types::web3::BlockNumber| match b { + zksync_types::web3::BlockNumber::Finalized => BlockNumber::Finalized, + zksync_types::web3::BlockNumber::Safe => BlockNumber::Finalized, + zksync_types::web3::BlockNumber::Latest => BlockNumber::Latest, + zksync_types::web3::BlockNumber::Earliest => BlockNumber::Earliest, + zksync_types::web3::BlockNumber::Pending => BlockNumber::Pending, + zksync_types::web3::BlockNumber::Number(n) => BlockNumber::Number(n), + }; + let from_block = value.from_block.map(convert_block_number); + let to_block = value.to_block.map(convert_block_number); + Filter { + from_block, + to_block, + address: value.address, + topics: value.topics, + block_hash: value.block_hash, + } + } +} + /// Filter Builder #[derive(Default, Clone)] pub struct FilterBuilder { diff --git a/core/node/api_server/Cargo.toml b/core/node/api_server/Cargo.toml index debabb8d3666..794a0fb275f7 100644 --- a/core/node/api_server/Cargo.toml +++ b/core/node/api_server/Cargo.toml @@ -11,6 +11,7 @@ keywords.workspace = true categories.workspace = true [dependencies] +zksync_crypto_primitives.workspace = true zksync_config.workspace = true zksync_consensus_roles.workspace = true zksync_contracts.workspace = true diff --git a/core/node/api_server/src/execution_sandbox/mod.rs b/core/node/api_server/src/execution_sandbox/mod.rs index b560d161ab52..bcba200f5ebc 100644 --- a/core/node/api_server/src/execution_sandbox/mod.rs +++ b/core/node/api_server/src/execution_sandbox/mod.rs @@ -228,9 +228,8 @@ impl BlockStartInfo { storage: &mut Connection<'_, Core>, ) -> anyhow::Result { let cached_pruning_info = self.get_pruning_info(storage).await?; - let last_block = cached_pruning_info.last_soft_pruned_l2_block; - if let Some(L2BlockNumber(last_block)) = last_block { - return Ok(L2BlockNumber(last_block + 1)); + if let Some(pruned) = cached_pruning_info.last_soft_pruned { + return Ok(pruned.l2_block + 1); } Ok(L2BlockNumber(0)) } @@ -240,9 +239,8 @@ impl BlockStartInfo { storage: &mut Connection<'_, Core>, ) -> anyhow::Result { let cached_pruning_info = self.get_pruning_info(storage).await?; - let last_batch = cached_pruning_info.last_soft_pruned_l1_batch; - if let Some(L1BatchNumber(last_block)) = last_batch { - return Ok(L1BatchNumber(last_block + 1)); + if let Some(pruned) = cached_pruning_info.last_soft_pruned { + return Ok(pruned.l1_batch + 1); } Ok(L1BatchNumber(0)) } diff --git a/core/node/api_server/src/execution_sandbox/vm_metrics.rs b/core/node/api_server/src/execution_sandbox/vm_metrics.rs index 282d9bdf1b77..2f6d21f3a875 100644 --- a/core/node/api_server/src/execution_sandbox/vm_metrics.rs +++ b/core/node/api_server/src/execution_sandbox/vm_metrics.rs @@ -164,6 +164,7 @@ pub(super) fn collect_tx_execution_metrics( published_bytecode_bytes, l2_l1_long_messages, l2_l1_logs: result.logs.total_l2_to_l1_logs_count(), + user_l2_l1_logs: result.logs.user_l2_to_l1_logs.len(), contracts_used: result.statistics.contracts_used, contracts_deployed, vm_events: result.logs.events.len(), diff --git a/core/node/api_server/src/tx_sender/mod.rs b/core/node/api_server/src/tx_sender/mod.rs index 91fb84ab8f17..4c98fc7c455c 100644 --- a/core/node/api_server/src/tx_sender/mod.rs +++ b/core/node/api_server/src/tx_sender/mod.rs @@ -588,7 +588,7 @@ impl TxSender { } // For now, both L1 gas price and pubdata price are scaled with the same coefficient - async fn scaled_batch_fee_input(&self) -> anyhow::Result { + pub(crate) async fn scaled_batch_fee_input(&self) -> anyhow::Result { self.0 .batch_fee_input_provider .get_batch_fee_input_scaled( @@ -671,7 +671,7 @@ impl TxSender { // but the API assumes we are post boojum. In this situation we will determine a tx as being executable but the StateKeeper will // still reject them as it's not. let protocol_version = ProtocolVersionId::latest(); - let seal_data = SealData::for_transaction(transaction, tx_metrics, protocol_version); + let seal_data = SealData::for_transaction(transaction, tx_metrics); if let Some(reason) = self .0 .sealer diff --git a/core/node/api_server/src/tx_sender/tests/send_tx.rs b/core/node/api_server/src/tx_sender/tests/send_tx.rs index c861f04a832e..c0b02e45ad89 100644 --- a/core/node/api_server/src/tx_sender/tests/send_tx.rs +++ b/core/node/api_server/src/tx_sender/tests/send_tx.rs @@ -6,7 +6,7 @@ use assert_matches::assert_matches; use chrono::NaiveDateTime; use test_casing::test_casing; use zksync_multivm::interface::{tracer::ValidationTraces, ExecutionResult}; -use zksync_node_fee_model::MockBatchFeeParamsProvider; +use zksync_node_fee_model::{BatchFeeModelInputProvider, MockBatchFeeParamsProvider}; use zksync_node_test_utils::create_l2_transaction; use zksync_types::K256PrivateKey; @@ -22,10 +22,9 @@ async fn submitting_tx_requires_one_connection() { .unwrap(); let l2_chain_id = L2ChainId::default(); - let fee_input = MockBatchFeeParamsProvider::default() - .get_batch_fee_input_scaled(1.0, 1.0) - .await - .unwrap(); + let fee_params_provider: &dyn BatchFeeModelInputProvider = + &MockBatchFeeParamsProvider::default(); + let fee_input = fee_params_provider.get_batch_fee_input().await.unwrap(); let (base_fee, gas_per_pubdata) = derive_base_fee_and_gas_per_pubdata(fee_input, ProtocolVersionId::latest().into()); let tx = create_l2_transaction(base_fee, gas_per_pubdata); @@ -130,10 +129,9 @@ async fn fee_validation_errors() { let l2_chain_id = L2ChainId::default(); let tx_executor = SandboxExecutor::mock(MockOneshotExecutor::default()).await; let (tx_sender, _) = create_test_tx_sender(pool.clone(), l2_chain_id, tx_executor).await; - let fee_input = MockBatchFeeParamsProvider::default() - .get_batch_fee_input_scaled(1.0, 1.0) - .await - .unwrap(); + let fee_params_provider: &dyn BatchFeeModelInputProvider = + &MockBatchFeeParamsProvider::default(); + let fee_input = fee_params_provider.get_batch_fee_input().await.unwrap(); let (base_fee, gas_per_pubdata) = derive_base_fee_and_gas_per_pubdata(fee_input, ProtocolVersionId::latest().into()); let tx = create_l2_transaction(base_fee, gas_per_pubdata); @@ -322,10 +320,9 @@ async fn submit_tx_with_validation_traces(actual_range: Range, expected_ran .unwrap(); let l2_chain_id = L2ChainId::default(); - let fee_input = MockBatchFeeParamsProvider::default() - .get_batch_fee_input_scaled(1.0, 1.0) - .await - .unwrap(); + let fee_params_provider: &dyn BatchFeeModelInputProvider = + &MockBatchFeeParamsProvider::default(); + let fee_input = fee_params_provider.get_batch_fee_input().await.unwrap(); let (base_fee, gas_per_pubdata) = derive_base_fee_and_gas_per_pubdata(fee_input, ProtocolVersionId::latest().into()); let tx = create_l2_transaction(base_fee, gas_per_pubdata); diff --git a/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/eth.rs b/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/eth.rs index 342756013752..93f0205c77fa 100644 --- a/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/eth.rs +++ b/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/eth.rs @@ -262,11 +262,15 @@ impl EthNamespaceServer for EthNamespace { &self, block_count: U64Number, newest_block: BlockNumber, - reward_percentiles: Vec, + reward_percentiles: Option>, ) -> RpcResult { - self.fee_history_impl(block_count.into(), newest_block, reward_percentiles) - .await - .map_err(|err| self.current_method().map_err(err)) + self.fee_history_impl( + block_count.into(), + newest_block, + reward_percentiles.unwrap_or_default(), + ) + .await + .map_err(|err| self.current_method().map_err(err)) } async fn max_priority_fee_per_gas(&self) -> RpcResult { diff --git a/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs b/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs index 91330aa7d949..cfa8c84b05b0 100644 --- a/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs +++ b/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs @@ -1,7 +1,7 @@ use zksync_types::{ - api::{TeeProof, TransactionExecutionInfo}, + api::{ChainAggProof, TeeProof, TransactionExecutionInfo}, tee_types::TeeType, - L1BatchNumber, H256, + L1BatchNumber, L2ChainId, H256, }; use zksync_web3_decl::{ jsonrpsee::core::{async_trait, RpcResult}, @@ -30,4 +30,14 @@ impl UnstableNamespaceServer for UnstableNamespace { .await .map_err(|err| self.current_method().map_err(err)) } + + async fn get_chain_log_proof( + &self, + l1_batch_number: L1BatchNumber, + chain_id: L2ChainId, + ) -> RpcResult> { + self.get_chain_log_proof_impl(l1_batch_number, chain_id) + .await + .map_err(|err| self.current_method().map_err(err)) + } } diff --git a/core/node/api_server/src/web3/namespaces/debug.rs b/core/node/api_server/src/web3/namespaces/debug.rs index 4fd32c1b5223..8e72f5b45991 100644 --- a/core/node/api_server/src/web3/namespaces/debug.rs +++ b/core/node/api_server/src/web3/namespaces/debug.rs @@ -96,16 +96,22 @@ impl DebugNamespace { CallType::NearCall => unreachable!("We have to filter our near calls before"), }; - let (result, error) = if let Some(error) = call.revert_reason { - (None, Some(error)) - } else { - ( + let (result, error) = match (call.revert_reason, call.error) { + (Some(revert_reason), _) => { + // If revert_reason exists, it takes priority over VM error + (None, Some(revert_reason)) + } + (None, Some(vm_error)) => { + // If no revert_reason but VM error exists + (None, Some(vm_error)) + } + (None, None) => ( Some(CallResult { output: web3::Bytes::from(call.output), gas_used: U256::from(call.gas_used), }), None, - ) + ), }; calls.push(DebugCallFlat { @@ -244,12 +250,7 @@ impl DebugNamespace { // It is important to drop a DB connection before calling the provider, since it acquires a connection internally // on the main node. drop(connection); - let scale_factor = self.state.api_config.estimate_gas_scale_factor; - let fee_input_provider = &self.state.tx_sender.0.batch_fee_input_provider; - // For now, the same scaling is used for both the L1 gas price and the pubdata price - fee_input_provider - .get_batch_fee_input_scaled(scale_factor, scale_factor) - .await? + self.state.tx_sender.scaled_batch_fee_input().await? } else { let fee_input = block_args.historical_fee_input(&mut connection).await?; drop(connection); diff --git a/core/node/api_server/src/web3/namespaces/en.rs b/core/node/api_server/src/web3/namespaces/en.rs index a09a0cb92fc7..f990144a4acd 100644 --- a/core/node/api_server/src/web3/namespaces/en.rs +++ b/core/node/api_server/src/web3/namespaces/en.rs @@ -151,18 +151,18 @@ impl EnNamespace { Ok(self .state .api_config - .bridgehub_proxy_addr + .l1_bridgehub_proxy_addr .map(|bridgehub_proxy_addr| EcosystemContracts { bridgehub_proxy_addr, state_transition_proxy_addr: self .state .api_config - .state_transition_proxy_addr + .l1_state_transition_proxy_addr .unwrap(), transparent_proxy_admin_addr: self .state .api_config - .transparent_proxy_admin_addr + .l1_transparent_proxy_admin_addr .unwrap(), }) .context("Shared bridge doesn't supported")?) @@ -222,6 +222,8 @@ impl EnNamespace { .state .api_config .l1_batch_commit_data_generator_mode, + // external node should initialise itself from a snapshot + custom_genesis_state_path: None, }; Ok(config) } diff --git a/core/node/api_server/src/web3/namespaces/unstable.rs b/core/node/api_server/src/web3/namespaces/unstable.rs deleted file mode 100644 index 783088cdc36a..000000000000 --- a/core/node/api_server/src/web3/namespaces/unstable.rs +++ /dev/null @@ -1,62 +0,0 @@ -use chrono::{DateTime, Utc}; -use zksync_dal::{CoreDal, DalError}; -use zksync_types::{ - api::{TeeProof, TransactionExecutionInfo}, - tee_types::TeeType, - L1BatchNumber, -}; -use zksync_web3_decl::{error::Web3Error, types::H256}; - -use crate::web3::{backend_jsonrpsee::MethodTracer, RpcState}; - -#[derive(Debug)] -pub(crate) struct UnstableNamespace { - state: RpcState, -} - -impl UnstableNamespace { - pub fn new(state: RpcState) -> Self { - Self { state } - } - - pub(crate) fn current_method(&self) -> &MethodTracer { - &self.state.current_method - } - - pub async fn transaction_execution_info_impl( - &self, - hash: H256, - ) -> Result, Web3Error> { - let mut storage = self.state.acquire_connection().await?; - Ok(storage - .transactions_web3_dal() - .get_unstable_transaction_execution_info(hash) - .await - .map_err(DalError::generalize)? - .map(|execution_info| TransactionExecutionInfo { execution_info })) - } - - pub async fn get_tee_proofs_impl( - &self, - l1_batch_number: L1BatchNumber, - tee_type: Option, - ) -> Result, Web3Error> { - let mut storage = self.state.acquire_connection().await?; - Ok(storage - .tee_proof_generation_dal() - .get_tee_proofs(l1_batch_number, tee_type) - .await - .map_err(DalError::generalize)? - .into_iter() - .map(|proof| TeeProof { - l1_batch_number, - tee_type, - pubkey: proof.pubkey, - signature: proof.signature, - proof: proof.proof, - proved_at: DateTime::::from_naive_utc_and_offset(proof.updated_at, Utc), - attestation: proof.attestation, - }) - .collect::>()) - } -} diff --git a/core/node/api_server/src/web3/namespaces/unstable/mod.rs b/core/node/api_server/src/web3/namespaces/unstable/mod.rs new file mode 100644 index 000000000000..47e43f10282b --- /dev/null +++ b/core/node/api_server/src/web3/namespaces/unstable/mod.rs @@ -0,0 +1,142 @@ +use chrono::{DateTime, Utc}; +use itertools::Itertools; +use utils::{ + chain_id_leaf_preimage, get_chain_count, get_chain_id_from_index, get_chain_root_from_id, +}; +use zksync_crypto_primitives::hasher::keccak::KeccakHasher; +use zksync_dal::{CoreDal, DalError}; +use zksync_mini_merkle_tree::MiniMerkleTree; +use zksync_types::{ + api::{ChainAggProof, TeeProof, TransactionExecutionInfo}, + tee_types::TeeType, + L1BatchNumber, L2ChainId, +}; +use zksync_web3_decl::{error::Web3Error, types::H256}; + +use crate::web3::{backend_jsonrpsee::MethodTracer, RpcState}; + +mod utils; + +#[derive(Debug)] +pub(crate) struct UnstableNamespace { + state: RpcState, +} + +impl UnstableNamespace { + pub fn new(state: RpcState) -> Self { + Self { state } + } + + pub(crate) fn current_method(&self) -> &MethodTracer { + &self.state.current_method + } + + pub async fn transaction_execution_info_impl( + &self, + hash: H256, + ) -> Result, Web3Error> { + let mut storage = self.state.acquire_connection().await?; + Ok(storage + .transactions_web3_dal() + .get_unstable_transaction_execution_info(hash) + .await + .map_err(DalError::generalize)? + .map(|execution_info| TransactionExecutionInfo { execution_info })) + } + + pub async fn get_tee_proofs_impl( + &self, + l1_batch_number: L1BatchNumber, + tee_type: Option, + ) -> Result, Web3Error> { + let mut storage = self.state.acquire_connection().await?; + let proofs = storage + .tee_proof_generation_dal() + .get_tee_proofs(l1_batch_number, tee_type) + .await + .map_err(DalError::generalize)? + .into_iter() + .map(|proof| TeeProof { + l1_batch_number, + tee_type, + pubkey: proof.pubkey, + signature: proof.signature, + proof: proof.proof, + proved_at: DateTime::::from_naive_utc_and_offset(proof.updated_at, Utc), + status: proof.status, + attestation: proof.attestation, + }) + .collect::>(); + + Ok(proofs) + } + + pub async fn get_chain_log_proof_impl( + &self, + l1_batch_number: L1BatchNumber, + l2_chain_id: L2ChainId, + ) -> Result, Web3Error> { + let mut connection = self.state.acquire_connection().await?; + self.state + .start_info + .ensure_not_pruned(l1_batch_number, &mut connection) + .await?; + + let Some((_, l2_block_number)) = connection + .blocks_dal() + .get_l2_block_range_of_l1_batch(l1_batch_number) + .await + .map_err(DalError::generalize)? + else { + return Ok(None); + }; + let chain_count_integer = get_chain_count(&mut connection, l2_block_number).await?; + + let mut chain_ids = Vec::new(); + for chain_index in 0..chain_count_integer { + chain_ids.push( + get_chain_id_from_index(&mut connection, chain_index, l2_block_number).await?, + ); + } + + let Some((chain_id_leaf_proof_mask, _)) = chain_ids + .iter() + .find_position(|id| **id == H256::from_low_u64_be(l2_chain_id.as_u64())) + else { + return Ok(None); + }; + + let mut leaves = Vec::new(); + for chain_id in chain_ids { + let chain_root = + get_chain_root_from_id(&mut connection, chain_id, l2_block_number).await?; + leaves.push(chain_id_leaf_preimage(chain_root, chain_id)); + } + + let chain_merkle_tree = + MiniMerkleTree::<[u8; 96], KeccakHasher>::new(leaves.into_iter(), None); + + let mut chain_id_leaf_proof = chain_merkle_tree + .merkle_root_and_path(chain_id_leaf_proof_mask) + .1; + + let Some(local_root) = connection + .blocks_dal() + .get_l1_batch_local_root(l1_batch_number) + .await + .map_err(DalError::generalize)? + else { + return Ok(None); + }; + + // Chain tree is the right subtree of the aggregated tree. + // We append root of the left subtree to form full proof. + let chain_id_leaf_proof_mask = chain_id_leaf_proof_mask | (1 << chain_id_leaf_proof.len()); + chain_id_leaf_proof.push(local_root); + + Ok(Some(ChainAggProof { + chain_id_leaf_proof, + chain_id_leaf_proof_mask: chain_id_leaf_proof_mask as u64, + })) + } +} diff --git a/core/node/api_server/src/web3/namespaces/unstable/utils.rs b/core/node/api_server/src/web3/namespaces/unstable/utils.rs new file mode 100644 index 000000000000..2d3187fab6b8 --- /dev/null +++ b/core/node/api_server/src/web3/namespaces/unstable/utils.rs @@ -0,0 +1,104 @@ +use zksync_dal::{Connection, Core, CoreDal, DalError}; +use zksync_multivm::circuit_sequencer_api_latest::boojum::ethereum_types::U256; +use zksync_system_constants::{ + message_root::{CHAIN_COUNT_KEY, CHAIN_INDEX_TO_ID_KEY, CHAIN_TREE_KEY}, + L2_MESSAGE_ROOT_ADDRESS, +}; +use zksync_types::{ + h256_to_u256, l2_to_l1_log::CHAIN_ID_LEAF_PADDING, u256_to_h256, web3::keccak256, + AccountTreeId, L2BlockNumber, StorageKey, H256, +}; +use zksync_web3_decl::error::Web3Error; + +pub(super) async fn get_chain_count( + connection: &mut Connection<'_, Core>, + block_number: L2BlockNumber, +) -> anyhow::Result { + let chain_count_key = CHAIN_COUNT_KEY; + let chain_count_storage_key = + message_root_log_key(H256::from_low_u64_be(chain_count_key as u64)); + let chain_count = connection + .storage_web3_dal() + .get_historical_value_unchecked(chain_count_storage_key.hashed_key(), block_number) + .await + .map_err(DalError::generalize)?; + if h256_to_u256(chain_count) > u8::MAX.into() { + anyhow::bail!("Chain count doesn't fit in `u8`"); + } + Ok(chain_count.0[31]) +} + +pub(super) async fn get_chain_id_from_index( + connection: &mut Connection<'_, Core>, + chain_index: u8, + block_number: L2BlockNumber, +) -> Result { + let key = H256::from_slice(&keccak256( + &[ + H256::from_low_u64_be(chain_index as u64).0, + H256::from_low_u64_be(CHAIN_INDEX_TO_ID_KEY as u64).0, + ] + .concat(), + )); + let storage_key = message_root_log_key(key); + let chain_id = connection + .storage_web3_dal() + .get_historical_value_unchecked(storage_key.hashed_key(), block_number) + .await + .map_err(DalError::generalize)?; + Ok(chain_id) +} + +pub(super) async fn get_chain_root_from_id( + connection: &mut Connection<'_, Core>, + chain_id: H256, + block_number: L2BlockNumber, +) -> Result { + let chain_tree_key = H256::from_slice(&keccak256( + &[chain_id.0, H256::from_low_u64_be(CHAIN_TREE_KEY as u64).0].concat(), + )); + let chain_sides_len_key = + u256_to_h256(h256_to_u256(chain_tree_key).overflowing_add(U256::one()).0); + let chain_sides_len_storage_key = message_root_log_key(chain_sides_len_key); + let chain_sides_len = connection + .storage_web3_dal() + .get_historical_value_unchecked(chain_sides_len_storage_key.hashed_key(), block_number) + .await + .map_err(DalError::generalize)?; + + let last_element_pos = { + let length = h256_to_u256(chain_sides_len); + assert!( + length > U256::zero(), + "_sides.length is zero, chain is not registered" + ); + + length - 1 + }; + let sides_data_start_key = H256(keccak256(chain_sides_len_key.as_bytes())); + let chain_root_key = h256_to_u256(sides_data_start_key) + .overflowing_add(last_element_pos) + .0; + let chain_root_storage_key = message_root_log_key(u256_to_h256(chain_root_key)); + let chain_root = connection + .storage_web3_dal() + .get_historical_value_unchecked(chain_root_storage_key.hashed_key(), block_number) + .await + .map_err(DalError::generalize)?; + Ok(chain_root) +} + +pub(super) fn chain_id_leaf_preimage(chain_root: H256, chain_id: H256) -> [u8; 96] { + let mut full_preimage = [0u8; 96]; + + full_preimage[0..32].copy_from_slice(CHAIN_ID_LEAF_PADDING.as_bytes()); + full_preimage[32..64].copy_from_slice(&chain_root.0); + full_preimage[64..96].copy_from_slice(&chain_id.0); + + full_preimage +} + +fn message_root_log_key(key: H256) -> StorageKey { + let message_root = AccountTreeId::new(L2_MESSAGE_ROOT_ADDRESS); + StorageKey::new(message_root, key) +} diff --git a/core/node/api_server/src/web3/namespaces/zks.rs b/core/node/api_server/src/web3/namespaces/zks.rs index 05c90f0b0140..c692494d5091 100644 --- a/core/node/api_server/src/web3/namespaces/zks.rs +++ b/core/node/api_server/src/web3/namespaces/zks.rs @@ -1,6 +1,7 @@ use std::collections::HashMap; use anyhow::Context as _; +use zksync_crypto_primitives::hasher::{keccak::KeccakHasher, Hasher}; use zksync_dal::{Connection, Core, CoreDal, DalError}; use zksync_metadata_calculator::api_server::TreeApiError; use zksync_mini_merkle_tree::MiniMerkleTree; @@ -17,7 +18,7 @@ use zksync_types::{ h256_to_u256, l1::L1Tx, l2::L2Tx, - l2_to_l1_log::{l2_to_l1_logs_tree_size, L2ToL1Log}, + l2_to_l1_log::{l2_to_l1_logs_tree_size, L2ToL1Log, LOG_PROOF_SUPPORTED_METADATA_VERSION}, tokens::ETHEREUM_ADDRESS, transaction_request::CallRequest, utils::storage_key_for_standard_token_balance, @@ -137,11 +138,11 @@ impl ZksNamespace { } pub fn get_bridgehub_contract_impl(&self) -> Option
{ - self.state.api_config.bridgehub_proxy_addr + self.state.api_config.l1_bridgehub_proxy_addr } pub fn get_main_contract_impl(&self) -> Address { - self.state.api_config.diamond_proxy_addr + self.state.api_config.l1_diamond_proxy_addr } pub fn get_testnet_paymaster_impl(&self) -> Option
{ @@ -321,9 +322,9 @@ impl ZksNamespace { return Ok(None); }; - let Some(batch) = storage + let Some(batch_with_metadata) = storage .blocks_dal() - .get_l1_batch_header(l1_batch_number) + .get_l1_batch_metadata(l1_batch_number) .await .map_err(DalError::generalize)? else { @@ -332,13 +333,71 @@ impl ZksNamespace { let merkle_tree_leaves = all_l1_logs_in_batch.iter().map(L2ToL1Log::to_bytes); - let protocol_version = batch + let protocol_version = batch_with_metadata + .header .protocol_version .unwrap_or_else(ProtocolVersionId::last_potentially_undefined); let tree_size = l2_to_l1_logs_tree_size(protocol_version); - let (root, proof) = MiniMerkleTree::new(merkle_tree_leaves, Some(tree_size)) + let (local_root, proof) = MiniMerkleTree::new(merkle_tree_leaves, Some(tree_size)) .merkle_root_and_path(l1_log_index); + + if protocol_version.is_pre_gateway() { + return Ok(Some(L2ToL1LogProof { + proof, + root: local_root, + id: l1_log_index as u32, + })); + } + + let aggregated_root = batch_with_metadata + .metadata + .aggregation_root + .expect("`aggregation_root` must be present for post-gateway branch"); + let root = KeccakHasher.compress(&local_root, &aggregated_root); + + let mut log_leaf_proof = proof; + log_leaf_proof.push(aggregated_root); + + let Some(sl_chain_id) = storage + .eth_sender_dal() + .get_batch_commit_chain_id(l1_batch_number) + .await + .map_err(DalError::generalize)? + else { + return Ok(None); + }; + + let (batch_proof_len, batch_chain_proof) = + if sl_chain_id.0 != self.state.api_config.l1_chain_id.0 { + let Some(batch_chain_proof) = storage + .blocks_dal() + .get_l1_batch_chain_merkle_path(l1_batch_number) + .await + .map_err(DalError::generalize)? + else { + return Ok(None); + }; + + (batch_chain_proof.batch_proof_len, batch_chain_proof.proof) + } else { + (0, Vec::new()) + }; + + let proof = { + let mut metadata = [0u8; 32]; + metadata[0] = LOG_PROOF_SUPPORTED_METADATA_VERSION; + metadata[1] = log_leaf_proof.len() as u8; + metadata[2] = batch_proof_len as u8; + + let mut result = vec![H256(metadata)]; + + result.extend(log_leaf_proof); + result.extend(batch_chain_proof); + + result + }; + Ok(Some(L2ToL1LogProof { proof, root, @@ -361,6 +420,11 @@ impl ZksNamespace { return Ok(None); }; + self.state + .start_info + .ensure_not_pruned(l1_batch_number, &mut storage) + .await?; + let log_proof = self .get_l2_to_l1_log_proof_inner( &mut storage, diff --git a/core/node/api_server/src/web3/state.rs b/core/node/api_server/src/web3/state.rs index d43771811ee0..900cd165c045 100644 --- a/core/node/api_server/src/web3/state.rs +++ b/core/node/api_server/src/web3/state.rs @@ -20,9 +20,8 @@ use zksync_dal::{Connection, ConnectionPool, Core, CoreDal, DalError}; use zksync_metadata_calculator::api_server::TreeApiClient; use zksync_node_sync::SyncState; use zksync_types::{ - api, api::BridgeAddresses, commitment::L1BatchCommitmentMode, l2::L2Tx, - transaction_request::CallRequest, Address, L1BatchNumber, L1ChainId, L2BlockNumber, L2ChainId, - H256, U256, U64, + api, commitment::L1BatchCommitmentMode, l2::L2Tx, transaction_request::CallRequest, Address, + L1BatchNumber, L1ChainId, L2BlockNumber, L2ChainId, H256, U256, U64, }; use zksync_web3_decl::{error::Web3Error, types::Filter}; @@ -104,10 +103,10 @@ pub struct InternalApiConfig { pub estimate_gas_acceptable_overestimation: u32, pub estimate_gas_optimize_search: bool, pub bridge_addresses: api::BridgeAddresses, - pub bridgehub_proxy_addr: Option
, - pub state_transition_proxy_addr: Option
, - pub transparent_proxy_admin_addr: Option
, - pub diamond_proxy_addr: Address, + pub l1_bridgehub_proxy_addr: Option
, + pub l1_state_transition_proxy_addr: Option
, + pub l1_transparent_proxy_admin_addr: Option
, + pub l1_diamond_proxy_addr: Address, pub l2_testnet_paymaster_addr: Option
, pub req_entities_limit: usize, pub fee_history_limit: u64, @@ -149,19 +148,19 @@ impl InternalApiConfig { ), l2_legacy_shared_bridge: contracts_config.l2_legacy_shared_bridge_addr, }, - bridgehub_proxy_addr: contracts_config + l1_bridgehub_proxy_addr: contracts_config .ecosystem_contracts .as_ref() .map(|a| a.bridgehub_proxy_addr), - state_transition_proxy_addr: contracts_config + l1_state_transition_proxy_addr: contracts_config .ecosystem_contracts .as_ref() .map(|a| a.state_transition_proxy_addr), - transparent_proxy_admin_addr: contracts_config + l1_transparent_proxy_admin_addr: contracts_config .ecosystem_contracts .as_ref() .map(|a| a.transparent_proxy_admin_addr), - diamond_proxy_addr: contracts_config.diamond_proxy_addr, + l1_diamond_proxy_addr: contracts_config.diamond_proxy_addr, l2_testnet_paymaster_addr: contracts_config.l2_testnet_paymaster_addr, req_entities_limit: web3_config.req_entities_limit(), fee_history_limit: web3_config.fee_history_limit(), @@ -213,18 +212,18 @@ impl SealedL2BlockNumber { } #[derive(Debug, Clone)] -pub struct BridgeAddressesHandle(Arc>); +pub struct BridgeAddressesHandle(Arc>); impl BridgeAddressesHandle { - pub fn new(bridge_addresses: BridgeAddresses) -> Self { + pub fn new(bridge_addresses: api::BridgeAddresses) -> Self { Self(Arc::new(RwLock::new(bridge_addresses))) } - pub async fn update(&self, bridge_addresses: BridgeAddresses) { + pub async fn update(&self, bridge_addresses: api::BridgeAddresses) { *self.0.write().await = bridge_addresses; } - pub async fn read(&self) -> BridgeAddresses { + pub async fn read(&self) -> api::BridgeAddresses { self.0.read().await.clone() } } diff --git a/core/node/api_server/src/web3/tests/mod.rs b/core/node/api_server/src/web3/tests/mod.rs index feac8eb8d17f..f447f135917a 100644 --- a/core/node/api_server/src/web3/tests/mod.rs +++ b/core/node/api_server/src/web3/tests/mod.rs @@ -1313,7 +1313,7 @@ impl HttpTest for FeeHistoryTest { .map(U256::from); let history = client - .fee_history(1_000.into(), api::BlockNumber::Latest, vec![]) + .fee_history(1_000.into(), api::BlockNumber::Latest, Some(vec![])) .await?; assert_eq!(history.inner.oldest_block, 0.into()); assert_eq!( @@ -1346,7 +1346,11 @@ impl HttpTest for FeeHistoryTest { // Check partial histories: blocks 0..=1 let history = client - .fee_history(1_000.into(), api::BlockNumber::Number(1.into()), vec![]) + .fee_history( + 1_000.into(), + api::BlockNumber::Number(1.into()), + Some(vec![]), + ) .await?; assert_eq!(history.inner.oldest_block, 0.into()); assert_eq!( @@ -1357,7 +1361,7 @@ impl HttpTest for FeeHistoryTest { // Blocks 1..=2 let history = client - .fee_history(2.into(), api::BlockNumber::Latest, vec![]) + .fee_history(2.into(), api::BlockNumber::Latest, Some(vec![])) .await?; assert_eq!(history.inner.oldest_block, 1.into()); assert_eq!( @@ -1368,7 +1372,7 @@ impl HttpTest for FeeHistoryTest { // Blocks 1..=1 let history = client - .fee_history(1.into(), api::BlockNumber::Number(1.into()), vec![]) + .fee_history(1.into(), api::BlockNumber::Number(1.into()), Some(vec![])) .await?; assert_eq!(history.inner.oldest_block, 1.into()); assert_eq!(history.inner.base_fee_per_gas, [100, 100].map(U256::from)); @@ -1376,7 +1380,11 @@ impl HttpTest for FeeHistoryTest { // Non-existing newest block. let err = client - .fee_history(1000.into(), api::BlockNumber::Number(100.into()), vec![]) + .fee_history( + 1000.into(), + api::BlockNumber::Number(100.into()), + Some(vec![]), + ) .await .unwrap_err(); assert_matches!( diff --git a/core/node/commitment_generator/src/lib.rs b/core/node/commitment_generator/src/lib.rs index 2ce0152abab6..71b019e230a7 100644 --- a/core/node/commitment_generator/src/lib.rs +++ b/core/node/commitment_generator/src/lib.rs @@ -293,13 +293,13 @@ impl CommitmentGenerator { }; let aggregation_root = if protocol_version.is_pre_gateway() { + H256::zero() + } else { let mut connection = self .connection_pool .connection_tagged("commitment_generator") .await?; read_aggregation_root(&mut connection, l1_batch_number).await? - } else { - H256::zero() }; CommitmentInput::PostBoojum { diff --git a/core/node/consensus/src/config.rs b/core/node/consensus/src/config.rs index 4ad7a551ab42..2cb6045151bd 100644 --- a/core/node/consensus/src/config.rs +++ b/core/node/consensus/src/config.rs @@ -3,7 +3,7 @@ use std::collections::{BTreeMap, HashMap}; use anyhow::Context as _; use secrecy::{ExposeSecret as _, Secret}; -use zksync_concurrency::{limiter, net, time}; +use zksync_concurrency::net; use zksync_config::{ configs, configs::consensus::{ConsensusConfig, ConsensusSecrets, Host, NodePublicKey}, @@ -152,11 +152,6 @@ pub(super) fn executor( let mut rpc = executor::RpcConfig::default(); rpc.get_block_rate = cfg.rpc().get_block_rate(); - // Disable batch syncing, because it is not implemented. - rpc.get_batch_rate = limiter::Rate { - burst: 0, - refresh: time::Duration::ZERO, - }; let debug_page = cfg.debug_page_addr.map(|addr| network::debug_page::Config { addr, @@ -169,6 +164,7 @@ pub(super) fn executor( server_addr: cfg.server_addr, public_addr: net::Host(cfg.public_addr.0.clone()), max_payload_size: cfg.max_payload_size, + view_timeout: cfg.view_timeout(), node_key: node_key(secrets) .context("node_key")? .context("missing node_key")?, diff --git a/core/node/consensus/src/storage/testonly.rs b/core/node/consensus/src/storage/testonly.rs index 0f29e2468267..295ae4fc1790 100644 --- a/core/node/consensus/src/storage/testonly.rs +++ b/core/node/consensus/src/storage/testonly.rs @@ -247,15 +247,20 @@ impl ConnectionPool { .await .wrap("get_l2_block_range_of_l1_batch()")? .context("batch not found")?; - let last_batch = L1BatchNumber(last_batch.0.try_into().context("oveflow")?); - let last_block = L2BlockNumber(last_block.0.try_into().context("oveflow")?); + let last_batch = L1BatchNumber(last_batch.0.try_into().context("overflow")?); + let last_batch_root_hash = ctx + .wait(conn.0.blocks_dal().get_l1_batch_state_root(last_batch)) + .await? + .context("get_l1_batch_state_root()")? + .unwrap_or_default(); + let last_block = L2BlockNumber(last_block.0.try_into().context("overflow")?); ctx.wait( conn.0 .pruning_dal() - .soft_prune_batches_range(last_batch, last_block), + .insert_soft_pruning_log(last_batch, last_block), ) .await? - .context("soft_prune_batches_range()")?; + .context("insert_soft_pruning_log()")?; ctx.wait( conn.0 .pruning_dal() @@ -263,6 +268,13 @@ impl ConnectionPool { ) .await? .context("hard_prune_batches_range()")?; + ctx.wait(conn.0.pruning_dal().insert_hard_pruning_log( + last_batch, + last_block, + last_batch_root_hash, + )) + .await? + .context("insert_hard_pruning_log()")?; Ok(()) } } diff --git a/core/node/consensus/src/testonly.rs b/core/node/consensus/src/testonly.rs index 225a38aee760..1a4da71f85a0 100644 --- a/core/node/consensus/src/testonly.rs +++ b/core/node/consensus/src/testonly.rs @@ -149,6 +149,7 @@ fn make_config( public_addr: config::Host(cfg.public_addr.0.clone()), max_payload_size: usize::MAX, max_batch_size: usize::MAX, + view_timeout: None, gossip_dynamic_inbound_limit: cfg.gossip.dynamic_inbound_limit, gossip_static_inbound: cfg .gossip @@ -584,7 +585,6 @@ impl StateKeeperRunner { let stop_recv = stop_recv.clone(); async { ZkSyncStateKeeper::new( - stop_recv, Box::new(io), Box::new(executor_factory), OutputHandler::new(Box::new(persistence.with_tx_insertion())) @@ -592,7 +592,7 @@ impl StateKeeperRunner { Arc::new(NoopSealer), Arc::new(async_cache), ) - .run() + .run(stop_recv) .await .context("ZkSyncStateKeeper::run()")?; Ok(()) @@ -665,7 +665,6 @@ impl StateKeeperRunner { let stop_recv = stop_recv.clone(); async { ZkSyncStateKeeper::new( - stop_recv, Box::new(io), Box::new(MockBatchExecutor), OutputHandler::new(Box::new(persistence.with_tx_insertion())) @@ -674,7 +673,7 @@ impl StateKeeperRunner { Arc::new(NoopSealer), Arc::new(MockReadStorageFactory), ) - .run() + .run(stop_recv) .await .context("ZkSyncStateKeeper::run()")?; Ok(()) diff --git a/core/node/consistency_checker/src/lib.rs b/core/node/consistency_checker/src/lib.rs index e13e479117cc..a73adc44b83e 100644 --- a/core/node/consistency_checker/src/lib.rs +++ b/core/node/consistency_checker/src/lib.rs @@ -1,9 +1,12 @@ -use std::{borrow::Cow, collections::HashSet, fmt, time::Duration}; +use std::{borrow::Cow, cmp::Ordering, collections::HashSet, fmt, time::Duration}; use anyhow::Context as _; use serde::Serialize; use tokio::sync::watch; -use zksync_contracts::PRE_BOOJUM_COMMIT_FUNCTION; +use zksync_contracts::{ + bridgehub_contract, POST_BOOJUM_COMMIT_FUNCTION, POST_SHARED_BRIDGE_COMMIT_FUNCTION, + PRE_BOOJUM_COMMIT_FUNCTION, +}; use zksync_dal::{Connection, ConnectionPool, Core, CoreDal}; use zksync_eth_client::{ clients::{DynClient, L1}, @@ -11,16 +14,23 @@ use zksync_eth_client::{ }; use zksync_health_check::{Health, HealthStatus, HealthUpdater, ReactiveHealthCheck}; use zksync_l1_contract_interface::{ - i_executor::{commit::kzg::ZK_SYNC_BYTES_PER_BLOB, structures::CommitBatchInfo}, + i_executor::{ + commit::kzg::ZK_SYNC_BYTES_PER_BLOB, + structures::{ + CommitBatchInfo, StoredBatchInfo, PUBDATA_SOURCE_BLOBS, PUBDATA_SOURCE_CALLDATA, + PUBDATA_SOURCE_CUSTOM_PRE_GATEWAY, SUPPORTED_ENCODING_VERSION, + }, + }, Tokenizable, }; use zksync_shared_metrics::{CheckerComponent, EN_METRICS}; use zksync_types::{ commitment::{L1BatchCommitmentMode, L1BatchWithMetadata}, ethabi, - ethabi::Token, + ethabi::{ParamType, Token}, pubdata_da::PubdataSendingMode, - Address, L1BatchNumber, ProtocolVersionId, H256, U256, + Address, L1BatchNumber, L2ChainId, ProtocolVersionId, SLChainId, H256, L2_BRIDGEHUB_ADDRESS, + U256, }; #[cfg(test)] @@ -33,10 +43,10 @@ enum CheckError { #[error("error calling L1 contract")] ContractCall(#[from] ContractCallError), /// Error that is caused by the main node providing incorrect information etc. - #[error("failed validating commit transaction")] + #[error("failed validating commit transaction: {0}")] Validation(anyhow::Error), /// Error that is caused by violating invariants internal to *this* node (e.g., not having expected data in Postgres). - #[error("internal error")] + #[error("internal error: {0}")] Internal(anyhow::Error), } @@ -213,6 +223,13 @@ impl LocalL1BatchCommitData { .map_or(true, |version| version.is_pre_shared_bridge()) } + fn is_pre_gateway(&self) -> bool { + self.l1_batch + .header + .protocol_version + .map_or(true, |version| version.is_pre_gateway()) + } + /// All returned errors are validation errors. fn verify_commitment(&self, reference: ðabi::Token) -> anyhow::Result<()> { let protocol_version = self @@ -220,11 +237,13 @@ impl LocalL1BatchCommitData { .header .protocol_version .unwrap_or_else(ProtocolVersionId::last_potentially_undefined); - let da = detect_da(protocol_version, reference) + let da = detect_da(protocol_version, reference, self.commitment_mode) .context("cannot detect DA source from reference commitment token")?; - // For `PubdataDA::Calldata`, it's required that the pubdata fits into a single blob. - if matches!(da, PubdataSendingMode::Calldata) { + // For rollups with `PubdataSendingMode::Calldata`, it's required that the pubdata fits into a single blob. + if matches!(self.commitment_mode, L1BatchCommitmentMode::Rollup) + && matches!(da, PubdataSendingMode::Calldata) + { let pubdata_len = self .l1_batch .header @@ -258,12 +277,8 @@ impl LocalL1BatchCommitData { pub fn detect_da( protocol_version: ProtocolVersionId, reference: &Token, + commitment_mode: L1BatchCommitmentMode, ) -> Result { - /// These are used by the L1 Contracts to indicate what DA layer is used for pubdata - const PUBDATA_SOURCE_CALLDATA: u8 = 0; - const PUBDATA_SOURCE_BLOBS: u8 = 1; - const PUBDATA_SOURCE_CUSTOM: u8 = 2; - fn parse_error(message: impl Into>) -> ethabi::Error { ethabi::Error::Other(message.into()) } @@ -290,28 +305,80 @@ pub fn detect_da( "last reference token has unexpected shape; expected bytes, got {last_reference_token:?}" ))), }; - match last_reference_token.first() { - Some(&byte) if byte == PUBDATA_SOURCE_CALLDATA => Ok(PubdataSendingMode::Calldata), - Some(&byte) if byte == PUBDATA_SOURCE_BLOBS => Ok(PubdataSendingMode::Blobs), - Some(&byte) if byte == PUBDATA_SOURCE_CUSTOM => Ok(PubdataSendingMode::Custom), - Some(&byte) => Err(parse_error(format!( - "unexpected first byte of the last reference token; expected one of [{PUBDATA_SOURCE_CALLDATA}, {PUBDATA_SOURCE_BLOBS}], \ + + if protocol_version.is_pre_gateway() { + return match last_reference_token.first() { + Some(&byte) if byte == PUBDATA_SOURCE_CALLDATA => Ok(PubdataSendingMode::Calldata), + Some(&byte) if byte == PUBDATA_SOURCE_BLOBS => Ok(PubdataSendingMode::Blobs), + Some(&byte) if byte == PUBDATA_SOURCE_CUSTOM_PRE_GATEWAY => Ok(PubdataSendingMode::Custom), + Some(&byte) => Err(parse_error(format!( + "unexpected first byte of the last reference token; expected one of [{PUBDATA_SOURCE_CALLDATA}, {PUBDATA_SOURCE_BLOBS}, {PUBDATA_SOURCE_CUSTOM_PRE_GATEWAY}], \ + got {byte}" + ))), + None => Err(parse_error("last reference token is empty")), + }; + } + + match commitment_mode { + L1BatchCommitmentMode::Validium => { + // `Calldata`, `RelayedL2Calldata` and `Blobs` are encoded exactly the same way, + // token is just a `state_diff_hash` for them. + // For `Custom` it's `state_diff_hash` followed by `da_inclusion_data`. We can't distinguish + // between `Calldata`/`RelayedL2Calldata`/`Blobs`/`Custom` with empty `da_inclusion_data`, + // but it's ok to just return a `Calldata` given they are all encoded the same. + match last_reference_token.len().cmp(&32) { + Ordering::Equal => Ok(PubdataSendingMode::Calldata), + Ordering::Greater => Ok(PubdataSendingMode::Custom), + Ordering::Less => Err(parse_error( + "unexpected last reference token len for post-gateway version validium", + )), + } + } + L1BatchCommitmentMode::Rollup => { + // For rollup the format of this token (`operatorDAInput`) is: + // 32 bytes - `state_diff_hash` + // 32 bytes - hash of the full pubdata + // 1 byte - number of blobs + // 32 bytes for each blob - hashes of blobs + // 1 byte - pubdata source + // X bytes - blob/pubdata commitments + + let number_of_blobs = last_reference_token.get(64).copied().ok_or_else(|| { + parse_error(format!( + "last reference token is too short; expected at least 65 bytes, got {}", + last_reference_token.len() + )) + })? as usize; + + match last_reference_token.get(65 + 32 * number_of_blobs) { + Some(&byte) if byte == PUBDATA_SOURCE_CALLDATA => Ok(PubdataSendingMode::Calldata), + Some(&byte) if byte == PUBDATA_SOURCE_BLOBS => Ok(PubdataSendingMode::Blobs), + Some(&byte) => Err(parse_error(format!( + "unexpected first byte of the last reference token for rollup; expected one of [{PUBDATA_SOURCE_CALLDATA}, {PUBDATA_SOURCE_BLOBS}], \ got {byte}" - ))), - None => Err(parse_error("last reference token is empty")), + ))), + None => Err(parse_error(format!("last reference token is too short; expected at least 65 bytes, got {}", last_reference_token.len()))), + } + } } } +#[derive(Debug)] +pub struct SLChainAccess { + client: Box>, + chain_id: SLChainId, + diamond_proxy_addr: Option
, +} + #[derive(Debug)] pub struct ConsistencyChecker { /// ABI of the ZKsync contract contract: ethabi::Contract, - /// Address of the ZKsync diamond proxy on L1 - diamond_proxy_addr: Option
, /// How many past batches to check when starting max_batches_to_recheck: u32, sleep_interval: Duration, - l1_client: Box>, + l1_chain_data: SLChainAccess, + gateway_chain_data: Option, event_handler: Box, l1_data_mismatch_behavior: L1DataMismatchBehavior, pool: ConnectionPool, @@ -322,19 +389,49 @@ pub struct ConsistencyChecker { impl ConsistencyChecker { const DEFAULT_SLEEP_INTERVAL: Duration = Duration::from_secs(5); - pub fn new( + pub async fn new( l1_client: Box>, + gateway_client: Option>>, max_batches_to_recheck: u32, pool: ConnectionPool, commitment_mode: L1BatchCommitmentMode, + l2_chain_id: L2ChainId, ) -> anyhow::Result { let (health_check, health_updater) = ConsistencyCheckerHealthUpdater::new(); + let l1_chain_id = l1_client.fetch_chain_id().await?; + let l1_chain_data = SLChainAccess { + client: l1_client.for_component("consistency_checker"), + chain_id: l1_chain_id, + diamond_proxy_addr: None, + }; + + let gateway_chain_data = if let Some(client) = gateway_client { + let contract = bridgehub_contract(); + let function_name = if contract.function("getZKChain").is_ok() { + "getZKChain" + } else { + "getHyperchain" + }; + let gateway_diamond_proxy = + CallFunctionArgs::new(function_name, Token::Uint(l2_chain_id.as_u64().into())) + .for_contract(L2_BRIDGEHUB_ADDRESS, &contract) + .call(&client) + .await?; + let chain_id = client.fetch_chain_id().await?; + Some(SLChainAccess { + client: client.for_component("consistency_checker"), + chain_id, + diamond_proxy_addr: Some(gateway_diamond_proxy), + }) + } else { + None + }; Ok(Self { contract: zksync_contracts::hyperchain_contract(), - diamond_proxy_addr: None, max_batches_to_recheck, sleep_interval: Self::DEFAULT_SLEEP_INTERVAL, - l1_client: l1_client.for_component("consistency_checker"), + l1_chain_data, + gateway_chain_data, event_handler: Box::new(health_updater), l1_data_mismatch_behavior: L1DataMismatchBehavior::Log, pool, @@ -343,8 +440,8 @@ impl ConsistencyChecker { }) } - pub fn with_diamond_proxy_addr(mut self, address: Address) -> Self { - self.diamond_proxy_addr = Some(address); + pub fn with_l1_diamond_proxy_addr(mut self, address: Address) -> Self { + self.l1_chain_data.diamond_proxy_addr = Some(address); self } @@ -361,11 +458,36 @@ impl ConsistencyChecker { let commit_tx_hash = local.commit_tx_hash; tracing::info!("Checking commit tx {commit_tx_hash} for L1 batch #{batch_number}"); - let commit_tx_status = self - .l1_client + let sl_chain_id = self + .pool + .connection_tagged("consistency_checker") + .await + .map_err(|err| CheckError::Internal(err.into()))? + .eth_sender_dal() + .get_batch_commit_chain_id(batch_number) + .await + .map_err(|err| CheckError::Internal(err.into()))?; + let chain_data = match sl_chain_id { + Some(chain_id) => { + let Some(chain_data) = self.chain_data_by_id(chain_id) else { + return Err(CheckError::Validation(anyhow::anyhow!( + "failed to find client for chain id {chain_id}" + ))); + }; + chain_data + } + None => &self.l1_chain_data, + }; + let commit_tx_status = chain_data + .client .get_tx_status(commit_tx_hash) .await? - .with_context(|| format!("receipt for tx {commit_tx_hash:?} not found on L1")) + .with_context(|| { + format!( + "receipt for tx {commit_tx_hash:?} not found on target chain with id {}", + chain_data.chain_id + ) + }) .map_err(CheckError::Validation)?; if !commit_tx_status.success { let err = anyhow::anyhow!("main node gave us a failed commit tx {commit_tx_hash:?}"); @@ -373,14 +495,14 @@ impl ConsistencyChecker { } // We can't get tx calldata from the DB because it can be fake. - let commit_tx = self - .l1_client + let commit_tx = chain_data + .client .get_tx(commit_tx_hash) .await? .with_context(|| format!("commit transaction {commit_tx_hash:?} not found on L1")) .map_err(CheckError::Internal)?; // we've got a transaction receipt previously, thus an internal error - if let Some(diamond_proxy_addr) = self.diamond_proxy_addr { + if let Some(diamond_proxy_addr) = chain_data.diamond_proxy_addr { let event = self .contract .event("BlockCommit") @@ -423,10 +545,9 @@ impl ConsistencyChecker { let commit_function = if local.is_pre_boojum() { &*PRE_BOOJUM_COMMIT_FUNCTION } else if local.is_pre_shared_bridge() { - self.contract - .function("commitBatches") - .context("L1 contract does not have `commitBatches` function") - .map_err(CheckError::Internal)? + &*POST_BOOJUM_COMMIT_FUNCTION + } else if local.is_pre_gateway() { + &*POST_SHARED_BRIDGE_COMMIT_FUNCTION } else { self.contract .function("commitBatchesSharedBridge") @@ -434,12 +555,16 @@ impl ConsistencyChecker { .map_err(CheckError::Internal)? }; - let commitment = - Self::extract_commit_data(&commit_tx.input.0, commit_function, batch_number) - .with_context(|| { - format!("failed extracting commit data for transaction {commit_tx_hash:?}") - }) - .map_err(CheckError::Validation)?; + let commitment = Self::extract_commit_data( + &commit_tx.input.0, + commit_function, + batch_number, + local.is_pre_gateway(), + ) + .with_context(|| { + format!("failed extracting commit data for transaction {commit_tx_hash:?}") + }) + .map_err(CheckError::Validation)?; local .verify_commitment(&commitment) .map_err(CheckError::Validation) @@ -450,6 +575,7 @@ impl ConsistencyChecker { commit_tx_input_data: &[u8], commit_function: ðabi::Function, batch_number: L1BatchNumber, + pre_gateway: bool, ) -> anyhow::Result { let expected_solidity_selector = commit_function.short_signature(); let actual_solidity_selector = &commit_tx_input_data[..4]; @@ -461,11 +587,45 @@ impl ConsistencyChecker { let mut commit_input_tokens = commit_function .decode_input(&commit_tx_input_data[4..]) .context("Failed decoding calldata for L1 commit function")?; - let mut commitments = commit_input_tokens - .pop() - .context("Unexpected signature for L1 commit function")? - .into_array() - .context("Unexpected signature for L1 commit function")?; + let mut commitments: Vec; + if pre_gateway { + commitments = commit_input_tokens + .pop() + .context("Unexpected signature for L1 commit function")? + .into_array() + .context("Unexpected signature for L1 commit function")?; + } else { + let commitments_popped = commit_input_tokens + .pop() + .context("Unexpected signature for L1 commit function: no tokens")?; + let commitment_bytes = match commitments_popped { + Token::Bytes(arr) => arr, + _ => anyhow::bail!( + "Unexpected signature for L1 commit function: last token is not bytes" + ), + }; + let (version, encoded_data) = commitment_bytes.split_at(1); + anyhow::ensure!( + version[0] == SUPPORTED_ENCODING_VERSION, + "Unexpected encoding version: {}", + version[0] + ); + let decoded_data = ethabi::decode( + &[ + StoredBatchInfo::schema(), + ParamType::Array(Box::new(CommitBatchInfo::post_gateway_schema())), + ], + encoded_data, + ) + .context("Failed to decode commitData")?; + if let Some(Token::Array(batch_commitments)) = &decoded_data.get(1) { + // Now you have access to `stored_batch_info` and `l1_batches_to_commit` + // Process them as needed + commitments = batch_commitments.clone(); + } else { + anyhow::bail!("Unexpected data format"); + } + } // Commit transactions usually publish multiple commitments at once, so we need to find // the one that corresponds to the batch we're checking. @@ -473,15 +633,15 @@ impl ConsistencyChecker { .first() .context("L1 batch commitment is empty")?; let ethabi::Token::Tuple(first_batch_commitment) = first_batch_commitment else { - anyhow::bail!("Unexpected signature for L1 commit function"); + anyhow::bail!("Unexpected signature for L1 commit function 3"); }; let first_batch_number = first_batch_commitment .first() - .context("Unexpected signature for L1 commit function")?; + .context("Unexpected signature for L1 commit function 4")?; let first_batch_number = first_batch_number .clone() .into_uint() - .context("Unexpected signature for L1 commit function")?; + .context("Unexpected signature for L1 commit function 5")?; let first_batch_number = usize::try_from(first_batch_number) .map_err(|_| anyhow::anyhow!("Integer overflow for L1 batch number"))?; // ^ `TryFrom` has `&str` error here, so we can't use `.context()`. @@ -511,24 +671,31 @@ impl ConsistencyChecker { } async fn sanity_check_diamond_proxy_addr(&self) -> Result<(), CheckError> { - let Some(address) = self.diamond_proxy_addr else { - return Ok(()); - }; - tracing::debug!("Performing sanity checks for diamond proxy contract {address:?}"); + for client_data in std::iter::once(&self.l1_chain_data).chain(&self.gateway_chain_data) { + let Some(address) = client_data.diamond_proxy_addr else { + continue; + }; + let chain_id = client_data.chain_id; + tracing::debug!("Performing sanity checks for chain id {chain_id}, diamond proxy contract {address:?}"); - let version: U256 = CallFunctionArgs::new("getProtocolVersion", ()) - .for_contract(address, &self.contract) - .call(&self.l1_client) - .await?; - tracing::info!("Checked diamond proxy {address:?} (protocol version: {version})"); + let version: U256 = CallFunctionArgs::new("getProtocolVersion", ()) + .for_contract(address, &self.contract) + .call(&client_data.client) + .await?; + tracing::info!("Checked chain id {chain_id}, diamond proxy {address:?} (protocol version: {version})"); + } Ok(()) } pub async fn run(mut self, mut stop_receiver: watch::Receiver) -> anyhow::Result<()> { tracing::info!( - "Starting consistency checker with diamond proxy contract: {:?}, sleep interval: {:?}, \ - max historic L1 batches to check: {}", - self.diamond_proxy_addr, + "Starting consistency checker with l1 diamond proxy contract: {:?}, \ + gateway diamond proxy contract: {:?}, \ + sleep interval: {:?}, max historic L1 batches to check: {}", + self.l1_chain_data.diamond_proxy_addr, + self.gateway_chain_data + .as_ref() + .map(|d| d.diamond_proxy_addr), self.sleep_interval, self.max_batches_to_recheck ); @@ -658,6 +825,16 @@ impl ConsistencyChecker { tracing::info!("Stop signal received, consistency_checker is shutting down"); Ok(()) } + + fn chain_data_by_id(&self, searched_chain_id: SLChainId) -> Option<&SLChainAccess> { + if searched_chain_id == self.l1_chain_data.chain_id { + Some(&self.l1_chain_data) + } else if Some(searched_chain_id) == self.gateway_chain_data.as_ref().map(|d| d.chain_id) { + self.gateway_chain_data.as_ref() + } else { + None + } + } } /// Repeatedly polls the DB until there is an L1 batch with metadata. We may not have such a batch initially diff --git a/core/node/consistency_checker/src/tests/mod.rs b/core/node/consistency_checker/src/tests/mod.rs index b09ef2b2272c..1635bddffb83 100644 --- a/core/node/consistency_checker/src/tests/mod.rs +++ b/core/node/consistency_checker/src/tests/mod.rs @@ -7,7 +7,7 @@ use test_casing::{test_casing, Product}; use tokio::sync::mpsc; use zksync_config::GenesisConfig; use zksync_dal::Connection; -use zksync_eth_client::{clients::MockSettlementLayer, Options}; +use zksync_eth_client::{clients::MockSettlementLayer, EthInterface, Options}; use zksync_l1_contract_interface::{i_executor::methods::CommitBatches, Tokenizable, Tokenize}; use zksync_node_genesis::{insert_genesis_batch, mock_genesis_config, GenesisParams}; use zksync_node_test_utils::{ @@ -30,9 +30,12 @@ pub(crate) fn create_l1_batch_with_metadata(number: u32) -> L1BatchWithMetadata } const PRE_BOOJUM_PROTOCOL_VERSION: ProtocolVersionId = ProtocolVersionId::Version10; -const DIAMOND_PROXY_ADDR: Address = Address::repeat_byte(1); +const L1_DIAMOND_PROXY_ADDR: Address = Address::repeat_byte(1); +const GATEWAY_DIAMOND_PROXY_ADDR: Address = Address::repeat_byte(2); const VALIDATOR_TIMELOCK_ADDR: Address = Address::repeat_byte(23); -const CHAIN_ID: u32 = 270; +const ERA_CHAIN_ID: u64 = 270; +const L1_CHAIN_ID: u64 = 9; +const GATEWAY_CHAIN_ID: u64 = 505; const COMMITMENT_MODES: [L1BatchCommitmentMode; 2] = [ L1BatchCommitmentMode::Rollup, L1BatchCommitmentMode::Validium, @@ -72,14 +75,10 @@ pub(crate) fn build_commit_tx_input_data( if protocol_version.is_pre_boojum() { PRE_BOOJUM_COMMIT_FUNCTION.encode_input(&tokens).unwrap() } else if protocol_version.is_pre_shared_bridge() { - contract - .function("commitBatches") - .unwrap() - .encode_input(&tokens) - .unwrap() + POST_BOOJUM_COMMIT_FUNCTION.encode_input(&tokens).unwrap() } else { // Post shared bridge transactions also require chain id - let tokens: Vec<_> = vec![Token::Uint(CHAIN_ID.into())] + let tokens: Vec<_> = vec![Token::Uint(ERA_CHAIN_ID.into())] .into_iter() .chain(tokens) .collect(); @@ -91,18 +90,25 @@ pub(crate) fn build_commit_tx_input_data( } } -pub(crate) fn create_mock_checker( +pub(crate) async fn create_mock_checker( client: MockSettlementLayer, pool: ConnectionPool, commitment_mode: L1BatchCommitmentMode, ) -> ConsistencyChecker { let (health_check, health_updater) = ConsistencyCheckerHealthUpdater::new(); + let client = client.into_client(); + let chain_id = client.fetch_chain_id().await.unwrap(); + let l1_chain_data = SLChainAccess { + client: Box::new(client), + chain_id, + diamond_proxy_addr: Some(L1_DIAMOND_PROXY_ADDR), + }; ConsistencyChecker { contract: zksync_contracts::hyperchain_contract(), - diamond_proxy_addr: Some(DIAMOND_PROXY_ADDR), max_batches_to_recheck: 100, sleep_interval: Duration::from_millis(10), - l1_client: Box::new(client.into_client()), + l1_chain_data, + gateway_chain_data: None, event_handler: Box::new(health_updater), l1_data_mismatch_behavior: L1DataMismatchBehavior::Bail, pool, @@ -111,23 +117,51 @@ pub(crate) fn create_mock_checker( } } -fn create_mock_ethereum() -> MockSettlementLayer { - let mock = MockSettlementLayer::builder().with_call_handler(|call, _block_id| { - assert_eq!(call.to, Some(DIAMOND_PROXY_ADDR)); - let packed_semver = ProtocolVersionId::latest().into_packed_semver_with_patch(0); - let contract = zksync_contracts::hyperchain_contract(); - let expected_input = contract - .function("getProtocolVersion") - .unwrap() - .encode_input(&[]) - .unwrap(); - assert_eq!(call.data, Some(expected_input.into())); +fn create_mock_sl(chain_id: u64, with_get_zk_chain: bool) -> MockSettlementLayer { + let mock = MockSettlementLayer::builder() + .with_call_handler(move |call, _block_id| match call.to { + Some(addr) if addr == L1_DIAMOND_PROXY_ADDR || addr == GATEWAY_DIAMOND_PROXY_ADDR => { + let packed_semver = ProtocolVersionId::latest().into_packed_semver_with_patch(0); + let contract = zksync_contracts::hyperchain_contract(); + let expected_input = contract + .function("getProtocolVersion") + .unwrap() + .encode_input(&[]) + .unwrap(); + assert_eq!(call.data, Some(expected_input.into())); - ethabi::Token::Uint(packed_semver) - }); + ethabi::Token::Uint(packed_semver) + } + Some(addr) if with_get_zk_chain && addr == L2_BRIDGEHUB_ADDRESS => { + let contract = zksync_contracts::bridgehub_contract(); + let function_name = if contract.function("getZKChain").is_ok() { + "getZKChain" + } else { + "getHyperchain" + }; + let expected_input = contract + .function(function_name) + .unwrap() + .encode_input(&[Token::Uint(ERA_CHAIN_ID.into())]) + .unwrap(); + assert_eq!(call.data, Some(expected_input.into())); + + ethabi::Token::Address(GATEWAY_DIAMOND_PROXY_ADDR) + } + _ => panic!("Received unexpected call"), + }) + .with_chain_id(chain_id); mock.build() } +fn create_mock_ethereum() -> MockSettlementLayer { + create_mock_sl(L1_CHAIN_ID, false) +} + +fn create_mock_gateway() -> MockSettlementLayer { + create_mock_sl(GATEWAY_CHAIN_ID, true) +} + impl HandleConsistencyCheckerEvent for mpsc::UnboundedSender { fn initialize(&mut self) { // Do nothing @@ -141,8 +175,8 @@ impl HandleConsistencyCheckerEvent for mpsc::UnboundedSender { self.send(last_checked_batch).ok(); } - fn report_inconsistent_batch(&mut self, _number: L1BatchNumber, _err: &anyhow::Error) { - // Do nothing + fn report_inconsistent_batch(&mut self, number: L1BatchNumber, err: &anyhow::Error) { + panic!("Error on batch #{number}: {err}"); } } @@ -163,6 +197,11 @@ fn build_commit_tx_input_data_is_correct(commitment_mode: L1BatchCommitmentMode) &commit_tx_input_data, commit_function, batch.header.number, + batch + .header + .protocol_version + .map(|v| v.is_pre_gateway()) + .unwrap_or(true), ) .unwrap(); assert_eq!( @@ -174,8 +213,7 @@ fn build_commit_tx_input_data_is_correct(commitment_mode: L1BatchCommitmentMode) #[test] fn extracting_commit_data_for_boojum_batch() { - let contract = zksync_contracts::hyperchain_contract(); - let commit_function = contract.function("commitBatches").unwrap(); + let commit_function = &*POST_BOOJUM_COMMIT_FUNCTION; // Calldata taken from the commit transaction for `https://sepolia.explorer.zksync.io/batch/4470`; // `https://sepolia.etherscan.io/tx/0x300b9115037028b1f8aa2177abf98148c3df95c9b04f95a4e25baf4dfee7711f` let commit_tx_input_data = include_bytes!("commit_l1_batch_4470_testnet_sepolia.calldata"); @@ -184,6 +222,7 @@ fn extracting_commit_data_for_boojum_batch() { commit_tx_input_data, commit_function, L1BatchNumber(4_470), + true, ) .unwrap(); @@ -197,6 +236,7 @@ fn extracting_commit_data_for_boojum_batch() { commit_tx_input_data, commit_function, L1BatchNumber(bogus_l1_batch), + true, ) .unwrap_err(); } @@ -204,8 +244,7 @@ fn extracting_commit_data_for_boojum_batch() { #[test] fn extracting_commit_data_for_multiple_batches() { - let contract = zksync_contracts::hyperchain_contract(); - let commit_function = contract.function("commitBatches").unwrap(); + let commit_function = &*POST_BOOJUM_COMMIT_FUNCTION; // Calldata taken from the commit transaction for `https://explorer.zksync.io/batch/351000`; // `https://etherscan.io/tx/0xbd8dfe0812df0da534eb95a2d2a4382d65a8172c0b648a147d60c1c2921227fd` let commit_tx_input_data = include_bytes!("commit_l1_batch_351000-351004_mainnet.calldata"); @@ -215,6 +254,7 @@ fn extracting_commit_data_for_multiple_batches() { commit_tx_input_data, commit_function, L1BatchNumber(l1_batch), + true, ) .unwrap(); @@ -229,6 +269,7 @@ fn extracting_commit_data_for_multiple_batches() { commit_tx_input_data, commit_function, L1BatchNumber(bogus_l1_batch), + true, ) .unwrap_err(); } @@ -244,6 +285,7 @@ fn extracting_commit_data_for_pre_boojum_batch() { commit_tx_input_data, &PRE_BOOJUM_COMMIT_FUNCTION, L1BatchNumber(200_000), + true, ) .unwrap(); @@ -265,6 +307,7 @@ impl SaveAction<'_> { self, storage: &mut Connection<'_, Core>, commit_tx_hash_by_l1_batch: &HashMap, + chain_id_by_l1_batch: &HashMap, ) { match self { Self::InsertBatch(l1_batch) => { @@ -291,6 +334,7 @@ impl SaveAction<'_> { } Self::InsertCommitTx(l1_batch_number) => { let commit_tx_hash = commit_tx_hash_by_l1_batch[&l1_batch_number]; + let chain_id = chain_id_by_l1_batch.get(&l1_batch_number).copied(); storage .eth_sender_dal() .insert_bogus_confirmed_eth_tx( @@ -298,6 +342,7 @@ impl SaveAction<'_> { AggregatedActionType::Commit, commit_tx_hash, chrono::Utc::now(), + chain_id, ) .await .unwrap(); @@ -367,7 +412,7 @@ fn l1_batch_commit_log(l1_batch: &L1BatchWithMetadata) -> Log { }); Log { - address: DIAMOND_PROXY_ADDR, + address: L1_DIAMOND_PROXY_ADDR, topics: vec![ *BLOCK_COMMIT_EVENT_HASH, H256::from_low_u64_be(l1_batch.header.number.0.into()), // batch number @@ -432,7 +477,7 @@ async fn normal_checker_function( let (l1_batch_updates_sender, mut l1_batch_updates_receiver) = mpsc::unbounded_channel(); let checker = ConsistencyChecker { event_handler: Box::new(l1_batch_updates_sender), - ..create_mock_checker(client, pool.clone(), commitment_mode) + ..create_mock_checker(client, pool.clone(), commitment_mode).await }; let (stop_sender, stop_receiver) = watch::channel(false); @@ -441,7 +486,112 @@ async fn normal_checker_function( // Add new batches to the storage. for save_action in save_actions_mapper(&l1_batches) { save_action - .apply(&mut storage, &commit_tx_hash_by_l1_batch) + .apply( + &mut storage, + &commit_tx_hash_by_l1_batch, + &Default::default(), + ) + .await; + tokio::time::sleep(Duration::from_millis(7)).await; + } + + // Wait until all batches are checked. + loop { + let checked_batch = l1_batch_updates_receiver.recv().await.unwrap(); + if checked_batch == l1_batches.last().unwrap().header.number { + break; + } + } + + // Send the stop signal to the checker and wait for it to stop. + stop_sender.send_replace(true); + checker_task.await.unwrap().unwrap(); +} + +#[tokio::test] +async fn checker_works_with_different_settlement_layers() { + // Use default action mapper. + let save_actions_mapper = SAVE_ACTION_MAPPERS[0].1; + let commitment_mode = L1BatchCommitmentMode::Rollup; + + let pool = ConnectionPool::::test_pool().await; + let mut storage = pool.connection().await.unwrap(); + insert_genesis_batch(&mut storage, &GenesisParams::mock()) + .await + .unwrap(); + + let l1_batches: Vec<_> = (1..=10).map(create_l1_batch_with_metadata).collect(); + let mut commit_tx_hash_by_l1_batch = HashMap::with_capacity(l1_batches.len()); + let mut chain_id_by_l1_batch = HashMap::with_capacity(l1_batches.len()); + let l1_client = create_mock_ethereum(); + let gateway_client = create_mock_gateway(); + + let clients = [l1_client, gateway_client]; + let diamond_proxies = [L1_DIAMOND_PROXY_ADDR, GATEWAY_DIAMOND_PROXY_ADDR]; + + for (i, l1_batches) in l1_batches.chunks(2).enumerate() { + let client = &clients[i & 1]; + let input_data = build_commit_tx_input_data(l1_batches, commitment_mode); + let signed_tx = client.sign_prepared_tx( + input_data.clone(), + VALIDATOR_TIMELOCK_ADDR, + Options { + nonce: Some((i / 2).into()), + ..Options::default() + }, + ); + let signed_tx = signed_tx.unwrap(); + client.as_ref().send_raw_tx(signed_tx.raw_tx).await.unwrap(); + client.execute_tx(signed_tx.hash, true, 1).with_logs( + l1_batches + .iter() + .map(|batch| { + let mut log = l1_batch_commit_log(batch); + log.address = diamond_proxies[i & 1]; + log + }) + .collect(), + ); + + commit_tx_hash_by_l1_batch.extend( + l1_batches + .iter() + .map(|batch| (batch.header.number, signed_tx.hash)), + ); + let chain_id = client.as_ref().fetch_chain_id().await.unwrap(); + chain_id_by_l1_batch.extend( + l1_batches + .iter() + .map(|batch| (batch.header.number, chain_id)), + ) + } + + let (l1_batch_updates_sender, mut l1_batch_updates_receiver) = mpsc::unbounded_channel(); + let mut checker = ConsistencyChecker::new( + Box::new(clients[0].clone().into_client()), + Some(Box::new(clients[1].clone().into_client())), + 100, + pool.clone(), + commitment_mode, + L2ChainId::new(ERA_CHAIN_ID).unwrap(), + ) + .await + .unwrap(); + checker.sleep_interval = Duration::from_millis(10); + checker.event_handler = Box::new(l1_batch_updates_sender); + checker.l1_data_mismatch_behavior = L1DataMismatchBehavior::Bail; + + let (stop_sender, stop_receiver) = watch::channel(false); + let checker_task = tokio::spawn(checker.run(stop_receiver)); + + // Add new batches to the storage. + for save_action in save_actions_mapper(&l1_batches) { + save_action + .apply( + &mut storage, + &commit_tx_hash_by_l1_batch, + &chain_id_by_l1_batch, + ) .await; tokio::time::sleep(Duration::from_millis(7)).await; } @@ -515,7 +665,7 @@ async fn checker_processes_pre_boojum_batches( let (l1_batch_updates_sender, mut l1_batch_updates_receiver) = mpsc::unbounded_channel(); let checker = ConsistencyChecker { event_handler: Box::new(l1_batch_updates_sender), - ..create_mock_checker(client, pool.clone(), commitment_mode) + ..create_mock_checker(client, pool.clone(), commitment_mode).await }; let (stop_sender, stop_receiver) = watch::channel(false); @@ -524,7 +674,11 @@ async fn checker_processes_pre_boojum_batches( // Add new batches to the storage. for save_action in save_actions_mapper(&l1_batches) { save_action - .apply(&mut storage, &commit_tx_hash_by_l1_batch) + .apply( + &mut storage, + &commit_tx_hash_by_l1_batch, + &Default::default(), + ) .await; tokio::time::sleep(Duration::from_millis(7)).await; } @@ -586,7 +740,11 @@ async fn checker_functions_after_snapshot_recovery( if !delay_batch_insertion { for &save_action in &save_actions { save_action - .apply(&mut storage, &commit_tx_hash_by_l1_batch) + .apply( + &mut storage, + &commit_tx_hash_by_l1_batch, + &Default::default(), + ) .await; } } @@ -594,7 +752,7 @@ async fn checker_functions_after_snapshot_recovery( let (l1_batch_updates_sender, mut l1_batch_updates_receiver) = mpsc::unbounded_channel(); let checker = ConsistencyChecker { event_handler: Box::new(l1_batch_updates_sender), - ..create_mock_checker(client, pool.clone(), commitment_mode) + ..create_mock_checker(client, pool.clone(), commitment_mode).await }; let (stop_sender, stop_receiver) = watch::channel(false); let checker_task = tokio::spawn(checker.run(stop_receiver)); @@ -603,7 +761,11 @@ async fn checker_functions_after_snapshot_recovery( tokio::time::sleep(Duration::from_millis(10)).await; for &save_action in &save_actions { save_action - .apply(&mut storage, &commit_tx_hash_by_l1_batch) + .apply( + &mut storage, + &commit_tx_hash_by_l1_batch, + &Default::default(), + ) .await; } } @@ -654,7 +816,7 @@ impl IncorrectDataKind { l1_batch: &L1BatchWithMetadata, commitment_mode: L1BatchCommitmentMode, ) -> H256 { - let mut log_origin = Some(DIAMOND_PROXY_ADDR); + let mut log_origin = Some(L1_DIAMOND_PROXY_ADDR); let (commit_tx_input_data, successful_status) = match self { Self::MissingStatus => { return H256::zero(); // Do not execute the transaction @@ -771,12 +933,16 @@ async fn checker_detects_incorrect_tx_data( ]; for save_action in save_actions { save_action - .apply(&mut storage, &commit_tx_hash_by_l1_batch) + .apply( + &mut storage, + &commit_tx_hash_by_l1_batch, + &Default::default(), + ) .await; } drop(storage); - let checker = create_mock_checker(client, pool, commitment_mode); + let checker = create_mock_checker(client, pool, commitment_mode).await; let (_stop_sender, stop_receiver) = watch::channel(false); // The checker must stop with an error. tokio::time::timeout(Duration::from_secs(30), checker.run(stop_receiver)) diff --git a/core/node/db_pruner/src/lib.rs b/core/node/db_pruner/src/lib.rs index 4b4a53c68aa0..6ec8b2ce4602 100644 --- a/core/node/db_pruner/src/lib.rs +++ b/core/node/db_pruner/src/lib.rs @@ -8,7 +8,10 @@ use std::{ use anyhow::Context as _; use serde::{Deserialize, Serialize}; use tokio::sync::watch; -use zksync_dal::{pruning_dal::PruningInfo, Connection, ConnectionPool, Core, CoreDal}; +use zksync_dal::{ + pruning_dal::{HardPruningInfo, PruningInfo, SoftPruningInfo}, + Connection, ConnectionPool, Core, CoreDal, +}; use zksync_health_check::{Health, HealthStatus, HealthUpdater, ReactiveHealthCheck}; use zksync_types::{L1BatchNumber, L2BlockNumber}; @@ -53,10 +56,10 @@ struct DbPrunerHealth { impl From for DbPrunerHealth { fn from(info: PruningInfo) -> Self { Self { - last_soft_pruned_l1_batch: info.last_soft_pruned_l1_batch, - last_soft_pruned_l2_block: info.last_soft_pruned_l2_block, - last_hard_pruned_l1_batch: info.last_hard_pruned_l1_batch, - last_hard_pruned_l2_block: info.last_hard_pruned_l2_block, + last_soft_pruned_l1_batch: info.last_soft_pruned.map(|info| info.l1_batch), + last_soft_pruned_l2_block: info.last_soft_pruned.map(|info| info.l2_block), + last_hard_pruned_l1_batch: info.last_hard_pruned.map(|info| info.l1_batch), + last_hard_pruned_l2_block: info.last_hard_pruned.map(|info| info.l2_block), } } } @@ -188,13 +191,10 @@ impl DbPruner { let mut transaction = storage.start_transaction().await?; let mut current_pruning_info = transaction.pruning_dal().get_pruning_info().await?; - let next_l1_batch_to_prune = L1BatchNumber( - current_pruning_info - .last_soft_pruned_l1_batch - .unwrap_or(L1BatchNumber(0)) - .0 - + self.config.pruned_batch_chunk_size, - ); + let next_l1_batch_to_prune = current_pruning_info + .last_soft_pruned + .map_or(L1BatchNumber(0), |info| info.l1_batch) + + self.config.pruned_batch_chunk_size; if !self.is_l1_batch_prunable(next_l1_batch_to_prune).await { METRICS.pruning_chunk_duration[&PruneType::NoOp].observe(start.elapsed()); return Ok(false); @@ -207,7 +207,7 @@ impl DbPruner { .with_context(|| format!("L1 batch #{next_l1_batch_to_prune} is ready to be pruned, but has no L2 blocks"))?; transaction .pruning_dal() - .soft_prune_batches_range(next_l1_batch_to_prune, next_l2_block_to_prune) + .insert_soft_pruning_log(next_l1_batch_to_prune, next_l2_block_to_prune) .await?; transaction.commit().await?; @@ -218,8 +218,10 @@ impl DbPruner { "Soft pruned db l1_batches up to {next_l1_batch_to_prune} and L2 blocks up to {next_l2_block_to_prune}, operation took {latency:?}", ); - current_pruning_info.last_soft_pruned_l1_batch = Some(next_l1_batch_to_prune); - current_pruning_info.last_soft_pruned_l2_block = Some(next_l2_block_to_prune); + current_pruning_info.last_soft_pruned = Some(SoftPruningInfo { + l1_batch: next_l1_batch_to_prune, + l2_block: next_l2_block_to_prune, + }); self.update_health(current_pruning_info); Ok(true) } @@ -233,20 +235,26 @@ impl DbPruner { let mut transaction = storage.start_transaction().await?; let mut current_pruning_info = transaction.pruning_dal().get_pruning_info().await?; - let last_soft_pruned_l1_batch = - current_pruning_info.last_soft_pruned_l1_batch.with_context(|| { - format!("bogus pruning info {current_pruning_info:?}: trying to hard-prune data, but there is no soft-pruned L1 batch") - })?; - let last_soft_pruned_l2_block = - current_pruning_info.last_soft_pruned_l2_block.with_context(|| { - format!("bogus pruning info {current_pruning_info:?}: trying to hard-prune data, but there is no soft-pruned L2 block") + let soft_pruned = current_pruning_info.last_soft_pruned.with_context(|| { + format!("bogus pruning info {current_pruning_info:?}: trying to hard-prune data, but there is no soft-pruned data") + })?; + + let last_pruned_l1_batch_root_hash = transaction + .blocks_dal() + .get_l1_batch_state_root(soft_pruned.l1_batch) + .await? + .with_context(|| { + format!( + "hard-pruned L1 batch #{} does not have root hash", + soft_pruned.l1_batch + ) })?; let mut dal = transaction.pruning_dal(); let stats = tokio::select! { result = dal.hard_prune_batches_range( - last_soft_pruned_l1_batch, - last_soft_pruned_l2_block, + soft_pruned.l1_batch, + soft_pruned.l2_block, ) => result?, _ = stop_receiver.changed() => { @@ -258,15 +266,23 @@ impl DbPruner { } }; METRICS.observe_hard_pruning(stats); + + dal.insert_hard_pruning_log( + soft_pruned.l1_batch, + soft_pruned.l2_block, + last_pruned_l1_batch_root_hash, + ) + .await?; transaction.commit().await?; let latency = latency.observe(); - tracing::info!( - "Hard pruned db l1_batches up to {last_soft_pruned_l1_batch} and L2 blocks up to {last_soft_pruned_l2_block}, \ - operation took {latency:?}" - ); - current_pruning_info.last_hard_pruned_l1_batch = Some(last_soft_pruned_l1_batch); - current_pruning_info.last_hard_pruned_l2_block = Some(last_soft_pruned_l2_block); + let hard_pruning_info = HardPruningInfo { + l1_batch: soft_pruned.l1_batch, + l2_block: soft_pruned.l2_block, + l1_batch_root_hash: Some(last_pruned_l1_batch_root_hash), + }; + tracing::info!("Hard pruned data up to {hard_pruning_info:?}, operation took {latency:?}"); + current_pruning_info.last_hard_pruned = Some(hard_pruning_info); self.update_health(current_pruning_info); Ok(PruningIterationOutcome::Pruned) } @@ -280,9 +296,7 @@ impl DbPruner { self.update_health(current_pruning_info); // If this `if` is not entered, it means that the node has restarted after soft pruning - if current_pruning_info.last_soft_pruned_l1_batch - == current_pruning_info.last_hard_pruned_l1_batch - { + if current_pruning_info.is_caught_up() { let pruning_done = self.soft_prune(&mut storage).await?; if !pruning_done { return Ok(PruningIterationOutcome::NoOp); diff --git a/core/node/db_pruner/src/tests.rs b/core/node/db_pruner/src/tests.rs index 99fbada423dc..7afd5044c73c 100644 --- a/core/node/db_pruner/src/tests.rs +++ b/core/node/db_pruner/src/tests.rs @@ -12,8 +12,7 @@ use zksync_node_test_utils::{ l1_batch_metadata_to_commitment_artifacts, }; use zksync_types::{ - aggregated_operations::AggregatedActionType, block::L2BlockHeader, Address, L2BlockNumber, - ProtocolVersion, H256, + aggregated_operations::AggregatedActionType, L2BlockNumber, ProtocolVersion, H256, }; use super::*; @@ -95,8 +94,8 @@ async fn is_l1_batch_prunable_works() { async fn insert_l2_blocks( conn: &mut Connection<'_, Core>, - l1_batches_count: u64, - l2_blocks_per_batch: u64, + l1_batches_count: u32, + l2_blocks_per_batch: u32, ) { conn.protocol_versions_dal() .save_protocol_version_with_tx(&ProtocolVersion::default()) @@ -104,36 +103,31 @@ async fn insert_l2_blocks( .unwrap(); for l1_batch_number in 0..l1_batches_count { + let l1_batch_number = L1BatchNumber(l1_batch_number); for l2_block_index in 0..l2_blocks_per_batch { - let l2_block_number = - L2BlockNumber((l1_batch_number * l2_blocks_per_batch + l2_block_index) as u32); - let l2_block_header = L2BlockHeader { - number: l2_block_number, - timestamp: 0, - hash: H256::from_low_u64_be(u64::from(l2_block_number.0)), - l1_tx_count: 0, - l2_tx_count: 0, - fee_account_address: Address::repeat_byte(1), - base_fee_per_gas: 0, - gas_per_pubdata_limit: 0, - batch_fee_input: Default::default(), - base_system_contracts_hashes: Default::default(), - protocol_version: Some(Default::default()), - virtual_blocks: 0, - gas_limit: 0, - logs_bloom: Default::default(), - pubdata_params: Default::default(), - }; + let l2_block_number = l1_batch_number.0 * l2_blocks_per_batch + l2_block_index; + let l2_block_header = create_l2_block(l2_block_number); conn.blocks_dal() .insert_l2_block(&l2_block_header) .await .unwrap(); conn.blocks_dal() - .mark_l2_blocks_as_executed_in_l1_batch(L1BatchNumber(l1_batch_number as u32)) + .mark_l2_blocks_as_executed_in_l1_batch(l1_batch_number) .await .unwrap(); } + + let l1_batch_header = create_l1_batch(l1_batch_number.0); + conn.blocks_dal() + .insert_mock_l1_batch(&l1_batch_header) + .await + .unwrap(); + let root_hash = H256::from_low_u64_be(l1_batch_number.0.into()); + conn.blocks_dal() + .set_l1_batch_hash(l1_batch_number, root_hash) + .await + .unwrap(); } } @@ -144,7 +138,7 @@ async fn hard_pruning_ignores_conditions_checks() { insert_l2_blocks(&mut conn, 10, 2).await; conn.pruning_dal() - .soft_prune_batches_range(L1BatchNumber(2), L2BlockNumber(5)) + .insert_soft_pruning_log(L1BatchNumber(2), L2BlockNumber(5)) .await .unwrap(); @@ -167,24 +161,34 @@ async fn hard_pruning_ignores_conditions_checks() { .unwrap(); assert_eq!( - PruningInfo { - last_soft_pruned_l1_batch: Some(L1BatchNumber(2)), - last_soft_pruned_l2_block: Some(L2BlockNumber(5)), - last_hard_pruned_l1_batch: Some(L1BatchNumber(2)), - last_hard_pruned_l2_block: Some(L2BlockNumber(5)), - }, + test_pruning_info(2, 5), conn.pruning_dal().get_pruning_info().await.unwrap() ); let health = health_check.check_health().await; assert_matches!(health.status(), HealthStatus::Ready); } + +fn test_pruning_info(l1_batch: u32, l2_block: u32) -> PruningInfo { + PruningInfo { + last_soft_pruned: Some(SoftPruningInfo { + l1_batch: L1BatchNumber(l1_batch), + l2_block: L2BlockNumber(l2_block), + }), + last_hard_pruned: Some(HardPruningInfo { + l1_batch: L1BatchNumber(l1_batch), + l2_block: L2BlockNumber(l2_block), + l1_batch_root_hash: Some(H256::from_low_u64_be(l1_batch.into())), + }), + } +} + #[test(tokio::test)] async fn pruner_catches_up_with_hard_pruning_up_to_soft_pruning_boundary_ignoring_chunk_size() { let pool = ConnectionPool::::test_pool().await; let mut conn = pool.connection().await.unwrap(); insert_l2_blocks(&mut conn, 10, 2).await; conn.pruning_dal() - .soft_prune_batches_range(L1BatchNumber(2), L2BlockNumber(5)) + .insert_soft_pruning_log(L1BatchNumber(2), L2BlockNumber(5)) .await .unwrap(); @@ -205,12 +209,7 @@ async fn pruner_catches_up_with_hard_pruning_up_to_soft_pruning_boundary_ignorin .unwrap(); assert_eq!( - PruningInfo { - last_soft_pruned_l1_batch: Some(L1BatchNumber(2)), - last_soft_pruned_l2_block: Some(L2BlockNumber(5)), - last_hard_pruned_l1_batch: Some(L1BatchNumber(2)), - last_hard_pruned_l2_block: Some(L2BlockNumber(5)), - }, + test_pruning_info(2, 5), conn.pruning_dal().get_pruning_info().await.unwrap() ); @@ -219,12 +218,7 @@ async fn pruner_catches_up_with_hard_pruning_up_to_soft_pruning_boundary_ignorin .await .unwrap(); assert_eq!( - PruningInfo { - last_soft_pruned_l1_batch: Some(L1BatchNumber(7)), - last_soft_pruned_l2_block: Some(L2BlockNumber(15)), - last_hard_pruned_l1_batch: Some(L1BatchNumber(7)), - last_hard_pruned_l2_block: Some(L2BlockNumber(15)), - }, + test_pruning_info(7, 15), conn.pruning_dal().get_pruning_info().await.unwrap() ); } @@ -253,12 +247,7 @@ async fn unconstrained_pruner_with_fresh_database() { .unwrap(); assert_eq!( - PruningInfo { - last_soft_pruned_l1_batch: Some(L1BatchNumber(3)), - last_soft_pruned_l2_block: Some(L2BlockNumber(7)), - last_hard_pruned_l1_batch: Some(L1BatchNumber(3)), - last_hard_pruned_l2_block: Some(L2BlockNumber(7)), - }, + test_pruning_info(3, 7), conn.pruning_dal().get_pruning_info().await.unwrap() ); @@ -267,12 +256,7 @@ async fn unconstrained_pruner_with_fresh_database() { .await .unwrap(); assert_eq!( - PruningInfo { - last_soft_pruned_l1_batch: Some(L1BatchNumber(6)), - last_soft_pruned_l2_block: Some(L2BlockNumber(13)), - last_hard_pruned_l1_batch: Some(L1BatchNumber(6)), - last_hard_pruned_l2_block: Some(L2BlockNumber(13)), - }, + test_pruning_info(6, 13), conn.pruning_dal().get_pruning_info().await.unwrap() ); } @@ -302,12 +286,7 @@ async fn pruning_blocked_after_first_chunk() { .unwrap(); assert_eq!( - PruningInfo { - last_soft_pruned_l1_batch: Some(L1BatchNumber(3)), - last_soft_pruned_l2_block: Some(L2BlockNumber(7)), - last_hard_pruned_l1_batch: Some(L1BatchNumber(3)), - last_hard_pruned_l2_block: Some(L2BlockNumber(7)), - }, + test_pruning_info(3, 7), conn.pruning_dal().get_pruning_info().await.unwrap() ); @@ -318,12 +297,7 @@ async fn pruning_blocked_after_first_chunk() { assert_matches!(outcome, PruningIterationOutcome::NoOp); // pruning shouldn't have progressed as chunk 6 cannot be pruned assert_eq!( - PruningInfo { - last_soft_pruned_l1_batch: Some(L1BatchNumber(3)), - last_soft_pruned_l2_block: Some(L2BlockNumber(7)), - last_hard_pruned_l1_batch: Some(L1BatchNumber(3)), - last_hard_pruned_l2_block: Some(L2BlockNumber(7)), - }, + test_pruning_info(3, 7), conn.pruning_dal().get_pruning_info().await.unwrap() ); } @@ -417,6 +391,7 @@ async fn mark_l1_batch_as_executed(storage: &mut Connection<'_, Core>, number: u AggregatedActionType::Execute, H256::from_low_u64_be(number.into()), chrono::Utc::now(), + None, ) .await .unwrap(); diff --git a/core/node/eth_sender/Cargo.toml b/core/node/eth_sender/Cargo.toml index a33536baa986..90b5727d9500 100644 --- a/core/node/eth_sender/Cargo.toml +++ b/core/node/eth_sender/Cargo.toml @@ -11,12 +11,14 @@ keywords.workspace = true categories.workspace = true [dependencies] +serde.workspace = true vise.workspace = true zksync_types.workspace = true zksync_dal.workspace = true zksync_config.workspace = true zksync_contracts.workspace = true zksync_eth_client.workspace = true +zksync_health_check.workspace = true zksync_l1_contract_interface.workspace = true zksync_object_store.workspace = true zksync_prover_interface.workspace = true diff --git a/core/node/eth_sender/src/aggregator.rs b/core/node/eth_sender/src/aggregator.rs index 432804a21b2e..8b0d8dfecea0 100644 --- a/core/node/eth_sender/src/aggregator.rs +++ b/core/node/eth_sender/src/aggregator.rs @@ -12,13 +12,14 @@ use zksync_types::{ helpers::unix_timestamp_ms, protocol_version::{L1VerifierConfig, ProtocolSemanticVersion}, pubdata_da::PubdataSendingMode, - L1BatchNumber, ProtocolVersionId, + settlement::SettlementMode, + Address, L1BatchNumber, ProtocolVersionId, }; use super::{ aggregated_operations::AggregatedOperation, publish_criterion::{ - DataSizeCriterion, GasCriterion, L1BatchPublishCriterion, NumberCriterion, + GasCriterionKind, L1BatchPublishCriterion, L1GasCriterion, NumberCriterion, TimestampDeadlineCriterion, }, }; @@ -44,65 +45,89 @@ impl Aggregator { pub fn new( config: SenderConfig, blob_store: Arc, - operate_4844_mode: bool, + custom_commit_sender_addr: Option
, commitment_mode: L1BatchCommitmentMode, + settlement_mode: SettlementMode, ) -> Self { let pubdata_da = config.pubdata_sending_mode; - Self { - commit_criteria: vec![ + let operate_4844_mode = + custom_commit_sender_addr.is_some() && !settlement_mode.is_gateway(); + + // We do not have a reliable lower bound for gas needed to execute batches on gateway so we do not aggregate. + let execute_criteria: Vec> = if settlement_mode + .is_gateway() + { + if config.max_aggregated_blocks_to_execute > 1 { + tracing::warn!( + "config.max_aggregated_blocks_to_execute is set to {} but \ + aggregator does not support aggregating execute operations when settling on gateway", + config.max_aggregated_blocks_to_execute + ); + } + + vec![Box::from(NumberCriterion { + op: AggregatedActionType::Execute, + limit: 1, + })] + } else { + vec![ Box::from(NumberCriterion { - op: AggregatedActionType::Commit, - limit: config.max_aggregated_blocks_to_commit, + op: AggregatedActionType::Execute, + limit: config.max_aggregated_blocks_to_execute, + }), + Box::from(TimestampDeadlineCriterion { + op: AggregatedActionType::Execute, + deadline_seconds: config.aggregated_block_execute_deadline, + max_allowed_lag: Some(config.timestamp_criteria_max_allowed_lag), }), - Box::from(GasCriterion::new( - AggregatedActionType::Commit, + Box::from(L1GasCriterion::new( config.max_aggregated_tx_gas, + GasCriterionKind::Execute, )), - Box::from(DataSizeCriterion { + ] + }; + + // It only makes sense to aggregate commit operation when validium chain settles to L1. + let commit_criteria: Vec> = if settlement_mode + == SettlementMode::SettlesToL1 + && commitment_mode == L1BatchCommitmentMode::Validium + { + vec![ + Box::from(NumberCriterion { op: AggregatedActionType::Commit, - data_limit: config.max_eth_tx_data_size, - pubdata_da, - commitment_mode, + limit: config.max_aggregated_blocks_to_commit, }), Box::from(TimestampDeadlineCriterion { op: AggregatedActionType::Commit, deadline_seconds: config.aggregated_block_commit_deadline, max_allowed_lag: Some(config.timestamp_criteria_max_allowed_lag), }), - ], - proof_criteria: vec![ - Box::from(NumberCriterion { - op: AggregatedActionType::PublishProofOnchain, - limit: *config.aggregated_proof_sizes.iter().max().unwrap() as u32, - }), - Box::from(GasCriterion::new( - AggregatedActionType::PublishProofOnchain, - config.max_aggregated_tx_gas, - )), - Box::from(TimestampDeadlineCriterion { - op: AggregatedActionType::PublishProofOnchain, - deadline_seconds: config.aggregated_block_prove_deadline, - // Currently, we can't use this functionality for proof criterion - // since we don't send dummy and real proofs in the same range, - // so even small ranges must be closed. - max_allowed_lag: None, - }), - ], - execute_criteria: vec![ - Box::from(NumberCriterion { - op: AggregatedActionType::Execute, - limit: config.max_aggregated_blocks_to_execute, - }), - Box::from(GasCriterion::new( - AggregatedActionType::Execute, + Box::from(L1GasCriterion::new( config.max_aggregated_tx_gas, + GasCriterionKind::CommitValidium, )), - Box::from(TimestampDeadlineCriterion { - op: AggregatedActionType::Execute, - deadline_seconds: config.aggregated_block_execute_deadline, - max_allowed_lag: Some(config.timestamp_criteria_max_allowed_lag), - }), - ], + ] + } else { + if config.max_aggregated_blocks_to_commit > 1 { + tracing::warn!( + "config.max_aggregated_blocks_to_commit is set to {} but \ + aggregator does not support aggregating commit operations anymore", + config.max_aggregated_blocks_to_commit + ); + } + vec![Box::from(NumberCriterion { + op: AggregatedActionType::Commit, + limit: 1, + })] + }; + + Self { + commit_criteria, + proof_criteria: vec![Box::from(NumberCriterion { + op: AggregatedActionType::PublishProofOnchain, + limit: 1, + })], + execute_criteria, config, blob_store, operate_4844_mode, @@ -137,12 +162,7 @@ impl Aggregator { { Some(AggregatedOperation::Execute(op)) } else if let Some(op) = self - .get_proof_operation( - storage, - *self.config.aggregated_proof_sizes.iter().max().unwrap(), - last_sealed_l1_batch_number, - l1_verifier_config, - ) + .get_proof_operation(storage, last_sealed_l1_batch_number, l1_verifier_config) .await { Some(AggregatedOperation::PublishProofOnchain(op)) @@ -181,7 +201,10 @@ impl Aggregator { ) .await; - l1_batches.map(|l1_batches| ExecuteBatches { l1_batches }) + l1_batches.map(|l1_batches| ExecuteBatches { + l1_batches, + priority_ops_proofs: Vec::new(), + }) } async fn get_commit_operation( @@ -247,12 +270,11 @@ impl Aggregator { async fn load_dummy_proof_operations( storage: &mut Connection<'_, Core>, - limit: usize, is_4844_mode: bool, ) -> Vec { let mut ready_for_proof_l1_batches = storage .blocks_dal() - .get_ready_for_dummy_proof_l1_batches(limit) + .get_ready_for_dummy_proof_l1_batches(1) .await .unwrap(); @@ -421,7 +443,6 @@ impl Aggregator { async fn get_proof_operation( &mut self, storage: &mut Connection<'_, Core>, - limit: usize, last_sealed_l1_batch: L1BatchNumber, l1_verifier_config: L1VerifierConfig, ) -> Option { @@ -438,7 +459,7 @@ impl Aggregator { ProofSendingMode::SkipEveryProof => { let ready_for_proof_l1_batches = - Self::load_dummy_proof_operations(storage, limit, self.operate_4844_mode).await; + Self::load_dummy_proof_operations(storage, self.operate_4844_mode).await; self.prepare_dummy_proof_operation( storage, ready_for_proof_l1_batches, @@ -461,7 +482,7 @@ impl Aggregator { } else { let ready_for_proof_batches = storage .blocks_dal() - .get_skipped_for_proof_l1_batches(limit) + .get_skipped_for_proof_l1_batches(1) .await .unwrap(); self.prepare_dummy_proof_operation( diff --git a/core/node/eth_sender/src/eth_tx_aggregator.rs b/core/node/eth_sender/src/eth_tx_aggregator.rs index ac9ed4aaaadb..dbef6da7b47b 100644 --- a/core/node/eth_sender/src/eth_tx_aggregator.rs +++ b/core/node/eth_sender/src/eth_tx_aggregator.rs @@ -3,6 +3,7 @@ use zksync_config::configs::eth_sender::SenderConfig; use zksync_contracts::BaseSystemContractsHashes; use zksync_dal::{Connection, ConnectionPool, Core, CoreDal}; use zksync_eth_client::{BoundEthInterface, CallFunctionArgs}; +use zksync_health_check::{Health, HealthStatus, HealthUpdater, ReactiveHealthCheck}; use zksync_l1_contract_interface::{ i_executor::{ commit::kzg::{KzgInfo, ZK_SYNC_BYTES_PER_BLOB}, @@ -14,7 +15,7 @@ use zksync_l1_contract_interface::{ use zksync_shared_metrics::BlockL1Stage; use zksync_types::{ aggregated_operations::AggregatedActionType, - commitment::{L1BatchWithMetadata, SerializeCommitment}, + commitment::{L1BatchCommitmentMode, L1BatchWithMetadata, SerializeCommitment}, eth_sender::{EthTx, EthTxBlobSidecar, EthTxBlobSidecarV1, SidecarBlobV1}, ethabi::{Function, Token}, l2_to_l1_log::UserL2ToL1Log, @@ -27,8 +28,9 @@ use zksync_types::{ use super::aggregated_operations::AggregatedOperation; use crate::{ + health::{EthTxAggregatorHealthDetails, EthTxDetails}, metrics::{PubdataKind, METRICS}, - utils::agg_l1_batch_base_cost, + publish_criterion::L1GasCriterion, zksync_functions::ZkSyncFunctions, Aggregator, EthSenderError, }; @@ -65,6 +67,7 @@ pub struct EthTxAggregator { pool: ConnectionPool, settlement_mode: SettlementMode, sl_chain_id: SLChainId, + health_updater: HealthUpdater, } struct TxData { @@ -119,10 +122,14 @@ impl EthTxAggregator { pool, settlement_mode, sl_chain_id, + health_updater: ReactiveHealthCheck::new("eth_tx_aggregator").1, } } pub async fn run(mut self, stop_receiver: watch::Receiver) -> anyhow::Result<()> { + self.health_updater + .update(Health::from(HealthStatus::Ready)); + let pool = self.pool.clone(); loop { let mut storage = pool.connection_tagged("eth_sender").await.unwrap(); @@ -380,7 +387,6 @@ impl EthTxAggregator { tracing::error!("Failed to get multicall data {err:?}"); err })?; - let contracts_are_pre_shared_bridge = protocol_version_id.is_pre_shared_bridge(); let snark_wrapper_vk_hash = self .get_snark_wrapper_vk_hash(verifier_address) @@ -422,15 +428,15 @@ impl EthTxAggregator { return Ok(()); } let is_gateway = self.settlement_mode.is_gateway(); - let tx = self - .save_eth_tx( - storage, - &agg_op, - contracts_are_pre_shared_bridge, - is_gateway, - ) - .await?; + let tx = self.save_eth_tx(storage, &agg_op, is_gateway).await?; Self::report_eth_tx_saving(storage, &agg_op, &tx).await; + + self.health_updater.update( + EthTxAggregatorHealthDetails { + last_saved_tx: EthTxDetails::new(&tx, None), + } + .into(), + ); } Ok(()) } @@ -468,19 +474,9 @@ impl EthTxAggregator { .await; } - fn encode_aggregated_op( - &self, - op: &AggregatedOperation, - contracts_are_pre_shared_bridge: bool, - ) -> TxData { - let operation_is_pre_shared_bridge = op.protocol_version().is_pre_shared_bridge(); - - // The post shared bridge contracts support pre-shared bridge operations, but vice versa is not true. - if contracts_are_pre_shared_bridge { - assert!(operation_is_pre_shared_bridge); - } - + fn encode_aggregated_op(&self, op: &AggregatedOperation) -> TxData { let mut args = vec![Token::Uint(self.rollup_chain_id.as_u64().into())]; + let is_op_pre_gateway = op.protocol_version().is_pre_gateway(); let (calldata, sidecar) = match op { AggregatedOperation::Commit(last_committed_l1_batch, l1_batches, pubdata_da) => { @@ -492,17 +488,12 @@ impl EthTxAggregator { }; let commit_data_base = commit_batches.into_tokens(); - let (encoding_fn, commit_data) = if contracts_are_pre_shared_bridge { - (&self.functions.pre_shared_bridge_commit, commit_data_base) + args.extend(commit_data_base); + let commit_data = args; + let encoding_fn = if is_op_pre_gateway { + &self.functions.post_shared_bridge_commit } else { - args.extend(commit_data_base); - ( - self.functions - .post_shared_bridge_commit - .as_ref() - .expect("Missing ABI for commitBatchesSharedBridge"), - args, - ) + &self.functions.post_gateway_commit }; let l1_batch_for_sidecar = @@ -515,37 +506,27 @@ impl EthTxAggregator { Self::encode_commit_data(encoding_fn, &commit_data, l1_batch_for_sidecar) } AggregatedOperation::PublishProofOnchain(op) => { - let calldata = if contracts_are_pre_shared_bridge { - self.functions - .pre_shared_bridge_prove - .encode_input(&op.into_tokens()) - .expect("Failed to encode prove transaction data") + args.extend(op.into_tokens()); + let encoding_fn = if is_op_pre_gateway { + &self.functions.post_shared_bridge_prove } else { - args.extend(op.into_tokens()); - self.functions - .post_shared_bridge_prove - .as_ref() - .expect("Missing ABI for proveBatchesSharedBridge") - .encode_input(&args) - .expect("Failed to encode prove transaction data") + &self.functions.post_gateway_prove }; + let calldata = encoding_fn + .encode_input(&args) + .expect("Failed to encode prove transaction data"); (calldata, None) } AggregatedOperation::Execute(op) => { - let calldata = if contracts_are_pre_shared_bridge { - self.functions - .pre_shared_bridge_execute - .encode_input(&op.into_tokens()) - .expect("Failed to encode execute transaction data") + args.extend(op.into_tokens()); + let encoding_fn = if is_op_pre_gateway { + &self.functions.post_shared_bridge_execute } else { - args.extend(op.into_tokens()); - self.functions - .post_shared_bridge_execute - .as_ref() - .expect("Missing ABI for executeBatchesSharedBridge") - .encode_input(&args) - .expect("Failed to encode execute transaction data") + &self.functions.post_gateway_execute }; + let calldata = encoding_fn + .encode_input(&args) + .expect("Failed to encode execute transaction data"); (calldata, None) } }; @@ -593,7 +574,6 @@ impl EthTxAggregator { &self, storage: &mut Connection<'_, Core>, aggregated_op: &AggregatedOperation, - contracts_are_pre_shared_bridge: bool, is_gateway: bool, ) -> Result { let mut transaction = storage.start_transaction().await.unwrap(); @@ -606,16 +586,22 @@ impl EthTxAggregator { (_, _) => None, }; let nonce = self.get_next_nonce(&mut transaction, sender_addr).await?; - let encoded_aggregated_op = - self.encode_aggregated_op(aggregated_op, contracts_are_pre_shared_bridge); + let encoded_aggregated_op = self.encode_aggregated_op(aggregated_op); let l1_batch_number_range = aggregated_op.l1_batch_range(); - let predicted_gas_for_batches = transaction - .blocks_dal() - .get_l1_batches_predicted_gas(l1_batch_number_range.clone(), op_type) - .await - .unwrap(); - let eth_tx_predicted_gas = agg_l1_batch_base_cost(op_type) + predicted_gas_for_batches; + let eth_tx_predicted_gas = match (op_type, is_gateway, self.aggregator.mode()) { + (AggregatedActionType::Execute, false, _) => Some( + L1GasCriterion::total_execute_gas_amount( + &mut transaction, + l1_batch_number_range.clone(), + ) + .await, + ), + (AggregatedActionType::Commit, false, L1BatchCommitmentMode::Validium) => Some( + L1GasCriterion::total_validium_commit_gas_amount(l1_batch_number_range.clone()), + ), + _ => None, + }; let eth_tx = transaction .eth_sender_dal() @@ -670,4 +656,9 @@ impl EthTxAggregator { ) }) } + + /// Returns the health check for eth tx aggregator. + pub fn health_check(&self) -> ReactiveHealthCheck { + self.health_updater.subscribe() + } } diff --git a/core/node/eth_sender/src/eth_tx_manager.rs b/core/node/eth_sender/src/eth_tx_manager.rs index 6992bea1007c..17f71b69303a 100644 --- a/core/node/eth_sender/src/eth_tx_manager.rs +++ b/core/node/eth_sender/src/eth_tx_manager.rs @@ -9,6 +9,7 @@ use zksync_dal::{Connection, ConnectionPool, Core, CoreDal}; use zksync_eth_client::{ encode_blob_tx_with_sidecar, BoundEthInterface, ExecutedTxStatus, RawTransactionBytes, }; +use zksync_health_check::{Health, HealthStatus, HealthUpdater, ReactiveHealthCheck}; use zksync_node_fee_model::l1_gas_price::TxParamsProvider; use zksync_shared_metrics::BlockL1Stage; use zksync_types::{eth_sender::EthTx, Address, L1BlockNumber, H256, U256}; @@ -19,6 +20,7 @@ use crate::{ AbstractL1Interface, L1BlockNumbers, OperatorNonce, OperatorType, RealL1Interface, }, eth_fees_oracle::{EthFees, EthFeesOracle, GasAdjusterFeesOracle}, + health::{EthTxDetails, EthTxManagerHealthDetails}, metrics::TransactionType, }; @@ -33,6 +35,7 @@ pub struct EthTxManager { config: SenderConfig, fees_oracle: Box, pool: ConnectionPool, + health_updater: HealthUpdater, } impl EthTxManager { @@ -67,6 +70,7 @@ impl EthTxManager { config, fees_oracle: Box::new(fees_oracle), pool, + health_updater: ReactiveHealthCheck::new("eth_tx_manager").1, } } @@ -417,6 +421,14 @@ impl EthTxManager { ) { let receipt_block_number = tx_status.receipt.block_number.unwrap().as_u32(); if receipt_block_number <= finalized_block.0 { + self.health_updater.update( + EthTxManagerHealthDetails { + last_mined_tx: EthTxDetails::new(tx, Some((&tx_status).into())), + finalized_block, + } + .into(), + ); + if tx_status.success { self.confirm_tx(storage, tx, tx_status).await; } else { @@ -488,13 +500,14 @@ impl EthTxManager { .track_eth_tx_metrics(storage, BlockL1Stage::Mined, tx) .await; - if gas_used > U256::from(tx.predicted_gas_cost) { - tracing::error!( - "Predicted gas {} lower than used gas {gas_used} for tx {:?} {}", - tx.predicted_gas_cost, - tx.tx_type, - tx.id - ); + if let Some(predicted_gas_cost) = tx.predicted_gas_cost { + if gas_used > U256::from(predicted_gas_cost) { + tracing::error!( + "Predicted gas {predicted_gas_cost} lower than used gas {gas_used} for tx {:?} {}", + tx.tx_type, + tx.id + ); + } } tracing::info!( "eth_tx {} with hash {tx_hash:?} for {} is confirmed. Gas spent: {gas_used:?}", @@ -522,6 +535,9 @@ impl EthTxManager { } pub async fn run(mut self, stop_receiver: watch::Receiver) -> anyhow::Result<()> { + self.health_updater + .update(Health::from(HealthStatus::Ready)); + let pool = self.pool.clone(); loop { @@ -682,4 +698,9 @@ impl EthTxManager { } } } + + /// Returns the health check for eth tx manager. + pub fn health_check(&self) -> ReactiveHealthCheck { + self.health_updater.subscribe() + } } diff --git a/core/node/eth_sender/src/health.rs b/core/node/eth_sender/src/health.rs new file mode 100644 index 000000000000..1a07a61e2892 --- /dev/null +++ b/core/node/eth_sender/src/health.rs @@ -0,0 +1,68 @@ +use serde::{Deserialize, Serialize}; +use zksync_eth_client::ExecutedTxStatus; +use zksync_health_check::{Health, HealthStatus}; +use zksync_types::{ + aggregated_operations::AggregatedActionType, eth_sender::EthTx, web3::TransactionReceipt, + L1BlockNumber, Nonce, H256, +}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TxStatus { + pub tx_hash: H256, + pub success: bool, + pub receipt: TransactionReceipt, +} + +impl From<&ExecutedTxStatus> for TxStatus { + fn from(status: &ExecutedTxStatus) -> Self { + Self { + tx_hash: status.tx_hash, + success: status.success, + receipt: status.receipt.clone(), + } + } +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct EthTxAggregatorHealthDetails { + pub last_saved_tx: EthTxDetails, +} + +impl From for Health { + fn from(details: EthTxAggregatorHealthDetails) -> Self { + Self::from(HealthStatus::Ready).with_details(details) + } +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct EthTxDetails { + pub nonce: Nonce, + pub tx_type: AggregatedActionType, + pub created_at_timestamp: u64, + pub predicted_gas_cost: Option, + pub status: Option, +} + +impl EthTxDetails { + pub fn new(tx: &EthTx, status: Option) -> Self { + Self { + nonce: tx.nonce, + tx_type: tx.tx_type, + created_at_timestamp: tx.created_at_timestamp, + predicted_gas_cost: tx.predicted_gas_cost, + status, + } + } +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct EthTxManagerHealthDetails { + pub last_mined_tx: EthTxDetails, + pub finalized_block: L1BlockNumber, +} + +impl From for Health { + fn from(details: EthTxManagerHealthDetails) -> Self { + Self::from(HealthStatus::Ready).with_details(details) + } +} diff --git a/core/node/eth_sender/src/lib.rs b/core/node/eth_sender/src/lib.rs index 747ece93b811..feec550b011d 100644 --- a/core/node/eth_sender/src/lib.rs +++ b/core/node/eth_sender/src/lib.rs @@ -3,9 +3,9 @@ mod aggregator; mod error; mod eth_tx_aggregator; mod eth_tx_manager; +mod health; mod metrics; mod publish_criterion; -mod utils; mod zksync_functions; mod abstract_l1_interface; diff --git a/core/node/eth_sender/src/publish_criterion.rs b/core/node/eth_sender/src/publish_criterion.rs index 30f0820b148a..28ca44697bb6 100644 --- a/core/node/eth_sender/src/publish_criterion.rs +++ b/core/node/eth_sender/src/publish_criterion.rs @@ -1,18 +1,17 @@ -use std::fmt; +use std::{fmt, ops}; use async_trait::async_trait; use chrono::Utc; use zksync_dal::{Connection, Core, CoreDal}; -use zksync_l1_contract_interface::{i_executor::structures::CommitBatchInfo, Tokenizable}; use zksync_types::{ - aggregated_operations::AggregatedActionType, - commitment::{L1BatchCommitmentMode, L1BatchWithMetadata}, - ethabi, - pubdata_da::PubdataSendingMode, + aggregated_operations::{ + AggregatedActionType, L1_BATCH_EXECUTE_BASE_COST, L1_OPERATION_EXECUTE_COST, + }, + commitment::L1BatchWithMetadata, L1BatchNumber, }; -use super::{metrics::METRICS, utils::agg_l1_batch_base_cost}; +use super::metrics::METRICS; #[async_trait] pub trait L1BatchPublishCriterion: fmt::Debug + Send + Sync { @@ -123,32 +122,82 @@ impl L1BatchPublishCriterion for TimestampDeadlineCriterion { } } +#[derive(Debug, Clone, Copy)] +pub enum GasCriterionKind { + CommitValidium, + Execute, +} + +impl From for AggregatedActionType { + fn from(value: GasCriterionKind) -> Self { + match value { + GasCriterionKind::CommitValidium => AggregatedActionType::Commit, + GasCriterionKind::Execute => AggregatedActionType::Execute, + } + } +} + #[derive(Debug)] -pub struct GasCriterion { - pub op: AggregatedActionType, +pub struct L1GasCriterion { pub gas_limit: u32, + pub kind: GasCriterionKind, } -impl GasCriterion { - pub fn new(op: AggregatedActionType, gas_limit: u32) -> GasCriterion { - GasCriterion { op, gas_limit } +impl L1GasCriterion { + /// Base gas cost of processing aggregated `Execute` operation. + /// It's applicable iff SL is Ethereum. + const AGGR_L1_BATCH_EXECUTE_BASE_COST: u32 = 241_000; + + /// Base gas cost of processing aggregated `Commit` operation. + /// It's applicable iff SL is Ethereum. + const AGGR_L1_BATCH_COMMIT_BASE_COST: u32 = 242_000; + + /// Additional gas cost of processing `Commit` operation per batch. + /// It's applicable iff SL is Ethereum. + pub const L1_BATCH_COMMIT_BASE_COST: u32 = 31_000; + + pub fn new(gas_limit: u32, kind: GasCriterionKind) -> L1GasCriterion { + L1GasCriterion { gas_limit, kind } + } + + pub async fn total_execute_gas_amount( + storage: &mut Connection<'_, Core>, + batch_numbers: ops::RangeInclusive, + ) -> u32 { + let mut total = Self::AGGR_L1_BATCH_EXECUTE_BASE_COST; + + for batch_number in batch_numbers.start().0..=batch_numbers.end().0 { + total += Self::get_execute_gas_amount(storage, batch_number.into()).await; + } + + total + } + + pub fn total_validium_commit_gas_amount( + batch_numbers: ops::RangeInclusive, + ) -> u32 { + Self::AGGR_L1_BATCH_COMMIT_BASE_COST + + (batch_numbers.end().0 - batch_numbers.start().0 + 1) + * Self::L1_BATCH_COMMIT_BASE_COST } - async fn get_gas_amount( - &self, + async fn get_execute_gas_amount( storage: &mut Connection<'_, Core>, batch_number: L1BatchNumber, ) -> u32 { - storage + let header = storage .blocks_dal() - .get_l1_batches_predicted_gas(batch_number..=batch_number, self.op) + .get_l1_batch_header(batch_number) .await .unwrap() + .unwrap_or_else(|| panic!("Missing L1 batch header in DB for #{batch_number}")); + + L1_BATCH_EXECUTE_BASE_COST + u32::from(header.l1_tx_count) * L1_OPERATION_EXECUTE_COST } } #[async_trait] -impl L1BatchPublishCriterion for GasCriterion { +impl L1BatchPublishCriterion for L1GasCriterion { fn name(&self) -> &'static str { "gas_limit" } @@ -159,17 +208,25 @@ impl L1BatchPublishCriterion for GasCriterion { consecutive_l1_batches: &[L1BatchWithMetadata], _last_sealed_l1_batch: L1BatchNumber, ) -> Option { - let base_cost = agg_l1_batch_base_cost(self.op); + let aggr_cost = match self.kind { + GasCriterionKind::Execute => Self::AGGR_L1_BATCH_EXECUTE_BASE_COST, + GasCriterionKind::CommitValidium => Self::AGGR_L1_BATCH_COMMIT_BASE_COST, + }; assert!( - self.gas_limit > base_cost, + self.gas_limit > aggr_cost, "Config max gas cost for operations is too low" ); // We're not sure our predictions are accurate, so it's safer to lower the gas limit by 10% - let mut gas_left = (self.gas_limit as f64 * 0.9).round() as u32 - base_cost; + let mut gas_left = (self.gas_limit as f64 * 0.9).round() as u32 - aggr_cost; let mut last_l1_batch = None; for (index, l1_batch) in consecutive_l1_batches.iter().enumerate() { - let batch_gas = self.get_gas_amount(storage, l1_batch.header.number).await; + let batch_gas = match self.kind { + GasCriterionKind::Execute => { + Self::get_execute_gas_amount(storage, l1_batch.header.number).await + } + GasCriterionKind::CommitValidium => Self::L1_BATCH_COMMIT_BASE_COST, + }; if batch_gas >= gas_left { if index == 0 { panic!( @@ -185,70 +242,16 @@ impl L1BatchPublishCriterion for GasCriterion { } if let Some(last_l1_batch) = last_l1_batch { + let op: AggregatedActionType = self.kind.into(); let first_l1_batch_number = consecutive_l1_batches.first().unwrap().header.number.0; tracing::debug!( "`gas_limit` publish criterion (gas={}) triggered for op {} with L1 batch range {:?}", self.gas_limit - gas_left, - self.op, + op, first_l1_batch_number..=last_l1_batch.0 ); - METRICS.block_aggregation_reason[&(self.op, "gas").into()].inc(); + METRICS.block_aggregation_reason[&(op, "gas").into()].inc(); } last_l1_batch } } - -#[derive(Debug)] -pub struct DataSizeCriterion { - pub op: AggregatedActionType, - pub data_limit: usize, - pub pubdata_da: PubdataSendingMode, - pub commitment_mode: L1BatchCommitmentMode, -} - -#[async_trait] -impl L1BatchPublishCriterion for DataSizeCriterion { - fn name(&self) -> &'static str { - "data_size" - } - - async fn last_l1_batch_to_publish( - &mut self, - _storage: &mut Connection<'_, Core>, - consecutive_l1_batches: &[L1BatchWithMetadata], - _last_sealed_l1_batch: L1BatchNumber, - ) -> Option { - const STORED_BLOCK_INFO_SIZE: usize = 96; // size of `StoredBlockInfo` solidity struct - let mut data_size_left = self.data_limit - STORED_BLOCK_INFO_SIZE; - - for (index, l1_batch) in consecutive_l1_batches.iter().enumerate() { - // TODO (PLA-771): Make sure that this estimation is correct. - let commit_token = - CommitBatchInfo::new(self.commitment_mode, l1_batch, self.pubdata_da).into_token(); - let l1_commit_data_size = ethabi::encode(&[commit_token]).len(); - - if data_size_left < l1_commit_data_size { - if index == 0 { - panic!( - "L1 batch #{} requires {} data, which is more than the range limit of {}", - l1_batch.header.number, l1_commit_data_size, self.data_limit - ); - } - - let first_l1_batch_number = consecutive_l1_batches.first().unwrap().header.number.0; - let output = l1_batch.header.number - 1; - tracing::debug!( - "`data_size` publish criterion (data={}) triggered for op {} with L1 batch range {:?}", - self.data_limit - data_size_left, - self.op, - first_l1_batch_number..=output.0 - ); - METRICS.block_aggregation_reason[&(self.op, "data_size").into()].inc(); - return Some(output); - } - data_size_left -= l1_commit_data_size; - } - - None - } -} diff --git a/core/node/eth_sender/src/tester.rs b/core/node/eth_sender/src/tester.rs index 646df1dc1a7b..3caf0cae5f87 100644 --- a/core/node/eth_sender/src/tester.rs +++ b/core/node/eth_sender/src/tester.rs @@ -146,7 +146,6 @@ impl EthSenderTester { PubdataSendingMode::Calldata }; let aggregator_config = SenderConfig { - aggregated_proof_sizes: vec![1], pubdata_sending_mode, ..eth_sender_config.clone().sender.unwrap() }; @@ -256,8 +255,9 @@ impl EthSenderTester { Aggregator::new( aggregator_config.clone(), MockObjectStore::arc(), - aggregator_operate_4844_mode, + custom_commit_sender_addr, commitment_mode, + SettlementMode::SettlesToL1, ), gateway.clone(), // ZKsync contract address @@ -414,6 +414,7 @@ impl EthSenderTester { .into_iter() .map(l1_batch_with_metadata) .collect(), + priority_ops_proofs: Vec::new(), }); self.next_l1_batch_number_to_execute += 1; self.save_operation(operation).await @@ -514,7 +515,6 @@ impl EthSenderTester { .save_eth_tx( &mut self.conn.connection().await.unwrap(), &aggregated_operation, - false, self.is_l2, ) .await diff --git a/core/node/eth_sender/src/tests.rs b/core/node/eth_sender/src/tests.rs index 8e5032a69cfc..aab6d2e43d76 100644 --- a/core/node/eth_sender/src/tests.rs +++ b/core/node/eth_sender/src/tests.rs @@ -34,6 +34,7 @@ fn get_dummy_operation(number: u32) -> AggregatedOperation { metadata: default_l1_batch_metadata(), raw_published_factory_deps: Vec::new(), }], + priority_ops_proofs: Vec::new(), }) } @@ -208,7 +209,6 @@ async fn resend_each_block(commitment_mode: L1BatchCommitmentMode) -> anyhow::Re &mut tester.conn.connection().await.unwrap(), &get_dummy_operation(0), false, - false, ) .await?; diff --git a/core/node/eth_sender/src/utils.rs b/core/node/eth_sender/src/utils.rs deleted file mode 100644 index 98139c201993..000000000000 --- a/core/node/eth_sender/src/utils.rs +++ /dev/null @@ -1,14 +0,0 @@ -use zksync_types::aggregated_operations::AggregatedActionType; - -// TODO(QIT-32): Remove constants(except `L1_OPERATION_EXECUTE_COST`) and logic that use them -const AGGR_L1_BATCH_COMMIT_BASE_COST: u32 = 242_000; -const AGGR_L1_BATCH_PROVE_BASE_COST: u32 = 1_000_000; -const AGGR_L1_BATCH_EXECUTE_BASE_COST: u32 = 241_000; - -pub fn agg_l1_batch_base_cost(op: AggregatedActionType) -> u32 { - match op { - AggregatedActionType::Commit => AGGR_L1_BATCH_COMMIT_BASE_COST, - AggregatedActionType::PublishProofOnchain => AGGR_L1_BATCH_PROVE_BASE_COST, - AggregatedActionType::Execute => AGGR_L1_BATCH_EXECUTE_BASE_COST, - } -} diff --git a/core/node/eth_sender/src/zksync_functions.rs b/core/node/eth_sender/src/zksync_functions.rs index 85508c71c03d..f3e4998ef37c 100644 --- a/core/node/eth_sender/src/zksync_functions.rs +++ b/core/node/eth_sender/src/zksync_functions.rs @@ -1,14 +1,19 @@ -use zksync_contracts::{hyperchain_contract, multicall_contract, verifier_contract}; +use zksync_contracts::{ + hyperchain_contract, multicall_contract, verifier_contract, POST_SHARED_BRIDGE_COMMIT_FUNCTION, + POST_SHARED_BRIDGE_EXECUTE_FUNCTION, POST_SHARED_BRIDGE_PROVE_FUNCTION, +}; use zksync_types::ethabi::{Contract, Function}; #[derive(Debug)] pub(super) struct ZkSyncFunctions { - pub(super) pre_shared_bridge_commit: Function, - pub(super) post_shared_bridge_commit: Option, - pub(super) pre_shared_bridge_prove: Function, - pub(super) post_shared_bridge_prove: Option, - pub(super) pre_shared_bridge_execute: Function, - pub(super) post_shared_bridge_execute: Option, + pub(super) post_shared_bridge_commit: Function, + pub(super) post_shared_bridge_prove: Function, + pub(super) post_shared_bridge_execute: Function, + + pub(super) post_gateway_commit: Function, + pub(super) post_gateway_prove: Function, + pub(super) post_gateway_execute: Function, + pub(super) get_l2_bootloader_bytecode_hash: Function, pub(super) get_l2_default_account_bytecode_hash: Function, pub(super) get_verifier: Function, @@ -47,15 +52,14 @@ impl Default for ZkSyncFunctions { let verifier_contract = verifier_contract(); let multicall_contract = multicall_contract(); - let pre_shared_bridge_commit = get_function(&zksync_contract, "commitBatches"); - let post_shared_bridge_commit = - get_optional_function(&zksync_contract, "commitBatchesSharedBridge"); - let pre_shared_bridge_prove = get_function(&zksync_contract, "proveBatches"); - let post_shared_bridge_prove = - get_optional_function(&zksync_contract, "proveBatchesSharedBridge"); - let pre_shared_bridge_execute = get_function(&zksync_contract, "executeBatches"); - let post_shared_bridge_execute = - get_optional_function(&zksync_contract, "executeBatchesSharedBridge"); + let post_shared_bridge_commit = POST_SHARED_BRIDGE_COMMIT_FUNCTION.clone(); + let post_shared_bridge_prove = POST_SHARED_BRIDGE_PROVE_FUNCTION.clone(); + let post_shared_bridge_execute = POST_SHARED_BRIDGE_EXECUTE_FUNCTION.clone(); + + let post_gateway_commit = get_function(&zksync_contract, "commitBatchesSharedBridge"); + let post_gateway_prove = get_function(&zksync_contract, "proveBatchesSharedBridge"); + let post_gateway_execute = get_function(&zksync_contract, "executeBatchesSharedBridge"); + let get_l2_bootloader_bytecode_hash = get_function(&zksync_contract, "getL2BootloaderBytecodeHash"); let get_l2_default_account_bytecode_hash = @@ -69,12 +73,12 @@ impl Default for ZkSyncFunctions { let verification_key_hash = get_function(&verifier_contract, "verificationKeyHash"); ZkSyncFunctions { - pre_shared_bridge_commit, post_shared_bridge_commit, - pre_shared_bridge_prove, post_shared_bridge_prove, - pre_shared_bridge_execute, post_shared_bridge_execute, + post_gateway_commit, + post_gateway_prove, + post_gateway_execute, get_l2_bootloader_bytecode_hash, get_l2_default_account_bytecode_hash, get_evm_emulator_bytecode_hash, diff --git a/core/node/eth_watch/Cargo.toml b/core/node/eth_watch/Cargo.toml index 985649c35daf..2a2374cef70e 100644 --- a/core/node/eth_watch/Cargo.toml +++ b/core/node/eth_watch/Cargo.toml @@ -18,6 +18,8 @@ zksync_contracts.workspace = true zksync_system_constants.workspace = true zksync_eth_client.workspace = true zksync_shared_metrics.workspace = true +zksync_mini_merkle_tree.workspace = true +zksync_web3_decl.workspace = true tokio = { workspace = true, features = ["time"] } anyhow.workspace = true @@ -25,7 +27,10 @@ thiserror.workspace = true async-trait.workspace = true tracing.workspace = true async-recursion.workspace = true +itertools.workspace = true [dev-dependencies] zksync_concurrency.workspace = true test-log.workspace = true +hex.workspace = true +bincode.workspace = true diff --git a/core/node/eth_watch/src/client.rs b/core/node/eth_watch/src/client.rs index ac5fc86c6e9f..18e49aad0813 100644 --- a/core/node/eth_watch/src/client.rs +++ b/core/node/eth_watch/src/client.rs @@ -1,21 +1,28 @@ -use std::fmt; +use std::{fmt, sync::Arc}; use anyhow::Context; use zksync_contracts::{ getters_facet_contract, state_transition_manager_contract, verifier_contract, + MESSAGE_ROOT_CONTRACT, }; use zksync_eth_client::{ clients::{DynClient, L1}, CallFunctionArgs, ClientError, ContractCallError, EnrichedClientError, EnrichedClientResult, EthInterface, }; +use zksync_system_constants::L2_MESSAGE_ROOT_ADDRESS; use zksync_types::{ + api::{ChainAggProof, Log}, ethabi::Contract, - web3::{BlockId, BlockNumber, FilterBuilder, Log}, - Address, SLChainId, H256, U256, + web3::{BlockId, BlockNumber, Filter, FilterBuilder}, + Address, L1BatchNumber, L2ChainId, SLChainId, H256, U256, U64, +}; +use zksync_web3_decl::{ + client::{Network, L2}, + namespaces::{EthNamespaceClient, UnstableNamespaceClient, ZksNamespaceClient}, }; -/// L1 client functionality used by [`EthWatch`](crate::EthWatch) and constituent event processors. +/// Common L1 and L2 client functionality used by [`EthWatch`](crate::EthWatch) and constituent event processors. #[async_trait::async_trait] pub trait EthClient: 'static + fmt::Debug + Send + Sync { /// Returns events in a given block range. @@ -27,6 +34,10 @@ pub trait EthClient: 'static + fmt::Debug + Send + Sync { topic2: Option, retries_left: usize, ) -> EnrichedClientResult>; + + /// Returns either finalized L1 block number or block number that satisfies `self.confirmations_for_eth_event` if it's set. + async fn confirmed_block_number(&self) -> EnrichedClientResult; + /// Returns finalized L1 block number. async fn finalized_block_number(&self) -> EnrichedClientResult; @@ -40,7 +51,17 @@ pub trait EthClient: 'static + fmt::Debug + Send + Sync { packed_version: H256, ) -> EnrichedClientResult>>; + /// Returns ID of the chain. async fn chain_id(&self) -> EnrichedClientResult; + + /// Returns chain root for `l2_chain_id` at the moment right after `block_number`. + /// `block_number` is block number on SL. + /// `l2_chain_id` is chain id of L2. + async fn get_chain_root( + &self, + block_number: U64, + l2_chain_id: L2ChainId, + ) -> Result; } pub const RETRY_LIMIT: usize = 5; @@ -50,10 +71,10 @@ const TOO_MANY_RESULTS_RETH: &str = "length limit exceeded"; const TOO_BIG_RANGE_RETH: &str = "query exceeds max block range"; const TOO_MANY_RESULTS_CHAINSTACK: &str = "range limit exceeded"; -/// Implementation of [`EthClient`] based on HTTP JSON-RPC (encapsulated via [`EthInterface`]). +/// Implementation of [`EthClient`] based on HTTP JSON-RPC. #[derive(Debug, Clone)] -pub struct EthHttpQueryClient { - client: Box>, +pub struct EthHttpQueryClient { + client: Box>, diamond_proxy_addr: Address, governance_address: Address, new_upgrade_cut_data_signature: H256, @@ -62,12 +83,16 @@ pub struct EthHttpQueryClient { chain_admin_address: Option
, verifier_contract_abi: Contract, getters_facet_contract_abi: Contract, + message_root_abi: Contract, confirmations_for_eth_event: Option, } -impl EthHttpQueryClient { +impl EthHttpQueryClient +where + Box>: GetLogsClient, +{ pub fn new( - client: Box>, + client: Box>, diamond_proxy_addr: Address, state_transition_manager_address: Option
, chain_admin_address: Option
, @@ -92,6 +117,7 @@ impl EthHttpQueryClient { .signature(), verifier_contract_abi: verifier_contract(), getters_facet_contract_abi: getters_facet_contract(), + message_root_abi: MESSAGE_ROOT_CONTRACT.clone(), confirmations_for_eth_event, } } @@ -102,6 +128,7 @@ impl EthHttpQueryClient { Some(self.governance_address), self.state_transition_manager_address, self.chain_admin_address, + Some(L2_MESSAGE_ROOT_ADDRESS), ] .into_iter() .flatten() @@ -126,7 +153,7 @@ impl EthHttpQueryClient { builder = builder.address(addresses); } let filter = builder.build(); - let mut result = self.client.logs(&filter).await; + let mut result = self.client.get_logs(filter).await; // This code is compatible with both Infura and Alchemy API providers. // Note: we don't handle rate-limits here - assumption is that we're never going to hit them. @@ -216,7 +243,10 @@ impl EthHttpQueryClient { } #[async_trait::async_trait] -impl EthClient for EthHttpQueryClient { +impl EthClient for EthHttpQueryClient +where + Box>: EthInterface + GetLogsClient, +{ async fn scheduler_vk_hash( &self, verifier_address: Address, @@ -274,27 +304,31 @@ impl EthClient for EthHttpQueryClient { .await } - async fn finalized_block_number(&self) -> EnrichedClientResult { + async fn confirmed_block_number(&self) -> EnrichedClientResult { if let Some(confirmations) = self.confirmations_for_eth_event { let latest_block_number = self.client.block_number().await?.as_u64(); Ok(latest_block_number.saturating_sub(confirmations)) } else { - let block = self - .client - .block(BlockId::Number(BlockNumber::Finalized)) - .await? - .ok_or_else(|| { - let err = ClientError::Custom("Finalized block must be present on L1".into()); - EnrichedClientError::new(err, "block") - })?; - let block_number = block.number.ok_or_else(|| { - let err = ClientError::Custom("Finalized block must contain number".into()); - EnrichedClientError::new(err, "block").with_arg("block", &block) - })?; - Ok(block_number.as_u64()) + self.finalized_block_number().await } } + async fn finalized_block_number(&self) -> EnrichedClientResult { + let block = self + .client + .block(BlockId::Number(BlockNumber::Finalized)) + .await? + .ok_or_else(|| { + let err = ClientError::Custom("Finalized block must be present on L1".into()); + EnrichedClientError::new(err, "block") + })?; + let block_number = block.number.ok_or_else(|| { + let err = ClientError::Custom("Finalized block must contain number".into()); + EnrichedClientError::new(err, "block").with_arg("block", &block) + })?; + Ok(block_number.as_u64()) + } + async fn get_total_priority_txs(&self) -> Result { CallFunctionArgs::new("getTotalPriorityTxs", ()) .for_contract(self.diamond_proxy_addr, &self.getters_facet_contract_abi) @@ -304,6 +338,157 @@ impl EthClient for EthHttpQueryClient { } async fn chain_id(&self) -> EnrichedClientResult { - Ok(self.client.fetch_chain_id().await?) + self.client.fetch_chain_id().await + } + + async fn get_chain_root( + &self, + block_number: U64, + l2_chain_id: L2ChainId, + ) -> Result { + CallFunctionArgs::new("getChainRoot", U256::from(l2_chain_id.as_u64())) + .with_block(BlockId::Number(block_number.into())) + .for_contract(L2_MESSAGE_ROOT_ADDRESS, &self.message_root_abi) + .call(&self.client) + .await + } +} + +/// Encapsulates `eth_getLogs` calls. +#[async_trait::async_trait] +pub trait GetLogsClient: 'static + fmt::Debug + Send + Sync { + /// Returns L2 version of [`Log`] with L2-specific fields, e.g. `l1_batch_number`. + /// L1 clients fill such fields with `None`. + async fn get_logs(&self, filter: Filter) -> EnrichedClientResult>; +} + +#[async_trait::async_trait] +impl GetLogsClient for Box> { + async fn get_logs(&self, filter: Filter) -> EnrichedClientResult> { + Ok(self + .logs(&filter) + .await? + .into_iter() + .map(Into::into) + .collect()) + } +} + +#[async_trait::async_trait] +impl GetLogsClient for Box> { + async fn get_logs(&self, filter: Filter) -> EnrichedClientResult> { + EthNamespaceClient::get_logs(self, filter.into()) + .await + .map_err(|err| EnrichedClientError::new(err, "eth_getLogs")) + } +} + +/// L2 client functionality used by [`EthWatch`](crate::EthWatch) and constituent event processors. +/// Trait extension for [`EthClient`]. +#[async_trait::async_trait] +pub trait L2EthClient: EthClient { + async fn get_chain_log_proof( + &self, + l1_batch_number: L1BatchNumber, + chain_id: L2ChainId, + ) -> EnrichedClientResult>; + + async fn get_chain_root_l2( + &self, + l1_batch_number: L1BatchNumber, + l2_chain_id: L2ChainId, + ) -> Result, ContractCallError>; +} + +#[async_trait::async_trait] +impl L2EthClient for EthHttpQueryClient { + async fn get_chain_log_proof( + &self, + l1_batch_number: L1BatchNumber, + chain_id: L2ChainId, + ) -> EnrichedClientResult> { + self.client + .get_chain_log_proof(l1_batch_number, chain_id) + .await + .map_err(|err| EnrichedClientError::new(err, "unstable_getChainLogProof")) + } + + async fn get_chain_root_l2( + &self, + l1_batch_number: L1BatchNumber, + l2_chain_id: L2ChainId, + ) -> Result, ContractCallError> { + let l2_block_range = self + .client + .get_l2_block_range(l1_batch_number) + .await + .map_err(|err| EnrichedClientError::new(err, "zks_getL1BatchBlockRange"))?; + if let Some((_, l2_block_number)) = l2_block_range { + self.get_chain_root(l2_block_number, l2_chain_id) + .await + .map(Some) + } else { + Ok(None) + } + } +} + +/// Wrapper for L2 client object. +/// It is used for L2EthClient -> EthClient dyn upcasting coercion: +/// Arc -> L2EthClientW -> Arc +#[derive(Debug, Clone)] +pub struct L2EthClientW(pub Arc); + +#[async_trait::async_trait] +impl EthClient for L2EthClientW { + async fn get_events( + &self, + from: BlockNumber, + to: BlockNumber, + topic1: H256, + topic2: Option, + retries_left: usize, + ) -> EnrichedClientResult> { + self.0 + .get_events(from, to, topic1, topic2, retries_left) + .await + } + + async fn confirmed_block_number(&self) -> EnrichedClientResult { + self.0.confirmed_block_number().await + } + + async fn finalized_block_number(&self) -> EnrichedClientResult { + self.0.finalized_block_number().await + } + + async fn get_total_priority_txs(&self) -> Result { + self.0.get_total_priority_txs().await + } + + async fn scheduler_vk_hash( + &self, + verifier_address: Address, + ) -> Result { + self.0.scheduler_vk_hash(verifier_address).await + } + + async fn diamond_cut_by_version( + &self, + packed_version: H256, + ) -> EnrichedClientResult>> { + self.0.diamond_cut_by_version(packed_version).await + } + + async fn chain_id(&self) -> EnrichedClientResult { + self.0.chain_id().await + } + + async fn get_chain_root( + &self, + block_number: U64, + l2_chain_id: L2ChainId, + ) -> Result { + self.0.get_chain_root(block_number, l2_chain_id).await } } diff --git a/core/node/eth_watch/src/event_processors/appended_chain_batch_root.rs b/core/node/eth_watch/src/event_processors/appended_chain_batch_root.rs new file mode 100644 index 000000000000..68f731120c65 --- /dev/null +++ b/core/node/eth_watch/src/event_processors/appended_chain_batch_root.rs @@ -0,0 +1,238 @@ +use std::sync::Arc; + +use anyhow::Context; +use itertools::Itertools; +use zksync_dal::{eth_watcher_dal::EventType, Connection, Core, CoreDal, DalError}; +use zksync_mini_merkle_tree::MiniMerkleTree; +use zksync_types::{ + api::{ChainAggProof, Log}, + ethabi, h256_to_u256, + l2_to_l1_log::{ + BatchAndChainMerklePath, BATCH_LEAF_PADDING, LOG_PROOF_SUPPORTED_METADATA_VERSION, + }, + u256_to_h256, L1BatchNumber, L2ChainId, SLChainId, H256, U256, +}; + +use crate::{ + client::L2EthClient, + event_processors::{EventProcessor, EventProcessorError, EventsSource}, +}; + +/// Listens to `AppendedChainBatchRoot` events and saves `BatchAndChainMerklePath` for batches. +/// These events are emitted on SL each time L1 batch is executed. Processor uses them to track which batches are already executed +/// and group them by SL's batch number they are executed in as this data is required to build `BatchAndChainMerklePath`. +#[derive(Debug)] +pub struct BatchRootProcessor { + next_batch_number_lower_bound: L1BatchNumber, + appended_chain_batch_root_signature: H256, + merkle_tree: MiniMerkleTree<[u8; 96]>, + l2_chain_id: L2ChainId, + sl_l2_client: Arc, +} + +impl BatchRootProcessor { + pub fn new( + next_batch_number_lower_bound: L1BatchNumber, + merkle_tree: MiniMerkleTree<[u8; 96]>, + l2_chain_id: L2ChainId, + sl_l2_client: Arc, + ) -> Self { + Self { + next_batch_number_lower_bound, + appended_chain_batch_root_signature: ethabi::long_signature( + "AppendedChainBatchRoot", + &[ + ethabi::ParamType::Uint(256), + ethabi::ParamType::Uint(256), + ethabi::ParamType::FixedBytes(32), + ], + ), + merkle_tree, + l2_chain_id, + sl_l2_client, + } + } +} + +#[async_trait::async_trait] +impl EventProcessor for BatchRootProcessor { + async fn process_events( + &mut self, + storage: &mut Connection<'_, Core>, + events: Vec, + ) -> Result { + let events_count = events.len(); + let mut transaction = storage + .start_transaction() + .await + .map_err(DalError::generalize)?; + + let grouped_events: Vec<_> = events + .into_iter() + .map(|log| { + let sl_l1_batch_number = L1BatchNumber( + log.l1_batch_number + .expect("Missing L1 batch number for finalized event") + .as_u32(), + ); + let chain_l1_batch_number = L1BatchNumber(h256_to_u256(log.topics[2]).as_u32()); + let logs_root_hash = H256::from_slice(&log.data.0); + + (sl_l1_batch_number, chain_l1_batch_number, logs_root_hash) + }) + .group_by(|(sl_l1_batch_number, _, _)| *sl_l1_batch_number) + .into_iter() + .map(|(sl_l1_batch_number, group)| { + let group: Vec<_> = group + .into_iter() + .map(|(_, chain_l1_batch_number, logs_root_hash)| { + (chain_l1_batch_number, logs_root_hash) + }) + .collect(); + + (sl_l1_batch_number, group) + }) + .collect(); + + let next_batch_number_lower_bound = self.next_batch_number_lower_bound; + let new_events = grouped_events + .into_iter() + .skip_while(|(_sl_l1_batch_number, events)| { + let first_event = events.first().unwrap(); + let last_event = events.last().unwrap(); + + match ( + first_event.0 < next_batch_number_lower_bound, + last_event.0 < next_batch_number_lower_bound, + ) { + (true, true) => true, // skip + (false, false) => false, // do not skip + _ => { + panic!("batch range was partially processed"); + } + } + }); + + let sl_chain_id = self.sl_l2_client.chain_id().await?; + for (sl_l1_batch_number, chain_batches) in new_events { + let chain_agg_proof = self + .sl_l2_client + .get_chain_log_proof(sl_l1_batch_number, self.l2_chain_id) + .await? + .context("Missing chain log proof for finalized batch")?; + let chain_proof_vector = + Self::chain_proof_vector(sl_l1_batch_number, chain_agg_proof, sl_chain_id); + + for (batch_number, batch_root) in &chain_batches { + let root_from_db = transaction + .blocks_dal() + .get_l1_batch_l2_l1_merkle_root(*batch_number) + .await + .map_err(DalError::generalize)? + .context("Missing l2_l1_merkle_root for finalized batch")?; + assert_eq!(root_from_db, *batch_root); + + self.merkle_tree + .push(Self::batch_leaf_preimage(*batch_root, *batch_number)); + self.next_batch_number_lower_bound = *batch_number + 1; + } + + let chain_root_local = self.merkle_tree.merkle_root(); + let chain_root_remote = self + .sl_l2_client + .get_chain_root_l2(sl_l1_batch_number, self.l2_chain_id) + .await?; + assert_eq!( + chain_root_local, + chain_root_remote.unwrap(), + "Chain root mismatch, l1 batch number #{sl_l1_batch_number}" + ); + + let number_of_leaves = self.merkle_tree.length(); + let batch_proofs = (0..chain_batches.len()).map(|i| { + let leaf_position = number_of_leaves - chain_batches.len() + i; + let batch_proof = self + .merkle_tree + .merkle_root_and_path_by_absolute_index(leaf_position) + .1; + let batch_proof_len = batch_proof.len() as u32; + let mut proof = vec![H256::from_low_u64_be(leaf_position as u64)]; + proof.extend(batch_proof); + proof.extend(chain_proof_vector.clone()); + + BatchAndChainMerklePath { + batch_proof_len, + proof, + } + }); + + for ((batch_number, _), proof) in chain_batches.iter().zip(batch_proofs) { + tracing::info!(%batch_number, "Saving batch-chain merkle path"); + transaction + .blocks_dal() + .set_batch_chain_merkle_path(*batch_number, proof) + .await + .map_err(DalError::generalize)?; + } + } + + transaction.commit().await.map_err(DalError::generalize)?; + + Ok(events_count) + } + + fn topic1(&self) -> H256 { + self.appended_chain_batch_root_signature + } + + fn topic2(&self) -> Option { + Some(H256::from_low_u64_be(self.l2_chain_id.as_u64())) + } + + fn event_source(&self) -> EventsSource { + EventsSource::SL + } + + fn event_type(&self) -> EventType { + EventType::ChainBatchRoot + } + + fn only_finalized_block(&self) -> bool { + true + } +} + +impl BatchRootProcessor { + pub(crate) fn batch_leaf_preimage(batch_root: H256, batch_number: L1BatchNumber) -> [u8; 96] { + let mut full_preimage = [0u8; 96]; + + full_preimage[0..32].copy_from_slice(BATCH_LEAF_PADDING.as_bytes()); + full_preimage[32..64].copy_from_slice(batch_root.as_bytes()); + full_preimage[64..96] + .copy_from_slice(H256::from_low_u64_be(batch_number.0 as u64).as_bytes()); + + full_preimage + } + + fn chain_proof_vector( + sl_l1_batch_number: L1BatchNumber, + chain_agg_proof: ChainAggProof, + sl_chain_id: SLChainId, + ) -> Vec { + let sl_encoded_data = + (U256::from(sl_l1_batch_number.0) << 128u32) + chain_agg_proof.chain_id_leaf_proof_mask; + + let mut metadata = [0u8; 32]; + metadata[0] = LOG_PROOF_SUPPORTED_METADATA_VERSION; + metadata[1] = chain_agg_proof.chain_id_leaf_proof.len() as u8; + + let mut chain_proof_vector = vec![ + u256_to_h256(sl_encoded_data), + H256::from_low_u64_be(sl_chain_id.0), + H256(metadata), + ]; + chain_proof_vector.extend(chain_agg_proof.chain_id_leaf_proof); + + chain_proof_vector + } +} diff --git a/core/node/eth_watch/src/event_processors/decentralized_upgrades.rs b/core/node/eth_watch/src/event_processors/decentralized_upgrades.rs index aa43e7239f88..3f4b0f3cf5ab 100644 --- a/core/node/eth_watch/src/event_processors/decentralized_upgrades.rs +++ b/core/node/eth_watch/src/event_processors/decentralized_upgrades.rs @@ -1,7 +1,9 @@ +use std::sync::Arc; + use anyhow::Context as _; use zksync_dal::{eth_watcher_dal::EventType, Connection, Core, CoreDal, DalError}; use zksync_types::{ - ethabi::Contract, protocol_version::ProtocolSemanticVersion, web3::Log, ProtocolUpgrade, H256, + api::Log, ethabi::Contract, protocol_version::ProtocolSemanticVersion, ProtocolUpgrade, H256, U256, }; @@ -17,12 +19,14 @@ pub struct DecentralizedUpgradesEventProcessor { /// Last protocol version seen. Used to skip events for already known upgrade proposals. last_seen_protocol_version: ProtocolSemanticVersion, update_upgrade_timestamp_signature: H256, + sl_client: Arc, } impl DecentralizedUpgradesEventProcessor { pub fn new( last_seen_protocol_version: ProtocolSemanticVersion, chain_admin_contract: &Contract, + sl_client: Arc, ) -> Self { Self { last_seen_protocol_version, @@ -31,6 +35,7 @@ impl DecentralizedUpgradesEventProcessor { .context("UpdateUpgradeTimestamp event is missing in ABI") .unwrap() .signature(), + sl_client, } } } @@ -40,7 +45,6 @@ impl EventProcessor for DecentralizedUpgradesEventProcessor { async fn process_events( &mut self, storage: &mut Connection<'_, Core>, - sl_client: &dyn EthClient, events: Vec, ) -> Result { let mut upgrades = Vec::new(); @@ -51,7 +55,8 @@ impl EventProcessor for DecentralizedUpgradesEventProcessor { .ok() .context("upgrade timestamp is too big")?; - let diamond_cut = sl_client + let diamond_cut = self + .sl_client .diamond_cut_by_version(version) .await? .context("missing upgrade data on STM")?; @@ -62,7 +67,7 @@ impl EventProcessor for DecentralizedUpgradesEventProcessor { }; // Scheduler VK is not present in proposal event. It is hard coded in verifier contract. let scheduler_vk_hash = if let Some(address) = upgrade.verifier_address { - Some(sl_client.scheduler_vk_hash(address).await?) + Some(self.sl_client.scheduler_vk_hash(address).await?) } else { None }; @@ -128,7 +133,7 @@ impl EventProcessor for DecentralizedUpgradesEventProcessor { Ok(events.len()) } - fn relevant_topic(&self) -> H256 { + fn topic1(&self) -> H256 { self.update_upgrade_timestamp_signature } diff --git a/core/node/eth_watch/src/event_processors/mod.rs b/core/node/eth_watch/src/event_processors/mod.rs index f145181b0cf9..ddbf84e65937 100644 --- a/core/node/eth_watch/src/event_processors/mod.rs +++ b/core/node/eth_watch/src/event_processors/mod.rs @@ -2,16 +2,17 @@ use std::fmt; use zksync_dal::{eth_watcher_dal::EventType, Connection, Core}; use zksync_eth_client::{ContractCallError, EnrichedClientError}; -use zksync_types::{web3::Log, H256}; +use zksync_types::{api::Log, H256}; pub(crate) use self::{ + appended_chain_batch_root::BatchRootProcessor, decentralized_upgrades::DecentralizedUpgradesEventProcessor, priority_ops::PriorityOpsEventProcessor, }; -use crate::client::EthClient; +mod appended_chain_batch_root; mod decentralized_upgrades; -pub mod priority_ops; +mod priority_ops; /// Errors issued by an [`EventProcessor`]. #[derive(Debug, thiserror::Error)] @@ -50,19 +51,28 @@ impl EventProcessorError { /// feeds events to all processors one-by-one. #[async_trait::async_trait] pub(super) trait EventProcessor: 'static + fmt::Debug + Send + Sync { - /// Processes given events. All events are guaranteed to match [`Self::relevant_topic()`]. + /// Processes given events. All events are guaranteed to match [`Self::topic1()`] and [`Self::topic2()`]. /// Returns number of processed events, this result is used to update last processed block. async fn process_events( &mut self, storage: &mut Connection<'_, Core>, - sl_client: &dyn EthClient, events: Vec, ) -> Result; - /// Relevant topic which defines what events to be processed - fn relevant_topic(&self) -> H256; + /// Relevant topic1 which defines what events to be processed + fn topic1(&self) -> H256; + + /// Relevant topic2 which defines what events to be processed + fn topic2(&self) -> Option { + None + } fn event_source(&self) -> EventsSource; fn event_type(&self) -> EventType; + + /// Whether processor expect events only from finalized blocks. + fn only_finalized_block(&self) -> bool { + false + } } diff --git a/core/node/eth_watch/src/event_processors/priority_ops.rs b/core/node/eth_watch/src/event_processors/priority_ops.rs index 051c076850e9..cbb224da6396 100644 --- a/core/node/eth_watch/src/event_processors/priority_ops.rs +++ b/core/node/eth_watch/src/event_processors/priority_ops.rs @@ -1,10 +1,10 @@ -use std::convert::TryFrom; +use std::{convert::TryFrom, sync::Arc}; use anyhow::Context; use zksync_contracts::hyperchain_contract; use zksync_dal::{eth_watcher_dal::EventType, Connection, Core, CoreDal, DalError}; use zksync_shared_metrics::{TxStage, APP_METRICS}; -use zksync_types::{l1::L1Tx, web3::Log, PriorityOpId, H256}; +use zksync_types::{api::Log, l1::L1Tx, PriorityOpId, H256}; use crate::{ client::EthClient, @@ -17,16 +17,21 @@ use crate::{ pub struct PriorityOpsEventProcessor { next_expected_priority_id: PriorityOpId, new_priority_request_signature: H256, + sl_client: Arc, } impl PriorityOpsEventProcessor { - pub fn new(next_expected_priority_id: PriorityOpId) -> anyhow::Result { + pub fn new( + next_expected_priority_id: PriorityOpId, + sl_client: Arc, + ) -> anyhow::Result { Ok(Self { next_expected_priority_id, new_priority_request_signature: hyperchain_contract() .event("NewPriorityRequest") .context("NewPriorityRequest event is missing in ABI")? .signature(), + sl_client, }) } } @@ -36,14 +41,13 @@ impl EventProcessor for PriorityOpsEventProcessor { async fn process_events( &mut self, storage: &mut Connection<'_, Core>, - sl_client: &dyn EthClient, events: Vec, ) -> Result { let mut priority_ops = Vec::new(); let events_count = events.len(); for event in events { assert_eq!(event.topics[0], self.new_priority_request_signature); // guaranteed by the watcher - let tx = L1Tx::try_from(event) + let tx = L1Tx::try_from(Into::::into(event)) .map_err(|err| EventProcessorError::log_parse(err, "priority op"))?; priority_ops.push(tx); } @@ -84,7 +88,7 @@ impl EventProcessor for PriorityOpsEventProcessor { let stage_latency = METRICS.poll_eth_node[&PollStage::PersistL1Txs].start(); APP_METRICS.processed_txs[&TxStage::added_to_mempool()].inc(); APP_METRICS.processed_l1_txs[&TxStage::added_to_mempool()].inc(); - let processed_priority_transactions = sl_client.get_total_priority_txs().await?; + let processed_priority_transactions = self.sl_client.get_total_priority_txs().await?; let ops_to_insert: Vec<&L1Tx> = new_ops .iter() .take_while(|op| processed_priority_transactions > op.serial_id().0) @@ -105,7 +109,7 @@ impl EventProcessor for PriorityOpsEventProcessor { Ok(skipped_ops + ops_to_insert.len()) } - fn relevant_topic(&self) -> H256 { + fn topic1(&self) -> H256 { self.new_priority_request_signature } diff --git a/core/node/eth_watch/src/lib.rs b/core/node/eth_watch/src/lib.rs index 4185878d2ac4..908ff4da37f1 100644 --- a/core/node/eth_watch/src/lib.rs +++ b/core/node/eth_watch/src/lib.rs @@ -2,24 +2,27 @@ //! protocol upgrades etc. //! New events are accepted to the ZKsync network once they have the sufficient amount of L1 confirmations. -use std::time::Duration; +use std::{sync::Arc, time::Duration}; use anyhow::Context as _; use tokio::sync::watch; use zksync_dal::{Connection, ConnectionPool, Core, CoreDal, DalError}; +use zksync_mini_merkle_tree::MiniMerkleTree; use zksync_system_constants::PRIORITY_EXPIRATION; use zksync_types::{ ethabi::Contract, protocol_version::ProtocolSemanticVersion, - web3::BlockNumber as Web3BlockNumber, PriorityOpId, + web3::BlockNumber as Web3BlockNumber, L1BatchNumber, L2ChainId, PriorityOpId, }; -pub use self::client::EthHttpQueryClient; +pub use self::client::{EthClient, EthHttpQueryClient, L2EthClient}; use self::{ - client::{EthClient, RETRY_LIMIT}, + client::{L2EthClientW, RETRY_LIMIT}, event_processors::{EventProcessor, EventProcessorError, PriorityOpsEventProcessor}, metrics::METRICS, }; -use crate::event_processors::{DecentralizedUpgradesEventProcessor, EventsSource}; +use crate::event_processors::{ + BatchRootProcessor, DecentralizedUpgradesEventProcessor, EventsSource, +}; mod client; mod event_processors; @@ -31,42 +34,63 @@ mod tests; struct EthWatchState { last_seen_protocol_version: ProtocolSemanticVersion, next_expected_priority_id: PriorityOpId, + chain_batch_root_number_lower_bound: L1BatchNumber, + batch_merkle_tree: MiniMerkleTree<[u8; 96]>, } /// Ethereum watcher component. #[derive(Debug)] pub struct EthWatch { - l1_client: Box, - sl_client: Box, + l1_client: Arc, + sl_client: Arc, poll_interval: Duration, event_processors: Vec>, pool: ConnectionPool, } impl EthWatch { + #[allow(clippy::too_many_arguments)] pub async fn new( chain_admin_contract: &Contract, l1_client: Box, - sl_client: Box, + sl_l2_client: Option>, pool: ConnectionPool, poll_interval: Duration, + chain_id: L2ChainId, ) -> anyhow::Result { let mut storage = pool.connection_tagged("eth_watch").await?; - let state = Self::initialize_state(&mut storage).await?; + let l1_client: Arc = l1_client.into(); + let sl_l2_client: Option> = sl_l2_client.map(Into::into); + let sl_client: Arc = if let Some(sl_l2_client) = sl_l2_client.clone() { + Arc::new(L2EthClientW(sl_l2_client)) + } else { + l1_client.clone() + }; + + let state = Self::initialize_state(&mut storage, sl_client.as_ref()).await?; tracing::info!("initialized state: {state:?}"); drop(storage); let priority_ops_processor = - PriorityOpsEventProcessor::new(state.next_expected_priority_id)?; + PriorityOpsEventProcessor::new(state.next_expected_priority_id, sl_client.clone())?; let decentralized_upgrades_processor = DecentralizedUpgradesEventProcessor::new( state.last_seen_protocol_version, chain_admin_contract, + sl_client.clone(), ); - let event_processors: Vec> = vec![ + let mut event_processors: Vec> = vec![ Box::new(priority_ops_processor), Box::new(decentralized_upgrades_processor), ]; - + if let Some(sl_l2_client) = sl_l2_client { + let batch_root_processor = BatchRootProcessor::new( + state.chain_batch_root_number_lower_bound, + state.batch_merkle_tree, + chain_id, + sl_l2_client, + ); + event_processors.push(Box::new(batch_root_processor)); + } Ok(Self { l1_client, sl_client, @@ -77,7 +101,10 @@ impl EthWatch { } #[tracing::instrument(name = "EthWatch::initialize_state", skip_all)] - async fn initialize_state(storage: &mut Connection<'_, Core>) -> anyhow::Result { + async fn initialize_state( + storage: &mut Connection<'_, Core>, + sl_client: &dyn EthClient, + ) -> anyhow::Result { let next_expected_priority_id: PriorityOpId = storage .transactions_dal() .last_priority_id() @@ -90,9 +117,26 @@ impl EthWatch { .await? .context("expected at least one (genesis) version to be present in DB")?; + let sl_chain_id = sl_client.chain_id().await?; + let batch_hashes = storage + .blocks_dal() + .get_executed_batch_roots_on_sl(sl_chain_id) + .await?; + + let chain_batch_root_number_lower_bound = batch_hashes + .last() + .map(|(n, _)| *n + 1) + .unwrap_or(L1BatchNumber(0)); + let tree_leaves = batch_hashes.into_iter().map(|(batch_number, batch_root)| { + BatchRootProcessor::batch_leaf_preimage(batch_root, batch_number) + }); + let batch_merkle_tree = MiniMerkleTree::new(tree_leaves, None); + Ok(EthWatchState { next_expected_priority_id, last_seen_protocol_version, + chain_batch_root_number_lower_bound, + batch_merkle_tree, }) } @@ -137,37 +181,42 @@ impl EthWatch { EventsSource::SL => self.sl_client.as_ref(), }; let chain_id = client.chain_id().await?; - let finalized_block = client.finalized_block_number().await?; + let to_block = if processor.only_finalized_block() { + client.finalized_block_number().await? + } else { + client.confirmed_block_number().await? + }; let from_block = storage .eth_watcher_dal() .get_or_set_next_block_to_process( processor.event_type(), chain_id, - finalized_block.saturating_sub(PRIORITY_EXPIRATION), + to_block.saturating_sub(PRIORITY_EXPIRATION), ) .await .map_err(DalError::generalize)?; // There are no new blocks so there is nothing to be done - if from_block > finalized_block { + if from_block > to_block { continue; } + let processor_events = client .get_events( Web3BlockNumber::Number(from_block.into()), - Web3BlockNumber::Number(finalized_block.into()), - processor.relevant_topic(), - None, + Web3BlockNumber::Number(to_block.into()), + processor.topic1(), + processor.topic2(), RETRY_LIMIT, ) .await?; let processed_events_count = processor - .process_events(storage, &*self.sl_client, processor_events.clone()) + .process_events(storage, processor_events.clone()) .await?; let next_block_to_process = if processed_events_count == processor_events.len() { - finalized_block + 1 + to_block + 1 } else if processed_events_count == 0 { //nothing was processed from_block diff --git a/core/node/eth_watch/src/tests.rs b/core/node/eth_watch/src/tests.rs deleted file mode 100644 index 12ac8bdbf3f7..000000000000 --- a/core/node/eth_watch/src/tests.rs +++ /dev/null @@ -1,791 +0,0 @@ -use std::{collections::HashMap, convert::TryInto, sync::Arc}; - -use tokio::sync::RwLock; -use zksync_contracts::{ - chain_admin_contract, hyperchain_contract, state_transition_manager_contract, -}; -use zksync_dal::{Connection, ConnectionPool, Core, CoreDal}; -use zksync_eth_client::{ContractCallError, EnrichedClientResult}; -use zksync_types::{ - abi, - abi::ProposedUpgrade, - ethabi, - ethabi::Token, - l1::{L1Tx, OpProcessingType, PriorityQueueType}, - protocol_upgrade::{ProtocolUpgradeTx, ProtocolUpgradeTxCommonData}, - protocol_version::ProtocolSemanticVersion, - web3::{contract::Tokenizable, BlockNumber, Log}, - Address, Execute, L1TxCommonData, PriorityOpId, ProtocolUpgrade, ProtocolVersion, - ProtocolVersionId, SLChainId, Transaction, H160, H256, U256, U64, -}; - -use crate::{ - client::{EthClient, RETRY_LIMIT}, - EthWatch, -}; - -#[derive(Debug)] -struct FakeEthClientData { - transactions: HashMap>, - diamond_upgrades: HashMap>, - upgrade_timestamp: HashMap>, - last_finalized_block_number: u64, - chain_id: SLChainId, - processed_priority_transactions_count: u64, -} - -impl FakeEthClientData { - fn new(chain_id: SLChainId) -> Self { - Self { - transactions: Default::default(), - diamond_upgrades: Default::default(), - upgrade_timestamp: Default::default(), - last_finalized_block_number: 0, - chain_id, - processed_priority_transactions_count: 0, - } - } - - fn add_transactions(&mut self, transactions: &[L1Tx]) { - for transaction in transactions { - let eth_block = transaction.eth_block(); - self.transactions - .entry(eth_block.0 as u64) - .or_default() - .push(tx_into_log(transaction.clone())); - self.processed_priority_transactions_count += 1; - } - } - - fn add_upgrade_timestamp(&mut self, upgrades: &[(ProtocolUpgrade, u64)]) { - for (upgrade, eth_block) in upgrades { - self.upgrade_timestamp - .entry(*eth_block) - .or_default() - .push(upgrade_timestamp_log(*eth_block)); - self.diamond_upgrades - .entry(*eth_block) - .or_default() - .push(diamond_upgrade_log(upgrade.clone(), *eth_block)); - } - } - - fn set_last_finalized_block_number(&mut self, number: u64) { - self.last_finalized_block_number = number; - } - - fn set_processed_priority_transactions_count(&mut self, number: u64) { - self.processed_priority_transactions_count = number; - } -} - -#[derive(Debug, Clone)] -struct MockEthClient { - inner: Arc>, -} - -impl MockEthClient { - fn new(chain_id: SLChainId) -> Self { - Self { - inner: Arc::new(RwLock::new(FakeEthClientData::new(chain_id))), - } - } - - async fn add_transactions(&mut self, transactions: &[L1Tx]) { - self.inner.write().await.add_transactions(transactions); - } - - async fn add_upgrade_timestamp(&mut self, upgrades: &[(ProtocolUpgrade, u64)]) { - self.inner.write().await.add_upgrade_timestamp(upgrades); - } - - async fn set_last_finalized_block_number(&mut self, number: u64) { - self.inner - .write() - .await - .set_last_finalized_block_number(number); - } - - async fn set_processed_priority_transactions_count(&mut self, number: u64) { - self.inner - .write() - .await - .set_processed_priority_transactions_count(number) - } - - async fn block_to_number(&self, block: BlockNumber) -> u64 { - match block { - BlockNumber::Earliest => 0, - BlockNumber::Number(number) => number.as_u64(), - BlockNumber::Pending - | BlockNumber::Latest - | BlockNumber::Finalized - | BlockNumber::Safe => unreachable!(), - } - } -} - -#[async_trait::async_trait] -impl EthClient for MockEthClient { - async fn get_events( - &self, - from: BlockNumber, - to: BlockNumber, - topic1: H256, - topic2: Option, - _retries_left: usize, - ) -> EnrichedClientResult> { - let from = self.block_to_number(from).await; - let to = self.block_to_number(to).await; - let mut logs = vec![]; - for number in from..=to { - if let Some(ops) = self.inner.read().await.transactions.get(&number) { - logs.extend_from_slice(ops); - } - if let Some(ops) = self.inner.read().await.diamond_upgrades.get(&number) { - logs.extend_from_slice(ops); - } - if let Some(ops) = self.inner.read().await.upgrade_timestamp.get(&number) { - logs.extend_from_slice(ops); - } - } - Ok(logs - .into_iter() - .filter(|log| { - log.topics.first() == Some(&topic1) - && (topic2.is_none() || log.topics.get(1) == topic2.as_ref()) - }) - .collect()) - } - - async fn scheduler_vk_hash( - &self, - _verifier_address: Address, - ) -> Result { - Ok(H256::zero()) - } - - async fn finalized_block_number(&self) -> EnrichedClientResult { - Ok(self.inner.read().await.last_finalized_block_number) - } - - async fn diamond_cut_by_version( - &self, - packed_version: H256, - ) -> EnrichedClientResult>> { - let from_block = *self - .inner - .read() - .await - .diamond_upgrades - .keys() - .min() - .unwrap_or(&0); - let to_block = *self - .inner - .read() - .await - .diamond_upgrades - .keys() - .max() - .unwrap_or(&0); - - let logs = self - .get_events( - U64::from(from_block).into(), - U64::from(to_block).into(), - state_transition_manager_contract() - .event("NewUpgradeCutData") - .unwrap() - .signature(), - Some(packed_version), - RETRY_LIMIT, - ) - .await?; - - Ok(logs.into_iter().next().map(|log| log.data.0)) - } - - async fn get_total_priority_txs(&self) -> Result { - Ok(self - .inner - .read() - .await - .processed_priority_transactions_count) - } - - async fn chain_id(&self) -> EnrichedClientResult { - Ok(self.inner.read().await.chain_id) - } -} - -fn build_l1_tx(serial_id: u64, eth_block: u64) -> L1Tx { - let tx = L1Tx { - execute: Execute { - contract_address: Some(Address::repeat_byte(0x11)), - calldata: vec![1, 2, 3], - factory_deps: vec![], - value: U256::zero(), - }, - common_data: L1TxCommonData { - serial_id: PriorityOpId(serial_id), - sender: [1u8; 20].into(), - eth_block, - gas_limit: Default::default(), - max_fee_per_gas: Default::default(), - gas_per_pubdata_limit: 1u32.into(), - full_fee: Default::default(), - layer_2_tip_fee: U256::from(10u8), - refund_recipient: Address::zero(), - to_mint: Default::default(), - priority_queue_type: PriorityQueueType::Deque, - op_processing_type: OpProcessingType::Common, - canonical_tx_hash: H256::default(), - }, - received_timestamp_ms: 0, - }; - // Convert to abi::Transaction and back, so that canonical_tx_hash is computed. - let tx = Transaction::from_abi( - abi::Transaction::try_from(Transaction::from(tx)).unwrap(), - false, - ) - .unwrap(); - tx.try_into().unwrap() -} - -fn build_upgrade_tx(id: ProtocolVersionId, eth_block: u64) -> ProtocolUpgradeTx { - let tx = ProtocolUpgradeTx { - execute: Execute { - contract_address: Some(Address::repeat_byte(0x11)), - calldata: vec![1, 2, 3], - factory_deps: vec![], - value: U256::zero(), - }, - common_data: ProtocolUpgradeTxCommonData { - upgrade_id: id, - sender: [1u8; 20].into(), - eth_block, - gas_limit: Default::default(), - max_fee_per_gas: Default::default(), - gas_per_pubdata_limit: 1u32.into(), - refund_recipient: Address::zero(), - to_mint: Default::default(), - canonical_tx_hash: H256::zero(), - }, - received_timestamp_ms: 0, - }; - // Convert to abi::Transaction and back, so that canonical_tx_hash is computed. - Transaction::from_abi( - abi::Transaction::try_from(Transaction::from(tx)).unwrap(), - false, - ) - .unwrap() - .try_into() - .unwrap() -} - -async fn create_test_watcher( - connection_pool: ConnectionPool, - is_gateway: bool, -) -> (EthWatch, MockEthClient, MockEthClient) { - let l1_client = MockEthClient::new(SLChainId(42)); - let sl_client = if is_gateway { - MockEthClient::new(SLChainId(123)) - } else { - l1_client.clone() - }; - let watcher = EthWatch::new( - &chain_admin_contract(), - Box::new(l1_client.clone()), - Box::new(sl_client.clone()), - connection_pool, - std::time::Duration::from_nanos(1), - ) - .await - .unwrap(); - - (watcher, l1_client, sl_client) -} - -async fn create_l1_test_watcher( - connection_pool: ConnectionPool, -) -> (EthWatch, MockEthClient) { - let (watcher, l1_client, _) = create_test_watcher(connection_pool, false).await; - (watcher, l1_client) -} - -async fn create_gateway_test_watcher( - connection_pool: ConnectionPool, -) -> (EthWatch, MockEthClient, MockEthClient) { - create_test_watcher(connection_pool, true).await -} - -#[test_log::test(tokio::test)] -async fn test_normal_operation_l1_txs() { - let connection_pool = ConnectionPool::::test_pool().await; - setup_db(&connection_pool).await; - let (mut watcher, mut client) = create_l1_test_watcher(connection_pool.clone()).await; - - let mut storage = connection_pool.connection().await.unwrap(); - client - .add_transactions(&[build_l1_tx(0, 10), build_l1_tx(1, 14), build_l1_tx(2, 18)]) - .await; - client.set_last_finalized_block_number(15).await; - // second tx will not be processed, as it's block is not finalized yet. - watcher.loop_iteration(&mut storage).await.unwrap(); - let db_txs = get_all_db_txs(&mut storage).await; - let mut db_txs: Vec = db_txs - .into_iter() - .map(|tx| tx.try_into().unwrap()) - .collect(); - db_txs.sort_by_key(|tx| tx.common_data.serial_id); - assert_eq!(db_txs.len(), 2); - let db_tx = db_txs[0].clone(); - assert_eq!(db_tx.common_data.serial_id.0, 0); - let db_tx = db_txs[1].clone(); - assert_eq!(db_tx.common_data.serial_id.0, 1); - - client.set_last_finalized_block_number(20).await; - // now the second tx will be processed - watcher.loop_iteration(&mut storage).await.unwrap(); - let db_txs = get_all_db_txs(&mut storage).await; - let mut db_txs: Vec = db_txs - .into_iter() - .map(|tx| tx.try_into().unwrap()) - .collect(); - db_txs.sort_by_key(|tx| tx.common_data.serial_id); - assert_eq!(db_txs.len(), 3); - let db_tx = db_txs[2].clone(); - assert_eq!(db_tx.common_data.serial_id.0, 2); -} - -#[test_log::test(tokio::test)] -async fn test_gap_in_upgrade_timestamp() { - let connection_pool = ConnectionPool::::test_pool().await; - setup_db(&connection_pool).await; - let (mut watcher, mut client) = create_l1_test_watcher(connection_pool.clone()).await; - - let mut storage = connection_pool.connection().await.unwrap(); - client - .add_upgrade_timestamp(&[( - ProtocolUpgrade { - version: ProtocolSemanticVersion { - minor: ProtocolVersionId::next(), - patch: 0.into(), - }, - tx: None, - ..Default::default() - }, - 10, - )]) - .await; - client.set_last_finalized_block_number(15).await; - watcher.loop_iteration(&mut storage).await.unwrap(); - - let db_versions = storage.protocol_versions_dal().all_versions().await; - // there should be genesis version and just added version - assert_eq!(db_versions.len(), 2); - - let previous_version = (ProtocolVersionId::latest() as u16 - 1).try_into().unwrap(); - let next_version = ProtocolVersionId::next(); - assert_eq!(db_versions[0].minor, previous_version); - assert_eq!(db_versions[1].minor, next_version); -} - -#[test_log::test(tokio::test)] -async fn test_normal_operation_upgrade_timestamp() { - zksync_concurrency::testonly::abort_on_panic(); - let connection_pool = ConnectionPool::::test_pool().await; - setup_db(&connection_pool).await; - - let mut client = MockEthClient::new(SLChainId(42)); - let mut watcher = EthWatch::new( - &chain_admin_contract(), - Box::new(client.clone()), - Box::new(client.clone()), - connection_pool.clone(), - std::time::Duration::from_nanos(1), - ) - .await - .unwrap(); - - let mut storage = connection_pool.connection().await.unwrap(); - client - .add_upgrade_timestamp(&[ - ( - ProtocolUpgrade { - tx: None, - ..Default::default() - }, - 10, - ), - ( - ProtocolUpgrade { - version: ProtocolSemanticVersion { - minor: ProtocolVersionId::next(), - patch: 0.into(), - }, - tx: Some(build_upgrade_tx(ProtocolVersionId::next(), 18)), - ..Default::default() - }, - 18, - ), - ( - ProtocolUpgrade { - version: ProtocolSemanticVersion { - minor: ProtocolVersionId::next(), - patch: 1.into(), - }, - tx: None, - ..Default::default() - }, - 19, - ), - ]) - .await; - client.set_last_finalized_block_number(15).await; - // The second upgrade will not be processed, as it has less than 5 confirmations. - watcher.loop_iteration(&mut storage).await.unwrap(); - - let db_versions = storage.protocol_versions_dal().all_versions().await; - // There should be genesis version and just added version. - assert_eq!(db_versions.len(), 2); - assert_eq!(db_versions[1].minor, ProtocolVersionId::latest()); - - client.set_last_finalized_block_number(20).await; - // Now the second and the third upgrades will be processed. - watcher.loop_iteration(&mut storage).await.unwrap(); - let db_versions = storage.protocol_versions_dal().all_versions().await; - let mut expected_version = ProtocolSemanticVersion { - minor: ProtocolVersionId::next(), - patch: 0.into(), - }; - assert_eq!(db_versions.len(), 4); - assert_eq!(db_versions[2], expected_version); - expected_version.patch += 1; - assert_eq!(db_versions[3], expected_version); - - // Check that tx was saved with the second upgrade. - let tx = storage - .protocol_versions_dal() - .get_protocol_upgrade_tx(ProtocolVersionId::next()) - .await - .unwrap() - .expect("no protocol upgrade transaction"); - assert_eq!(tx.common_data.upgrade_id, ProtocolVersionId::next()); -} - -#[test_log::test(tokio::test)] -#[should_panic] -async fn test_gap_in_single_batch() { - let connection_pool = ConnectionPool::::test_pool().await; - setup_db(&connection_pool).await; - let (mut watcher, mut client) = create_l1_test_watcher(connection_pool.clone()).await; - - let mut storage = connection_pool.connection().await.unwrap(); - client - .add_transactions(&[ - build_l1_tx(0, 10), - build_l1_tx(1, 14), - build_l1_tx(2, 14), - build_l1_tx(3, 14), - build_l1_tx(5, 14), - ]) - .await; - client.set_last_finalized_block_number(15).await; - watcher.loop_iteration(&mut storage).await.unwrap(); -} - -#[test_log::test(tokio::test)] -#[should_panic] -async fn test_gap_between_batches() { - let connection_pool = ConnectionPool::::test_pool().await; - setup_db(&connection_pool).await; - let (mut watcher, mut client) = create_l1_test_watcher(connection_pool.clone()).await; - - let mut storage = connection_pool.connection().await.unwrap(); - client - .add_transactions(&[ - // this goes to the first batch - build_l1_tx(0, 10), - build_l1_tx(1, 14), - build_l1_tx(2, 14), - // this goes to the second batch - build_l1_tx(4, 20), - build_l1_tx(5, 22), - ]) - .await; - client.set_last_finalized_block_number(15).await; - watcher.loop_iteration(&mut storage).await.unwrap(); - - let db_txs = get_all_db_txs(&mut storage).await; - assert_eq!(db_txs.len(), 3); - client.set_last_finalized_block_number(25).await; - watcher.loop_iteration(&mut storage).await.unwrap(); -} - -#[test_log::test(tokio::test)] -async fn test_overlapping_batches() { - zksync_concurrency::testonly::abort_on_panic(); - let connection_pool = ConnectionPool::::test_pool().await; - setup_db(&connection_pool).await; - let (mut watcher, mut client) = create_l1_test_watcher(connection_pool.clone()).await; - - let mut storage = connection_pool.connection().await.unwrap(); - client - .add_transactions(&[ - // this goes to the first batch - build_l1_tx(0, 10), - build_l1_tx(1, 14), - build_l1_tx(2, 14), - // this goes to the second batch - build_l1_tx(1, 20), - build_l1_tx(2, 22), - build_l1_tx(3, 23), - build_l1_tx(4, 23), - ]) - .await; - client.set_last_finalized_block_number(15).await; - watcher.loop_iteration(&mut storage).await.unwrap(); - - let db_txs = get_all_db_txs(&mut storage).await; - assert_eq!(db_txs.len(), 3); - - client.set_last_finalized_block_number(25).await; - watcher.loop_iteration(&mut storage).await.unwrap(); - - let db_txs = get_all_db_txs(&mut storage).await; - assert_eq!(db_txs.len(), 5); - let mut db_txs: Vec = db_txs - .into_iter() - .map(|tx| tx.try_into().unwrap()) - .collect(); - db_txs.sort_by_key(|tx| tx.common_data.serial_id); - let tx = db_txs[2].clone(); - assert_eq!(tx.common_data.serial_id.0, 2); - let tx = db_txs[4].clone(); - assert_eq!(tx.common_data.serial_id.0, 4); -} - -#[test_log::test(tokio::test)] -async fn test_transactions_get_gradually_processed_by_gateway() { - zksync_concurrency::testonly::abort_on_panic(); - let connection_pool = ConnectionPool::::test_pool().await; - setup_db(&connection_pool).await; - let (mut watcher, mut l1_client, mut gateway_client) = - create_gateway_test_watcher(connection_pool.clone()).await; - - let mut storage = connection_pool.connection().await.unwrap(); - l1_client - .add_transactions(&[ - build_l1_tx(0, 10), - build_l1_tx(1, 14), - build_l1_tx(2, 14), - build_l1_tx(3, 20), - build_l1_tx(4, 22), - ]) - .await; - l1_client.set_last_finalized_block_number(15).await; - gateway_client - .set_processed_priority_transactions_count(2) - .await; - watcher.loop_iteration(&mut storage).await.unwrap(); - - let db_txs = get_all_db_txs(&mut storage).await; - assert_eq!(db_txs.len(), 2); - - l1_client.set_last_finalized_block_number(25).await; - gateway_client - .set_processed_priority_transactions_count(4) - .await; - watcher.loop_iteration(&mut storage).await.unwrap(); - - let db_txs = get_all_db_txs(&mut storage).await; - assert_eq!(db_txs.len(), 4); - let mut db_txs: Vec = db_txs - .into_iter() - .map(|tx| tx.try_into().unwrap()) - .collect(); - db_txs.sort_by_key(|tx| tx.common_data.serial_id); - let tx = db_txs[2].clone(); - assert_eq!(tx.common_data.serial_id.0, 2); - let tx = db_txs[3].clone(); - assert_eq!(tx.common_data.serial_id.0, 3); -} - -async fn get_all_db_txs(storage: &mut Connection<'_, Core>) -> Vec { - storage.transactions_dal().reset_mempool().await.unwrap(); - storage - .transactions_dal() - .sync_mempool(&[], &[], 0, 0, 1000) - .await - .unwrap() - .into_iter() - .map(|x| x.0) - .collect() -} - -fn tx_into_log(tx: L1Tx) -> Log { - let tx = abi::Transaction::try_from(Transaction::from(tx)).unwrap(); - let abi::Transaction::L1 { - tx, - factory_deps, - eth_block, - .. - } = tx - else { - unreachable!() - }; - - let data = ethabi::encode( - &abi::NewPriorityRequest { - tx_id: tx.nonce, - tx_hash: tx.hash().into(), - expiration_timestamp: u64::MAX, - transaction: tx, - factory_deps, - } - .encode(), - ); - - Log { - address: Address::repeat_byte(0x1), - topics: vec![hyperchain_contract() - .event("NewPriorityRequest") - .expect("NewPriorityRequest event is missing in abi") - .signature()], - data: data.into(), - block_hash: Some(H256::repeat_byte(0x11)), - block_number: Some(eth_block.into()), - transaction_hash: Some(H256::default()), - transaction_index: Some(0u64.into()), - log_index: Some(0u64.into()), - transaction_log_index: Some(0u64.into()), - log_type: None, - removed: None, - block_timestamp: None, - } -} - -fn init_calldata(protocol_upgrade: ProtocolUpgrade) -> Vec { - let upgrade_token = upgrade_into_diamond_cut(protocol_upgrade); - - let encoded_params = ethabi::encode(&[upgrade_token]); - - let execute_upgrade_selector = hyperchain_contract() - .function("executeUpgrade") - .unwrap() - .short_signature(); - - // Concatenate the function selector with the encoded parameters - let mut calldata = Vec::with_capacity(4 + encoded_params.len()); - calldata.extend_from_slice(&execute_upgrade_selector); - calldata.extend_from_slice(&encoded_params); - - calldata -} - -fn diamond_upgrade_log(upgrade: ProtocolUpgrade, eth_block: u64) -> Log { - // struct DiamondCutData { - // FacetCut[] facetCuts; - // address initAddress; - // bytes initCalldata; - // } - let final_data = ethabi::encode(&[Token::Tuple(vec![ - Token::Array(vec![]), - Token::Address(H160::zero()), - Token::Bytes(init_calldata(upgrade.clone())), - ])]); - tracing::info!("{:?}", Token::Bytes(init_calldata(upgrade))); - - Log { - address: Address::repeat_byte(0x1), - topics: vec![ - state_transition_manager_contract() - .event("NewUpgradeCutData") - .unwrap() - .signature(), - H256::from_low_u64_be(eth_block), - ], - data: final_data.into(), - block_hash: Some(H256::repeat_byte(0x11)), - block_number: Some(eth_block.into()), - transaction_hash: Some(H256::random()), - transaction_index: Some(0u64.into()), - log_index: Some(0u64.into()), - transaction_log_index: Some(0u64.into()), - log_type: None, - removed: None, - block_timestamp: None, - } -} -fn upgrade_timestamp_log(eth_block: u64) -> Log { - let final_data = ethabi::encode(&[U256::from(12345).into_token()]); - - Log { - address: Address::repeat_byte(0x1), - topics: vec![ - chain_admin_contract() - .event("UpdateUpgradeTimestamp") - .expect("UpdateUpgradeTimestamp event is missing in ABI") - .signature(), - H256::from_low_u64_be(eth_block), - ], - data: final_data.into(), - block_hash: Some(H256::repeat_byte(0x11)), - block_number: Some(eth_block.into()), - transaction_hash: Some(H256::random()), - transaction_index: Some(0u64.into()), - log_index: Some(0u64.into()), - transaction_log_index: Some(0u64.into()), - log_type: None, - removed: None, - block_timestamp: None, - } -} - -fn upgrade_into_diamond_cut(upgrade: ProtocolUpgrade) -> Token { - let abi::Transaction::L1 { - tx, factory_deps, .. - } = upgrade - .tx - .map(|tx| Transaction::from(tx).try_into().unwrap()) - .unwrap_or(abi::Transaction::L1 { - tx: Default::default(), - factory_deps: vec![], - eth_block: 0, - }) - else { - unreachable!() - }; - ProposedUpgrade { - l2_protocol_upgrade_tx: tx, - factory_deps, - bootloader_hash: upgrade.bootloader_code_hash.unwrap_or_default().into(), - default_account_hash: upgrade.default_account_code_hash.unwrap_or_default().into(), - verifier: upgrade.verifier_address.unwrap_or_default(), - verifier_params: upgrade.verifier_params.unwrap_or_default().into(), - l1_contracts_upgrade_calldata: vec![], - post_upgrade_calldata: vec![], - upgrade_timestamp: upgrade.timestamp.into(), - new_protocol_version: upgrade.version.pack(), - } - .encode() -} - -async fn setup_db(connection_pool: &ConnectionPool) { - connection_pool - .connection() - .await - .unwrap() - .protocol_versions_dal() - .save_protocol_version_with_tx(&ProtocolVersion { - version: ProtocolSemanticVersion { - minor: (ProtocolVersionId::latest() as u16 - 1).try_into().unwrap(), - patch: 0.into(), - }, - ..Default::default() - }) - .await - .unwrap(); -} diff --git a/core/node/eth_watch/src/tests/client.rs b/core/node/eth_watch/src/tests/client.rs new file mode 100644 index 000000000000..04825f22d815 --- /dev/null +++ b/core/node/eth_watch/src/tests/client.rs @@ -0,0 +1,487 @@ +use std::{collections::HashMap, convert::TryInto, sync::Arc}; + +use tokio::sync::RwLock; +use zksync_contracts::{ + chain_admin_contract, hyperchain_contract, state_transition_manager_contract, +}; +use zksync_eth_client::{ContractCallError, EnrichedClientResult}; +use zksync_types::{ + abi, + abi::ProposedUpgrade, + api::{ChainAggProof, Log}, + ethabi, + ethabi::Token, + l1::L1Tx, + u256_to_h256, + web3::{contract::Tokenizable, BlockNumber}, + Address, L1BatchNumber, L2ChainId, ProtocolUpgrade, SLChainId, Transaction, H256, U256, U64, +}; + +use crate::client::{EthClient, L2EthClient, RETRY_LIMIT}; + +#[derive(Debug)] +pub struct FakeEthClientData { + transactions: HashMap>, + diamond_upgrades: HashMap>, + upgrade_timestamp: HashMap>, + last_finalized_block_number: u64, + chain_id: SLChainId, + processed_priority_transactions_count: u64, + chain_log_proofs: HashMap, + batch_roots: HashMap>, + chain_roots: HashMap, +} + +impl FakeEthClientData { + fn new(chain_id: SLChainId) -> Self { + Self { + transactions: Default::default(), + diamond_upgrades: Default::default(), + upgrade_timestamp: Default::default(), + last_finalized_block_number: 0, + chain_id, + processed_priority_transactions_count: 0, + chain_log_proofs: Default::default(), + batch_roots: Default::default(), + chain_roots: Default::default(), + } + } + + fn add_transactions(&mut self, transactions: &[L1Tx]) { + for transaction in transactions { + let eth_block = transaction.eth_block(); + self.transactions + .entry(eth_block.0 as u64) + .or_default() + .push(tx_into_log(transaction.clone())); + self.processed_priority_transactions_count += 1; + } + } + + fn add_upgrade_timestamp(&mut self, upgrades: &[(ProtocolUpgrade, u64)]) { + for (upgrade, eth_block) in upgrades { + self.upgrade_timestamp + .entry(*eth_block) + .or_default() + .push(upgrade_timestamp_log(*eth_block)); + self.diamond_upgrades + .entry(*eth_block) + .or_default() + .push(diamond_upgrade_log(upgrade.clone(), *eth_block)); + } + } + + fn set_last_finalized_block_number(&mut self, number: u64) { + self.last_finalized_block_number = number; + } + + fn set_processed_priority_transactions_count(&mut self, number: u64) { + self.processed_priority_transactions_count = number; + } + + fn add_batch_roots(&mut self, batch_roots: &[(u64, u64, H256)]) { + for (sl_block, l2_batch_number, batch_root) in batch_roots { + self.batch_roots + .entry(*sl_block) + .or_default() + .push(batch_root_to_log(*sl_block, *l2_batch_number, *batch_root)); + } + } + + fn add_chain_roots(&mut self, chain_roots: &[(u64, H256)]) { + for (batch, root) in chain_roots { + self.chain_roots.insert(*batch, *root); + } + } + + fn add_chain_log_proofs(&mut self, chain_log_proofs: Vec<(L1BatchNumber, ChainAggProof)>) { + for (batch, proof) in chain_log_proofs { + self.chain_log_proofs.insert(batch, proof); + } + } +} + +#[derive(Debug, Clone)] +pub struct MockEthClient { + inner: Arc>, +} + +impl MockEthClient { + pub fn new(chain_id: SLChainId) -> Self { + Self { + inner: Arc::new(RwLock::new(FakeEthClientData::new(chain_id))), + } + } + + pub async fn add_transactions(&mut self, transactions: &[L1Tx]) { + self.inner.write().await.add_transactions(transactions); + } + + pub async fn add_upgrade_timestamp(&mut self, upgrades: &[(ProtocolUpgrade, u64)]) { + self.inner.write().await.add_upgrade_timestamp(upgrades); + } + + pub async fn set_last_finalized_block_number(&mut self, number: u64) { + self.inner + .write() + .await + .set_last_finalized_block_number(number); + } + + pub async fn set_processed_priority_transactions_count(&mut self, number: u64) { + self.inner + .write() + .await + .set_processed_priority_transactions_count(number) + } + + pub async fn block_to_number(&self, block: BlockNumber) -> u64 { + match block { + BlockNumber::Earliest => 0, + BlockNumber::Number(number) => number.as_u64(), + BlockNumber::Pending + | BlockNumber::Latest + | BlockNumber::Finalized + | BlockNumber::Safe => unreachable!(), + } + } + + pub async fn add_batch_roots(&mut self, batch_roots: &[(u64, u64, H256)]) { + self.inner.write().await.add_batch_roots(batch_roots); + } + + pub async fn add_chain_roots(&mut self, chain_roots: &[(u64, H256)]) { + self.inner.write().await.add_chain_roots(chain_roots); + } + + pub async fn add_chain_log_proofs( + &mut self, + chain_log_proofs: Vec<(L1BatchNumber, ChainAggProof)>, + ) { + self.inner + .write() + .await + .add_chain_log_proofs(chain_log_proofs); + } +} + +#[async_trait::async_trait] +impl EthClient for MockEthClient { + async fn get_events( + &self, + from: BlockNumber, + to: BlockNumber, + topic1: H256, + topic2: Option, + _retries_left: usize, + ) -> EnrichedClientResult> { + let from = self.block_to_number(from).await; + let to = self.block_to_number(to).await; + let mut logs = vec![]; + for number in from..=to { + if let Some(ops) = self.inner.read().await.transactions.get(&number) { + logs.extend_from_slice(ops); + } + if let Some(ops) = self.inner.read().await.diamond_upgrades.get(&number) { + logs.extend_from_slice(ops); + } + if let Some(ops) = self.inner.read().await.upgrade_timestamp.get(&number) { + logs.extend_from_slice(ops); + } + if let Some(ops) = self.inner.read().await.batch_roots.get(&number) { + logs.extend_from_slice(ops); + } + } + Ok(logs + .into_iter() + .filter(|log| { + log.topics.first() == Some(&topic1) + && (topic2.is_none() || log.topics.get(1) == topic2.as_ref()) + }) + .collect()) + } + + async fn scheduler_vk_hash( + &self, + _verifier_address: Address, + ) -> Result { + Ok(H256::zero()) + } + + async fn finalized_block_number(&self) -> EnrichedClientResult { + Ok(self.inner.read().await.last_finalized_block_number) + } + + async fn confirmed_block_number(&self) -> EnrichedClientResult { + Ok(self.inner.read().await.last_finalized_block_number) + } + + async fn diamond_cut_by_version( + &self, + packed_version: H256, + ) -> EnrichedClientResult>> { + let from_block = *self + .inner + .read() + .await + .diamond_upgrades + .keys() + .min() + .unwrap_or(&0); + let to_block = *self + .inner + .read() + .await + .diamond_upgrades + .keys() + .max() + .unwrap_or(&0); + + let logs = self + .get_events( + U64::from(from_block).into(), + U64::from(to_block).into(), + state_transition_manager_contract() + .event("NewUpgradeCutData") + .unwrap() + .signature(), + Some(packed_version), + RETRY_LIMIT, + ) + .await?; + + Ok(logs.into_iter().next().map(|log| log.data.0)) + } + + async fn get_total_priority_txs(&self) -> Result { + Ok(self + .inner + .read() + .await + .processed_priority_transactions_count) + } + + async fn chain_id(&self) -> EnrichedClientResult { + Ok(self.inner.read().await.chain_id) + } + + async fn get_chain_root( + &self, + _block_number: U64, + _l2_chain_id: L2ChainId, + ) -> Result { + unimplemented!() + } +} + +#[async_trait::async_trait] +impl L2EthClient for MockEthClient { + async fn get_chain_log_proof( + &self, + l1_batch_number: L1BatchNumber, + _chain_id: L2ChainId, + ) -> EnrichedClientResult> { + Ok(self + .inner + .read() + .await + .chain_log_proofs + .get(&l1_batch_number) + .cloned()) + } + + async fn get_chain_root_l2( + &self, + l1_batch_number: L1BatchNumber, + _l2_chain_id: L2ChainId, + ) -> Result, ContractCallError> { + Ok(self + .inner + .read() + .await + .chain_roots + .get(&l1_batch_number.0.into()) + .cloned()) + } +} + +fn tx_into_log(tx: L1Tx) -> Log { + let tx = abi::Transaction::try_from(Transaction::from(tx)).unwrap(); + let abi::Transaction::L1 { + tx, + factory_deps, + eth_block, + .. + } = tx + else { + unreachable!() + }; + + let data = ethabi::encode( + &abi::NewPriorityRequest { + tx_id: tx.nonce, + tx_hash: tx.hash().into(), + expiration_timestamp: u64::MAX, + transaction: tx, + factory_deps, + } + .encode(), + ); + + Log { + address: Address::repeat_byte(0x1), + topics: vec![hyperchain_contract() + .event("NewPriorityRequest") + .expect("NewPriorityRequest event is missing in abi") + .signature()], + data: data.into(), + block_hash: Some(H256::repeat_byte(0x11)), + block_number: Some(eth_block.into()), + l1_batch_number: None, + transaction_hash: Some(H256::default()), + transaction_index: Some(0u64.into()), + log_index: Some(0u64.into()), + transaction_log_index: Some(0u64.into()), + log_type: None, + removed: None, + block_timestamp: None, + } +} + +fn init_calldata(protocol_upgrade: ProtocolUpgrade) -> Vec { + let upgrade_token = upgrade_into_diamond_cut(protocol_upgrade); + + let encoded_params = ethabi::encode(&[upgrade_token]); + + let execute_upgrade_selector = hyperchain_contract() + .function("executeUpgrade") + .unwrap() + .short_signature(); + + // Concatenate the function selector with the encoded parameters + let mut calldata = Vec::with_capacity(4 + encoded_params.len()); + calldata.extend_from_slice(&execute_upgrade_selector); + calldata.extend_from_slice(&encoded_params); + + calldata +} + +fn diamond_upgrade_log(upgrade: ProtocolUpgrade, eth_block: u64) -> Log { + // struct DiamondCutData { + // FacetCut[] facetCuts; + // address initAddress; + // bytes initCalldata; + // } + let final_data = ethabi::encode(&[Token::Tuple(vec![ + Token::Array(vec![]), + Token::Address(Address::zero()), + Token::Bytes(init_calldata(upgrade.clone())), + ])]); + tracing::info!("{:?}", Token::Bytes(init_calldata(upgrade))); + + Log { + address: Address::repeat_byte(0x1), + topics: vec![ + state_transition_manager_contract() + .event("NewUpgradeCutData") + .unwrap() + .signature(), + H256::from_low_u64_be(eth_block), + ], + data: final_data.into(), + block_hash: Some(H256::repeat_byte(0x11)), + block_number: Some(eth_block.into()), + l1_batch_number: None, + transaction_hash: Some(H256::random()), + transaction_index: Some(0u64.into()), + log_index: Some(0u64.into()), + transaction_log_index: Some(0u64.into()), + log_type: None, + removed: None, + block_timestamp: None, + } +} +fn upgrade_timestamp_log(eth_block: u64) -> Log { + let final_data = ethabi::encode(&[U256::from(12345).into_token()]); + + Log { + address: Address::repeat_byte(0x1), + topics: vec![ + chain_admin_contract() + .event("UpdateUpgradeTimestamp") + .expect("UpdateUpgradeTimestamp event is missing in ABI") + .signature(), + H256::from_low_u64_be(eth_block), + ], + data: final_data.into(), + block_hash: Some(H256::repeat_byte(0x11)), + block_number: Some(eth_block.into()), + l1_batch_number: None, + transaction_hash: Some(H256::random()), + transaction_index: Some(0u64.into()), + log_index: Some(0u64.into()), + transaction_log_index: Some(0u64.into()), + log_type: None, + removed: None, + block_timestamp: None, + } +} + +fn upgrade_into_diamond_cut(upgrade: ProtocolUpgrade) -> Token { + let abi::Transaction::L1 { + tx, factory_deps, .. + } = upgrade + .tx + .map(|tx| Transaction::from(tx).try_into().unwrap()) + .unwrap_or(abi::Transaction::L1 { + tx: Default::default(), + factory_deps: vec![], + eth_block: 0, + }) + else { + unreachable!() + }; + ProposedUpgrade { + l2_protocol_upgrade_tx: tx, + factory_deps, + bootloader_hash: upgrade.bootloader_code_hash.unwrap_or_default().into(), + default_account_hash: upgrade.default_account_code_hash.unwrap_or_default().into(), + verifier: upgrade.verifier_address.unwrap_or_default(), + verifier_params: upgrade.verifier_params.unwrap_or_default().into(), + l1_contracts_upgrade_calldata: vec![], + post_upgrade_calldata: vec![], + upgrade_timestamp: upgrade.timestamp.into(), + new_protocol_version: upgrade.version.pack(), + } + .encode() +} + +fn batch_root_to_log(sl_block_number: u64, l2_batch_number: u64, batch_root: H256) -> Log { + let topic1 = ethabi::long_signature( + "AppendedChainBatchRoot", + &[ + ethabi::ParamType::Uint(256), + ethabi::ParamType::Uint(256), + ethabi::ParamType::FixedBytes(32), + ], + ); + let topic2 = u256_to_h256(L2ChainId::default().as_u64().into()); + let topic3 = u256_to_h256(l2_batch_number.into()); + let data = ethabi::encode(&[batch_root.into_token()]); + + Log { + address: Address::repeat_byte(0x1), + topics: vec![topic1, topic2, topic3], + data: data.into(), + block_hash: Some(H256::repeat_byte(0x11)), + block_number: Some(sl_block_number.into()), + l1_batch_number: Some(sl_block_number.into()), + transaction_hash: Some(H256::random()), + transaction_index: Some(0u64.into()), + log_index: Some(0u64.into()), + transaction_log_index: Some(0u64.into()), + log_type: None, + removed: None, + block_timestamp: None, + } +} diff --git a/core/node/eth_watch/src/tests/mod.rs b/core/node/eth_watch/src/tests/mod.rs new file mode 100644 index 000000000000..df91074beb18 --- /dev/null +++ b/core/node/eth_watch/src/tests/mod.rs @@ -0,0 +1,827 @@ +use std::convert::TryInto; + +use zksync_contracts::chain_admin_contract; +use zksync_dal::{Connection, ConnectionPool, Core, CoreDal}; +use zksync_types::{ + abi, + aggregated_operations::AggregatedActionType, + api::ChainAggProof, + block::L1BatchHeader, + commitment::L1BatchCommitmentArtifacts, + l1::{L1Tx, OpProcessingType, PriorityQueueType}, + l2_to_l1_log::BatchAndChainMerklePath, + protocol_upgrade::{ProtocolUpgradeTx, ProtocolUpgradeTxCommonData}, + protocol_version::ProtocolSemanticVersion, + Address, Execute, L1BatchNumber, L1TxCommonData, L2ChainId, PriorityOpId, ProtocolUpgrade, + ProtocolVersion, ProtocolVersionId, SLChainId, Transaction, H256, U256, +}; + +use crate::{tests::client::MockEthClient, EthWatch, L2EthClient}; + +mod client; + +const SL_CHAIN_ID: SLChainId = SLChainId(505); + +fn build_l1_tx(serial_id: u64, eth_block: u64) -> L1Tx { + let tx = L1Tx { + execute: Execute { + contract_address: Some(Address::repeat_byte(0x11)), + calldata: vec![1, 2, 3], + factory_deps: vec![], + value: U256::zero(), + }, + common_data: L1TxCommonData { + serial_id: PriorityOpId(serial_id), + sender: [1u8; 20].into(), + eth_block, + gas_limit: Default::default(), + max_fee_per_gas: Default::default(), + gas_per_pubdata_limit: 1u32.into(), + full_fee: Default::default(), + layer_2_tip_fee: U256::from(10u8), + refund_recipient: Address::zero(), + to_mint: Default::default(), + priority_queue_type: PriorityQueueType::Deque, + op_processing_type: OpProcessingType::Common, + canonical_tx_hash: H256::default(), + }, + received_timestamp_ms: 0, + }; + // Convert to abi::Transaction and back, so that canonical_tx_hash is computed. + let tx = Transaction::from_abi( + abi::Transaction::try_from(Transaction::from(tx)).unwrap(), + false, + ) + .unwrap(); + tx.try_into().unwrap() +} + +fn build_upgrade_tx(id: ProtocolVersionId, eth_block: u64) -> ProtocolUpgradeTx { + let tx = ProtocolUpgradeTx { + execute: Execute { + contract_address: Some(Address::repeat_byte(0x11)), + calldata: vec![1, 2, 3], + factory_deps: vec![], + value: U256::zero(), + }, + common_data: ProtocolUpgradeTxCommonData { + upgrade_id: id, + sender: [1u8; 20].into(), + eth_block, + gas_limit: Default::default(), + max_fee_per_gas: Default::default(), + gas_per_pubdata_limit: 1u32.into(), + refund_recipient: Address::zero(), + to_mint: Default::default(), + canonical_tx_hash: H256::zero(), + }, + received_timestamp_ms: 0, + }; + // Convert to abi::Transaction and back, so that canonical_tx_hash is computed. + Transaction::from_abi( + abi::Transaction::try_from(Transaction::from(tx)).unwrap(), + false, + ) + .unwrap() + .try_into() + .unwrap() +} + +async fn create_test_watcher( + connection_pool: ConnectionPool, + is_gateway: bool, +) -> (EthWatch, MockEthClient, MockEthClient) { + let l1_client = MockEthClient::new(SLChainId(42)); + let sl_client = MockEthClient::new(SL_CHAIN_ID); + let sl_l2_client: Option> = if is_gateway { + Some(Box::new(sl_client.clone())) + } else { + None + }; + let watcher = EthWatch::new( + &chain_admin_contract(), + Box::new(l1_client.clone()), + sl_l2_client, + connection_pool, + std::time::Duration::from_nanos(1), + L2ChainId::default(), + ) + .await + .unwrap(); + + (watcher, l1_client, sl_client) +} + +async fn create_l1_test_watcher( + connection_pool: ConnectionPool, +) -> (EthWatch, MockEthClient) { + let (watcher, l1_client, _) = create_test_watcher(connection_pool, false).await; + (watcher, l1_client) +} + +async fn create_gateway_test_watcher( + connection_pool: ConnectionPool, +) -> (EthWatch, MockEthClient, MockEthClient) { + create_test_watcher(connection_pool, true).await +} + +#[test_log::test(tokio::test)] +async fn test_normal_operation_l1_txs() { + let connection_pool = ConnectionPool::::test_pool().await; + setup_db(&connection_pool).await; + let (mut watcher, mut client) = create_l1_test_watcher(connection_pool.clone()).await; + + let mut storage = connection_pool.connection().await.unwrap(); + client + .add_transactions(&[build_l1_tx(0, 10), build_l1_tx(1, 14), build_l1_tx(2, 18)]) + .await; + client.set_last_finalized_block_number(15).await; + // second tx will not be processed, as it's block is not finalized yet. + watcher.loop_iteration(&mut storage).await.unwrap(); + let db_txs = get_all_db_txs(&mut storage).await; + let mut db_txs: Vec = db_txs + .into_iter() + .map(|tx| tx.try_into().unwrap()) + .collect(); + db_txs.sort_by_key(|tx| tx.common_data.serial_id); + assert_eq!(db_txs.len(), 2); + let db_tx = db_txs[0].clone(); + assert_eq!(db_tx.common_data.serial_id.0, 0); + let db_tx = db_txs[1].clone(); + assert_eq!(db_tx.common_data.serial_id.0, 1); + + client.set_last_finalized_block_number(20).await; + // now the second tx will be processed + watcher.loop_iteration(&mut storage).await.unwrap(); + let db_txs = get_all_db_txs(&mut storage).await; + let mut db_txs: Vec = db_txs + .into_iter() + .map(|tx| tx.try_into().unwrap()) + .collect(); + db_txs.sort_by_key(|tx| tx.common_data.serial_id); + assert_eq!(db_txs.len(), 3); + let db_tx = db_txs[2].clone(); + assert_eq!(db_tx.common_data.serial_id.0, 2); +} + +#[test_log::test(tokio::test)] +async fn test_gap_in_upgrade_timestamp() { + let connection_pool = ConnectionPool::::test_pool().await; + setup_db(&connection_pool).await; + let (mut watcher, mut client) = create_l1_test_watcher(connection_pool.clone()).await; + + let mut storage = connection_pool.connection().await.unwrap(); + client + .add_upgrade_timestamp(&[( + ProtocolUpgrade { + version: ProtocolSemanticVersion { + minor: ProtocolVersionId::next(), + patch: 0.into(), + }, + tx: None, + ..Default::default() + }, + 10, + )]) + .await; + client.set_last_finalized_block_number(15).await; + watcher.loop_iteration(&mut storage).await.unwrap(); + + let db_versions = storage.protocol_versions_dal().all_versions().await; + // there should be genesis version and just added version + assert_eq!(db_versions.len(), 2); + + let previous_version = (ProtocolVersionId::latest() as u16 - 1).try_into().unwrap(); + let next_version = ProtocolVersionId::next(); + assert_eq!(db_versions[0].minor, previous_version); + assert_eq!(db_versions[1].minor, next_version); +} + +#[test_log::test(tokio::test)] +async fn test_normal_operation_upgrade_timestamp() { + zksync_concurrency::testonly::abort_on_panic(); + let connection_pool = ConnectionPool::::test_pool().await; + setup_db(&connection_pool).await; + + let mut client = MockEthClient::new(SLChainId(42)); + let mut watcher = EthWatch::new( + &chain_admin_contract(), + Box::new(client.clone()), + None, + connection_pool.clone(), + std::time::Duration::from_nanos(1), + L2ChainId::default(), + ) + .await + .unwrap(); + + let mut storage = connection_pool.connection().await.unwrap(); + client + .add_upgrade_timestamp(&[ + ( + ProtocolUpgrade { + tx: None, + ..Default::default() + }, + 10, + ), + ( + ProtocolUpgrade { + version: ProtocolSemanticVersion { + minor: ProtocolVersionId::next(), + patch: 0.into(), + }, + tx: Some(build_upgrade_tx(ProtocolVersionId::next(), 18)), + ..Default::default() + }, + 18, + ), + ( + ProtocolUpgrade { + version: ProtocolSemanticVersion { + minor: ProtocolVersionId::next(), + patch: 1.into(), + }, + tx: None, + ..Default::default() + }, + 19, + ), + ]) + .await; + client.set_last_finalized_block_number(15).await; + // The second upgrade will not be processed, as it has less than 5 confirmations. + watcher.loop_iteration(&mut storage).await.unwrap(); + + let db_versions = storage.protocol_versions_dal().all_versions().await; + // There should be genesis version and just added version. + assert_eq!(db_versions.len(), 2); + assert_eq!(db_versions[1].minor, ProtocolVersionId::latest()); + + client.set_last_finalized_block_number(20).await; + // Now the second and the third upgrades will be processed. + watcher.loop_iteration(&mut storage).await.unwrap(); + let db_versions = storage.protocol_versions_dal().all_versions().await; + let mut expected_version = ProtocolSemanticVersion { + minor: ProtocolVersionId::next(), + patch: 0.into(), + }; + assert_eq!(db_versions.len(), 4); + assert_eq!(db_versions[2], expected_version); + expected_version.patch += 1; + assert_eq!(db_versions[3], expected_version); + + // Check that tx was saved with the second upgrade. + let tx = storage + .protocol_versions_dal() + .get_protocol_upgrade_tx(ProtocolVersionId::next()) + .await + .unwrap() + .expect("no protocol upgrade transaction"); + assert_eq!(tx.common_data.upgrade_id, ProtocolVersionId::next()); +} + +#[test_log::test(tokio::test)] +#[should_panic] +async fn test_gap_in_single_batch() { + let connection_pool = ConnectionPool::::test_pool().await; + setup_db(&connection_pool).await; + let (mut watcher, mut client) = create_l1_test_watcher(connection_pool.clone()).await; + + let mut storage = connection_pool.connection().await.unwrap(); + client + .add_transactions(&[ + build_l1_tx(0, 10), + build_l1_tx(1, 14), + build_l1_tx(2, 14), + build_l1_tx(3, 14), + build_l1_tx(5, 14), + ]) + .await; + client.set_last_finalized_block_number(15).await; + watcher.loop_iteration(&mut storage).await.unwrap(); +} + +#[test_log::test(tokio::test)] +#[should_panic] +async fn test_gap_between_batches() { + let connection_pool = ConnectionPool::::test_pool().await; + setup_db(&connection_pool).await; + let (mut watcher, mut client) = create_l1_test_watcher(connection_pool.clone()).await; + + let mut storage = connection_pool.connection().await.unwrap(); + client + .add_transactions(&[ + // this goes to the first batch + build_l1_tx(0, 10), + build_l1_tx(1, 14), + build_l1_tx(2, 14), + // this goes to the second batch + build_l1_tx(4, 20), + build_l1_tx(5, 22), + ]) + .await; + client.set_last_finalized_block_number(15).await; + watcher.loop_iteration(&mut storage).await.unwrap(); + + let db_txs = get_all_db_txs(&mut storage).await; + assert_eq!(db_txs.len(), 3); + client.set_last_finalized_block_number(25).await; + watcher.loop_iteration(&mut storage).await.unwrap(); +} + +#[test_log::test(tokio::test)] +async fn test_overlapping_batches() { + zksync_concurrency::testonly::abort_on_panic(); + let connection_pool = ConnectionPool::::test_pool().await; + setup_db(&connection_pool).await; + let (mut watcher, mut client) = create_l1_test_watcher(connection_pool.clone()).await; + + let mut storage = connection_pool.connection().await.unwrap(); + client + .add_transactions(&[ + // this goes to the first batch + build_l1_tx(0, 10), + build_l1_tx(1, 14), + build_l1_tx(2, 14), + // this goes to the second batch + build_l1_tx(1, 20), + build_l1_tx(2, 22), + build_l1_tx(3, 23), + build_l1_tx(4, 23), + ]) + .await; + client.set_last_finalized_block_number(15).await; + watcher.loop_iteration(&mut storage).await.unwrap(); + + let db_txs = get_all_db_txs(&mut storage).await; + assert_eq!(db_txs.len(), 3); + + client.set_last_finalized_block_number(25).await; + watcher.loop_iteration(&mut storage).await.unwrap(); + + let db_txs = get_all_db_txs(&mut storage).await; + assert_eq!(db_txs.len(), 5); + let mut db_txs: Vec = db_txs + .into_iter() + .map(|tx| tx.try_into().unwrap()) + .collect(); + db_txs.sort_by_key(|tx| tx.common_data.serial_id); + let tx = db_txs[2].clone(); + assert_eq!(tx.common_data.serial_id.0, 2); + let tx = db_txs[4].clone(); + assert_eq!(tx.common_data.serial_id.0, 4); +} + +#[test_log::test(tokio::test)] +async fn test_transactions_get_gradually_processed_by_gateway() { + zksync_concurrency::testonly::abort_on_panic(); + let connection_pool = ConnectionPool::::test_pool().await; + setup_db(&connection_pool).await; + let (mut watcher, mut l1_client, mut gateway_client) = + create_gateway_test_watcher(connection_pool.clone()).await; + + let mut storage = connection_pool.connection().await.unwrap(); + l1_client + .add_transactions(&[ + build_l1_tx(0, 10), + build_l1_tx(1, 14), + build_l1_tx(2, 14), + build_l1_tx(3, 20), + build_l1_tx(4, 22), + ]) + .await; + l1_client.set_last_finalized_block_number(15).await; + gateway_client + .set_processed_priority_transactions_count(2) + .await; + watcher.loop_iteration(&mut storage).await.unwrap(); + + let db_txs = get_all_db_txs(&mut storage).await; + assert_eq!(db_txs.len(), 2); + + l1_client.set_last_finalized_block_number(25).await; + gateway_client + .set_processed_priority_transactions_count(4) + .await; + watcher.loop_iteration(&mut storage).await.unwrap(); + + let db_txs = get_all_db_txs(&mut storage).await; + assert_eq!(db_txs.len(), 4); + let mut db_txs: Vec = db_txs + .into_iter() + .map(|tx| tx.try_into().unwrap()) + .collect(); + db_txs.sort_by_key(|tx| tx.common_data.serial_id); + let tx = db_txs[2].clone(); + assert_eq!(tx.common_data.serial_id.0, 2); + let tx = db_txs[3].clone(); + assert_eq!(tx.common_data.serial_id.0, 3); +} + +#[test_log::test(tokio::test)] +async fn test_batch_root_processor_from_genesis() { + let connection_pool = ConnectionPool::::test_pool().await; + setup_db(&connection_pool).await; + setup_batch_roots(&connection_pool, 0).await; + let (mut watcher, _, mut sl_client) = + create_gateway_test_watcher(connection_pool.clone()).await; + + let batch_roots = batch_roots(); + sl_client + .add_batch_roots(&[ + (5, 1, batch_roots[0]), + (9, 2, batch_roots[1]), + (11, 3, batch_roots[2]), + ]) + .await; + sl_client + .add_chain_roots(&[ + ( + 5, + H256::from_slice( + &hex::decode( + "10a2ef76e709d318b459be49f1e8d7f02d7120f2b501bc0afddd935f1a813c67", + ) + .unwrap(), + ), + ), + ( + 9, + H256::from_slice( + &hex::decode( + "e0c3330f674b6b2d578f958a1dbd66f164d068b0bb5a9fb077eca013976fda6f", + ) + .unwrap(), + ), + ), + ( + 11, + H256::from_slice( + &hex::decode( + "d22fc9a7b005fefecd33bb56cdbf70bcc23610e693cd21295f9920227c2cb1cc", + ) + .unwrap(), + ), + ), + ]) + .await; + let chain_log_proofs = chain_log_proofs(); + sl_client.add_chain_log_proofs(chain_log_proofs).await; + + sl_client.set_last_finalized_block_number(5).await; + + let mut connection = connection_pool.connection().await.unwrap(); + watcher.loop_iteration(&mut connection).await.unwrap(); + + let proof1 = connection + .blocks_dal() + .get_l1_batch_chain_merkle_path(L1BatchNumber(1)) + .await + .unwrap() + .unwrap(); + let proof1 = hex::encode(&bincode::serialize(&proof1).unwrap()); + assert_eq!(proof1, "000000000600000000000000420000000000000030783030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303030303030303030303030303030303030303030303030303030303030303530303030303030303030303030303030303030303030303030303030303030334200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030316639420000000000000030783031303230303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303932343932386331333737613663663234633339633264343666386562396466323365383131623236646333353237653534383339366664346531373362314200000000000000307833373561356266393039636230323134336533363935636136353865303634316537333961613539306630303034646261393335373263343463646239643264"); + + sl_client.set_last_finalized_block_number(11).await; + watcher.loop_iteration(&mut connection).await.unwrap(); + + let proof2 = connection + .blocks_dal() + .get_l1_batch_chain_merkle_path(L1BatchNumber(2)) + .await + .unwrap() + .unwrap(); + let proof2 = hex::encode(&bincode::serialize(&proof2).unwrap()); + assert_eq!(proof2, "0100000007000000000000004200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303031420000000000000030783130613265663736653730396433313862343539626534396631653864376630326437313230663262353031626330616664646439333566316138313363363742000000000000003078303030303030303030303030303030303030303030303030303030303030303930303030303030303030303030303030303030303030303030303030303030334200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030316639420000000000000030783031303230303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303932343932386331333737613663663234633339633264343666386562396466323365383131623236646333353237653534383339366664346531373362314200000000000000307861333738613230636132376237616533303731643162643763326164613030343639616263353765343239646436663438613833303932646237303539613138"); + + let proof3 = connection + .blocks_dal() + .get_l1_batch_chain_merkle_path(L1BatchNumber(3)) + .await + .unwrap() + .unwrap(); + let proof3 = hex::encode(&bincode::serialize(&proof3).unwrap()); + assert_eq!(proof3, "02000000080000000000000042000000000000003078303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030324200000000000000307834363730306234643430616335633335616632633232646461323738376139316562353637623036633932346138666238616539613035623230633038633231420000000000000030786530633333333066363734623662326435373866393538613164626436366631363464303638623062623561396662303737656361303133393736666461366642000000000000003078303030303030303030303030303030303030303030303030303030303030306230303030303030303030303030303030303030303030303030303030303030334200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030316639420000000000000030783031303230303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303932343932386331333737613663663234633339633264343666386562396466323365383131623236646333353237653534383339366664346531373362314200000000000000307833373561356266393039636230323134336533363935636136353865303634316537333961613539306630303034646261393335373263343463646239643264"); +} + +#[test_log::test(tokio::test)] +async fn test_batch_root_processor_restart() { + let connection_pool = ConnectionPool::::test_pool().await; + setup_db(&connection_pool).await; + setup_batch_roots(&connection_pool, 2).await; + let (mut watcher, _, mut sl_client) = + create_gateway_test_watcher(connection_pool.clone()).await; + + let batch_roots = batch_roots(); + sl_client + .add_batch_roots(&[ + (11, 3, batch_roots[2]), + (13, 4, batch_roots[3]), + (14, 5, batch_roots[4]), + (14, 6, batch_roots[5]), + ]) + .await; + sl_client + .add_chain_roots(&[ + ( + 11, + H256::from_slice( + &hex::decode( + "d22fc9a7b005fefecd33bb56cdbf70bcc23610e693cd21295f9920227c2cb1cc", + ) + .unwrap(), + ), + ), + ( + 13, + H256::from_slice( + &hex::decode( + "53edc1f5ad79c5999bd578dfc135f9c51ebd7fafa4585b64f71d15b2dce1b728", + ) + .unwrap(), + ), + ), + ( + 14, + H256::from_slice( + &hex::decode( + "61b35796307159a6da8aa45448e6941e3438380582e2f3cb358db59598ae156f", + ) + .unwrap(), + ), + ), + ]) + .await; + let chain_log_proofs = chain_log_proofs(); + sl_client.add_chain_log_proofs(chain_log_proofs).await; + + sl_client.set_last_finalized_block_number(14).await; + + let mut connection = connection_pool.connection().await.unwrap(); + watcher.loop_iteration(&mut connection).await.unwrap(); + + let proof = connection + .blocks_dal() + .get_l1_batch_chain_merkle_path(L1BatchNumber(3)) + .await + .unwrap() + .unwrap(); + let proof = hex::encode(&bincode::serialize(&proof).unwrap()); + assert_eq!(proof, "02000000080000000000000042000000000000003078303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030324200000000000000307834363730306234643430616335633335616632633232646461323738376139316562353637623036633932346138666238616539613035623230633038633231420000000000000030786530633333333066363734623662326435373866393538613164626436366631363464303638623062623561396662303737656361303133393736666461366642000000000000003078303030303030303030303030303030303030303030303030303030303030306230303030303030303030303030303030303030303030303030303030303030334200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030316639420000000000000030783031303230303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303932343932386331333737613663663234633339633264343666386562396466323365383131623236646333353237653534383339366664346531373362314200000000000000307833373561356266393039636230323134336533363935636136353865303634316537333961613539306630303034646261393335373263343463646239643264"); + + let proof = connection + .blocks_dal() + .get_l1_batch_chain_merkle_path(L1BatchNumber(4)) + .await + .unwrap() + .unwrap(); + let proof = hex::encode(&bincode::serialize(&proof).unwrap()); + assert_eq!(proof, "02000000080000000000000042000000000000003078303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030334200000000000000307837623765373735373139343639366666393634616233353837393131373362636337663735356132656161393334653935373061636533393139383435313265420000000000000030786530633333333066363734623662326435373866393538613164626436366631363464303638623062623561396662303737656361303133393736666461366642000000000000003078303030303030303030303030303030303030303030303030303030303030306430303030303030303030303030303030303030303030303030303030303030334200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030316639420000000000000030783031303230303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303932343932386331333737613663663234633339633264343666386562396466323365383131623236646333353237653534383339366664346531373362314200000000000000307835353063313735316338653764626166633839303939326634353532333636663064643565623665343362653535353936386264616338633732656466316261"); + + let proof = connection + .blocks_dal() + .get_l1_batch_chain_merkle_path(L1BatchNumber(5)) + .await + .unwrap() + .unwrap(); + let proof = hex::encode(&bincode::serialize(&proof).unwrap()); + assert_eq!(proof, "030000000900000000000000420000000000000030783030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303442000000000000003078303235663065363031353230366661626364326263613930316432633438396536336263356564346231356266356330633963363066396531363735383564614200000000000000307863633463343165646230633230333133343862323932623736386539626163316565386339326330396566386133323737633265636534303963313264383661420000000000000030783533656463316635616437396335393939626435373864666331333566396335316562643766616661343538356236346637316431356232646365316237323842000000000000003078303030303030303030303030303030303030303030303030303030303030306530303030303030303030303030303030303030303030303030303030303030334200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030316639420000000000000030783031303230303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303932343932386331333737613663663234633339633264343666386562396466323365383131623236646333353237653534383339366664346531373362314200000000000000307833373561356266393039636230323134336533363935636136353865303634316537333961613539306630303034646261393335373263343463646239643264"); + + let proof = connection + .blocks_dal() + .get_l1_batch_chain_merkle_path(L1BatchNumber(6)) + .await + .unwrap() + .unwrap(); + let proof = hex::encode(&bincode::serialize(&proof).unwrap()); + assert_eq!(proof, "030000000900000000000000420000000000000030783030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303542000000000000003078323465653435363834376535373364313635613832333634306632303834383139636331613865333433316562633635633865363064333435343266313637324200000000000000307863633463343165646230633230333133343862323932623736386539626163316565386339326330396566386133323737633265636534303963313264383661420000000000000030783533656463316635616437396335393939626435373864666331333566396335316562643766616661343538356236346637316431356232646365316237323842000000000000003078303030303030303030303030303030303030303030303030303030303030306530303030303030303030303030303030303030303030303030303030303030334200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030316639420000000000000030783031303230303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303932343932386331333737613663663234633339633264343666386562396466323365383131623236646333353237653534383339366664346531373362314200000000000000307833373561356266393039636230323134336533363935636136353865303634316537333961613539306630303034646261393335373263343463646239643264"); +} + +async fn get_all_db_txs(storage: &mut Connection<'_, Core>) -> Vec { + storage.transactions_dal().reset_mempool().await.unwrap(); + storage + .transactions_dal() + .sync_mempool(&[], &[], 0, 0, 1000) + .await + .unwrap() + .into_iter() + .map(|x| x.0) + .collect() +} + +async fn setup_db(connection_pool: &ConnectionPool) { + connection_pool + .connection() + .await + .unwrap() + .protocol_versions_dal() + .save_protocol_version_with_tx(&ProtocolVersion { + version: ProtocolSemanticVersion { + minor: (ProtocolVersionId::latest() as u16 - 1).try_into().unwrap(), + patch: 0.into(), + }, + ..Default::default() + }) + .await + .unwrap(); +} + +fn batch_roots() -> Vec { + [ + "5EEBBC173358620F7F61B69D80AFE503F76190396918EB7B27CEF4DB7C51D60A", + "B7E66115CDAAF5FFE70B53EF0AC6D0FF7D7BEB4341FEC6352A670B805AE15935", + "09BD2AD9C01C05F760BBEC6E59BF728566551B48C0DCBD01DB797D1C703122F8", + "B6E530FF878093B2D0CAF87780451A8F07922570E2D820B7A8541114E0D70FB5", + "B4F195844BA1792F3C1FB57C826B2DA60EA6EEBB90BF53F706120E49BB0486EF", + "118F6FAC96824D4E0845F7C7DF716969378F3F2038D9E9D0FEAD1FE01BA11A93", + ] + .into_iter() + .map(|s| H256::from_slice(&hex::decode(s).unwrap())) + .collect() +} + +fn chain_log_proofs() -> Vec<(L1BatchNumber, ChainAggProof)> { + vec![ + ( + L1BatchNumber(5), + ChainAggProof { + chain_id_leaf_proof: vec![ + H256::from_slice( + &hex::decode( + "0924928c1377a6cf24c39c2d46f8eb9df23e811b26dc3527e548396fd4e173b1", + ) + .unwrap(), + ), + H256::from_slice( + &hex::decode( + "375a5bf909cb02143e3695ca658e0641e739aa590f0004dba93572c44cdb9d2d", + ) + .unwrap(), + ), + ], + chain_id_leaf_proof_mask: 3, + }, + ), + ( + L1BatchNumber(9), + ChainAggProof { + chain_id_leaf_proof: vec![ + H256::from_slice( + &hex::decode( + "0924928c1377a6cf24c39c2d46f8eb9df23e811b26dc3527e548396fd4e173b1", + ) + .unwrap(), + ), + H256::from_slice( + &hex::decode( + "a378a20ca27b7ae3071d1bd7c2ada00469abc57e429dd6f48a83092db7059a18", + ) + .unwrap(), + ), + ], + chain_id_leaf_proof_mask: 3, + }, + ), + ( + L1BatchNumber(11), + ChainAggProof { + chain_id_leaf_proof: vec![ + H256::from_slice( + &hex::decode( + "0924928c1377a6cf24c39c2d46f8eb9df23e811b26dc3527e548396fd4e173b1", + ) + .unwrap(), + ), + H256::from_slice( + &hex::decode( + "375a5bf909cb02143e3695ca658e0641e739aa590f0004dba93572c44cdb9d2d", + ) + .unwrap(), + ), + ], + chain_id_leaf_proof_mask: 3, + }, + ), + ( + L1BatchNumber(13), + ChainAggProof { + chain_id_leaf_proof: vec![ + H256::from_slice( + &hex::decode( + "0924928c1377a6cf24c39c2d46f8eb9df23e811b26dc3527e548396fd4e173b1", + ) + .unwrap(), + ), + H256::from_slice( + &hex::decode( + "550c1751c8e7dbafc890992f4552366f0dd5eb6e43be555968bdac8c72edf1ba", + ) + .unwrap(), + ), + ], + chain_id_leaf_proof_mask: 3, + }, + ), + ( + L1BatchNumber(14), + ChainAggProof { + chain_id_leaf_proof: vec![ + H256::from_slice( + &hex::decode( + "0924928c1377a6cf24c39c2d46f8eb9df23e811b26dc3527e548396fd4e173b1", + ) + .unwrap(), + ), + H256::from_slice( + &hex::decode( + "375a5bf909cb02143e3695ca658e0641e739aa590f0004dba93572c44cdb9d2d", + ) + .unwrap(), + ), + ], + chain_id_leaf_proof_mask: 3, + }, + ), + ] +} + +async fn setup_batch_roots( + connection_pool: &ConnectionPool, + number_of_processed_batches: usize, +) { + let batch_roots = batch_roots(); + + let mut connection = connection_pool.connection().await.unwrap(); + + assert!(number_of_processed_batches <= batch_roots.len()); + for (i, root) in batch_roots.into_iter().enumerate() { + let batch_number = L1BatchNumber(i as u32 + 1); + let header = L1BatchHeader::new( + batch_number, + i as u64, + Default::default(), + (ProtocolVersionId::latest() as u16 - 1).try_into().unwrap(), + ); + connection + .blocks_dal() + .insert_mock_l1_batch(&header) + .await + .unwrap(); + connection + .blocks_dal() + .save_l1_batch_commitment_artifacts( + batch_number, + &L1BatchCommitmentArtifacts { + l2_l1_merkle_root: root, + ..Default::default() + }, + ) + .await + .unwrap(); + + let eth_tx_id = connection + .eth_sender_dal() + .save_eth_tx( + i as u64, + Default::default(), + AggregatedActionType::Execute, + Default::default(), + Default::default(), + Default::default(), + Default::default(), + true, + ) + .await + .unwrap() + .id; + connection + .eth_sender_dal() + .set_chain_id(eth_tx_id, SL_CHAIN_ID.0) + .await + .unwrap(); + connection + .blocks_dal() + .set_eth_tx_id( + batch_number..=batch_number, + eth_tx_id, + AggregatedActionType::Execute, + ) + .await + .unwrap(); + + if i < number_of_processed_batches { + connection + .blocks_dal() + .set_batch_chain_merkle_path( + batch_number, + BatchAndChainMerklePath { + batch_proof_len: 0, + proof: Vec::new(), + }, + ) + .await + .unwrap() + } + } +} diff --git a/core/node/genesis/Cargo.toml b/core/node/genesis/Cargo.toml index d625d7186bdf..4607db282728 100644 --- a/core/node/genesis/Cargo.toml +++ b/core/node/genesis/Cargo.toml @@ -26,3 +26,4 @@ anyhow.workspace = true itertools.workspace = true thiserror.workspace = true tracing.workspace = true +bincode.workspace = true diff --git a/core/node/genesis/src/lib.rs b/core/node/genesis/src/lib.rs index b0fb37b7f537..0d5387f44352 100644 --- a/core/node/genesis/src/lib.rs +++ b/core/node/genesis/src/lib.rs @@ -2,7 +2,7 @@ //! It initializes the Merkle tree with the basic setup (such as fields of special service accounts), //! setups the required databases, and outputs the data required to initialize a smart contract. -use std::fmt::Formatter; +use std::{collections::HashMap, fmt::Formatter}; use anyhow::Context as _; use zksync_config::GenesisConfig; @@ -10,13 +10,13 @@ use zksync_contracts::{ hyperchain_contract, verifier_contract, BaseSystemContracts, BaseSystemContractsHashes, SET_CHAIN_ID_EVENT, }; -use zksync_dal::{Connection, Core, CoreDal, DalError}; +use zksync_dal::{custom_genesis_export_dal::GenesisState, Connection, Core, CoreDal, DalError}; use zksync_eth_client::{CallFunctionArgs, EthInterface}; use zksync_merkle_tree::{domain::ZkSyncTree, TreeInstruction}; use zksync_multivm::utils::get_max_gas_per_pubdata_byte; use zksync_system_constants::PRIORITY_EXPIRATION; use zksync_types::{ - block::{BlockGasCount, DeployedContract, L1BatchHeader, L2BlockHasher, L2BlockHeader}, + block::{DeployedContract, L1BatchHeader, L2BlockHasher, L2BlockHeader}, bytecode::BytecodeHash, commitment::{CommitmentInput, L1BatchCommitment}, fee_model::BatchFeeInput, @@ -25,18 +25,19 @@ use zksync_types::{ system_contracts::get_system_smart_contracts, u256_to_h256, web3::{BlockNumber, FilterBuilder}, + zk_evm_types::LogQuery, AccountTreeId, Address, Bloom, L1BatchNumber, L1ChainId, L2BlockNumber, L2ChainId, - ProtocolVersion, ProtocolVersionId, StorageKey, H256, U256, + ProtocolVersion, ProtocolVersionId, StorageKey, StorageLog, H256, U256, }; use crate::utils::{ add_eth_token, get_deduped_log_queries, insert_base_system_contracts_to_factory_deps, - insert_system_contracts, save_genesis_l1_batch_metadata, + insert_deduplicated_writes_and_protective_reads, insert_factory_deps, insert_storage_logs, + save_genesis_l1_batch_metadata, }; - #[cfg(test)] mod tests; -mod utils; +pub mod utils; pub use crate::utils::get_storage_logs; @@ -186,53 +187,103 @@ pub fn mock_genesis_config() -> GenesisConfig { fee_account: Default::default(), dummy_verifier: false, l1_batch_commit_data_generator_mode: Default::default(), + custom_genesis_state_path: None, } } -// Insert genesis batch into the database -pub async fn insert_genesis_batch( +pub fn make_genesis_batch_params( + deduped_log_queries: Vec, + base_system_contract_hashes: BaseSystemContractsHashes, + protocol_version: ProtocolVersionId, +) -> (GenesisBatchParams, L1BatchCommitment) { + let storage_logs = deduped_log_queries + .into_iter() + .filter(|log_query| log_query.rw_flag) // only writes + .enumerate() + .map(|(index, log)| { + TreeInstruction::write( + StorageKey::new(AccountTreeId::new(log.address), u256_to_h256(log.key)) + .hashed_key_u256(), + (index + 1) as u64, + u256_to_h256(log.written_value), + ) + }) + .collect::>(); + + let metadata = ZkSyncTree::process_genesis_batch(&storage_logs); + let root_hash = metadata.root_hash; + let rollup_last_leaf_index = metadata.leaf_count + 1; + + let commitment_input = CommitmentInput::for_genesis_batch( + root_hash, + rollup_last_leaf_index, + base_system_contract_hashes, + protocol_version, + ); + let block_commitment = L1BatchCommitment::new(commitment_input); + let commitment = block_commitment.hash().commitment; + + ( + GenesisBatchParams { + root_hash, + commitment, + rollup_last_leaf_index, + }, + block_commitment, + ) +} + +pub async fn insert_genesis_batch_with_custom_state( storage: &mut Connection<'_, Core>, genesis_params: &GenesisParams, + custom_genesis_state: Option, ) -> Result { let mut transaction = storage.start_transaction().await?; let verifier_config = L1VerifierConfig { snark_wrapper_vk_hash: genesis_params.config.snark_wrapper_vk_hash, }; - create_genesis_l1_batch( + // if a custom genesis state was provided, read storage logs and factory dependencies from there + let (storage_logs, factory_deps): (Vec, HashMap>) = + match custom_genesis_state { + Some(r) => ( + r.storage_logs + .into_iter() + .map(|x| StorageLog::from(&x)) + .collect(), + r.factory_deps + .into_iter() + .map(|f| (H256(f.bytecode_hash), f.bytecode)) + .collect(), + ), + None => ( + get_storage_logs(&genesis_params.system_contracts), + genesis_params + .system_contracts + .iter() + .map(|c| { + ( + BytecodeHash::for_bytecode(&c.bytecode).value(), + c.bytecode.clone(), + ) + }) + .collect(), + ), + }; + + // This action disregards how leaf indeces used to be ordered before, and it reorders them by + // sorting by , which is required for calculating genesis parameters. + let deduped_log_queries = create_genesis_l1_batch_from_storage_logs_and_factory_deps( &mut transaction, genesis_params.protocol_version(), genesis_params.base_system_contracts(), - genesis_params.system_contracts(), + &storage_logs, + factory_deps, verifier_config, ) .await?; tracing::info!("chain_schema_genesis is complete"); - let deduped_log_queries = - get_deduped_log_queries(&get_storage_logs(genesis_params.system_contracts())); - - let (deduplicated_writes, _): (Vec<_>, Vec<_>) = deduped_log_queries - .into_iter() - .partition(|log_query| log_query.rw_flag); - - let storage_logs: Vec = deduplicated_writes - .iter() - .enumerate() - .map(|(index, log)| { - TreeInstruction::write( - StorageKey::new(AccountTreeId::new(log.address), u256_to_h256(log.key)) - .hashed_key_u256(), - (index + 1) as u64, - u256_to_h256(log.written_value), - ) - }) - .collect(); - - let metadata = ZkSyncTree::process_genesis_batch(&storage_logs); - let genesis_root_hash = metadata.root_hash; - let rollup_last_leaf_index = metadata.leaf_count + 1; - let base_system_contract_hashes = BaseSystemContractsHashes { bootloader: genesis_params .config @@ -244,27 +295,31 @@ pub async fn insert_genesis_batch( .ok_or(GenesisError::MalformedConfig("default_aa_hash"))?, evm_emulator: genesis_params.config.evm_emulator_hash, }; - let commitment_input = CommitmentInput::for_genesis_batch( - genesis_root_hash, - rollup_last_leaf_index, + + let (genesis_batch_params, block_commitment) = make_genesis_batch_params( + deduped_log_queries, base_system_contract_hashes, genesis_params.minor_protocol_version(), ); - let block_commitment = L1BatchCommitment::new(commitment_input); save_genesis_l1_batch_metadata( &mut transaction, block_commitment.clone(), - genesis_root_hash, - rollup_last_leaf_index, + genesis_batch_params.root_hash, + genesis_batch_params.rollup_last_leaf_index, ) .await?; transaction.commit().await?; - Ok(GenesisBatchParams { - root_hash: genesis_root_hash, - commitment: block_commitment.hash().commitment, - rollup_last_leaf_index, - }) + + Ok(genesis_batch_params) +} + +// Insert genesis batch into the database +pub async fn insert_genesis_batch( + storage: &mut Connection<'_, Core>, + genesis_params: &GenesisParams, +) -> Result { + insert_genesis_batch_with_custom_state(storage, genesis_params, None).await } pub async fn is_genesis_needed(storage: &mut Connection<'_, Core>) -> Result { @@ -317,6 +372,7 @@ pub async fn validate_genesis_params( pub async fn ensure_genesis_state( storage: &mut Connection<'_, Core>, genesis_params: &GenesisParams, + custom_genesis_state: Option, ) -> Result { let mut transaction = storage.start_transaction().await?; @@ -334,7 +390,12 @@ pub async fn ensure_genesis_state( root_hash, commitment, rollup_last_leaf_index, - } = insert_genesis_batch(&mut transaction, genesis_params).await?; + } = insert_genesis_batch_with_custom_state( + &mut transaction, + genesis_params, + custom_genesis_state, + ) + .await?; let expected_root_hash = genesis_params .config @@ -372,14 +433,14 @@ pub async fn ensure_genesis_state( Ok(root_hash) } -#[allow(clippy::too_many_arguments)] -pub async fn create_genesis_l1_batch( +pub(crate) async fn create_genesis_l1_batch_from_storage_logs_and_factory_deps( storage: &mut Connection<'_, Core>, protocol_version: ProtocolSemanticVersion, base_system_contracts: &BaseSystemContracts, - system_contracts: &[DeployedContract], + storage_logs: &[StorageLog], + factory_deps: HashMap>, l1_verifier_config: L1VerifierConfig, -) -> Result<(), GenesisError> { +) -> Result, GenesisError> { let version = ProtocolVersion { version: protocol_version, timestamp: 0, @@ -426,14 +487,7 @@ pub async fn create_genesis_l1_batch( .await?; transaction .blocks_dal() - .mark_l1_batch_as_sealed( - &genesis_l1_batch_header, - &[], - BlockGasCount::default(), - &[], - &[], - Default::default(), - ) + .mark_l1_batch_as_sealed(&genesis_l1_batch_header, &[], &[], &[], Default::default()) .await?; transaction .blocks_dal() @@ -444,6 +498,33 @@ pub async fn create_genesis_l1_batch( .mark_l2_blocks_as_executed_in_l1_batch(L1BatchNumber(0)) .await?; + insert_base_system_contracts_to_factory_deps(&mut transaction, base_system_contracts).await?; + + let mut genesis_transaction = transaction.start_transaction().await?; + + insert_storage_logs(&mut genesis_transaction, storage_logs).await?; + let dedup_log_queries = get_deduped_log_queries(storage_logs); + insert_deduplicated_writes_and_protective_reads( + &mut genesis_transaction, + dedup_log_queries.as_slice(), + ) + .await?; + insert_factory_deps(&mut genesis_transaction, factory_deps).await?; + genesis_transaction.commit().await?; + add_eth_token(&mut transaction).await?; + + transaction.commit().await?; + Ok(dedup_log_queries) +} + +#[allow(clippy::too_many_arguments)] +pub async fn create_genesis_l1_batch( + storage: &mut Connection<'_, Core>, + protocol_version: ProtocolSemanticVersion, + base_system_contracts: &BaseSystemContracts, + system_contracts: &[DeployedContract], + l1_verifier_config: L1VerifierConfig, +) -> Result<(), GenesisError> { let storage_logs = get_storage_logs(system_contracts); let factory_deps = system_contracts @@ -456,11 +537,15 @@ pub async fn create_genesis_l1_batch( }) .collect(); - insert_base_system_contracts_to_factory_deps(&mut transaction, base_system_contracts).await?; - insert_system_contracts(&mut transaction, factory_deps, &storage_logs).await?; - add_eth_token(&mut transaction).await?; - - transaction.commit().await?; + create_genesis_l1_batch_from_storage_logs_and_factory_deps( + storage, + protocol_version, + base_system_contracts, + &storage_logs, + factory_deps, + l1_verifier_config, + ) + .await?; Ok(()) } diff --git a/core/node/genesis/src/utils.rs b/core/node/genesis/src/utils.rs index 849c282a6d49..71c9e295da75 100644 --- a/core/node/genesis/src/utils.rs +++ b/core/node/genesis/src/utils.rs @@ -74,7 +74,7 @@ pub fn get_storage_logs(system_contracts: &[DeployedContract]) -> Vec Vec { +pub fn get_deduped_log_queries(storage_logs: &[StorageLog]) -> Vec { // we don't produce proof for the genesis block, // but we still need to populate the table // to have the correct initial state of the merkle tree @@ -172,30 +172,32 @@ pub(super) async fn save_genesis_l1_batch_metadata( Ok(()) } -pub(super) async fn insert_system_contracts( - storage: &mut Connection<'_, Core>, - factory_deps: HashMap>, +pub(super) async fn insert_storage_logs( + transaction: &mut Connection<'_, Core>, storage_logs: &[StorageLog], ) -> Result<(), GenesisError> { - tracing::warn!("Inserting {} system contracts", factory_deps.len()); - let (deduplicated_writes, protective_reads): (Vec<_>, Vec<_>) = - get_deduped_log_queries(storage_logs) - .into_iter() - .partition(|log_query| log_query.rw_flag); - - let mut transaction = storage.start_transaction().await?; transaction .storage_logs_dal() .insert_storage_logs(L2BlockNumber(0), storage_logs) .await?; + Ok(()) +} + +pub(super) async fn insert_deduplicated_writes_and_protective_reads( + transaction: &mut Connection<'_, Core>, + deduped_log_queries: &[LogQuery], +) -> Result<(), GenesisError> { + let (deduplicated_writes, protective_reads): (Vec<_>, Vec<_>) = deduped_log_queries + .iter() + .partition(|log_query| log_query.rw_flag); transaction .storage_logs_dedup_dal() .insert_protective_reads( L1BatchNumber(0), &protective_reads - .into_iter() - .map(StorageLog::from) + .iter() + .map(|log_query: &LogQuery| StorageLog::from(*log_query)) // Pass the log_query to from() .collect::>(), ) .await?; @@ -206,16 +208,22 @@ pub(super) async fn insert_system_contracts( StorageKey::new(AccountTreeId::new(log.address), u256_to_h256(log.key)).hashed_key() }) .collect(); + transaction .storage_logs_dedup_dal() .insert_initial_writes(L1BatchNumber(0), &written_storage_keys) .await?; + Ok(()) +} + +pub(super) async fn insert_factory_deps( + transaction: &mut Connection<'_, Core>, + factory_deps: HashMap>, +) -> Result<(), GenesisError> { transaction .factory_deps_dal() .insert_factory_deps(L2BlockNumber(0), &factory_deps) .await?; - - transaction.commit().await?; Ok(()) } diff --git a/core/node/house_keeper/src/blocks_state_reporter.rs b/core/node/house_keeper/src/blocks_state_reporter.rs index 6f85aa0fbb09..abd2c6e8802d 100644 --- a/core/node/house_keeper/src/blocks_state_reporter.rs +++ b/core/node/house_keeper/src/blocks_state_reporter.rs @@ -22,7 +22,10 @@ impl L1BatchMetricsReporter { async fn report_metrics(&self) -> anyhow::Result<()> { let mut block_metrics = vec![]; - let mut conn = self.connection_pool.connection().await?; + let mut conn = self + .connection_pool + .connection_tagged("house_keeper") + .await?; let last_l1_batch = conn.blocks_dal().get_sealed_l1_batch_number().await?; if let Some(number) = last_l1_batch { block_metrics.push((number, BlockStage::Sealed)); diff --git a/core/node/metadata_calculator/src/pruning.rs b/core/node/metadata_calculator/src/pruning.rs index 4ac05e55c302..77e4c30dc1d1 100644 --- a/core/node/metadata_calculator/src/pruning.rs +++ b/core/node/metadata_calculator/src/pruning.rs @@ -101,8 +101,8 @@ impl MerkleTreePruningTask { let pruning_info = storage.pruning_dal().get_pruning_info().await?; drop(storage); - if let Some(l1_batch_number) = pruning_info.last_hard_pruned_l1_batch { - let target_retained_l1_batch_number = l1_batch_number + 1; + if let Some(pruned) = pruning_info.last_hard_pruned { + let target_retained_l1_batch_number = pruned.l1_batch + 1; let target_retained_version = u64::from(target_retained_l1_batch_number.0); let Ok(prev_target_version) = pruner_handle.set_target_retained_version(target_retained_version) @@ -148,7 +148,7 @@ mod tests { use test_casing::test_casing; use zksync_node_genesis::{insert_genesis_batch, GenesisParams}; use zksync_node_test_utils::prepare_recovery_snapshot; - use zksync_types::{L1BatchNumber, L2BlockNumber}; + use zksync_types::{L1BatchNumber, L2BlockNumber, H256}; use super::*; use crate::{ @@ -194,6 +194,11 @@ mod tests { .hard_prune_batches_range(L1BatchNumber(3), L2BlockNumber(3)) .await .unwrap(); + storage + .pruning_dal() + .insert_hard_pruning_log(L1BatchNumber(3), L2BlockNumber(3), H256::zero()) + .await + .unwrap(); while reader.clone().info().await.min_l1_batch_number.unwrap() <= L1BatchNumber(3) { tokio::time::sleep(POLL_INTERVAL).await; @@ -322,9 +327,10 @@ mod tests { // Prune first 3 created batches in Postgres. storage .pruning_dal() - .hard_prune_batches_range( + .insert_hard_pruning_log( snapshot_recovery.l1_batch_number + 3, snapshot_recovery.l2_block_number + 3, + H256::zero(), // not used ) .await .unwrap(); diff --git a/core/node/metadata_calculator/src/recovery/mod.rs b/core/node/metadata_calculator/src/recovery/mod.rs index ce7207471791..372cbc96a6df 100644 --- a/core/node/metadata_calculator/src/recovery/mod.rs +++ b/core/node/metadata_calculator/src/recovery/mod.rs @@ -132,7 +132,7 @@ impl InitParameters { let (l1_batch, l2_block); let mut expected_root_hash = None; - match (recovery_status, pruning_info.last_hard_pruned_l2_block) { + match (recovery_status, pruning_info.last_hard_pruned) { (Some(recovery), None) => { tracing::warn!( "Snapshot recovery {recovery:?} is present on the node, but pruning info is empty; assuming no pruning happened" @@ -141,21 +141,20 @@ impl InitParameters { l2_block = recovery.l2_block_number; expected_root_hash = Some(recovery.l1_batch_root_hash); } - (Some(recovery), Some(pruned_l2_block)) => { + (Some(recovery), Some(pruned)) => { // We have both recovery and some pruning on top of it. - l2_block = pruned_l2_block.max(recovery.l2_block_number); - l1_batch = pruning_info - .last_hard_pruned_l1_batch - .with_context(|| format!("malformed pruning info: {pruning_info:?}"))?; - if l1_batch == recovery.l1_batch_number { + l2_block = pruned.l2_block.max(recovery.l2_block_number); + l1_batch = pruned.l1_batch; + if let Some(root_hash) = pruned.l1_batch_root_hash { + expected_root_hash = Some(root_hash); + } else if l1_batch == recovery.l1_batch_number { expected_root_hash = Some(recovery.l1_batch_root_hash); } } - (None, Some(pruned_l2_block)) => { - l2_block = pruned_l2_block; - l1_batch = pruning_info - .last_hard_pruned_l1_batch - .with_context(|| format!("malformed pruning info: {pruning_info:?}"))?; + (None, Some(pruned)) => { + l2_block = pruned.l2_block; + l1_batch = pruned.l1_batch; + expected_root_hash = pruned.l1_batch_root_hash; } (None, None) => return Ok(None), }; @@ -384,9 +383,9 @@ impl AsyncTreeRecovery { snapshot_l2_block: L2BlockNumber, ) -> anyhow::Result<()> { let pruning_info = storage.pruning_dal().get_pruning_info().await?; - if let Some(last_hard_pruned_l2_block) = pruning_info.last_hard_pruned_l2_block { + if let Some(pruned) = pruning_info.last_hard_pruned { anyhow::ensure!( - last_hard_pruned_l2_block == snapshot_l2_block, + pruned.l2_block == snapshot_l2_block, "Additional data was pruned compared to tree recovery L2 block #{snapshot_l2_block}: {pruning_info:?}. \ Continuing recovery is impossible; to recover the tree, drop its RocksDB directory, stop pruning and restart recovery" ); diff --git a/core/node/metadata_calculator/src/recovery/tests.rs b/core/node/metadata_calculator/src/recovery/tests.rs index 4b2ba578a5b6..8b5371441162 100644 --- a/core/node/metadata_calculator/src/recovery/tests.rs +++ b/core/node/metadata_calculator/src/recovery/tests.rs @@ -1,6 +1,6 @@ //! Tests for metadata calculator snapshot recovery. -use std::{collections::HashMap, path::Path, sync::Mutex}; +use std::{path::Path, sync::Mutex}; use assert_matches::assert_matches; use tempfile::TempDir; @@ -16,7 +16,7 @@ use zksync_merkle_tree::{domain::ZkSyncTree, recovery::PersistenceThreadHandle, use zksync_node_genesis::{insert_genesis_batch, GenesisParams}; use zksync_node_test_utils::prepare_recovery_snapshot; use zksync_storage::RocksDB; -use zksync_types::{L1BatchNumber, U256}; +use zksync_types::L1BatchNumber; use super::*; use crate::{ @@ -28,6 +28,8 @@ use crate::{ MetadataCalculator, MetadataCalculatorConfig, }; +impl HandleRecoveryEvent for () {} + #[test] fn calculating_chunk_count() { let mut snapshot = InitParameters { @@ -116,9 +118,16 @@ async fn prune_storage(pool: &ConnectionPool, pruned_l1_batch: L1BatchNumb .await .unwrap() .expect("L1 batch not present in Postgres"); + let root_hash = storage + .blocks_dal() + .get_l1_batch_state_root(pruned_l1_batch) + .await + .unwrap() + .expect("L1 batch does not have root hash"); + storage .pruning_dal() - .soft_prune_batches_range(pruned_l1_batch, pruned_l2_block) + .insert_soft_pruning_log(pruned_l1_batch, pruned_l2_block) .await .unwrap(); let pruning_stats = storage @@ -130,6 +139,11 @@ async fn prune_storage(pool: &ConnectionPool, pruned_l1_batch: L1BatchNumb pruning_stats.deleted_l1_batches > 0 && pruning_stats.deleted_l2_blocks > 0, "{pruning_stats:?}" ); + storage + .pruning_dal() + .insert_hard_pruning_log(pruned_l1_batch, pruned_l2_block, root_hash) + .await + .unwrap(); } #[tokio::test] @@ -425,8 +439,7 @@ async fn entire_recovery_workflow(case: RecoveryWorkflowCase) { calculator_task.await.expect("calculator panicked").unwrap(); } -/// `pruned_batches == 0` is a sanity check. -#[test_casing(4, [0, 1, 2, 4])] +#[test_casing(3, [1, 2, 4])] #[tokio::test] async fn recovery_with_further_pruning(pruned_batches: u32) { const NEW_BATCH_COUNT: usize = 5; @@ -459,41 +472,80 @@ async fn recovery_with_further_pruning(pruned_batches: u32) { .await; db_transaction.commit().await.unwrap(); - let all_logs = storage - .storage_logs_dal() - .dump_all_storage_logs_for_tests() - .await; - assert_eq!(all_logs.len(), 400); - let initial_writes = storage - .storage_logs_dedup_dal() - .dump_all_initial_writes_for_tests() - .await; - let initial_writes: HashMap<_, _> = initial_writes - .into_iter() - .map(|write| (write.hashed_key, write.index)) - .collect(); - drop(storage); + // Run the first tree instance to compute root hashes for all batches. + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let (calculator, _) = + setup_calculator(&temp_dir.path().join("first"), pool.clone(), true).await; + let expected_root_hash = run_calculator(calculator).await; - let instructions: Vec<_> = all_logs - .iter() - .map(|log| { - let leaf_index = initial_writes[&log.hashed_key]; - let key = U256::from_little_endian(log.hashed_key.as_bytes()); - TreeInstruction::write(key, leaf_index, log.value) - }) - .collect(); - let expected_root_hash = ZkSyncTree::process_genesis_batch(&instructions).root_hash; - - if pruned_batches > 0 { - prune_storage(&pool, snapshot_recovery.l1_batch_number + pruned_batches).await; - } + prune_storage(&pool, snapshot_recovery.l1_batch_number + pruned_batches).await; // Create a new tree instance. It should recover and process the remaining batches. let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); - let (calculator, _) = setup_calculator(temp_dir.path(), pool, true).await; + let (calculator, _) = setup_calculator(&temp_dir.path().join("new"), pool, true).await; assert_eq!(run_calculator(calculator).await, expected_root_hash); } +#[tokio::test] +async fn detecting_root_hash_mismatch_after_pruning() { + let pool = ConnectionPool::::test_pool().await; + let snapshot_logs = gen_storage_logs(100..300, 1).pop().unwrap(); + let mut storage = pool.connection().await.unwrap(); + let mut db_transaction = storage.start_transaction().await.unwrap(); + let snapshot_recovery = prepare_recovery_snapshot( + &mut db_transaction, + L1BatchNumber(23), + L2BlockNumber(42), + &snapshot_logs, + ) + .await; + + let logs = gen_storage_logs(200..400, 5); + extend_db_state_from_l1_batch( + &mut db_transaction, + snapshot_recovery.l1_batch_number + 1, + snapshot_recovery.l2_block_number + 1, + logs, + ) + .await; + // Intentionally add an incorrect root has of the batch to be pruned. + db_transaction + .blocks_dal() + .set_l1_batch_hash(snapshot_recovery.l1_batch_number + 1, H256::repeat_byte(42)) + .await + .unwrap(); + db_transaction.commit().await.unwrap(); + + prune_storage(&pool, snapshot_recovery.l1_batch_number + 1).await; + + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let config = MetadataCalculatorRecoveryConfig::default(); + let (tree, _) = create_tree_recovery(temp_dir.path(), L1BatchNumber(1), &config).await; + let (_stop_sender, stop_receiver) = watch::channel(false); + let recovery_options = RecoveryOptions { + chunk_count: 5, + concurrency_limit: 1, + events: Box::new(()), + }; + let init_params = InitParameters::new(&pool, &config) + .await + .unwrap() + .expect("no init params"); + assert_eq!(init_params.expected_root_hash, Some(H256::repeat_byte(42))); + + let err = tree + .recover(init_params, recovery_options, &pool, &stop_receiver) + .await + .unwrap_err(); + let err = format!("{err:#}").to_lowercase(); + assert!(err.contains("root hash"), "{err}"); + + // Because of an abrupt error, terminating a RocksDB instance needs to be handled explicitly. + tokio::task::spawn_blocking(RocksDB::await_rocksdb_termination) + .await + .unwrap(); +} + #[derive(Debug)] struct PruningEventListener { pool: ConnectionPool, @@ -519,6 +571,11 @@ async fn pruning_during_recovery_is_detected() { let logs = gen_storage_logs(200..400, 5); extend_db_state(&mut storage, logs).await; drop(storage); + + // Set root hashes for all L1 batches in Postgres. + let (calculator, _) = + setup_calculator(&temp_dir.path().join("first"), pool.clone(), true).await; + run_calculator(calculator).await; prune_storage(&pool, L1BatchNumber(1)).await; let tree_path = temp_dir.path().join("recovery"); diff --git a/core/node/metadata_calculator/src/tests.rs b/core/node/metadata_calculator/src/tests.rs index 9717ce5682ce..ff939d1ae582 100644 --- a/core/node/metadata_calculator/src/tests.rs +++ b/core/node/metadata_calculator/src/tests.rs @@ -402,7 +402,7 @@ async fn error_on_pruned_next_l1_batch(sealed_protective_reads: bool) { extend_db_state(&mut storage, new_logs).await; storage .pruning_dal() - .soft_prune_batches_range(L1BatchNumber(5), L2BlockNumber(5)) + .insert_soft_pruning_log(L1BatchNumber(5), L2BlockNumber(5)) .await .unwrap(); storage @@ -410,6 +410,11 @@ async fn error_on_pruned_next_l1_batch(sealed_protective_reads: bool) { .hard_prune_batches_range(L1BatchNumber(5), L2BlockNumber(5)) .await .unwrap(); + storage + .pruning_dal() + .insert_hard_pruning_log(L1BatchNumber(5), L2BlockNumber(5), H256::zero()) + .await + .unwrap(); // Sanity check: there should be no pruned batch headers. let next_l1_batch_header = storage .blocks_dal() @@ -696,7 +701,7 @@ async fn setup_calculator_with_options( ) -> MetadataCalculator { let mut storage = pool.connection().await.unwrap(); let pruning_info = storage.pruning_dal().get_pruning_info().await.unwrap(); - let has_pruning_logs = pruning_info.last_hard_pruned_l1_batch.is_some(); + let has_pruning_logs = pruning_info.last_hard_pruned.is_some(); if !has_pruning_logs && storage.blocks_dal().is_genesis_needed().await.unwrap() { insert_genesis_batch(&mut storage, &GenesisParams::mock()) .await diff --git a/core/node/metadata_calculator/src/updater.rs b/core/node/metadata_calculator/src/updater.rs index 17fd5d900eab..2758bd1501c0 100644 --- a/core/node/metadata_calculator/src/updater.rs +++ b/core/node/metadata_calculator/src/updater.rs @@ -186,7 +186,7 @@ impl TreeUpdater { ) -> anyhow::Result<()> { let pruning_info = storage.pruning_dal().get_pruning_info().await?; anyhow::ensure!( - Some(l1_batch_number) > pruning_info.last_soft_pruned_l1_batch, + pruning_info.last_soft_pruned.map_or(true, |info| info.l1_batch < l1_batch_number), "L1 batch #{l1_batch_number}, next to be processed by the tree, is pruned; the tree cannot continue operating" ); Ok(()) diff --git a/core/node/node_framework/Cargo.toml b/core/node/node_framework/Cargo.toml index e999735e93b5..7e3f57da3c36 100644 --- a/core/node/node_framework/Cargo.toml +++ b/core/node/node_framework/Cargo.toml @@ -54,6 +54,7 @@ zksync_node_storage_init.workspace = true zksync_external_price_api.workspace = true zksync_external_proof_integration_api.workspace = true zksync_logs_bloom_backfill.workspace = true +zksync_shared_metrics.workspace = true zksync_l1_recovery.workspace = true pin-project-lite.workspace = true @@ -62,6 +63,7 @@ thiserror.workspace = true async-trait.workspace = true futures.workspace = true anyhow.workspace = true +serde.workspace = true tokio = { workspace = true, features = ["rt"] } ctrlc.workspace = true semver.workspace = true diff --git a/core/node/node_framework/src/implementations/layers/consistency_checker.rs b/core/node/node_framework/src/implementations/layers/consistency_checker.rs index a9e99eb89ac4..a76b358b53b4 100644 --- a/core/node/node_framework/src/implementations/layers/consistency_checker.rs +++ b/core/node/node_framework/src/implementations/layers/consistency_checker.rs @@ -1,9 +1,9 @@ use zksync_consistency_checker::ConsistencyChecker; -use zksync_types::{commitment::L1BatchCommitmentMode, Address}; +use zksync_types::{commitment::L1BatchCommitmentMode, Address, L2ChainId}; use crate::{ implementations::resources::{ - eth_interface::EthInterfaceResource, + eth_interface::{EthInterfaceResource, GatewayEthInterfaceResource}, healthcheck::AppHealthCheckResource, pools::{MasterPool, PoolResource}, }, @@ -16,15 +16,17 @@ use crate::{ /// Wiring layer for the `ConsistencyChecker` (used by the external node). #[derive(Debug)] pub struct ConsistencyCheckerLayer { - diamond_proxy_addr: Address, + l1_diamond_proxy_addr: Address, max_batches_to_recheck: u32, commitment_mode: L1BatchCommitmentMode, + l2_chain_id: L2ChainId, } #[derive(Debug, FromContext)] #[context(crate = crate)] pub struct Input { pub l1_client: EthInterfaceResource, + pub gateway_client: Option, pub master_pool: PoolResource, #[context(default)] pub app_health: AppHealthCheckResource, @@ -39,14 +41,16 @@ pub struct Output { impl ConsistencyCheckerLayer { pub fn new( - diamond_proxy_addr: Address, + l1_diamond_proxy_addr: Address, max_batches_to_recheck: u32, commitment_mode: L1BatchCommitmentMode, + l2_chain_id: L2ChainId, ) -> ConsistencyCheckerLayer { Self { - diamond_proxy_addr, + l1_diamond_proxy_addr, max_batches_to_recheck, commitment_mode, + l2_chain_id, } } } @@ -63,17 +67,21 @@ impl WiringLayer for ConsistencyCheckerLayer { async fn wire(self, input: Self::Input) -> Result { // Get resources. let l1_client = input.l1_client.0; + let gateway_client = input.gateway_client.map(|c| c.0); let singleton_pool = input.master_pool.get_singleton().await?; let consistency_checker = ConsistencyChecker::new( l1_client, + gateway_client, self.max_batches_to_recheck, singleton_pool, self.commitment_mode, + self.l2_chain_id, ) + .await .map_err(WiringError::Internal)? - .with_diamond_proxy_addr(self.diamond_proxy_addr); + .with_l1_diamond_proxy_addr(self.l1_diamond_proxy_addr); input .app_health diff --git a/core/node/node_framework/src/implementations/layers/eth_sender/aggregator.rs b/core/node/node_framework/src/implementations/layers/eth_sender/aggregator.rs index 310580aeb3a3..d2be0b383393 100644 --- a/core/node/node_framework/src/implementations/layers/eth_sender/aggregator.rs +++ b/core/node/node_framework/src/implementations/layers/eth_sender/aggregator.rs @@ -9,6 +9,7 @@ use crate::{ implementations::resources::{ circuit_breakers::CircuitBreakersResource, eth_interface::{BoundEthInterfaceForBlobsResource, BoundEthInterfaceResource}, + healthcheck::AppHealthCheckResource, object_store::ObjectStoreResource, pools::{MasterPool, PoolResource, ReplicaPool}, }, @@ -55,6 +56,8 @@ pub struct Input { pub object_store: ObjectStoreResource, #[context(default)] pub circuit_breakers: CircuitBreakersResource, + #[context(default)] + pub app_health: AppHealthCheckResource, } #[derive(Debug, IntoContext)] @@ -108,8 +111,9 @@ impl WiringLayer for EthTxAggregatorLayer { let aggregator = Aggregator::new( config.clone(), object_store, - eth_client_blobs_addr.is_some(), + eth_client_blobs_addr, self.l1_batch_commit_data_generator_mode, + self.settlement_mode, ); let eth_tx_aggregator = EthTxAggregator::new( @@ -133,6 +137,12 @@ impl WiringLayer for EthTxAggregatorLayer { .insert(Box::new(FailedL1TransactionChecker { pool: replica_pool })) .await; + input + .app_health + .0 + .insert_component(eth_tx_aggregator.health_check()) + .map_err(WiringError::internal)?; + Ok(Output { eth_tx_aggregator }) } } diff --git a/core/node/node_framework/src/implementations/layers/eth_sender/manager.rs b/core/node/node_framework/src/implementations/layers/eth_sender/manager.rs index 5462fa575f94..e9ce4cc19e1a 100644 --- a/core/node/node_framework/src/implementations/layers/eth_sender/manager.rs +++ b/core/node/node_framework/src/implementations/layers/eth_sender/manager.rs @@ -8,6 +8,7 @@ use crate::{ circuit_breakers::CircuitBreakersResource, eth_interface::{BoundEthInterfaceForBlobsResource, BoundEthInterfaceResource}, gas_adjuster::GasAdjusterResource, + healthcheck::AppHealthCheckResource, pools::{MasterPool, PoolResource, ReplicaPool}, }, service::StopReceiver, @@ -48,6 +49,8 @@ pub struct Input { pub gas_adjuster: GasAdjusterResource, #[context(default)] pub circuit_breakers: CircuitBreakersResource, + #[context(default)] + pub app_health: AppHealthCheckResource, } #[derive(Debug, IntoContext)] @@ -114,6 +117,12 @@ impl WiringLayer for EthTxManagerLayer { .insert(Box::new(FailedL1TransactionChecker { pool: replica_pool })) .await; + input + .app_health + .0 + .insert_component(eth_tx_manager.health_check()) + .map_err(WiringError::internal)?; + Ok(Output { eth_tx_manager }) } } diff --git a/core/node/node_framework/src/implementations/layers/eth_watch.rs b/core/node/node_framework/src/implementations/layers/eth_watch.rs index e19828d85ccd..e871f5661d22 100644 --- a/core/node/node_framework/src/implementations/layers/eth_watch.rs +++ b/core/node/node_framework/src/implementations/layers/eth_watch.rs @@ -1,6 +1,7 @@ use zksync_config::{ContractsConfig, EthWatchConfig}; use zksync_contracts::chain_admin_contract; use zksync_eth_watch::{EthHttpQueryClient, EthWatch}; +use zksync_types::L2ChainId; use crate::{ implementations::resources::{ @@ -21,6 +22,7 @@ use crate::{ pub struct EthWatchLayer { eth_watch_config: EthWatchConfig, contracts_config: ContractsConfig, + chain_id: L2ChainId, } #[derive(Debug, FromContext)] @@ -38,10 +40,15 @@ pub struct Output { } impl EthWatchLayer { - pub fn new(eth_watch_config: EthWatchConfig, contracts_config: ContractsConfig) -> Self { + pub fn new( + eth_watch_config: EthWatchConfig, + contracts_config: ContractsConfig, + chain_id: L2ChainId, + ) -> Self { Self { eth_watch_config, contracts_config, + chain_id, } } } @@ -72,10 +79,11 @@ impl WiringLayer for EthWatchLayer { let eth_watch = EthWatch::new( &chain_admin_contract(), - Box::new(eth_client.clone()), Box::new(eth_client), + None, main_pool, self.eth_watch_config.poll_interval(), + self.chain_id, ) .await?; diff --git a/core/node/node_framework/src/implementations/layers/healtcheck_server.rs b/core/node/node_framework/src/implementations/layers/healtcheck_server.rs index 83a74c63cb45..3a4e3ca11569 100644 --- a/core/node/node_framework/src/implementations/layers/healtcheck_server.rs +++ b/core/node/node_framework/src/implementations/layers/healtcheck_server.rs @@ -3,6 +3,8 @@ use std::sync::Arc; use zksync_config::configs::api::HealthCheckConfig; use zksync_health_check::AppHealthCheck; use zksync_node_api_server::healthcheck::HealthCheckHandle; +use zksync_shared_metrics::metadata::{GitMetadata, RustMetadata, GIT_METRICS, RUST_METRICS}; +use zksync_web3_decl::jsonrpsee::core::Serialize; use crate::{ implementations::resources::healthcheck::AppHealthCheckResource, @@ -12,6 +14,13 @@ use crate::{ FromContext, IntoContext, }; +/// Full metadata of the compiled binary. +#[derive(Debug, Serialize)] +pub struct BinMetadata { + pub rust: &'static RustMetadata, + pub git: &'static GitMetadata, +} + /// Wiring layer for health check server /// /// Expects other layers to insert different components' health checks @@ -73,8 +82,12 @@ impl Task for HealthCheckTask { } async fn run(mut self: Box, mut stop_receiver: StopReceiver) -> anyhow::Result<()> { + self.app_health_check.set_details(BinMetadata { + rust: RUST_METRICS.initialize(), + git: GIT_METRICS.initialize(), + }); let handle = - HealthCheckHandle::spawn_server(self.config.bind_addr(), self.app_health_check.clone()); + HealthCheckHandle::spawn_server(self.config.bind_addr(), self.app_health_check); stop_receiver.0.changed().await?; handle.stop().await; diff --git a/core/node/node_framework/src/implementations/layers/house_keeper.rs b/core/node/node_framework/src/implementations/layers/house_keeper.rs index 1e2bc568d50f..af59a73554ac 100644 --- a/core/node/node_framework/src/implementations/layers/house_keeper.rs +++ b/core/node/node_framework/src/implementations/layers/house_keeper.rs @@ -56,7 +56,7 @@ impl WiringLayer for HouseKeeperLayer { let l1_batch_metrics_reporter = L1BatchMetricsReporter::new( self.house_keeper_config .l1_batch_metrics_reporting_interval_ms, - replica_pool.clone(), + replica_pool, ); Ok(Output { diff --git a/core/node/node_framework/src/implementations/layers/mod.rs b/core/node/node_framework/src/implementations/layers/mod.rs index c96de9eb3e69..a4dc3272ae4e 100644 --- a/core/node/node_framework/src/implementations/layers/mod.rs +++ b/core/node/node_framework/src/implementations/layers/mod.rs @@ -24,7 +24,7 @@ pub mod node_storage_init; pub mod object_store; pub mod pk_signing_eth_client; pub mod pools_layer; -pub mod postgres_metrics; +pub mod postgres; pub mod prometheus_exporter; pub mod proof_data_handler; pub mod pruning; diff --git a/core/node/node_framework/src/implementations/layers/postgres.rs b/core/node/node_framework/src/implementations/layers/postgres.rs new file mode 100644 index 000000000000..bf602f1de631 --- /dev/null +++ b/core/node/node_framework/src/implementations/layers/postgres.rs @@ -0,0 +1,171 @@ +use std::{ + sync::Arc, + time::{Duration, Instant}, +}; + +use async_trait::async_trait; +use serde::Serialize; +use tokio::sync::RwLock; +use zksync_dal::{ + metrics::PostgresMetrics, system_dal::DatabaseMigration, ConnectionPool, Core, CoreDal, +}; +use zksync_health_check::{CheckHealth, Health, HealthStatus}; + +use crate::{ + implementations::resources::{ + healthcheck::AppHealthCheckResource, + pools::{PoolResource, ReplicaPool}, + }, + service::StopReceiver, + task::{Task, TaskId, TaskKind}, + wiring_layer::{WiringError, WiringLayer}, + FromContext, IntoContext, +}; + +/// Execution interval for Postrgres metrics and healthcheck tasks +const TASK_EXECUTION_INTERVAL: Duration = Duration::from_secs(60); + +/// Wiring layer for the Postgres metrics exporter and healthcheck. +#[derive(Debug)] +pub struct PostgresLayer; + +#[derive(Debug, FromContext)] +#[context(crate = crate)] +pub struct Input { + pub replica_pool: PoolResource, + #[context(default)] + pub app_health: AppHealthCheckResource, +} + +#[derive(Debug, IntoContext)] +#[context(crate = crate)] +pub struct Output { + #[context(task)] + pub metrics_task: PostgresMetricsScrapingTask, +} + +#[async_trait::async_trait] +impl WiringLayer for PostgresLayer { + type Input = Input; + type Output = Output; + + fn layer_name(&self) -> &'static str { + "postgres_layer" + } + + async fn wire(self, input: Self::Input) -> Result { + let pool = input.replica_pool.get().await?; + let metrics_task = PostgresMetricsScrapingTask { + pool_for_metrics: pool.clone(), + }; + + let app_health = input.app_health.0; + app_health + .insert_custom_component(Arc::new(DatabaseHealthCheck { + polling_interval: TASK_EXECUTION_INTERVAL, + pool, + cached: RwLock::default(), + })) + .map_err(WiringError::internal)?; + + Ok(Output { metrics_task }) + } +} + +#[derive(Debug)] +pub struct PostgresMetricsScrapingTask { + pool_for_metrics: ConnectionPool, +} + +#[async_trait::async_trait] +impl Task for PostgresMetricsScrapingTask { + fn kind(&self) -> TaskKind { + TaskKind::UnconstrainedTask + } + + fn id(&self) -> TaskId { + "postgres_metrics_scraping".into() + } + + async fn run(self: Box, mut stop_receiver: StopReceiver) -> anyhow::Result<()> { + tokio::select! { + () = PostgresMetrics::run_scraping(self.pool_for_metrics, TASK_EXECUTION_INTERVAL) => { + tracing::warn!("Postgres metrics scraping unexpectedly stopped"); + } + _ = stop_receiver.0.changed() => { + tracing::info!("Stop signal received, Postgres metrics scraping is shutting down"); + } + } + Ok(()) + } +} + +#[derive(Debug, Clone, Serialize)] +pub struct DatabaseInfo { + last_migration: DatabaseMigration, +} + +impl From for Health { + fn from(details: DatabaseInfo) -> Self { + Self::from(HealthStatus::Ready).with_details(details) + } +} + +#[derive(Debug)] +struct DatabaseHealthCheck { + polling_interval: Duration, + pool: ConnectionPool, + cached: RwLock>, +} + +impl DatabaseHealthCheck { + async fn update(&self) -> anyhow::Result { + let mut conn = self.pool.connection_tagged("postgres_healthcheck").await?; + let last_migration = conn.system_dal().get_last_migration().await?; + Ok(DatabaseInfo { last_migration }) + } + + fn validate_cache(&self, cache: Option<&(DatabaseInfo, Instant)>) -> Option { + let now = Instant::now(); + if let Some((cached, cached_at)) = cache { + let elapsed = now + .checked_duration_since(*cached_at) + .unwrap_or(Duration::ZERO); + (elapsed <= self.polling_interval).then(|| cached.clone()) + } else { + None + } + } +} + +#[async_trait] +impl CheckHealth for DatabaseHealthCheck { + fn name(&self) -> &'static str { + "database" + } + + // If the DB malfunctions, this method would time out, which would lead to the health check marked as failed. + async fn check_health(&self) -> Health { + let cached = self.cached.read().await.clone(); + if let Some(cache) = self.validate_cache(cached.as_ref()) { + return cache.into(); + } + + let mut cached_lock = self.cached.write().await; + // The cached value may have been updated by another task. + if let Some(cache) = self.validate_cache(cached_lock.as_ref()) { + return cache.into(); + } + + match self.update().await { + Ok(info) => { + *cached_lock = Some((info.clone(), Instant::now())); + info.into() + } + Err(err) => { + tracing::warn!("Error updating database health: {err:#}"); + cached.map_or_else(|| HealthStatus::Affected.into(), |(info, _)| info.into()) + } + } + } +} diff --git a/core/node/node_framework/src/implementations/layers/postgres_metrics.rs b/core/node/node_framework/src/implementations/layers/postgres_metrics.rs deleted file mode 100644 index 238bee578678..000000000000 --- a/core/node/node_framework/src/implementations/layers/postgres_metrics.rs +++ /dev/null @@ -1,75 +0,0 @@ -use std::time::Duration; - -use zksync_dal::{metrics::PostgresMetrics, ConnectionPool, Core}; - -use crate::{ - implementations::resources::pools::{PoolResource, ReplicaPool}, - service::StopReceiver, - task::{Task, TaskId, TaskKind}, - wiring_layer::{WiringError, WiringLayer}, - FromContext, IntoContext, -}; - -const SCRAPE_INTERVAL: Duration = Duration::from_secs(60); - -/// Wiring layer for the Postgres metrics exporter. -#[derive(Debug)] -pub struct PostgresMetricsLayer; - -#[derive(Debug, FromContext)] -#[context(crate = crate)] -pub struct Input { - pub replica_pool: PoolResource, -} - -#[derive(Debug, IntoContext)] -#[context(crate = crate)] -pub struct Output { - #[context(task)] - pub task: PostgresMetricsScrapingTask, -} - -#[async_trait::async_trait] -impl WiringLayer for PostgresMetricsLayer { - type Input = Input; - type Output = Output; - - fn layer_name(&self) -> &'static str { - "postgres_metrics_layer" - } - - async fn wire(self, input: Self::Input) -> Result { - let pool_for_metrics = input.replica_pool.get_singleton().await?; - let task = PostgresMetricsScrapingTask { pool_for_metrics }; - - Ok(Output { task }) - } -} - -#[derive(Debug)] -pub struct PostgresMetricsScrapingTask { - pool_for_metrics: ConnectionPool, -} - -#[async_trait::async_trait] -impl Task for PostgresMetricsScrapingTask { - fn kind(&self) -> TaskKind { - TaskKind::UnconstrainedTask - } - - fn id(&self) -> TaskId { - "postgres_metrics_scraping".into() - } - - async fn run(self: Box, mut stop_receiver: StopReceiver) -> anyhow::Result<()> { - tokio::select! { - () = PostgresMetrics::run_scraping(self.pool_for_metrics, SCRAPE_INTERVAL) => { - tracing::warn!("Postgres metrics scraping unexpectedly stopped"); - } - _ = stop_receiver.0.changed() => { - tracing::info!("Stop signal received, Postgres metrics scraping is shutting down"); - } - } - Ok(()) - } -} diff --git a/core/node/node_framework/src/implementations/layers/query_eth_client.rs b/core/node/node_framework/src/implementations/layers/query_eth_client.rs index 116823d92d8a..73d28f6a2aae 100644 --- a/core/node/node_framework/src/implementations/layers/query_eth_client.rs +++ b/core/node/node_framework/src/implementations/layers/query_eth_client.rs @@ -1,9 +1,11 @@ use anyhow::Context; -use zksync_types::{settlement::SettlementMode, url::SensitiveUrl, L2ChainId, SLChainId}; +use zksync_types::{url::SensitiveUrl, L2ChainId, SLChainId}; use zksync_web3_decl::client::Client; use crate::{ - implementations::resources::eth_interface::{EthInterfaceResource, L2InterfaceResource}, + implementations::resources::eth_interface::{ + EthInterfaceResource, GatewayEthInterfaceResource, L2InterfaceResource, + }, wiring_layer::{WiringError, WiringLayer}, IntoContext, }; @@ -13,19 +15,19 @@ use crate::{ pub struct QueryEthClientLayer { chain_id: SLChainId, web3_url: SensitiveUrl, - settlement_mode: SettlementMode, + gateway_web3_url: Option, } impl QueryEthClientLayer { pub fn new( chain_id: SLChainId, web3_url: SensitiveUrl, - settlement_mode: SettlementMode, + gateway_web3_url: Option, ) -> Self { Self { chain_id, web3_url, - settlement_mode, + gateway_web3_url, } } } @@ -35,6 +37,7 @@ impl QueryEthClientLayer { pub struct Output { query_client_l1: EthInterfaceResource, query_client_l2: Option, + query_client_gateway: Option, } #[async_trait::async_trait] @@ -47,7 +50,7 @@ impl WiringLayer for QueryEthClientLayer { } async fn wire(self, _input: Self::Input) -> Result { - // Both the L1 and L2 client have the same URL, but provide different type guarantees. + // Both `query_client_gateway` and `query_client_l2` use the same URL, but provide different type guarantees. Ok(Output { query_client_l1: EthInterfaceResource(Box::new( Client::http(self.web3_url.clone()) @@ -55,9 +58,9 @@ impl WiringLayer for QueryEthClientLayer { .for_network(self.chain_id.into()) .build(), )), - query_client_l2: if self.settlement_mode.is_gateway() { + query_client_l2: if let Some(gateway_web3_url) = self.gateway_web3_url.clone() { Some(L2InterfaceResource(Box::new( - Client::http(self.web3_url.clone()) + Client::http(gateway_web3_url) .context("Client::new()")? .for_network(L2ChainId::try_from(self.chain_id.0).unwrap().into()) .build(), @@ -65,6 +68,15 @@ impl WiringLayer for QueryEthClientLayer { } else { None }, + query_client_gateway: if let Some(gateway_web3_url) = self.gateway_web3_url { + Some(GatewayEthInterfaceResource(Box::new( + Client::http(gateway_web3_url) + .context("Client::new()")? + .build(), + ))) + } else { + None + }, }) } } diff --git a/core/node/node_framework/src/implementations/layers/state_keeper/mod.rs b/core/node/node_framework/src/implementations/layers/state_keeper/mod.rs index 55defd095be8..6f21a321eb1a 100644 --- a/core/node/node_framework/src/implementations/layers/state_keeper/mod.rs +++ b/core/node/node_framework/src/implementations/layers/state_keeper/mod.rs @@ -1,17 +1,15 @@ use std::sync::Arc; use anyhow::Context; +use zksync_health_check::ReactiveHealthCheck; +use zksync_state::AsyncCatchupTask; pub use zksync_state::RocksdbStorageOptions; -use zksync_state::{AsyncCatchupTask, OwnedStorage, ReadStorageFactory}; -use zksync_state_keeper::{ - seal_criteria::ConditionalSealer, AsyncRocksdbCache, OutputHandler, StateKeeperIO, - ZkSyncStateKeeper, -}; +use zksync_state_keeper::{AsyncRocksdbCache, ZkSyncStateKeeper}; use zksync_storage::RocksDB; -use zksync_vm_executor::interface::BatchExecutorFactory; use crate::{ implementations::resources::{ + healthcheck::AppHealthCheckResource, pools::{MasterPool, PoolResource}, state_keeper::{ BatchExecutorResource, ConditionalSealerResource, OutputHandlerResource, @@ -44,6 +42,8 @@ pub struct Input { pub output_handler: OutputHandlerResource, pub conditional_sealer: ConditionalSealerResource, pub master_pool: PoolResource, + #[context(default)] + pub app_health: AppHealthCheckResource, } #[derive(Debug, IntoContext)] @@ -99,13 +99,21 @@ impl WiringLayer for StateKeeperLayer { self.rocksdb_options, ); - let state_keeper = StateKeeperTask { + let state_keeper = ZkSyncStateKeeper::new( io, - executor_factory: batch_executor_base, + batch_executor_base, output_handler, sealer, - storage_factory: Arc::new(storage_factory), - }; + Arc::new(storage_factory), + ); + + let state_keeper = StateKeeperTask { state_keeper }; + + input + .app_health + .0 + .insert_component(state_keeper.health_check()) + .map_err(WiringError::internal)?; let rocksdb_termination_hook = ShutdownHook::new("rocksdb_terminaton", async { // Wait for all the instances of RocksDB to be destroyed. @@ -123,11 +131,14 @@ impl WiringLayer for StateKeeperLayer { #[derive(Debug)] pub struct StateKeeperTask { - io: Box, - executor_factory: Box>, - output_handler: OutputHandler, - sealer: Arc, - storage_factory: Arc, + state_keeper: ZkSyncStateKeeper, +} + +impl StateKeeperTask { + /// Returns the health check for state keeper. + pub fn health_check(&self) -> ReactiveHealthCheck { + self.state_keeper.health_check() + } } #[async_trait::async_trait] @@ -137,15 +148,7 @@ impl Task for StateKeeperTask { } async fn run(self: Box, stop_receiver: StopReceiver) -> anyhow::Result<()> { - let state_keeper = ZkSyncStateKeeper::new( - stop_receiver.0, - self.io, - self.executor_factory, - self.output_handler, - self.sealer, - self.storage_factory, - ); - state_keeper.run().await + self.state_keeper.run(stop_receiver.0).await } } diff --git a/core/node/node_framework/src/implementations/layers/tree_data_fetcher.rs b/core/node/node_framework/src/implementations/layers/tree_data_fetcher.rs index ca2e80142401..cdf0700a0e73 100644 --- a/core/node/node_framework/src/implementations/layers/tree_data_fetcher.rs +++ b/core/node/node_framework/src/implementations/layers/tree_data_fetcher.rs @@ -1,9 +1,9 @@ use zksync_node_sync::tree_data_fetcher::TreeDataFetcher; -use zksync_types::Address; +use zksync_types::{Address, L2ChainId}; use crate::{ implementations::resources::{ - eth_interface::EthInterfaceResource, + eth_interface::{EthInterfaceResource, GatewayEthInterfaceResource}, healthcheck::AppHealthCheckResource, main_node_client::MainNodeClientResource, pools::{MasterPool, PoolResource}, @@ -17,7 +17,8 @@ use crate::{ /// Wiring layer for [`TreeDataFetcher`]. #[derive(Debug)] pub struct TreeDataFetcherLayer { - diamond_proxy_addr: Address, + l1_diamond_proxy_addr: Address, + l2_chain_id: L2ChainId, } #[derive(Debug, FromContext)] @@ -25,7 +26,8 @@ pub struct TreeDataFetcherLayer { pub struct Input { pub master_pool: PoolResource, pub main_node_client: MainNodeClientResource, - pub eth_client: EthInterfaceResource, + pub l1_client: EthInterfaceResource, + pub gateway_client: Option, #[context(default)] pub app_health: AppHealthCheckResource, } @@ -38,8 +40,11 @@ pub struct Output { } impl TreeDataFetcherLayer { - pub fn new(diamond_proxy_addr: Address) -> Self { - Self { diamond_proxy_addr } + pub fn new(l1_diamond_proxy_addr: Address, l2_chain_id: L2ChainId) -> Self { + Self { + l1_diamond_proxy_addr, + l2_chain_id, + } } } @@ -55,14 +60,21 @@ impl WiringLayer for TreeDataFetcherLayer { async fn wire(self, input: Self::Input) -> Result { let pool = input.master_pool.get().await?; let MainNodeClientResource(client) = input.main_node_client; - let EthInterfaceResource(eth_client) = input.eth_client; + let EthInterfaceResource(l1_client) = input.l1_client; + let gateway_client = input.gateway_client.map(|c| c.0); tracing::warn!( "Running tree data fetcher (allows a node to operate w/o a Merkle tree or w/o waiting the tree to catch up). \ This is an experimental feature; do not use unless you know what you're doing" ); - let task = - TreeDataFetcher::new(client, pool).with_l1_data(eth_client, self.diamond_proxy_addr)?; + let task = TreeDataFetcher::new(client, pool) + .with_l1_data( + l1_client, + self.l1_diamond_proxy_addr, + gateway_client, + self.l2_chain_id, + ) + .await?; // Insert healthcheck input diff --git a/core/node/node_framework/src/implementations/resources/eth_interface.rs b/core/node/node_framework/src/implementations/resources/eth_interface.rs index 24b7df327f63..f1bc17027f90 100644 --- a/core/node/node_framework/src/implementations/resources/eth_interface.rs +++ b/core/node/node_framework/src/implementations/resources/eth_interface.rs @@ -13,6 +13,15 @@ impl Resource for EthInterfaceResource { } } +#[derive(Debug, Clone)] +pub struct GatewayEthInterfaceResource(pub Box>); + +impl Resource for GatewayEthInterfaceResource { + fn name() -> String { + "common/gateway_eth_interface".into() + } +} + /// A resource that provides L2 interface object to the service. /// It is expected to have the same URL as the `EthInterfaceResource`, but have different capabilities. /// diff --git a/core/node/node_storage_init/src/external_node/genesis.rs b/core/node/node_storage_init/src/external_node/genesis.rs index b7a7efa9cf53..d5c487535bf4 100644 --- a/core/node/node_storage_init/src/external_node/genesis.rs +++ b/core/node/node_storage_init/src/external_node/genesis.rs @@ -22,10 +22,14 @@ impl InitializeStorage for ExternalNodeGenesis { _stop_receiver: watch::Receiver, ) -> anyhow::Result<()> { let mut storage = self.pool.connection_tagged("en").await?; + + // Custom genesis state for external nodes is not supported. If the main node has a custom genesis, + // its external nodes should be started from a snapshot instead. zksync_node_sync::genesis::perform_genesis_if_needed( &mut storage, self.l2_chain_id, &self.client.clone().for_component("genesis"), + None, ) .await .context("performing genesis failed") diff --git a/core/node/node_storage_init/src/main_node/genesis.rs b/core/node/node_storage_init/src/main_node/genesis.rs index e98473840370..a5d6c0e628ac 100644 --- a/core/node/node_storage_init/src/main_node/genesis.rs +++ b/core/node/node_storage_init/src/main_node/genesis.rs @@ -1,8 +1,11 @@ +use std::fs::File; + use anyhow::Context as _; use tokio::sync::watch; use zksync_config::{ContractsConfig, GenesisConfig}; use zksync_dal::{ConnectionPool, Core, CoreDal as _}; use zksync_node_genesis::GenesisParams; +use zksync_object_store::bincode; use zksync_web3_decl::client::{DynClient, L1}; use crate::traits::InitializeStorage; @@ -36,7 +39,21 @@ impl InitializeStorage for MainNodeGenesis { self.contracts.diamond_proxy_addr, ) .await?; - zksync_node_genesis::ensure_genesis_state(&mut storage, ¶ms).await?; + + let custom_genesis_state_reader = match &self.genesis.custom_genesis_state_path { + Some(path) => match File::open(path) { + Ok(file) => Some(bincode::deserialize_from(file)?), + Err(e) => return Err(e.into()), // Propagate other errors + }, + None => None, + }; + + zksync_node_genesis::ensure_genesis_state( + &mut storage, + ¶ms, + custom_genesis_state_reader, + ) + .await?; if let Some(ecosystem_contracts) = &self.contracts.ecosystem_contracts { zksync_node_genesis::save_set_chain_id_tx( diff --git a/core/node/node_sync/src/batch_status_updater/mod.rs b/core/node/node_sync/src/batch_status_updater/mod.rs index 3f6bb9ff33f8..de3bb88f8b93 100644 --- a/core/node/node_sync/src/batch_status_updater/mod.rs +++ b/core/node/node_sync/src/batch_status_updater/mod.rs @@ -13,7 +13,7 @@ use zksync_dal::{Connection, ConnectionPool, Core, CoreDal}; use zksync_health_check::{Health, HealthStatus, HealthUpdater, ReactiveHealthCheck}; use zksync_shared_metrics::EN_METRICS; use zksync_types::{ - aggregated_operations::AggregatedActionType, api, L1BatchNumber, L2BlockNumber, H256, + aggregated_operations::AggregatedActionType, api, L1BatchNumber, SLChainId, H256, }; use zksync_web3_decl::{ client::{DynClient, L2}, @@ -41,6 +41,7 @@ struct BatchStatusChange { number: L1BatchNumber, l1_tx_hash: H256, happened_at: DateTime, + sl_chain_id: Option, } #[derive(Debug, Default)] @@ -73,42 +74,21 @@ impl From for UpdaterError { #[async_trait] trait MainNodeClient: fmt::Debug + Send + Sync { - /// Returns any L2 block in the specified L1 batch. - async fn resolve_l1_batch_to_l2_block( + async fn batch_details( &self, number: L1BatchNumber, - ) -> EnrichedClientResult>; - - async fn block_details( - &self, - number: L2BlockNumber, - ) -> EnrichedClientResult>; + ) -> EnrichedClientResult>; } #[async_trait] impl MainNodeClient for Box> { - async fn resolve_l1_batch_to_l2_block( + async fn batch_details( &self, number: L1BatchNumber, - ) -> EnrichedClientResult> { - let request_latency = FETCHER_METRICS.requests[&FetchStage::GetL2BlockRange].start(); - let number = self - .get_l2_block_range(number) - .rpc_context("resolve_l1_batch_to_l2_block") - .with_arg("number", &number) - .await? - .map(|(start, _)| L2BlockNumber(start.as_u32())); - request_latency.observe(); - Ok(number) - } - - async fn block_details( - &self, - number: L2BlockNumber, - ) -> EnrichedClientResult> { - let request_latency = FETCHER_METRICS.requests[&FetchStage::GetBlockDetails].start(); + ) -> EnrichedClientResult> { + let request_latency = FETCHER_METRICS.requests[&FetchStage::GetL1BatchDetails].start(); let details = self - .get_block_details(number) + .get_l1_batch_details(number) .rpc_context("block_details") .with_arg("number", &number) .await?; @@ -155,27 +135,34 @@ impl UpdaterCursor { }) } - fn extract_tx_hash_and_timestamp( - batch_info: &api::BlockDetails, + /// Extracts tx hash, timestamp and chain id of the operation. + fn extract_op_data( + batch_info: &api::L1BatchDetails, stage: AggregatedActionType, - ) -> (Option, Option>) { + ) -> (Option, Option>, Option) { match stage { - AggregatedActionType::Commit => { - (batch_info.base.commit_tx_hash, batch_info.base.committed_at) - } - AggregatedActionType::PublishProofOnchain => { - (batch_info.base.prove_tx_hash, batch_info.base.proven_at) - } - AggregatedActionType::Execute => { - (batch_info.base.execute_tx_hash, batch_info.base.executed_at) - } + AggregatedActionType::Commit => ( + batch_info.base.commit_tx_hash, + batch_info.base.committed_at, + batch_info.base.commit_chain_id, + ), + AggregatedActionType::PublishProofOnchain => ( + batch_info.base.prove_tx_hash, + batch_info.base.proven_at, + batch_info.base.prove_chain_id, + ), + AggregatedActionType::Execute => ( + batch_info.base.execute_tx_hash, + batch_info.base.executed_at, + batch_info.base.execute_chain_id, + ), } } fn update( &mut self, status_changes: &mut StatusChanges, - batch_info: &api::BlockDetails, + batch_info: &api::L1BatchDetails, ) -> anyhow::Result<()> { for stage in [ AggregatedActionType::Commit, @@ -190,10 +177,10 @@ impl UpdaterCursor { fn update_stage( &mut self, status_changes: &mut StatusChanges, - batch_info: &api::BlockDetails, + batch_info: &api::L1BatchDetails, stage: AggregatedActionType, ) -> anyhow::Result<()> { - let (l1_tx_hash, happened_at) = Self::extract_tx_hash_and_timestamp(batch_info, stage); + let (l1_tx_hash, happened_at, sl_chain_id) = Self::extract_op_data(batch_info, stage); let (last_l1_batch, changes_to_update) = match stage { AggregatedActionType::Commit => ( &mut self.last_committed_l1_batch, @@ -212,7 +199,7 @@ impl UpdaterCursor { let Some(l1_tx_hash) = l1_tx_hash else { return Ok(()); }; - if batch_info.l1_batch_number != last_l1_batch.next() { + if batch_info.number != last_l1_batch.next() { return Ok(()); } @@ -221,12 +208,13 @@ impl UpdaterCursor { format!("Malformed API response: batch is {action_str}, but has no relevant timestamp") })?; changes_to_update.push(BatchStatusChange { - number: batch_info.l1_batch_number, + number: batch_info.number, l1_tx_hash, happened_at, + sl_chain_id, }); - tracing::info!("Batch {}: {action_str}", batch_info.l1_batch_number); - FETCHER_METRICS.l1_batch[&stage.into()].set(batch_info.l1_batch_number.0.into()); + tracing::info!("Batch {}: {action_str}", batch_info.number); + FETCHER_METRICS.l1_batch[&stage.into()].set(batch_info.number.0.into()); *last_l1_batch += 1; Ok(()) } @@ -348,22 +336,11 @@ impl BatchStatusUpdater { // update all three statuses (e.g. if the node is still syncing), but also skipping the gaps in the statuses // (e.g. if the last executed batch is 10, but the last proven is 20, we don't need to check the batches 11-19). while batch <= last_sealed_batch { - // While we may receive `None` for the `self.current_l1_batch`, it's OK: open batch is guaranteed to not - // be sent to L1. - let l2_block_number = self.client.resolve_l1_batch_to_l2_block(batch).await?; - let Some(l2_block_number) = l2_block_number else { + let Some(batch_info) = self.client.batch_details(batch).await? else { + // Batch is not ready yet return Ok(()); }; - let Some(batch_info) = self.client.block_details(l2_block_number).await? else { - // We cannot recover from an external API inconsistency. - let err = anyhow::anyhow!( - "Node API is inconsistent: L2 block {l2_block_number} was reported to be a part of {batch} L1 batch, \ - but API has no information about this L2 block", - ); - return Err(err.into()); - }; - cursor.update(status_changes, &batch_info)?; // Check whether we can skip a part of the range. @@ -407,10 +384,11 @@ impl BatchStatusUpdater { for change in &changes.commit { tracing::info!( - "Commit status change: number {}, hash {}, happened at {}", + "Commit status change: number {}, hash {}, happened at {}, on chainID {:?}", change.number, change.l1_tx_hash, - change.happened_at + change.happened_at, + change.sl_chain_id ); anyhow::ensure!( change.number <= last_sealed_batch, @@ -424,6 +402,7 @@ impl BatchStatusUpdater { AggregatedActionType::Commit, change.l1_tx_hash, change.happened_at, + change.sl_chain_id, ) .await?; cursor.last_committed_l1_batch = change.number; @@ -431,10 +410,11 @@ impl BatchStatusUpdater { for change in &changes.prove { tracing::info!( - "Prove status change: number {}, hash {}, happened at {}", + "Prove status change: number {}, hash {}, happened at {}, on chainID {:?}", change.number, change.l1_tx_hash, - change.happened_at + change.happened_at, + change.sl_chain_id ); anyhow::ensure!( change.number <= cursor.last_committed_l1_batch, @@ -448,6 +428,7 @@ impl BatchStatusUpdater { AggregatedActionType::PublishProofOnchain, change.l1_tx_hash, change.happened_at, + change.sl_chain_id, ) .await?; cursor.last_proven_l1_batch = change.number; @@ -455,10 +436,11 @@ impl BatchStatusUpdater { for change in &changes.execute { tracing::info!( - "Execute status change: number {}, hash {}, happened at {}", + "Execute status change: number {}, hash {}, happened at {}, on chainID {:?}", change.number, change.l1_tx_hash, - change.happened_at + change.happened_at, + change.sl_chain_id ); anyhow::ensure!( change.number <= cursor.last_proven_l1_batch, @@ -472,6 +454,7 @@ impl BatchStatusUpdater { AggregatedActionType::Execute, change.l1_tx_hash, change.happened_at, + change.sl_chain_id, ) .await?; cursor.last_executed_l1_batch = change.number; diff --git a/core/node/node_sync/src/batch_status_updater/tests.rs b/core/node/node_sync/src/batch_status_updater/tests.rs index 28b89f86f6a7..4ea1a5937075 100644 --- a/core/node/node_sync/src/batch_status_updater/tests.rs +++ b/core/node/node_sync/src/batch_status_updater/tests.rs @@ -8,7 +8,7 @@ use tokio::sync::{watch, Mutex}; use zksync_contracts::BaseSystemContractsHashes; use zksync_node_genesis::{insert_genesis_batch, GenesisParams}; use zksync_node_test_utils::{create_l1_batch, create_l2_block, prepare_recovery_snapshot}; -use zksync_types::{Address, ProtocolVersionId}; +use zksync_types::L2BlockNumber; use super::*; use crate::metrics::L1BatchStage; @@ -104,11 +104,11 @@ impl L1BatchStagesMap { for (number, stage) in self.iter() { let local_details = storage .blocks_web3_dal() - .get_block_details(L2BlockNumber(number.0)) + .get_l1_batch_details(L1BatchNumber(number.0)) .await .unwrap() .unwrap_or_else(|| panic!("no details for block #{number}")); - let expected_details = mock_block_details(number.0, stage); + let expected_details = mock_batch_details(number.0, stage); assert_eq!( local_details.base.commit_tx_hash, @@ -118,6 +118,10 @@ impl L1BatchStagesMap { local_details.base.committed_at, expected_details.base.committed_at ); + assert_eq!( + local_details.base.commit_chain_id, + expected_details.base.commit_chain_id, + ); assert_eq!( local_details.base.prove_tx_hash, expected_details.base.prove_tx_hash @@ -126,6 +130,10 @@ impl L1BatchStagesMap { local_details.base.proven_at, expected_details.base.proven_at ); + assert_eq!( + local_details.base.prove_chain_id, + expected_details.base.prove_chain_id, + ); assert_eq!( local_details.base.execute_tx_hash, expected_details.base.execute_tx_hash @@ -134,14 +142,17 @@ impl L1BatchStagesMap { local_details.base.executed_at, expected_details.base.executed_at ); + assert_eq!( + local_details.base.execute_chain_id, + expected_details.base.execute_chain_id, + ); } } } -fn mock_block_details(number: u32, stage: L1BatchStage) -> api::BlockDetails { - api::BlockDetails { - number: L2BlockNumber(number), - l1_batch_number: L1BatchNumber(number), +fn mock_batch_details(number: u32, stage: L1BatchStage) -> api::L1BatchDetails { + api::L1BatchDetails { + number: L1BatchNumber(number), base: api::BlockDetailsBase { timestamp: number.into(), l1_tx_count: 0, @@ -151,18 +162,19 @@ fn mock_block_details(number: u32, stage: L1BatchStage) -> api::BlockDetails { commit_tx_hash: (stage >= L1BatchStage::Committed).then(|| H256::repeat_byte(1)), committed_at: (stage >= L1BatchStage::Committed) .then(|| Utc.timestamp_opt(100, 0).unwrap()), + commit_chain_id: (stage >= L1BatchStage::Committed).then_some(SLChainId(11)), prove_tx_hash: (stage >= L1BatchStage::Proven).then(|| H256::repeat_byte(2)), proven_at: (stage >= L1BatchStage::Proven).then(|| Utc.timestamp_opt(200, 0).unwrap()), + prove_chain_id: (stage >= L1BatchStage::Proven).then_some(SLChainId(22)), execute_tx_hash: (stage >= L1BatchStage::Executed).then(|| H256::repeat_byte(3)), executed_at: (stage >= L1BatchStage::Executed) .then(|| Utc.timestamp_opt(300, 0).unwrap()), + execute_chain_id: (stage >= L1BatchStage::Executed).then_some(SLChainId(33)), l1_gas_price: 1, l2_fair_gas_price: 2, fair_pubdata_price: None, base_system_contracts_hashes: BaseSystemContractsHashes::default(), }, - operator_address: Address::zero(), - protocol_version: Some(ProtocolVersionId::default()), } } @@ -177,23 +189,15 @@ impl From for MockMainNodeClient { #[async_trait] impl MainNodeClient for MockMainNodeClient { - async fn resolve_l1_batch_to_l2_block( + async fn batch_details( &self, number: L1BatchNumber, - ) -> EnrichedClientResult> { - let map = self.0.lock().await; - Ok(map.get(number).is_some().then_some(L2BlockNumber(number.0))) - } - - async fn block_details( - &self, - number: L2BlockNumber, - ) -> EnrichedClientResult> { + ) -> EnrichedClientResult> { let map = self.0.lock().await; let Some(stage) = map.get(L1BatchNumber(number.0)) else { return Ok(None); }; - Ok(Some(mock_block_details(number.0, stage))) + Ok(Some(mock_batch_details(number.0, stage))) } } @@ -202,6 +206,7 @@ fn mock_change(number: L1BatchNumber) -> BatchStatusChange { number, l1_tx_hash: H256::zero(), happened_at: DateTime::default(), + sl_chain_id: Some(SLChainId(0)), } } diff --git a/core/node/node_sync/src/genesis.rs b/core/node/node_sync/src/genesis.rs index 7401bdd9c9d4..8d60599ff7ca 100644 --- a/core/node/node_sync/src/genesis.rs +++ b/core/node/node_sync/src/genesis.rs @@ -1,6 +1,6 @@ use anyhow::Context as _; use zksync_contracts::{BaseSystemContracts, BaseSystemContractsHashes, SystemContractCode}; -use zksync_dal::{Connection, Core, CoreDal}; +use zksync_dal::{custom_genesis_export_dal::GenesisState, Connection, Core, CoreDal}; use zksync_node_genesis::{ensure_genesis_state, GenesisParams}; use zksync_types::{ block::DeployedContract, system_contracts::get_system_smart_contracts, AccountTreeId, L2ChainId, @@ -16,13 +16,14 @@ pub async fn perform_genesis_if_needed( storage: &mut Connection<'_, Core>, zksync_chain_id: L2ChainId, client: &dyn MainNodeClient, + custom_genesis_state: Option, ) -> anyhow::Result<()> { let mut transaction = storage.start_transaction().await?; // We want to check whether the genesis is needed before we create genesis params to not // make the node startup slower. if transaction.blocks_dal().is_genesis_needed().await? { let genesis_params = create_genesis_params(client, zksync_chain_id).await?; - ensure_genesis_state(&mut transaction, &genesis_params) + ensure_genesis_state(&mut transaction, &genesis_params, custom_genesis_state) .await .context("ensure_genesis_state")?; } diff --git a/core/node/node_sync/src/metrics.rs b/core/node/node_sync/src/metrics.rs index 805c6f913df2..02ac1b3837af 100644 --- a/core/node/node_sync/src/metrics.rs +++ b/core/node/node_sync/src/metrics.rs @@ -8,10 +8,7 @@ use zksync_types::aggregated_operations::AggregatedActionType; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, EncodeLabelValue, EncodeLabelSet)] #[metrics(label = "stage", rename_all = "snake_case")] pub(super) enum FetchStage { - // uses legacy naming for L2 blocks for compatibility reasons - #[metrics(name = "get_miniblock_range")] - GetL2BlockRange, - GetBlockDetails, + GetL1BatchDetails, } #[derive( diff --git a/core/node/node_sync/src/tests.rs b/core/node/node_sync/src/tests.rs index 172a00e8c14c..21058144f778 100644 --- a/core/node/node_sync/src/tests.rs +++ b/core/node/node_sync/src/tests.rs @@ -132,7 +132,6 @@ impl StateKeeperHandles { } let state_keeper = ZkSyncStateKeeper::new( - stop_receiver, Box::new(io), Box::new(batch_executor), output_handler, @@ -143,7 +142,7 @@ impl StateKeeperHandles { Self { stop_sender, sync_state, - task: tokio::spawn(state_keeper.run()), + task: tokio::spawn(state_keeper.run(stop_receiver)), } } diff --git a/core/node/node_sync/src/tree_data_fetcher/mod.rs b/core/node/node_sync/src/tree_data_fetcher/mod.rs index 52037dac4edc..9f8ac18c39bd 100644 --- a/core/node/node_sync/src/tree_data_fetcher/mod.rs +++ b/core/node/node_sync/src/tree_data_fetcher/mod.rs @@ -11,7 +11,7 @@ use zksync_dal::{Connection, ConnectionPool, Core, CoreDal, DalError}; use zksync_health_check::{Health, HealthStatus, HealthUpdater, ReactiveHealthCheck}; use zksync_types::{ block::{L1BatchTreeData, L2BlockHeader}, - Address, L1BatchNumber, + Address, L1BatchNumber, L2ChainId, }; use zksync_web3_decl::{ client::{DynClient, L1, L2}, @@ -127,10 +127,12 @@ impl TreeDataFetcher { /// Attempts to fetch root hashes from L1 (namely, `BlockCommit` events emitted by the diamond proxy) if possible. /// The main node will still be used as a fallback in case communicating with L1 fails, or for newer batches, /// which may not be committed on L1. - pub fn with_l1_data( + pub async fn with_l1_data( mut self, - eth_client: Box>, - diamond_proxy_address: Address, + l1_client: Box>, + l1_diamond_proxy_addr: Address, + gateway_client: Option>>, + l2_chain_id: L2ChainId, ) -> anyhow::Result { anyhow::ensure!( self.diamond_proxy_address.is_none(), @@ -138,11 +140,15 @@ impl TreeDataFetcher { ); let l1_provider = L1DataProvider::new( - eth_client.for_component("tree_data_fetcher"), - diamond_proxy_address, - )?; + l1_client.for_component("tree_data_fetcher"), + l1_diamond_proxy_addr, + gateway_client.map(|c| c.for_component("tree_data_fetcher")), + self.pool.clone(), + l2_chain_id, + ) + .await?; self.data_provider.set_l1(l1_provider); - self.diamond_proxy_address = Some(diamond_proxy_address); + self.diamond_proxy_address = Some(l1_diamond_proxy_addr); Ok(self) } diff --git a/core/node/node_sync/src/tree_data_fetcher/provider/mod.rs b/core/node/node_sync/src/tree_data_fetcher/provider/mod.rs index e4f68cade6a4..432808422632 100644 --- a/core/node/node_sync/src/tree_data_fetcher/provider/mod.rs +++ b/core/node/node_sync/src/tree_data_fetcher/provider/mod.rs @@ -2,8 +2,13 @@ use std::fmt; use anyhow::Context; use async_trait::async_trait; -use zksync_eth_client::EthInterface; -use zksync_types::{block::L2BlockHeader, web3, Address, L1BatchNumber, H256, U256, U64}; +use zksync_contracts::bridgehub_contract; +use zksync_dal::{ConnectionPool, Core, CoreDal}; +use zksync_eth_client::{CallFunctionArgs, EthInterface}; +use zksync_system_constants::L2_BRIDGEHUB_ADDRESS; +use zksync_types::{ + block::L2BlockHeader, web3, Address, L1BatchNumber, L2ChainId, SLChainId, H256, U256, U64, +}; use zksync_web3_decl::{ client::{DynClient, L1, L2}, error::{ClientRpcContext, EnrichedClientError, EnrichedClientResult}, @@ -13,7 +18,7 @@ use zksync_web3_decl::{ use super::{ metrics::{ProcessingStage, TreeDataProviderSource, METRICS}, - TreeDataFetcherResult, + TreeDataFetcherError, TreeDataFetcherResult, }; #[cfg(test)] @@ -89,6 +94,14 @@ struct PastL1BatchInfo { number: L1BatchNumber, l1_commit_block_number: U64, l1_commit_block_timestamp: U256, + chain_id: SLChainId, +} + +#[derive(Debug)] +struct SLChainAccess { + client: Box>, + chain_id: SLChainId, + diamond_proxy_addr: Address, } /// Provider of tree data loading it from L1 `BlockCommit` events emitted by the diamond proxy contract. @@ -103,10 +116,11 @@ struct PastL1BatchInfo { /// (provided it's not too far behind the seal timestamp of the batch). #[derive(Debug)] pub(super) struct L1DataProvider { - eth_client: Box>, - diamond_proxy_address: Address, + l1_chain_data: SLChainAccess, + gateway_chain_data: Option, block_commit_signature: H256, past_l1_batch: Option, + pool: ConnectionPool, } impl L1DataProvider { @@ -116,19 +130,52 @@ impl L1DataProvider { /// `L1_BLOCK_ACCURACY`, but not large enough to trigger request limiting on the L1 RPC provider. const L1_BLOCK_RANGE: U64 = U64([20_000]); - pub fn new( - eth_client: Box>, - diamond_proxy_address: Address, + pub async fn new( + l1_client: Box>, + l1_diamond_proxy_addr: Address, + gateway_client: Option>>, + pool: ConnectionPool, + l2_chain_id: L2ChainId, ) -> anyhow::Result { + let l1_chain_id = l1_client.fetch_chain_id().await?; + let l1_chain_data = SLChainAccess { + client: l1_client, + chain_id: l1_chain_id, + diamond_proxy_addr: l1_diamond_proxy_addr, + }; + let gateway_chain_data = if let Some(client) = gateway_client { + let contract = bridgehub_contract(); + let function_name = if contract.function("getZKChain").is_ok() { + "getZKChain" + } else { + "getHyperchain" + }; + let gateway_diamond_proxy = CallFunctionArgs::new( + function_name, + zksync_types::ethabi::Token::Uint(l2_chain_id.as_u64().into()), + ) + .for_contract(L2_BRIDGEHUB_ADDRESS, &contract) + .call(&client) + .await?; + let chain_id = client.fetch_chain_id().await?; + Some(SLChainAccess { + client, + chain_id, + diamond_proxy_addr: gateway_diamond_proxy, + }) + } else { + None + }; let block_commit_signature = zksync_contracts::hyperchain_contract() .event("BlockCommit") .context("missing `BlockCommit` event")? .signature(); Ok(Self { - eth_client, - diamond_proxy_address, + l1_chain_data, + gateway_chain_data, block_commit_signature, past_l1_batch: None, + pool, }) } @@ -186,6 +233,16 @@ impl L1DataProvider { })?; Ok((number, block.timestamp)) } + + fn chain_data_by_id(&self, searched_chain_id: SLChainId) -> Option<&SLChainAccess> { + if searched_chain_id == self.l1_chain_data.chain_id { + Some(&self.l1_chain_data) + } else if Some(searched_chain_id) == self.gateway_chain_data.as_ref().map(|d| d.chain_id) { + self.gateway_chain_data.as_ref() + } else { + None + } + } } #[async_trait] @@ -195,12 +252,36 @@ impl TreeDataProvider for L1DataProvider { number: L1BatchNumber, last_l2_block: &L2BlockHeader, ) -> TreeDataProviderResult { + let sl_chain_id = self + .pool + .connection_tagged("tree_data_fetcher") + .await + .map_err(|err| TreeDataFetcherError::Internal(err.into()))? + .eth_sender_dal() + .get_batch_commit_chain_id(number) + .await + .map_err(|err| TreeDataFetcherError::Internal(err.into()))?; + let chain_data = match sl_chain_id { + Some(chain_id) => { + let Some(chain_data) = self.chain_data_by_id(chain_id) else { + return Err(TreeDataFetcherError::Internal(anyhow::anyhow!( + "failed to find client for chain id {chain_id}" + ))); + }; + chain_data + } + None => &self.l1_chain_data, + }; + let l1_batch_seal_timestamp = last_l2_block.timestamp; let from_block = self.past_l1_batch.and_then(|info| { assert!( info.number < number, "`batch_details()` must be called with monotonically increasing numbers" ); + if info.chain_id != chain_data.chain_id { + return None; + } let threshold_timestamp = info.l1_commit_block_timestamp + Self::L1_BLOCK_RANGE.as_u64() / 2; if U256::from(l1_batch_seal_timestamp) > threshold_timestamp { tracing::debug!( @@ -219,7 +300,7 @@ impl TreeDataProvider for L1DataProvider { Some(number) => number, None => { let (approximate_block, steps) = - Self::guess_l1_commit_block_number(&self.eth_client, l1_batch_seal_timestamp) + Self::guess_l1_commit_block_number(&chain_data.client, l1_batch_seal_timestamp) .await?; tracing::debug!( number = number.0, @@ -235,7 +316,7 @@ impl TreeDataProvider for L1DataProvider { let number_topic = H256::from_low_u64_be(number.0.into()); let filter = web3::FilterBuilder::default() - .address(vec![self.diamond_proxy_address]) + .address(vec![chain_data.diamond_proxy_addr]) .from_block(web3::BlockNumber::Number(from_block)) .to_block(web3::BlockNumber::Number(from_block + Self::L1_BLOCK_RANGE)) .topics( @@ -245,7 +326,7 @@ impl TreeDataProvider for L1DataProvider { None, ) .build(); - let mut logs = self.eth_client.logs(&filter).await?; + let mut logs = chain_data.client.logs(&filter).await?; logs.retain(|log| !log.is_removed() && log.block_number.is_some()); match logs.as_slice() { @@ -266,7 +347,10 @@ impl TreeDataProvider for L1DataProvider { {diff} block(s) after the `from` block from the filter" ); - let l1_commit_block = self.eth_client.block(l1_commit_block_number.into()).await?; + let l1_commit_block = chain_data + .client + .block(l1_commit_block_number.into()) + .await?; let l1_commit_block = l1_commit_block.ok_or_else(|| { let err = "Block disappeared from L1 RPC provider"; EnrichedClientError::new(ClientError::Custom(err.into()), "batch_details") @@ -276,6 +360,7 @@ impl TreeDataProvider for L1DataProvider { number, l1_commit_block_number, l1_commit_block_timestamp: l1_commit_block.timestamp, + chain_id: chain_data.chain_id, }); Ok(Ok(root_hash)) } diff --git a/core/node/node_sync/src/tree_data_fetcher/provider/tests.rs b/core/node/node_sync/src/tree_data_fetcher/provider/tests.rs index 09fa16f16077..75bf96092335 100644 --- a/core/node/node_sync/src/tree_data_fetcher/provider/tests.rs +++ b/core/node/node_sync/src/tree_data_fetcher/provider/tests.rs @@ -3,10 +3,15 @@ use assert_matches::assert_matches; use once_cell::sync::Lazy; use test_casing::test_casing; -use zksync_dal::{ConnectionPool, Core}; +use zksync_dal::{Connection, ConnectionPool, Core}; use zksync_node_genesis::{insert_genesis_batch, GenesisParams}; use zksync_node_test_utils::create_l2_block; -use zksync_types::{api, L2BlockNumber, ProtocolVersionId}; +use zksync_types::{ + aggregated_operations::AggregatedActionType, + api, ethabi, + web3::{BlockId, CallRequest}, + L2BlockNumber, ProtocolVersionId, +}; use zksync_web3_decl::client::MockClient; use super::*; @@ -14,7 +19,11 @@ use crate::tree_data_fetcher::tests::{ get_last_l2_block, seal_l1_batch_with_timestamp, MockMainNodeClient, }; -const DIAMOND_PROXY_ADDRESS: Address = Address::repeat_byte(0x22); +const L1_DIAMOND_PROXY_ADDRESS: Address = Address::repeat_byte(0x22); +const GATEWAY_DIAMOND_PROXY_ADDRESS: Address = Address::repeat_byte(0x33); +const L1_CHAIN_ID: u64 = 9; +const GATEWAY_CHAIN_ID: u64 = 505; +const ERA_CHAIN_ID: u64 = 270; static BLOCK_COMMIT_SIGNATURE: Lazy = Lazy::new(|| { zksync_contracts::hyperchain_contract() @@ -33,10 +42,13 @@ fn mock_block_details_base(number: u32, hash: Option) -> api::BlockDetails status: api::BlockStatus::Sealed, commit_tx_hash: None, committed_at: None, + commit_chain_id: None, prove_tx_hash: None, proven_at: None, + prove_chain_id: None, execute_tx_hash: None, executed_at: None, + execute_chain_id: None, l1_gas_price: 10, l2_fair_gas_price: 100, fair_pubdata_price: None, @@ -117,53 +129,66 @@ async fn rpc_data_provider_with_block_hash_divergence() { assert_matches!(output, Err(MissingData::PossibleReorg)); } +#[derive(Debug)] struct EthereumParameters { block_number: U64, - // L1 block numbers in which L1 batches are committed starting from L1 batch #1 - l1_blocks_for_commits: Vec, + // L1 batch numbers and SL block numbers in which they are committed. + batches_and_sl_blocks_for_commits: Vec<(L1BatchNumber, U64)>, + chain_id: SLChainId, + diamond_proxy: Address, } impl EthereumParameters { - fn new(block_number: u64) -> Self { + fn new_l1(block_number: u64) -> Self { + Self::new(block_number, L1_CHAIN_ID, L1_DIAMOND_PROXY_ADDRESS) + } + + fn new(block_number: u64, chain_id: u64, diamond_proxy: Address) -> Self { Self { block_number: block_number.into(), - l1_blocks_for_commits: vec![], + batches_and_sl_blocks_for_commits: vec![], + chain_id: SLChainId(chain_id), + diamond_proxy, } } - fn push_commit(&mut self, l1_block_number: u64) { + fn push_commit(&mut self, l1_batch_number: L1BatchNumber, l1_block_number: u64) { assert!(l1_block_number <= self.block_number.as_u64()); let l1_block_number = U64::from(l1_block_number); - let last_commit = self.l1_blocks_for_commits.last().copied(); - let is_increasing = last_commit.map_or(true, |last_number| last_number <= l1_block_number); - assert!(is_increasing, "Invalid L1 block number for commit"); + let last_commit = self.batches_and_sl_blocks_for_commits.last().copied(); + let is_increasing = last_commit.map_or(true, |last| { + last.0 <= l1_batch_number && last.1 <= l1_block_number + }); + assert!( + is_increasing, + "Invalid batch number or L1 block number for commit" + ); - self.l1_blocks_for_commits.push(l1_block_number); + self.batches_and_sl_blocks_for_commits + .push((l1_batch_number, l1_block_number)); } fn client(&self) -> MockClient { - let logs = self - .l1_blocks_for_commits - .iter() - .enumerate() - .map(|(i, &l1_block_number)| { - let l1_batch_number = H256::from_low_u64_be(i as u64 + 1); - let root_hash = H256::repeat_byte(i as u8 + 1); + let logs = self.batches_and_sl_blocks_for_commits.iter().map( + |&(l1_batch_number, l1_block_number)| { + let l1_batch_number_topic = H256::from_low_u64_be(l1_batch_number.0 as u64); + let root_hash = H256::repeat_byte(l1_batch_number.0 as u8); web3::Log { - address: DIAMOND_PROXY_ADDRESS, + address: self.diamond_proxy, topics: vec![ *BLOCK_COMMIT_SIGNATURE, - l1_batch_number, + l1_batch_number_topic, root_hash, H256::zero(), // commitment hash; not used ], block_number: Some(l1_block_number), ..web3::Log::default() } - }); + }, + ); let logs: Vec<_> = logs.collect(); - mock_l1_client(self.block_number, logs) + mock_l1_client(self.block_number, logs, self.chain_id) } } @@ -201,7 +226,7 @@ fn filter_logs(logs: &[web3::Log], filter: web3::Filter) -> Vec { filtered_logs.cloned().collect() } -fn mock_l1_client(block_number: U64, logs: Vec) -> MockClient { +fn mock_l1_client(block_number: U64, logs: Vec, chain_id: SLChainId) -> MockClient { MockClient::builder(L1::default()) .method("eth_blockNumber", move || Ok(block_number)) .method( @@ -228,12 +253,49 @@ fn mock_l1_client(block_number: U64, logs: Vec) -> MockClient { .method("eth_getLogs", move |filter: web3::Filter| { Ok(filter_logs(&logs, filter)) }) + .method("eth_chainId", move || Ok(U64::from(chain_id.0))) + .method("eth_call", move |req: CallRequest, _block_id: BlockId| { + let contract = bridgehub_contract(); + let function_name = if contract.function("getZKChain").is_ok() { + "getZKChain" + } else { + "getHyperchain" + }; + let expected_input = contract + .function(function_name) + .unwrap() + .encode_input(&[ethabi::Token::Uint(ERA_CHAIN_ID.into())]) + .unwrap(); + assert_eq!(req.to, Some(L2_BRIDGEHUB_ADDRESS)); + assert_eq!(req.data, Some(expected_input.into())); + Ok(web3::Bytes(ethabi::encode(&[ethabi::Token::Address( + GATEWAY_DIAMOND_PROXY_ADDRESS, + )]))) + }) .build() } +pub(super) async fn insert_l1_batch_commit_chain_id( + storage: &mut Connection<'_, Core>, + number: L1BatchNumber, + chain_id: SLChainId, +) { + storage + .eth_sender_dal() + .insert_bogus_confirmed_eth_tx( + number, + AggregatedActionType::Commit, + H256::from_low_u64_be(number.0 as u64), + chrono::Utc::now(), + Some(chain_id), + ) + .await + .unwrap(); +} + #[tokio::test] async fn guessing_l1_commit_block_number() { - let eth_params = EthereumParameters::new(100_000); + let eth_params = EthereumParameters::new_l1(100_000); let eth_client = eth_params.client(); for timestamp in [0, 100, 1_000, 5_000, 10_000, 100_000] { @@ -251,6 +313,21 @@ async fn guessing_l1_commit_block_number() { } } +async fn create_l1_data_provider( + l1_client: Box>, + pool: ConnectionPool, +) -> L1DataProvider { + L1DataProvider::new( + l1_client, + L1_DIAMOND_PROXY_ADDRESS, + None, + pool, + L2ChainId::new(ERA_CHAIN_ID).unwrap(), + ) + .await + .unwrap() +} + async fn test_using_l1_data_provider(l1_batch_timestamps: &[u64]) { let pool = ConnectionPool::::test_pool().await; let mut storage = pool.connection().await.unwrap(); @@ -258,15 +335,14 @@ async fn test_using_l1_data_provider(l1_batch_timestamps: &[u64]) { .await .unwrap(); - let mut eth_params = EthereumParameters::new(1_000_000); + let mut eth_params = EthereumParameters::new_l1(1_000_000); for (number, &ts) in l1_batch_timestamps.iter().enumerate() { let number = L1BatchNumber(number as u32 + 1); seal_l1_batch_with_timestamp(&mut storage, number, ts).await; - eth_params.push_commit(ts + 1_000); // have a reasonable small diff between batch generation and commitment + eth_params.push_commit(number, ts + 1_000); // have a reasonable small diff between batch generation and commitment } - let mut provider = - L1DataProvider::new(Box::new(eth_params.client()), DIAMOND_PROXY_ADDRESS).unwrap(); + let mut provider = create_l1_data_provider(Box::new(eth_params.client()), pool.clone()).await; for i in 0..l1_batch_timestamps.len() { let number = L1BatchNumber(i as u32 + 1); let root_hash = provider @@ -278,7 +354,7 @@ async fn test_using_l1_data_provider(l1_batch_timestamps: &[u64]) { let past_l1_batch = provider.past_l1_batch.unwrap(); assert_eq!(past_l1_batch.number, number); - let expected_l1_block_number = eth_params.l1_blocks_for_commits[i]; + let expected_l1_block_number = eth_params.batches_and_sl_blocks_for_commits[i].1; assert_eq!( past_l1_batch.l1_commit_block_number, expected_l1_block_number @@ -297,13 +373,78 @@ async fn using_l1_data_provider(batch_spacing: u64) { test_using_l1_data_provider(&l1_batch_timestamps).await; } +#[tokio::test] +async fn using_different_settlement_layers() { + let pool = ConnectionPool::::test_pool().await; + let mut storage = pool.connection().await.unwrap(); + insert_genesis_batch(&mut storage, &GenesisParams::mock()) + .await + .unwrap(); + + let l1_eth_params = EthereumParameters::new_l1(1_000_000); + let gateway_eth_params = + EthereumParameters::new(1_000_000, GATEWAY_CHAIN_ID, GATEWAY_DIAMOND_PROXY_ADDRESS); + let mut params_array = [l1_eth_params, gateway_eth_params]; + + // (index of sl: 0 for l1, 1 for gw; sl block number) + let batch_commit_info = [ + (0, 50_000), + (0, 50_500), + (1, 30_000), + (1, 32_000), + (0, 51_000), + (1, 60_000), + ]; + let chain_ids = [SLChainId(L1_CHAIN_ID), SLChainId(GATEWAY_CHAIN_ID)]; + for (i, &(sl_idx, ts)) in batch_commit_info.iter().enumerate() { + let number = L1BatchNumber(i as u32 + 1); + seal_l1_batch_with_timestamp(&mut storage, number, ts).await; + insert_l1_batch_commit_chain_id(&mut storage, number, chain_ids[sl_idx]).await; + params_array[sl_idx].push_commit(number, ts + 1_000); // have a reasonable small diff between batch generation and commitment + } + + let mut provider = L1DataProvider::new( + Box::new(params_array[0].client()), + L1_DIAMOND_PROXY_ADDRESS, + Some(Box::new(params_array[1].client())), + pool, + L2ChainId::new(ERA_CHAIN_ID).unwrap(), + ) + .await + .unwrap(); + for i in 0..batch_commit_info.len() { + let number = L1BatchNumber(i as u32 + 1); + let root_hash = provider + .batch_details(number, &get_last_l2_block(&mut storage, number).await) + .await + .unwrap() + .expect(&format!("no root hash for batch #{number}")); + assert_eq!(root_hash, H256::repeat_byte(number.0 as u8)); + + let past_l1_batch = provider.past_l1_batch.unwrap(); + assert_eq!(past_l1_batch.number, number); + let expected_l1_block_number = batch_commit_info[i].1 + 1_000; + assert_eq!( + past_l1_batch.l1_commit_block_number, + expected_l1_block_number.into() + ); + assert_eq!( + past_l1_batch.l1_commit_block_timestamp, + expected_l1_block_number.into() + ); + let expected_chain_id = chain_ids[batch_commit_info[i].0]; + assert_eq!(past_l1_batch.chain_id, expected_chain_id); + } +} + #[tokio::test] async fn detecting_reorg_in_l1_data_provider() { let l1_batch_number = H256::from_low_u64_be(1); + let pool = ConnectionPool::::test_pool().await; // Generate two logs for the same L1 batch #1 let logs = vec![ web3::Log { - address: DIAMOND_PROXY_ADDRESS, + address: L1_DIAMOND_PROXY_ADDRESS, topics: vec![ *BLOCK_COMMIT_SIGNATURE, l1_batch_number, @@ -314,7 +455,7 @@ async fn detecting_reorg_in_l1_data_provider() { ..web3::Log::default() }, web3::Log { - address: DIAMOND_PROXY_ADDRESS, + address: L1_DIAMOND_PROXY_ADDRESS, topics: vec![ *BLOCK_COMMIT_SIGNATURE, l1_batch_number, @@ -325,9 +466,9 @@ async fn detecting_reorg_in_l1_data_provider() { ..web3::Log::default() }, ]; - let l1_client = mock_l1_client(200.into(), logs); + let l1_client = mock_l1_client(200.into(), logs, SLChainId(9)); - let mut provider = L1DataProvider::new(Box::new(l1_client), DIAMOND_PROXY_ADDRESS).unwrap(); + let mut provider = create_l1_data_provider(Box::new(l1_client), pool.clone()).await; let output = provider .batch_details(L1BatchNumber(1), &create_l2_block(1)) .await @@ -343,16 +484,15 @@ async fn combined_data_provider_errors() { .await .unwrap(); - let mut eth_params = EthereumParameters::new(1_000_000); + let mut eth_params = EthereumParameters::new_l1(1_000_000); seal_l1_batch_with_timestamp(&mut storage, L1BatchNumber(1), 50_000).await; - eth_params.push_commit(51_000); + eth_params.push_commit(L1BatchNumber(1), 51_000); seal_l1_batch_with_timestamp(&mut storage, L1BatchNumber(2), 52_000).await; let mut main_node_client = MockMainNodeClient::default(); main_node_client.insert_batch(L1BatchNumber(2), H256::repeat_byte(2)); let mut provider = CombinedDataProvider::new(main_node_client); - let l1_provider = - L1DataProvider::new(Box::new(eth_params.client()), DIAMOND_PROXY_ADDRESS).unwrap(); + let l1_provider = create_l1_data_provider(Box::new(eth_params.client()), pool.clone()).await; provider.set_l1(l1_provider); // L1 batch #1 should be obtained from L1 diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index dae2ef8cd0c0..f02baeda0b16 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -112,7 +112,7 @@ async fn submit_tee_proof() { .await .expect("Failed to save attestation"); - // resend the same request; this time, it should be successful. + // resend the same request; this time, it should be successful let response = send_submit_tee_proof_request(&app, &uri, &tee_proof_request).await; assert_eq!(response.status(), StatusCode::OK); diff --git a/core/node/shared_metrics/Cargo.toml b/core/node/shared_metrics/Cargo.toml index f30a2ba35334..618888ffddc0 100644 --- a/core/node/shared_metrics/Cargo.toml +++ b/core/node/shared_metrics/Cargo.toml @@ -11,6 +11,7 @@ keywords.workspace = true categories.workspace = true [dependencies] +serde.workspace = true vise.workspace = true tracing.workspace = true zksync_types.workspace = true diff --git a/core/node/shared_metrics/build.rs b/core/node/shared_metrics/build.rs index d37fef0b1b0c..ff339ae5589c 100644 --- a/core/node/shared_metrics/build.rs +++ b/core/node/shared_metrics/build.rs @@ -4,43 +4,72 @@ use std::{ env, fs, io::{self, Write}, path::Path, + process::Command, }; use rustc_version::{Channel, LlvmVersion}; -fn print_rust_meta(out: &mut impl Write, meta: &rustc_version::VersionMeta) -> io::Result<()> { +fn print_binary_meta(out: &mut impl Write) -> io::Result<()> { + let rustc_meta = rustc_version::version_meta().expect("Failed obtaining rustc metadata"); + writeln!( out, - "pub(crate) const RUSTC_METADATA: RustcMetadata = RustcMetadata {{ \ + "pub const RUST_METADATA: RustMetadata = RustMetadata {{ \ version: {semver:?}, \ commit_hash: {commit_hash:?}, \ commit_date: {commit_date:?}, \ channel: {channel:?}, \ host: {host:?}, \ - llvm: {llvm:?} \ + llvm: {llvm:?}, \ + }}; + + pub const GIT_METADATA: GitMetadata = GitMetadata {{ \ + branch: {git_branch:?}, \ + revision: {git_revision:?} \ }};", - semver = meta.semver.to_string(), - commit_hash = meta.commit_hash, - commit_date = meta.commit_date, - channel = match meta.channel { + semver = rustc_meta.semver.to_string(), + commit_hash = rustc_meta.commit_hash, + commit_date = rustc_meta.commit_date, + channel = match rustc_meta.channel { Channel::Dev => "dev", Channel::Beta => "beta", Channel::Nightly => "nightly", Channel::Stable => "stable", }, - host = meta.host, - llvm = meta.llvm_version.as_ref().map(LlvmVersion::to_string), + host = rustc_meta.host, + llvm = rustc_meta.llvm_version.as_ref().map(LlvmVersion::to_string), + git_branch = git_branch(), + git_revision = git_revision() ) } +/// Outputs the current git branch as a string literal. +pub fn git_branch() -> Option { + run_cmd_opt("git", &["rev-parse", "--abbrev-ref", "HEAD"]) +} + +/// Outputs the current git commit hash as a string literal. +pub fn git_revision() -> Option { + run_cmd_opt("git", &["rev-parse", "--short", "HEAD"]) +} + +fn run_cmd_opt(cmd: &str, args: &[&str]) -> Option { + let output = Command::new(cmd).args(args).output().ok()?; + if output.status.success() { + String::from_utf8(output.stdout) + .ok() + .map(|s| s.trim().to_string()) + } else { + None + } +} + fn main() { let out_dir = env::var("OUT_DIR").expect("`OUT_DIR` env var not set for build script"); - let rustc_meta = rustc_version::version_meta().expect("Failed obtaining rustc metadata"); - let metadata_module_path = Path::new(&out_dir).join("metadata_values.rs"); let metadata_module = fs::File::create(metadata_module_path).expect("cannot create metadata module"); let mut metadata_module = io::BufWriter::new(metadata_module); - print_rust_meta(&mut metadata_module, &rustc_meta).expect("failed printing rustc metadata"); + print_binary_meta(&mut metadata_module).expect("failed printing binary metadata"); } diff --git a/core/node/shared_metrics/src/lib.rs b/core/node/shared_metrics/src/lib.rs index 2c41ec9293a0..001293a72bc2 100644 --- a/core/node/shared_metrics/src/lib.rs +++ b/core/node/shared_metrics/src/lib.rs @@ -8,7 +8,7 @@ use vise::{ use zksync_dal::transactions_dal::L2TxSubmissionResult; use zksync_types::aggregated_operations::AggregatedActionType; -pub mod rustc; +pub mod metadata; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, EncodeLabelValue, EncodeLabelSet)] #[metrics(label = "stage", rename_all = "snake_case")] diff --git a/core/node/shared_metrics/src/metadata.rs b/core/node/shared_metrics/src/metadata.rs new file mode 100644 index 000000000000..bc7e52ae1e97 --- /dev/null +++ b/core/node/shared_metrics/src/metadata.rs @@ -0,0 +1,66 @@ +use serde::Serialize; +use vise::{EncodeLabelSet, Info, Metrics}; + +use self::values::{GIT_METADATA, RUST_METADATA}; + +mod values { + use super::{GitMetadata, RustMetadata}; + + include!(concat!(env!("OUT_DIR"), "/metadata_values.rs")); +} + +/// Rust metadata of the compiled binary. +#[derive(Debug, EncodeLabelSet, Serialize)] +pub struct RustMetadata { + pub version: &'static str, + pub commit_hash: Option<&'static str>, + pub commit_date: Option<&'static str>, + pub channel: &'static str, + pub host: &'static str, + pub llvm: Option<&'static str>, +} + +/// Git metadata of the compiled binary. +#[derive(Debug, EncodeLabelSet, Serialize)] +pub struct GitMetadata { + pub branch: Option<&'static str>, + pub revision: Option<&'static str>, +} + +#[derive(Debug, Metrics)] +#[metrics(prefix = "rust")] +pub struct RustMetrics { + /// General information about the compiled binary. + info: Info, +} + +impl RustMetrics { + pub fn initialize(&self) -> &RustMetadata { + tracing::info!("Rust metadata for this binary: {RUST_METADATA:?}"); + self.info.set(RUST_METADATA).ok(); + // `unwrap` is safe due to setting the value above + self.info.get().unwrap() + } +} + +#[derive(Debug, Metrics)] +#[metrics(prefix = "git")] +pub struct GitMetrics { + /// General information about the compiled binary. + info: Info, +} + +impl GitMetrics { + pub fn initialize(&self) -> &GitMetadata { + tracing::info!("Git metadata for this binary: {GIT_METADATA:?}"); + self.info.set(GIT_METADATA).ok(); + // `unwrap` is safe due to setting the value above + self.info.get().unwrap() + } +} + +#[vise::register] +pub static RUST_METRICS: vise::Global = vise::Global::new(); + +#[vise::register] +pub static GIT_METRICS: vise::Global = vise::Global::new(); diff --git a/core/node/shared_metrics/src/rustc.rs b/core/node/shared_metrics/src/rustc.rs deleted file mode 100644 index 11165dbf51b0..000000000000 --- a/core/node/shared_metrics/src/rustc.rs +++ /dev/null @@ -1,36 +0,0 @@ -use vise::{EncodeLabelSet, Info, Metrics}; - -mod values { - use super::RustcMetadata; - include!(concat!(env!("OUT_DIR"), "/metadata_values.rs")); -} - -use values::RUSTC_METADATA; - -/// Metadata of Rust compiler used to compile the crate. -#[derive(Debug, EncodeLabelSet)] -pub struct RustcMetadata { - pub version: &'static str, - pub commit_hash: Option<&'static str>, - pub commit_date: Option<&'static str>, - pub channel: &'static str, - pub host: &'static str, - pub llvm: Option<&'static str>, -} - -#[derive(Debug, Metrics)] -#[metrics(prefix = "rust")] -pub struct RustMetrics { - /// General information about the Rust compiler. - info: Info, -} - -impl RustMetrics { - pub fn initialize(&self) { - tracing::info!("Metadata for rustc that this binary was compiled with: {RUSTC_METADATA:?}"); - self.info.set(RUSTC_METADATA).ok(); - } -} - -#[vise::register] -pub static RUST_METRICS: vise::Global = vise::Global::new(); diff --git a/core/node/state_keeper/Cargo.toml b/core/node/state_keeper/Cargo.toml index 49d4209a4c4f..979a11dcbb4a 100644 --- a/core/node/state_keeper/Cargo.toml +++ b/core/node/state_keeper/Cargo.toml @@ -16,6 +16,7 @@ vise.workspace = true zksync_multivm.workspace = true zksync_types.workspace = true zksync_dal.workspace = true +zksync_health_check.workspace = true zksync_state.workspace = true zksync_storage.workspace = true zksync_mempool.workspace = true @@ -38,6 +39,7 @@ tracing.workspace = true futures.workspace = true once_cell.workspace = true itertools.workspace = true +serde.workspace = true hex.workspace = true [dev-dependencies] diff --git a/core/node/state_keeper/src/executor/mod.rs b/core/node/state_keeper/src/executor/mod.rs index 903dae2f1cad..a6c7b92fa249 100644 --- a/core/node/state_keeper/src/executor/mod.rs +++ b/core/node/state_keeper/src/executor/mod.rs @@ -1,25 +1,23 @@ use zksync_multivm::interface::{ BatchTransactionExecutionResult, Call, CompressedBytecodeInfo, ExecutionResult, Halt, - VmExecutionResultAndLogs, + VmExecutionMetrics, VmExecutionResultAndLogs, }; use zksync_types::Transaction; pub use zksync_vm_executor::batch::MainBatchExecutorFactory; -use crate::ExecutionMetricsForCriteria; - #[cfg(test)] mod tests; /// State keeper representation of a transaction executed in the virtual machine. /// /// A separate type allows to be more typesafe when dealing with halted transactions. It also simplifies testing seal criteria -/// (i.e., without picking transactions that actually produce appropriate `ExecutionMetricsForCriteria`). +/// (i.e., without picking transactions that actually produce appropriate `VmExecutionMetrics`). #[derive(Debug, Clone)] pub enum TxExecutionResult { /// Successful execution of the tx and the block tip dry run. Success { tx_result: Box, - tx_metrics: Box, + tx_metrics: Box, compressed_bytecodes: Vec, call_tracer_result: Vec, gas_remaining: u32, @@ -38,7 +36,7 @@ impl TxExecutionResult { } => Self::BootloaderOutOfGasForTx, ExecutionResult::Halt { reason } => Self::RejectedByVm { reason }, _ => Self::Success { - tx_metrics: Box::new(ExecutionMetricsForCriteria::new(Some(tx), &res.tx_result)), + tx_metrics: Box::new(res.tx_result.get_execution_metrics(Some(tx))), gas_remaining: res.tx_result.statistics.gas_remaining, tx_result: res.tx_result.clone(), compressed_bytecodes: res.compressed_bytecodes, diff --git a/core/node/state_keeper/src/health.rs b/core/node/state_keeper/src/health.rs new file mode 100644 index 000000000000..4fc86263e439 --- /dev/null +++ b/core/node/state_keeper/src/health.rs @@ -0,0 +1,30 @@ +use serde::{Deserialize, Serialize}; +use zksync_health_check::{Health, HealthStatus}; +use zksync_types::{L1BatchNumber, L2BlockNumber, H256}; + +use crate::io::IoCursor; + +#[derive(Debug, Serialize, Deserialize)] +pub struct StateKeeperHealthDetails { + pub next_l2_block: L2BlockNumber, + pub prev_l2_block_hash: H256, + pub prev_l2_block_timestamp: u64, + pub l1_batch: L1BatchNumber, +} + +impl From for Health { + fn from(details: StateKeeperHealthDetails) -> Self { + Self::from(HealthStatus::Ready).with_details(details) + } +} + +impl From<&IoCursor> for StateKeeperHealthDetails { + fn from(details: &IoCursor) -> Self { + Self { + next_l2_block: details.next_l2_block, + prev_l2_block_hash: details.prev_l2_block_hash, + prev_l2_block_timestamp: details.prev_l2_block_timestamp, + l1_batch: details.l1_batch, + } + } +} diff --git a/core/node/state_keeper/src/io/mempool.rs b/core/node/state_keeper/src/io/mempool.rs index 991ecee699c3..cf354891236b 100644 --- a/core/node/state_keeper/src/io/mempool.rs +++ b/core/node/state_keeper/src/io/mempool.rs @@ -206,6 +206,21 @@ impl StateKeeperIO for MempoolIO { .protocol_version_id_by_timestamp(timestamp) .await .context("Failed loading protocol version")?; + let previous_protocol_version = storage + .blocks_dal() + .pending_protocol_version() + .await + .context("Failed loading previous protocol version")?; + let batch_with_upgrade_tx = if previous_protocol_version != protocol_version { + storage + .protocol_versions_dal() + .get_protocol_upgrade_tx(protocol_version) + .await + .context("Failed loading protocol upgrade tx")? + .is_some() + } else { + false + }; drop(storage); // We create a new filter each time, since parameters may change and a previously @@ -217,7 +232,8 @@ impl StateKeeperIO for MempoolIO { .await .context("failed creating L2 transaction filter")?; - if !self.mempool.has_next(&self.filter) { + // We do not populate mempool with upgrade tx so it should be checked separately. + if !batch_with_upgrade_tx && !self.mempool.has_next(&self.filter) { tokio::time::sleep(self.delay_interval).await; continue; } diff --git a/core/node/state_keeper/src/io/persistence.rs b/core/node/state_keeper/src/io/persistence.rs index d8fd99bfc95d..8db7fe4120ed 100644 --- a/core/node/state_keeper/src/io/persistence.rs +++ b/core/node/state_keeper/src/io/persistence.rs @@ -386,8 +386,8 @@ mod tests { use zksync_multivm::interface::{FinishedL1Batch, VmExecutionMetrics}; use zksync_node_genesis::{insert_genesis_batch, GenesisParams}; use zksync_types::{ - api::TransactionStatus, block::BlockGasCount, h256_to_u256, writes::StateDiffRecord, - L1BatchNumber, L2BlockNumber, StorageLogKind, H256, U256, + api::TransactionStatus, h256_to_u256, writes::StateDiffRecord, L1BatchNumber, + L2BlockNumber, StorageLogKind, H256, U256, }; use super::*; @@ -508,7 +508,6 @@ mod tests { tx, tx_result, vec![], - BlockGasCount::default(), VmExecutionMetrics::default(), vec![], ); diff --git a/core/node/state_keeper/src/io/seal_logic/l2_block_seal_subtasks.rs b/core/node/state_keeper/src/io/seal_logic/l2_block_seal_subtasks.rs index a6356a838602..701e121285d4 100644 --- a/core/node/state_keeper/src/io/seal_logic/l2_block_seal_subtasks.rs +++ b/core/node/state_keeper/src/io/seal_logic/l2_block_seal_subtasks.rs @@ -538,10 +538,10 @@ mod tests { user_l2_to_l1_logs, system_l2_to_l1_logs: Default::default(), new_factory_deps, - l1_gas_count: Default::default(), block_execution_metrics: Default::default(), txs_encoding_size: Default::default(), payload_encoding_size: Default::default(), + l1_tx_count: 0, timestamp: 1, number: L2BlockNumber(1), prev_block_hash: Default::default(), diff --git a/core/node/state_keeper/src/io/seal_logic/mod.rs b/core/node/state_keeper/src/io/seal_logic/mod.rs index 419413e127d3..655bf182ba8f 100644 --- a/core/node/state_keeper/src/io/seal_logic/mod.rs +++ b/core/node/state_keeper/src/io/seal_logic/mod.rs @@ -145,7 +145,6 @@ impl UpdatesManager { .mark_l1_batch_as_sealed( &l1_batch, &final_bootloader_memory, - self.pending_l1_gas_count(), &finished_batch.final_execution_state.storage_refunds, &finished_batch.final_execution_state.pubdata_costs, self.pending_execution_metrics().circuit_statistic, diff --git a/core/node/state_keeper/src/io/tests/mod.rs b/core/node/state_keeper/src/io/tests/mod.rs index 5a44bf71ad39..536efe82804a 100644 --- a/core/node/state_keeper/src/io/tests/mod.rs +++ b/core/node/state_keeper/src/io/tests/mod.rs @@ -13,11 +13,13 @@ use zksync_multivm::{ use zksync_node_test_utils::prepare_recovery_snapshot; use zksync_system_constants::KNOWN_CODES_STORAGE_ADDRESS; use zksync_types::{ - block::{BlockGasCount, L2BlockHasher}, + block::L2BlockHasher, bytecode::BytecodeHash, commitment::{L1BatchCommitmentMode, PubdataParams}, fee_model::{BatchFeeInput, PubdataIndependentBatchFeeModelInput}, l2::L2Tx, + protocol_upgrade::ProtocolUpgradeTx, + protocol_version::ProtocolSemanticVersion, AccountTreeId, Address, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersion, ProtocolVersionId, StorageKey, TransactionTimeRangeConstraint, H256, U256, }; @@ -279,7 +281,6 @@ async fn processing_storage_logs_when_sealing_l2_block() { l2_block.extend_from_executed_transaction( tx, execution_result, - BlockGasCount::default(), VmExecutionMetrics::default(), vec![], vec![], @@ -297,7 +298,6 @@ async fn processing_storage_logs_when_sealing_l2_block() { l2_block.extend_from_executed_transaction( tx, execution_result, - BlockGasCount::default(), VmExecutionMetrics::default(), vec![], vec![], @@ -371,7 +371,6 @@ async fn processing_events_when_sealing_l2_block() { l2_block.extend_from_executed_transaction( tx, execution_result, - BlockGasCount::default(), VmExecutionMetrics::default(), vec![], vec![], @@ -476,7 +475,6 @@ async fn processing_dynamic_factory_deps_when_sealing_l2_block() { l2_block.extend_from_executed_transaction( tx, execution_result, - BlockGasCount::default(), VmExecutionMetrics::default(), vec![], vec![], @@ -571,7 +569,6 @@ async fn l2_block_processing_after_snapshot_recovery(commitment_mode: L1BatchCom tx.into(), create_execution_result([]), vec![], - BlockGasCount::default(), VmExecutionMetrics::default(), vec![], ); @@ -853,6 +850,52 @@ async fn test_mempool_with_timestamp_assertion() { ); } +#[tokio::test] +async fn test_batch_params_with_protocol_upgrade_tx() { + let connection_pool = ConnectionPool::::constrained_test_pool(2).await; + let tester = Tester::new(L1BatchCommitmentMode::Rollup); + // Genesis is needed for proper mempool initialization. + tester.genesis(&connection_pool).await; + + let (mut mempool, _) = tester.create_test_mempool_io(connection_pool.clone()).await; + let (cursor, _) = mempool.initialize().await.unwrap(); + + // Check that new batch params are not returned when there is no tx to process. + let new_batch_params = mempool + .wait_for_new_batch_params(&cursor, Duration::from_millis(100)) + .await + .unwrap(); + assert!(new_batch_params.is_none()); + + // Insert protocol version with upgrade tx. + let protocol_upgrade_tx = ProtocolUpgradeTx { + execute: Default::default(), + common_data: Default::default(), + received_timestamp_ms: 0, + }; + let version = ProtocolVersion { + version: ProtocolSemanticVersion { + minor: ProtocolVersionId::next(), + patch: 0.into(), + }, + tx: Some(protocol_upgrade_tx), + ..Default::default() + }; + connection_pool + .connection() + .await + .unwrap() + .protocol_versions_dal() + .save_protocol_version_with_tx(&version) + .await + .unwrap(); + let new_batch_params = mempool + .wait_for_new_batch_params(&cursor, Duration::from_millis(100)) + .await + .unwrap(); + assert!(new_batch_params.is_some()); +} + async fn insert_l2_transaction(storage: &mut Connection<'_, Core>, tx: &L2Tx) { storage .transactions_dal() diff --git a/core/node/state_keeper/src/keeper.rs b/core/node/state_keeper/src/keeper.rs index fe37ee8d8dd6..c892fd8534ec 100644 --- a/core/node/state_keeper/src/keeper.rs +++ b/core/node/state_keeper/src/keeper.rs @@ -7,6 +7,7 @@ use std::{ use anyhow::Context as _; use tokio::sync::watch; use tracing::{info_span, Instrument}; +use zksync_health_check::{HealthUpdater, ReactiveHealthCheck}; use zksync_multivm::{ interface::{ executor::{BatchExecutor, BatchExecutorFactory}, @@ -24,12 +25,12 @@ use zksync_types::{ use crate::{ executor::TxExecutionResult, + health::StateKeeperHealthDetails, io::{IoCursor, L1BatchParams, L2BlockParams, OutputHandler, PendingBatchData, StateKeeperIO}, metrics::{AGGREGATION_METRICS, KEEPER_METRICS, L1_BATCH_METRICS}, seal_criteria::{ConditionalSealer, SealData, SealResolution, UnexecutableReason}, - types::ExecutionMetricsForCriteria, updates::UpdatesManager, - utils::gas_count_from_writes, + utils::is_canceled, }; /// Amount of time to block on waiting for some resource. The exact value is not really important, @@ -65,17 +66,16 @@ impl Error { /// a sequence of executed L2 blocks and batches. #[derive(Debug)] pub struct ZkSyncStateKeeper { - stop_receiver: watch::Receiver, io: Box, output_handler: OutputHandler, batch_executor: Box>, sealer: Arc, storage_factory: Arc, + health_updater: HealthUpdater, } impl ZkSyncStateKeeper { pub fn new( - stop_receiver: watch::Receiver, sequencer: Box, batch_executor: Box>, output_handler: OutputHandler, @@ -83,17 +83,17 @@ impl ZkSyncStateKeeper { storage_factory: Arc, ) -> Self { Self { - stop_receiver, io: sequencer, batch_executor, output_handler, sealer, storage_factory, + health_updater: ReactiveHealthCheck::new("state_keeper").1, } } - pub async fn run(mut self) -> anyhow::Result<()> { - match self.run_inner().await { + pub async fn run(mut self, stop_receiver: watch::Receiver) -> anyhow::Result<()> { + match self.run_inner(stop_receiver).await { Ok(_) => unreachable!(), Err(Error::Fatal(err)) => Err(err).context("state_keeper failed"), Err(Error::Canceled) => { @@ -104,9 +104,14 @@ impl ZkSyncStateKeeper { } /// Fallible version of `run` routine that allows to easily exit upon cancellation. - async fn run_inner(&mut self) -> Result { + async fn run_inner( + &mut self, + mut stop_receiver: watch::Receiver, + ) -> Result { let (cursor, pending_batch_params) = self.io.initialize().await?; self.output_handler.initialize(&cursor).await?; + self.health_updater + .update(StateKeeperHealthDetails::from(&cursor).into()); tracing::info!( "Starting state keeper. Next l1 batch to seal: {}, next L2 block to seal: {}", cursor.l1_batch, @@ -135,7 +140,7 @@ impl ZkSyncStateKeeper { None => { tracing::info!("There is no open pending batch, starting a new empty batch"); let (system_env, l1_batch_env, pubdata_params) = self - .wait_for_new_batch_env(&cursor) + .wait_for_new_batch_env(&cursor, &mut stop_receiver) .await .map_err(|e| e.context("wait_for_new_batch_params()"))?; PendingBatchData { @@ -154,22 +159,29 @@ impl ZkSyncStateKeeper { .await?; let mut batch_executor = self - .create_batch_executor(l1_batch_env.clone(), system_env.clone(), pubdata_params) + .create_batch_executor( + l1_batch_env.clone(), + system_env.clone(), + pubdata_params, + &stop_receiver, + ) .await?; self.restore_state( &mut *batch_executor, &mut updates_manager, pending_l2_blocks, + &stop_receiver, ) .await?; let mut l1_batch_seal_delta: Option = None; - while !self.is_canceled() { + while !is_canceled(&stop_receiver) { // This function will run until the batch can be sealed. self.process_l1_batch( &mut *batch_executor, &mut updates_manager, protocol_upgrade_tx, + &stop_receiver, ) .await?; @@ -178,8 +190,9 @@ impl ZkSyncStateKeeper { self.seal_l2_block(&updates_manager).await?; // We've sealed the L2 block that we had, but we still need to set up the timestamp // for the fictive L2 block. - let new_l2_block_params = - self.wait_for_new_l2_block_params(&updates_manager).await?; + let new_l2_block_params = self + .wait_for_new_l2_block_params(&updates_manager, &stop_receiver) + .await?; Self::start_next_l2_block( new_l2_block_params, &mut updates_manager, @@ -204,11 +217,17 @@ impl ZkSyncStateKeeper { // Start the new batch. next_cursor.l1_batch += 1; - (system_env, l1_batch_env, pubdata_params) = - self.wait_for_new_batch_env(&next_cursor).await?; + (system_env, l1_batch_env, pubdata_params) = self + .wait_for_new_batch_env(&next_cursor, &mut stop_receiver) + .await?; updates_manager = UpdatesManager::new(&l1_batch_env, &system_env, pubdata_params); batch_executor = self - .create_batch_executor(l1_batch_env.clone(), system_env.clone(), pubdata_params) + .create_batch_executor( + l1_batch_env.clone(), + system_env.clone(), + pubdata_params, + &stop_receiver, + ) .await?; let version_changed = system_env.version != sealed_batch_protocol_version; @@ -226,10 +245,11 @@ impl ZkSyncStateKeeper { l1_batch_env: L1BatchEnv, system_env: SystemEnv, pubdata_params: PubdataParams, + stop_receiver: &watch::Receiver, ) -> Result>, Error> { let storage = self .storage_factory - .access_storage(&self.stop_receiver, l1_batch_env.number - 1) + .access_storage(stop_receiver, l1_batch_env.number - 1) .await .context("failed creating VM storage")? .ok_or(Error::Canceled)?; @@ -287,10 +307,6 @@ impl ZkSyncStateKeeper { Ok(protocol_upgrade_tx) } - fn is_canceled(&self) -> bool { - *self.stop_receiver.borrow() - } - async fn load_upgrade_tx( &mut self, protocol_version: ProtocolVersionId, @@ -310,8 +326,9 @@ impl ZkSyncStateKeeper { async fn wait_for_new_batch_params( &mut self, cursor: &IoCursor, + stop_receiver: &watch::Receiver, ) -> Result { - while !self.is_canceled() { + while !is_canceled(stop_receiver) { if let Some(params) = self .io .wait_for_new_batch_params(cursor, POLL_WAIT_DURATION) @@ -332,10 +349,13 @@ impl ZkSyncStateKeeper { async fn wait_for_new_batch_env( &mut self, cursor: &IoCursor, + stop_receiver: &mut watch::Receiver, ) -> Result<(SystemEnv, L1BatchEnv, PubdataParams), Error> { // `io.wait_for_new_batch_params(..)` is not cancel-safe; once we get new batch params, we must hold onto them // until we get the rest of parameters from I/O or receive a stop signal. - let params = self.wait_for_new_batch_params(cursor).await?; + let params = self + .wait_for_new_batch_params(cursor, stop_receiver) + .await?; let contracts = self .io .load_base_system_contracts(params.protocol_version, cursor) @@ -353,7 +373,7 @@ impl ZkSyncStateKeeper { let previous_batch_hash = hash_result.context("cannot load state hash for previous L1 batch")?; Ok(params.into_env(self.io.chain_id(), contracts, cursor, previous_batch_hash)) } - _ = self.stop_receiver.changed() => Err(Error::Canceled), + _ = stop_receiver.changed() => Err(Error::Canceled), } } @@ -367,16 +387,20 @@ impl ZkSyncStateKeeper { async fn wait_for_new_l2_block_params( &mut self, updates: &UpdatesManager, + stop_receiver: &watch::Receiver, ) -> Result { let latency = KEEPER_METRICS.wait_for_l2_block_params.start(); let cursor = updates.io_cursor(); - while !self.is_canceled() { + while !is_canceled(stop_receiver) { if let Some(params) = self .io .wait_for_new_l2_block_params(&cursor, POLL_WAIT_DURATION) .await .context("error waiting for new L2 block params")? { + self.health_updater + .update(StateKeeperHealthDetails::from(&cursor).into()); + latency.observe(); return Ok(params); } @@ -439,6 +463,7 @@ impl ZkSyncStateKeeper { batch_executor: &mut dyn BatchExecutor, updates_manager: &mut UpdatesManager, l2_blocks_to_reexecute: Vec, + stop_receiver: &watch::Receiver, ) -> Result<(), Error> { if l2_blocks_to_reexecute.is_empty() { return Ok(()); @@ -474,7 +499,7 @@ impl ZkSyncStateKeeper { let TxExecutionResult::Success { tx_result, - tx_metrics, + tx_metrics: tx_execution_metrics, compressed_bytecodes, call_tracer_result, .. @@ -491,11 +516,6 @@ impl ZkSyncStateKeeper { .into()); }; - let ExecutionMetricsForCriteria { - l1_gas: tx_l1_gas_this_tx, - execution_metrics: tx_execution_metrics, - } = *tx_metrics; - let tx_hash = tx.hash(); let is_l1 = tx.is_l1(); let exec_result_status = tx_result.result.clone(); @@ -505,20 +525,17 @@ impl ZkSyncStateKeeper { tx, *tx_result, compressed_bytecodes, - tx_l1_gas_this_tx, - tx_execution_metrics, + *tx_execution_metrics, call_tracer_result, ); tracing::debug!( "Finished re-executing tx {tx_hash} by {initiator_account} (is_l1: {is_l1}, \ #{idx_in_l1_batch} in L1 batch #{l1_batch_number}, #{idx_in_l2_block} in L2 block #{l2_block_number}); \ - status: {exec_result_status:?}. L1 gas spent: {tx_l1_gas_this_tx:?}, total in L1 batch: {pending_l1_gas:?}, \ - tx execution metrics: {tx_execution_metrics:?}, block execution metrics: {block_execution_metrics:?}", + status: {exec_result_status:?}. Tx execution metrics: {tx_execution_metrics:?}, block execution metrics: {block_execution_metrics:?}", idx_in_l1_batch = updates_manager.pending_executed_transactions_len(), l1_batch_number = updates_manager.l1_batch.number, idx_in_l2_block = updates_manager.l2_block.executed_transactions.len(), - pending_l1_gas = updates_manager.pending_l1_gas_count(), block_execution_metrics = updates_manager.pending_execution_metrics() ); } @@ -530,7 +547,7 @@ impl ZkSyncStateKeeper { // We've processed all the L2 blocks, and right now we're initializing the next *actual* L2 block. let new_l2_block_params = self - .wait_for_new_l2_block_params(updates_manager) + .wait_for_new_l2_block_params(updates_manager, stop_receiver) .await .map_err(|e| e.context("wait_for_new_l2_block_params"))?; Self::start_next_l2_block(new_l2_block_params, updates_manager, batch_executor).await?; @@ -547,13 +564,14 @@ impl ZkSyncStateKeeper { batch_executor: &mut dyn BatchExecutor, updates_manager: &mut UpdatesManager, protocol_upgrade_tx: Option, + stop_receiver: &watch::Receiver, ) -> Result<(), Error> { if let Some(protocol_upgrade_tx) = protocol_upgrade_tx { self.process_upgrade_tx(batch_executor, updates_manager, protocol_upgrade_tx) .await?; } - while !self.is_canceled() { + while !is_canceled(stop_receiver) { let full_latency = KEEPER_METRICS.process_l1_batch_loop_iteration.start(); if self @@ -576,7 +594,7 @@ impl ZkSyncStateKeeper { self.seal_l2_block(updates_manager).await?; let new_l2_block_params = self - .wait_for_new_l2_block_params(updates_manager) + .wait_for_new_l2_block_params(updates_manager, stop_receiver) .await .map_err(|e| e.context("wait_for_new_l2_block_params"))?; tracing::debug!( @@ -612,7 +630,7 @@ impl ZkSyncStateKeeper { SealResolution::NoSeal | SealResolution::IncludeAndSeal => { let TxExecutionResult::Success { tx_result, - tx_metrics, + tx_metrics: tx_execution_metrics, call_tracer_result, compressed_bytecodes, .. @@ -622,16 +640,11 @@ impl ZkSyncStateKeeper { "Tx inclusion seal resolution must be a result of a successful tx execution", ); }; - let ExecutionMetricsForCriteria { - l1_gas: tx_l1_gas_this_tx, - execution_metrics: tx_execution_metrics, - } = *tx_metrics; updates_manager.extend_from_executed_transaction( tx, *tx_result, compressed_bytecodes, - tx_l1_gas_this_tx, - tx_execution_metrics, + *tx_execution_metrics, call_tracer_result, ); } @@ -687,7 +700,7 @@ impl ZkSyncStateKeeper { SealResolution::NoSeal | SealResolution::IncludeAndSeal => { let TxExecutionResult::Success { tx_result, - tx_metrics, + tx_metrics: tx_execution_metrics, compressed_bytecodes, call_tracer_result, .. @@ -702,17 +715,11 @@ impl ZkSyncStateKeeper { anyhow::bail!("Failed upgrade tx {:?}", tx.hash()); } - let ExecutionMetricsForCriteria { - l1_gas: tx_l1_gas_this_tx, - execution_metrics: tx_execution_metrics, - .. - } = *tx_metrics; updates_manager.extend_from_executed_transaction( tx, *tx_result, compressed_bytecodes, - tx_l1_gas_this_tx, - tx_execution_metrics, + *tx_execution_metrics, call_tracer_result, ); Ok(()) @@ -796,20 +803,15 @@ impl ZkSyncStateKeeper { } TxExecutionResult::Success { tx_result, - tx_metrics, + tx_metrics: tx_execution_metrics, gas_remaining, .. } => { let tx_execution_status = &tx_result.result; - let ExecutionMetricsForCriteria { - l1_gas: tx_l1_gas_this_tx, - execution_metrics: tx_execution_metrics, - } = **tx_metrics; tracing::trace!( "finished tx {:?} by {:?} (is_l1: {}) (#{} in l1 batch {}) (#{} in L2 block {}) \ - status: {:?}. L1 gas spent: {:?}, total in l1 batch: {:?}, \ - tx execution metrics: {:?}, block execution metrics: {:?}", + status: {:?}. Tx execution metrics: {:?}, block execution metrics: {:?}", tx.hash(), tx.initiator_account(), tx.is_l1(), @@ -818,10 +820,8 @@ impl ZkSyncStateKeeper { updates_manager.l2_block.executed_transactions.len() + 1, updates_manager.l2_block.number, tx_execution_status, - tx_l1_gas_this_tx, - updates_manager.pending_l1_gas_count() + tx_l1_gas_this_tx, &tx_execution_metrics, - updates_manager.pending_execution_metrics() + tx_execution_metrics, + updates_manager.pending_execution_metrics() + **tx_execution_metrics, ); let encoding_len = tx.encoding_len(); @@ -831,20 +831,11 @@ impl ZkSyncStateKeeper { .storage_writes_deduplicator .apply_and_rollback(logs_to_apply_iter.clone()); - let block_writes_l1_gas = gas_count_from_writes( - &block_writes_metrics, - updates_manager.protocol_version(), - ); - let tx_writes_metrics = StorageWritesDeduplicator::apply_on_empty_state(logs_to_apply_iter); - let tx_writes_l1_gas = - gas_count_from_writes(&tx_writes_metrics, updates_manager.protocol_version()); - let tx_gas_excluding_writes = tx_l1_gas_this_tx; let tx_data = SealData { - execution_metrics: tx_execution_metrics, - gas_count: tx_gas_excluding_writes + tx_writes_l1_gas, + execution_metrics: **tx_execution_metrics, cumulative_size: encoding_len, writes_metrics: tx_writes_metrics, gas_remaining: *gas_remaining, @@ -852,19 +843,18 @@ impl ZkSyncStateKeeper { let block_data = SealData { execution_metrics: tx_data.execution_metrics + updates_manager.pending_execution_metrics(), - gas_count: tx_gas_excluding_writes - + block_writes_l1_gas - + updates_manager.pending_l1_gas_count(), cumulative_size: tx_data.cumulative_size + updates_manager.pending_txs_encoding_size(), writes_metrics: block_writes_metrics, gas_remaining: *gas_remaining, }; + let is_tx_l1 = tx.is_l1() as usize; self.sealer.should_seal_l1_batch( updates_manager.l1_batch.number.0, updates_manager.batch_timestamp() as u128 * 1_000, updates_manager.pending_executed_transactions_len() + 1, + updates_manager.pending_l1_transactions_len() + is_tx_l1, &block_data, &tx_data, updates_manager.protocol_version(), @@ -874,4 +864,9 @@ impl ZkSyncStateKeeper { latency.observe(); Ok((resolution, exec_result)) } + + /// Returns the health check for state keeper. + pub fn health_check(&self) -> ReactiveHealthCheck { + self.health_updater.subscribe() + } } diff --git a/core/node/state_keeper/src/lib.rs b/core/node/state_keeper/src/lib.rs index c12e4163fdd4..001d474cb246 100644 --- a/core/node/state_keeper/src/lib.rs +++ b/core/node/state_keeper/src/lib.rs @@ -7,11 +7,12 @@ pub use self::{ mempool_actor::MempoolFetcher, seal_criteria::SequencerSealer, state_keeper_storage::AsyncRocksdbCache, - types::{ExecutionMetricsForCriteria, MempoolGuard}, + types::MempoolGuard, updates::UpdatesManager, }; pub mod executor; +mod health; pub mod io; mod keeper; mod mempool_actor; diff --git a/core/node/state_keeper/src/seal_criteria/conditional_sealer.rs b/core/node/state_keeper/src/seal_criteria/conditional_sealer.rs index cd00d4f89360..4da7dd463104 100644 --- a/core/node/state_keeper/src/seal_criteria/conditional_sealer.rs +++ b/core/node/state_keeper/src/seal_criteria/conditional_sealer.rs @@ -23,11 +23,13 @@ pub trait ConditionalSealer: 'static + fmt::Debug + Send + Sync { ) -> Option<&'static str>; /// Returns the action that should be taken by the state keeper after executing a transaction. + #[allow(clippy::too_many_arguments)] fn should_seal_l1_batch( &self, l1_batch_number: u32, block_open_timestamp_ms: u128, tx_count: usize, + l1_tx_count: usize, block_data: &SealData, tx_data: &SealData, protocol_version: ProtocolVersionId, @@ -59,6 +61,7 @@ impl ConditionalSealer for SequencerSealer { &self.config, MOCK_BLOCK_TIMESTAMP, TX_COUNT, + TX_COUNT, data, data, protocol_version, @@ -75,6 +78,7 @@ impl ConditionalSealer for SequencerSealer { l1_batch_number: u32, block_open_timestamp_ms: u128, tx_count: usize, + l1_tx_count: usize, block_data: &SealData, tx_data: &SealData, protocol_version: ProtocolVersionId, @@ -91,6 +95,7 @@ impl ConditionalSealer for SequencerSealer { &self.config, block_open_timestamp_ms, tx_count, + l1_tx_count, block_data, tx_data, protocol_version, @@ -132,13 +137,14 @@ impl SequencerSealer { fn default_sealers(config: &StateKeeperConfig) -> Vec> { vec![ Box::new(criteria::SlotsCriterion), - Box::new(criteria::GasCriterion), Box::new(criteria::PubDataBytesCriterion { max_pubdata_per_batch: config.max_pubdata_per_batch, }), Box::new(criteria::CircuitsCriterion), Box::new(criteria::TxEncodingSizeCriterion), Box::new(criteria::GasForBatchTipCriterion), + Box::new(criteria::L1L2TxsCriterion), + Box::new(criteria::L2L1LogsCriterion), ] } } @@ -163,6 +169,7 @@ impl ConditionalSealer for NoopSealer { _l1_batch_number: u32, _block_open_timestamp_ms: u128, _tx_count: usize, + _l1_tx_count: usize, _block_data: &SealData, _tx_data: &SealData, _protocol_version: ProtocolVersionId, diff --git a/core/node/state_keeper/src/seal_criteria/criteria/gas.rs b/core/node/state_keeper/src/seal_criteria/criteria/gas.rs deleted file mode 100644 index a97ac6ede353..000000000000 --- a/core/node/state_keeper/src/seal_criteria/criteria/gas.rs +++ /dev/null @@ -1,180 +0,0 @@ -use zksync_types::ProtocolVersionId; - -use crate::{ - seal_criteria::{ - SealCriterion, SealData, SealResolution, StateKeeperConfig, UnexecutableReason, - }, - utils::new_block_gas_count, -}; - -/// This is a temporary solution. -/// Instead of checking for gas it simply checks that the contracts' -/// bytecode is large enough. -/// Among all the data which will be published on-chain the contracts' -/// bytecode is by far the largest one and with high probability -/// the slots will run out before the other pubdata becomes too big -#[derive(Debug)] -pub(crate) struct GasCriterion; - -impl SealCriterion for GasCriterion { - fn should_seal( - &self, - config: &StateKeeperConfig, - _block_open_timestamp_ms: u128, - _tx_count: usize, - block_data: &SealData, - tx_data: &SealData, - _protocol_version_id: ProtocolVersionId, - ) -> SealResolution { - let tx_bound = - (config.max_single_tx_gas as f64 * config.reject_tx_at_gas_percentage).round() as u32; - let block_bound = - (config.max_single_tx_gas as f64 * config.close_block_at_gas_percentage).round() as u32; - - if (tx_data.gas_count + new_block_gas_count()).any_field_greater_than(tx_bound) { - UnexecutableReason::TooMuchGas.into() - } else if block_data - .gas_count - .any_field_greater_than(config.max_single_tx_gas) - { - SealResolution::ExcludeAndSeal - } else if block_data.gas_count.any_field_greater_than(block_bound) { - SealResolution::IncludeAndSeal - } else { - SealResolution::NoSeal - } - } - - fn prom_criterion_name(&self) -> &'static str { - "gas" - } -} - -#[cfg(test)] -mod tests { - use zksync_types::block::BlockGasCount; - - use super::*; - - #[test] - fn test_gas_seal_criterion() { - // Create an empty config and only setup fields relevant for the test. - let config = StateKeeperConfig { - max_single_tx_gas: 6000000, - reject_tx_at_gas_percentage: 0.95, - close_block_at_gas_percentage: 0.95, - ..Default::default() - }; - - let criterion = GasCriterion; - - // Empty block should fit into gas criterion. - let empty_block_gas = new_block_gas_count(); - let empty_block_resolution = criterion.should_seal( - &config, - 0, - 0, - &SealData { - gas_count: empty_block_gas, - ..SealData::default() - }, - &SealData::default(), - ProtocolVersionId::latest(), - ); - assert_eq!(empty_block_resolution, SealResolution::NoSeal); - - let tx_gas = BlockGasCount { - commit: config.max_single_tx_gas + 1, - prove: 0, - execute: 0, - }; - // Transaction that needs more gas than a block limit should be unexecutable. - let huge_transaction_resolution = criterion.should_seal( - &config, - 0, - 1, - &SealData { - gas_count: empty_block_gas + tx_gas, - ..SealData::default() - }, - &SealData { - gas_count: tx_gas, - ..SealData::default() - }, - ProtocolVersionId::latest(), - ); - assert_eq!( - huge_transaction_resolution, - UnexecutableReason::TooMuchGas.into() - ); - - // Check criterion workflow - let reject_tx_bound = - (config.max_single_tx_gas as f64 * config.reject_tx_at_gas_percentage).round() as u32; - let tx_gas = BlockGasCount { - commit: reject_tx_bound - empty_block_gas.commit, - prove: reject_tx_bound - empty_block_gas.prove, - execute: reject_tx_bound - empty_block_gas.execute, - }; - let resolution_after_first_tx = criterion.should_seal( - &config, - 0, - 1, - &SealData { - gas_count: empty_block_gas + tx_gas, - ..SealData::default() - }, - &SealData { - gas_count: tx_gas, - ..SealData::default() - }, - ProtocolVersionId::latest(), - ); - assert_eq!(resolution_after_first_tx, SealResolution::NoSeal); - - let resolution_after_second_tx = criterion.should_seal( - &config, - 0, - 2, - &SealData { - gas_count: empty_block_gas + tx_gas + tx_gas, - ..SealData::default() - }, - &SealData { - gas_count: tx_gas, - ..SealData::default() - }, - ProtocolVersionId::latest(), - ); - assert_eq!(resolution_after_second_tx, SealResolution::ExcludeAndSeal); - - // Check criterion workflow - let tx_gas = BlockGasCount { - commit: reject_tx_bound - empty_block_gas.commit - 1, - prove: reject_tx_bound - empty_block_gas.prove - 1, - execute: reject_tx_bound - empty_block_gas.execute - 1, - }; - let close_bound = - (config.max_single_tx_gas as f64 * config.close_block_at_gas_percentage).round() as u32; - let block_gas = BlockGasCount { - commit: close_bound + 1, - prove: close_bound + 1, - execute: close_bound + 1, - }; - let resolution_after_first_tx = criterion.should_seal( - &config, - 0, - 1, - &SealData { - gas_count: block_gas, - ..SealData::default() - }, - &SealData { - gas_count: tx_gas, - ..SealData::default() - }, - ProtocolVersionId::latest(), - ); - assert_eq!(resolution_after_first_tx, SealResolution::IncludeAndSeal); - } -} diff --git a/core/node/state_keeper/src/seal_criteria/criteria/gas_for_batch_tip.rs b/core/node/state_keeper/src/seal_criteria/criteria/gas_for_batch_tip.rs index 69214406bea5..263123cdcff2 100644 --- a/core/node/state_keeper/src/seal_criteria/criteria/gas_for_batch_tip.rs +++ b/core/node/state_keeper/src/seal_criteria/criteria/gas_for_batch_tip.rs @@ -15,6 +15,7 @@ impl SealCriterion for GasForBatchTipCriterion { _config: &StateKeeperConfig, _block_open_timestamp_ms: u128, tx_count: usize, + _l1_tx_count: usize, _block_data: &SealData, tx_data: &SealData, protocol_version: ProtocolVersionId, @@ -60,6 +61,7 @@ mod tests { &config, Default::default(), 1, + 0, &seal_data, &seal_data, protocol_version, @@ -74,6 +76,7 @@ mod tests { &config, Default::default(), 1, + 0, &seal_data, &seal_data, protocol_version, @@ -87,6 +90,7 @@ mod tests { &config, Default::default(), 2, + 0, &seal_data, &seal_data, protocol_version, diff --git a/core/node/state_keeper/src/seal_criteria/criteria/geometry_seal_criteria.rs b/core/node/state_keeper/src/seal_criteria/criteria/geometry_seal_criteria.rs index 1f3e8d104ce5..657fa1f71800 100644 --- a/core/node/state_keeper/src/seal_criteria/criteria/geometry_seal_criteria.rs +++ b/core/node/state_keeper/src/seal_criteria/criteria/geometry_seal_criteria.rs @@ -20,6 +20,7 @@ impl SealCriterion for CircuitsCriterion { config: &StateKeeperConfig, _block_open_timestamp_ms: u128, _tx_count: usize, + _l1_tx_count: usize, block_data: &SealData, tx_data: &SealData, protocol_version: ProtocolVersionId, @@ -94,6 +95,7 @@ mod tests { &config, Default::default(), 0, + 0, &SealData { execution_metrics: block_execution_metrics, ..SealData::default() @@ -114,6 +116,7 @@ mod tests { &config, Default::default(), 0, + 0, &SealData { execution_metrics: block_execution_metrics, ..SealData::default() @@ -134,6 +137,7 @@ mod tests { &config, Default::default(), 0, + 0, &SealData { execution_metrics: block_execution_metrics, ..SealData::default() @@ -154,6 +158,7 @@ mod tests { &config, Default::default(), 0, + 0, &SealData::default(), &SealData { execution_metrics: tx_execution_metrics, diff --git a/core/node/state_keeper/src/seal_criteria/criteria/l1_l2_txs.rs b/core/node/state_keeper/src/seal_criteria/criteria/l1_l2_txs.rs new file mode 100644 index 000000000000..fa679a19bab6 --- /dev/null +++ b/core/node/state_keeper/src/seal_criteria/criteria/l1_l2_txs.rs @@ -0,0 +1,104 @@ +use zksync_types::{ + aggregated_operations::{L1_BATCH_EXECUTE_BASE_COST, L1_OPERATION_EXECUTE_COST}, + ProtocolVersionId, +}; + +use crate::seal_criteria::{SealCriterion, SealData, SealResolution, StateKeeperConfig}; + +#[derive(Debug)] +pub(crate) struct L1L2TxsCriterion; + +impl SealCriterion for L1L2TxsCriterion { + fn should_seal( + &self, + config: &StateKeeperConfig, + _block_open_timestamp_ms: u128, + _tx_count: usize, + l1_tx_count: usize, + _block_data: &SealData, + _tx_data: &SealData, + _protocol_version_id: ProtocolVersionId, + ) -> SealResolution { + // With current gas consumption it's possible to execute 600 L1->L2 txs with 7500000 L1 gas. + const L1_L2_TX_COUNT_LIMIT: usize = 600; + + let block_l1_gas_bound = + (config.max_single_tx_gas as f64 * config.close_block_at_gas_percentage).round() as u32; + let l1_gas = L1_BATCH_EXECUTE_BASE_COST + (l1_tx_count as u32) * L1_OPERATION_EXECUTE_COST; + + // We check not only gas against `block_l1_gas_bound` but also count against `L1_L2_TX_COUNT_LIMIT`. + // It's required in case `max_single_tx_gas` is set to some high value for gateway, + // then chain migrates to L1 and there is some batch with large number of L1->L2 txs that is not yet executed. + // This check guarantees that it will be possible to execute such batch with reasonable gas limit on L1. + if l1_gas >= block_l1_gas_bound || l1_tx_count >= L1_L2_TX_COUNT_LIMIT { + SealResolution::IncludeAndSeal + } else { + SealResolution::NoSeal + } + } + + fn prom_criterion_name(&self) -> &'static str { + "gas" + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_l1_l2_txs_seal_criterion() { + let max_single_tx_gas = 15_000_000; + let close_block_at_gas_percentage = 0.95; + + let gas_bound = (max_single_tx_gas as f64 * close_block_at_gas_percentage).round() as u32; + let l1_tx_count_bound = + (gas_bound - L1_BATCH_EXECUTE_BASE_COST - 1) / L1_OPERATION_EXECUTE_COST; + let l1_tx_count_bound = l1_tx_count_bound.min(599); + + // Create an empty config and only setup fields relevant for the test. + let config = StateKeeperConfig { + max_single_tx_gas, + close_block_at_gas_percentage, + ..Default::default() + }; + + let criterion = L1L2TxsCriterion; + + // Empty block should fit into gas criterion. + let empty_block_resolution = criterion.should_seal( + &config, + 0, + 0, + 0, + &SealData::default(), + &SealData::default(), + ProtocolVersionId::latest(), + ); + assert_eq!(empty_block_resolution, SealResolution::NoSeal); + + // `l1_tx_count_bound` should return `NoSeal`. + let block_resolution = criterion.should_seal( + &config, + 0, + 0, + l1_tx_count_bound as usize, + &SealData::default(), + &SealData::default(), + ProtocolVersionId::latest(), + ); + assert_eq!(block_resolution, SealResolution::NoSeal); + + // `l1_tx_count_bound + 1` should return `IncludeAndSeal`. + let block_resolution = criterion.should_seal( + &config, + 0, + 0, + l1_tx_count_bound as usize + 1, + &SealData::default(), + &SealData::default(), + ProtocolVersionId::latest(), + ); + assert_eq!(block_resolution, SealResolution::IncludeAndSeal); + } +} diff --git a/core/node/state_keeper/src/seal_criteria/criteria/l2_l1_logs.rs b/core/node/state_keeper/src/seal_criteria/criteria/l2_l1_logs.rs new file mode 100644 index 000000000000..e33b94cbda2a --- /dev/null +++ b/core/node/state_keeper/src/seal_criteria/criteria/l2_l1_logs.rs @@ -0,0 +1,120 @@ +use zksync_types::{l2_to_l1_log::l2_to_l1_logs_tree_size, ProtocolVersionId}; + +use crate::seal_criteria::{ + SealCriterion, SealData, SealResolution, StateKeeperConfig, UnexecutableReason, +}; + +#[derive(Debug)] +pub(crate) struct L2L1LogsCriterion; + +impl SealCriterion for L2L1LogsCriterion { + fn should_seal( + &self, + config: &StateKeeperConfig, + _block_open_timestamp_ms: u128, + _tx_count: usize, + _l1_tx_count: usize, + block_data: &SealData, + tx_data: &SealData, + protocol_version_id: ProtocolVersionId, + ) -> SealResolution { + let max_allowed_logs = l2_to_l1_logs_tree_size(protocol_version_id); + + let reject_bound = + (max_allowed_logs as f64 * config.reject_tx_at_geometry_percentage).round() as usize; + let include_and_seal_bound = + (max_allowed_logs as f64 * config.close_block_at_geometry_percentage).round() as usize; + + // Only user logs are included in the tree, so we consider only their number. + if tx_data.execution_metrics.user_l2_to_l1_logs >= reject_bound { + UnexecutableReason::TooMuchUserL2L1Logs.into() + } else if block_data.execution_metrics.user_l2_to_l1_logs > max_allowed_logs { + SealResolution::ExcludeAndSeal + } else if block_data.execution_metrics.user_l2_to_l1_logs >= include_and_seal_bound { + SealResolution::IncludeAndSeal + } else { + SealResolution::NoSeal + } + } + + fn prom_criterion_name(&self) -> &'static str { + "gas" + } +} + +#[cfg(test)] +mod tests { + use test_casing::test_casing; + use zksync_multivm::interface::VmExecutionMetrics; + + use super::*; + + fn query_criterion( + config: &StateKeeperConfig, + block_data_logs: usize, + tx_data_logs: usize, + protocol_version_id: ProtocolVersionId, + ) -> SealResolution { + L2L1LogsCriterion.should_seal( + config, + 0, + 0, + 0, + &SealData { + execution_metrics: VmExecutionMetrics { + user_l2_to_l1_logs: block_data_logs, + ..VmExecutionMetrics::default() + }, + ..SealData::default() + }, + &SealData { + execution_metrics: VmExecutionMetrics { + user_l2_to_l1_logs: tx_data_logs, + ..VmExecutionMetrics::default() + }, + ..SealData::default() + }, + protocol_version_id, + ) + } + + #[test_casing(2, [ProtocolVersionId::Version25, ProtocolVersionId::Version27])] + fn test_l2_l1_logs_seal_criterion(protocol_version: ProtocolVersionId) { + let max_allowed_logs = l2_to_l1_logs_tree_size(protocol_version); + let config = StateKeeperConfig { + close_block_at_geometry_percentage: 0.95, + reject_tx_at_geometry_percentage: 0.9, + ..Default::default() + }; + + let reject_bound = + (max_allowed_logs as f64 * config.reject_tx_at_geometry_percentage).round() as usize; + let include_and_seal_bound = + (max_allowed_logs as f64 * config.close_block_at_geometry_percentage).round() as usize; + + // not enough logs to seal + let resolution = query_criterion( + &config, + reject_bound - 1, + reject_bound - 1, + protocol_version, + ); + assert_eq!(resolution, SealResolution::NoSeal); + + // reject tx with huge number of logs + let resolution = query_criterion(&config, reject_bound, reject_bound, protocol_version); + assert_eq!(resolution, UnexecutableReason::TooMuchUserL2L1Logs.into()); + + // enough logs to include and seal + let resolution = query_criterion(&config, include_and_seal_bound, 1, protocol_version); + assert_eq!(resolution, SealResolution::IncludeAndSeal); + + // not enough logs to exclude and seal + let resolution = query_criterion(&config, max_allowed_logs, 1, protocol_version); + assert_eq!(resolution, SealResolution::IncludeAndSeal); + + // enough logs to exclude and seal + let resolution = query_criterion(&config, max_allowed_logs + 1, 1, protocol_version); + assert_eq!(resolution, SealResolution::ExcludeAndSeal); + } +} diff --git a/core/node/state_keeper/src/seal_criteria/criteria/mod.rs b/core/node/state_keeper/src/seal_criteria/criteria/mod.rs index 6732a8cd2717..d9522a39fad7 100644 --- a/core/node/state_keeper/src/seal_criteria/criteria/mod.rs +++ b/core/node/state_keeper/src/seal_criteria/criteria/mod.rs @@ -1,12 +1,14 @@ -mod gas; mod gas_for_batch_tip; mod geometry_seal_criteria; +mod l1_l2_txs; +mod l2_l1_logs; mod pubdata_bytes; mod slots; mod tx_encoding_size; pub(crate) use self::{ - gas::GasCriterion, gas_for_batch_tip::GasForBatchTipCriterion, - geometry_seal_criteria::CircuitsCriterion, pubdata_bytes::PubDataBytesCriterion, - slots::SlotsCriterion, tx_encoding_size::TxEncodingSizeCriterion, + gas_for_batch_tip::GasForBatchTipCriterion, geometry_seal_criteria::CircuitsCriterion, + l1_l2_txs::L1L2TxsCriterion, l2_l1_logs::L2L1LogsCriterion, + pubdata_bytes::PubDataBytesCriterion, slots::SlotsCriterion, + tx_encoding_size::TxEncodingSizeCriterion, }; diff --git a/core/node/state_keeper/src/seal_criteria/criteria/pubdata_bytes.rs b/core/node/state_keeper/src/seal_criteria/criteria/pubdata_bytes.rs index 09fcf2f0fc1b..5cddfffd6899 100644 --- a/core/node/state_keeper/src/seal_criteria/criteria/pubdata_bytes.rs +++ b/core/node/state_keeper/src/seal_criteria/criteria/pubdata_bytes.rs @@ -20,6 +20,7 @@ impl SealCriterion for PubDataBytesCriterion { config: &StateKeeperConfig, _block_open_timestamp_ms: u128, _tx_count: usize, + _l1_tx_count: usize, block_data: &SealData, tx_data: &SealData, protocol_version: ProtocolVersionId, @@ -99,6 +100,7 @@ mod tests { &config, 0, 0, + 0, &SealData { execution_metrics: block_execution_metrics, ..SealData::default() @@ -120,6 +122,7 @@ mod tests { &config, 0, 0, + 0, &SealData { execution_metrics: block_execution_metrics, ..SealData::default() @@ -137,6 +140,7 @@ mod tests { &config, 0, 0, + 0, &SealData { execution_metrics: block_execution_metrics, ..SealData::default() diff --git a/core/node/state_keeper/src/seal_criteria/criteria/slots.rs b/core/node/state_keeper/src/seal_criteria/criteria/slots.rs index 81b3a0933801..ad07bf8f8a00 100644 --- a/core/node/state_keeper/src/seal_criteria/criteria/slots.rs +++ b/core/node/state_keeper/src/seal_criteria/criteria/slots.rs @@ -13,6 +13,7 @@ impl SealCriterion for SlotsCriterion { config: &StateKeeperConfig, _block_open_timestamp_ms: u128, tx_count: usize, + _l1_tx_count: usize, _block_data: &SealData, _tx_data: &SealData, protocol_version: ProtocolVersionId, @@ -54,6 +55,7 @@ mod tests { &config, Default::default(), config.transaction_slots - 1, + 0, &SealData::default(), &SealData::default(), ProtocolVersionId::latest(), @@ -64,6 +66,7 @@ mod tests { &config, Default::default(), config.transaction_slots, + 0, &SealData::default(), &SealData::default(), ProtocolVersionId::latest(), diff --git a/core/node/state_keeper/src/seal_criteria/criteria/tx_encoding_size.rs b/core/node/state_keeper/src/seal_criteria/criteria/tx_encoding_size.rs index 409673d6cac8..54076dc8ccc0 100644 --- a/core/node/state_keeper/src/seal_criteria/criteria/tx_encoding_size.rs +++ b/core/node/state_keeper/src/seal_criteria/criteria/tx_encoding_size.rs @@ -14,6 +14,7 @@ impl SealCriterion for TxEncodingSizeCriterion { config: &StateKeeperConfig, _block_open_timestamp_ms: u128, _tx_count: usize, + _l1_tx_count: usize, block_data: &SealData, tx_data: &SealData, protocol_version_id: ProtocolVersionId, @@ -65,6 +66,7 @@ mod tests { &config, 0, 0, + 0, &SealData::default(), &SealData::default(), ProtocolVersionId::latest(), @@ -75,6 +77,7 @@ mod tests { &config, 0, 0, + 0, &SealData::default(), &SealData { cumulative_size: bootloader_tx_encoding_space as usize + 1, @@ -91,6 +94,7 @@ mod tests { &config, 0, 0, + 0, &SealData { cumulative_size: bootloader_tx_encoding_space as usize + 1, ..SealData::default() @@ -107,6 +111,7 @@ mod tests { &config, 0, 0, + 0, &SealData { cumulative_size: bootloader_tx_encoding_space as usize, ..SealData::default() diff --git a/core/node/state_keeper/src/seal_criteria/mod.rs b/core/node/state_keeper/src/seal_criteria/mod.rs index 4c6f56a6f5b7..2ea039da57bc 100644 --- a/core/node/state_keeper/src/seal_criteria/mod.rs +++ b/core/node/state_keeper/src/seal_criteria/mod.rs @@ -17,16 +17,10 @@ use zksync_multivm::{ interface::{DeduplicatedWritesMetrics, Halt, TransactionExecutionMetrics, VmExecutionMetrics}, vm_latest::TransactionVmExt, }; -use zksync_types::{ - block::BlockGasCount, utils::display_timestamp, ProtocolVersionId, Transaction, -}; +use zksync_types::{utils::display_timestamp, ProtocolVersionId, Transaction}; pub use self::conditional_sealer::{ConditionalSealer, NoopSealer, SequencerSealer}; -use crate::{ - metrics::AGGREGATION_METRICS, - updates::UpdatesManager, - utils::{gas_count_from_tx_and_metrics, gas_count_from_writes, millis_since}, -}; +use crate::{metrics::AGGREGATION_METRICS, updates::UpdatesManager, utils::millis_since}; mod conditional_sealer; pub(super) mod criteria; @@ -68,6 +62,7 @@ pub enum UnexecutableReason { OutOfGasForBatchTip, BootloaderOutOfGas, NotEnoughGasProvided, + TooMuchUserL2L1Logs, } impl UnexecutableReason { @@ -82,6 +77,7 @@ impl UnexecutableReason { UnexecutableReason::OutOfGasForBatchTip => "OutOfGasForBatchTip", UnexecutableReason::BootloaderOutOfGas => "BootloaderOutOfGas", UnexecutableReason::NotEnoughGasProvided => "NotEnoughGasProvided", + UnexecutableReason::TooMuchUserL2L1Logs => "TooMuchUserL2L1Logs", } } } @@ -106,6 +102,7 @@ impl fmt::Display for UnexecutableReason { UnexecutableReason::OutOfGasForBatchTip => write!(f, "Out of gas for batch tip"), UnexecutableReason::BootloaderOutOfGas => write!(f, "Bootloader out of gas"), UnexecutableReason::NotEnoughGasProvided => write!(f, "Not enough gas provided"), + UnexecutableReason::TooMuchUserL2L1Logs => write!(f, "Too much user l2 l1 logs"), } } } @@ -159,7 +156,6 @@ impl SealResolution { #[derive(Debug, Default)] pub struct SealData { pub(super) execution_metrics: VmExecutionMetrics, - pub(super) gas_count: BlockGasCount, pub(super) cumulative_size: usize, pub(super) writes_metrics: DeduplicatedWritesMetrics, pub(super) gas_remaining: u32, @@ -171,15 +167,11 @@ impl SealData { pub fn for_transaction( transaction: &Transaction, tx_metrics: &TransactionExecutionMetrics, - protocol_version: ProtocolVersionId, ) -> Self { let execution_metrics = VmExecutionMetrics::from_tx_metrics(tx_metrics); let writes_metrics = DeduplicatedWritesMetrics::from_tx_metrics(tx_metrics); - let gas_count = gas_count_from_tx_and_metrics(transaction, &execution_metrics) - + gas_count_from_writes(&writes_metrics, protocol_version); Self { execution_metrics, - gas_count, cumulative_size: transaction.bootloader_encoding_size(), writes_metrics, gas_remaining: tx_metrics.gas_remaining, @@ -188,11 +180,13 @@ impl SealData { } pub(super) trait SealCriterion: fmt::Debug + Send + Sync + 'static { + #[allow(clippy::too_many_arguments)] fn should_seal( &self, config: &StateKeeperConfig, block_open_timestamp_ms: u128, tx_count: usize, + l1_tx_count: usize, block_data: &SealData, tx_data: &SealData, protocol_version: ProtocolVersionId, @@ -287,7 +281,6 @@ mod tests { tx, create_execution_result([]), vec![], - BlockGasCount::default(), VmExecutionMetrics::default(), vec![], ); diff --git a/core/node/state_keeper/src/testonly/test_batch_executor.rs b/core/node/state_keeper/src/testonly/test_batch_executor.rs index 5625add021bf..9a675c7e97e8 100644 --- a/core/node/state_keeper/src/testonly/test_batch_executor.rs +++ b/core/node/state_keeper/src/testonly/test_batch_executor.rs @@ -204,14 +204,13 @@ impl TestScenario { let (stop_sender, stop_receiver) = watch::channel(false); let (io, output_handler) = TestIO::new(stop_sender, self); let state_keeper = ZkSyncStateKeeper::new( - stop_receiver, Box::new(io), Box::new(batch_executor), output_handler, Arc::new(sealer), Arc::new(MockReadStorageFactory), ); - let sk_thread = tokio::spawn(state_keeper.run()); + let sk_thread = tokio::spawn(state_keeper.run(stop_receiver)); // We must assume that *theoretically* state keeper may ignore the stop signal from IO once scenario is // completed, so we spawn it in a separate thread to not get test stuck. diff --git a/core/node/state_keeper/src/tests/mod.rs b/core/node/state_keeper/src/tests/mod.rs index ca078354c896..b73741998a03 100644 --- a/core/node/state_keeper/src/tests/mod.rs +++ b/core/node/state_keeper/src/tests/mod.rs @@ -17,8 +17,7 @@ use zksync_multivm::{ }; use zksync_node_test_utils::create_l2_transaction; use zksync_types::{ - aggregated_operations::AggregatedActionType, - block::{BlockGasCount, L2BlockExecutionData, L2BlockHasher}, + block::{L2BlockExecutionData, L2BlockHasher}, fee_model::{BatchFeeInput, PubdataIndependentBatchFeeModelInput}, u256_to_h256, AccountTreeId, Address, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersionId, StorageKey, StorageLog, StorageLogKind, StorageLogWithPreviousValue, @@ -28,20 +27,16 @@ use zksync_types::{ use crate::{ io::PendingBatchData, keeper::POLL_WAIT_DURATION, - seal_criteria::{ - criteria::{GasCriterion, SlotsCriterion}, - SequencerSealer, UnexecutableReason, - }, + seal_criteria::{criteria::SlotsCriterion, SequencerSealer, UnexecutableReason}, testonly::{ successful_exec, test_batch_executor::{ - random_tx, random_upgrade_tx, rejected_exec, successful_exec_with_log, - MockReadStorageFactory, TestBatchExecutorBuilder, TestIO, TestScenario, FEE_ACCOUNT, + random_tx, random_upgrade_tx, rejected_exec, MockReadStorageFactory, + TestBatchExecutorBuilder, TestIO, TestScenario, FEE_ACCOUNT, }, BASE_SYSTEM_CONTRACTS, }, updates::UpdatesManager, - utils::{gas_count_from_tx_and_metrics, l1_batch_base_cost}, ZkSyncStateKeeper, }; @@ -188,86 +183,6 @@ async fn sealed_by_number_of_txs() { .await; } -#[tokio::test] -async fn sealed_by_gas() { - let first_tx = random_tx(1); - let execution_result = successful_exec_with_log(); - let exec_metrics = execution_result - .tx_result - .get_execution_metrics(Some(&first_tx)); - assert!(exec_metrics.size() > 0); - let l1_gas_per_tx = gas_count_from_tx_and_metrics(&first_tx, &exec_metrics); - assert!(l1_gas_per_tx.commit > 0); - - let config = StateKeeperConfig { - max_single_tx_gas: 62_000 + l1_gas_per_tx.commit * 2, - reject_tx_at_gas_percentage: 1.0, - close_block_at_gas_percentage: 0.5, - ..StateKeeperConfig::default() - }; - let sealer = SequencerSealer::with_sealers(config, vec![Box::new(GasCriterion)]); - - TestScenario::new() - .seal_l2_block_when(|updates| { - updates.l2_block.executed_transactions.len() == 1 - }) - .next_tx("First tx", first_tx, execution_result.clone()) - .l2_block_sealed_with("L2 block with a single tx", move |updates| { - assert_eq!( - updates.l2_block.l1_gas_count, - l1_gas_per_tx, - "L1 gas used by a L2 block should consist of the gas used by its txs" - ); - }) - .next_tx("Second tx", random_tx(1), execution_result) - .l2_block_sealed("L2 block 2") - .batch_sealed_with("Batch sealed with both txs", move |updates| { - assert_eq!( - updates.l1_batch.l1_gas_count, - BlockGasCount { - commit: l1_batch_base_cost(AggregatedActionType::Commit) + l1_gas_per_tx.commit * 2, - prove: l1_batch_base_cost(AggregatedActionType::PublishProofOnchain), - execute: l1_batch_base_cost(AggregatedActionType::Execute), - }, - "L1 gas used by a batch should consist of gas used by its txs + basic block gas cost" - ); - }) - .run(sealer).await; -} - -#[tokio::test] -async fn sealed_by_gas_then_by_num_tx() { - let config = StateKeeperConfig { - max_single_tx_gas: 62_000, - reject_tx_at_gas_percentage: 1.0, - close_block_at_gas_percentage: 0.5, - transaction_slots: 3, - ..StateKeeperConfig::default() - }; - let sealer = SequencerSealer::with_sealers( - config, - vec![Box::new(GasCriterion), Box::new(SlotsCriterion)], - ); - - let execution_result = successful_exec_with_log(); - - // 1st tx is sealed by gas sealer; 2nd, 3rd, & 4th are sealed by slots sealer. - TestScenario::new() - .seal_l2_block_when(|updates| updates.l2_block.executed_transactions.len() == 1) - .next_tx("First tx", random_tx(1), execution_result) - .l2_block_sealed("L2 block 1") - .batch_sealed("Batch 1") - .next_tx("Second tx", random_tx(2), successful_exec()) - .l2_block_sealed("L2 block 2") - .next_tx("Third tx", random_tx(3), successful_exec()) - .l2_block_sealed("L2 block 3") - .next_tx("Fourth tx", random_tx(4), successful_exec()) - .l2_block_sealed("L2 block 4") - .batch_sealed("Batch 2") - .run(sealer) - .await; -} - #[tokio::test] async fn batch_sealed_before_l2_block_does() { let config = StateKeeperConfig { @@ -417,14 +332,13 @@ async fn load_upgrade_tx() { let sealer = SequencerSealer::default(); let scenario = TestScenario::new(); let batch_executor = TestBatchExecutorBuilder::new(&scenario); - let (stop_sender, stop_receiver) = watch::channel(false); + let (stop_sender, _stop_receiver) = watch::channel(false); let (mut io, output_handler) = TestIO::new(stop_sender, scenario); io.add_upgrade_tx(ProtocolVersionId::latest(), random_upgrade_tx(1)); io.add_upgrade_tx(ProtocolVersionId::next(), random_upgrade_tx(2)); let mut sk = ZkSyncStateKeeper::new( - stop_receiver, Box::new(io), Box::new(batch_executor), output_handler, diff --git a/core/node/state_keeper/src/types.rs b/core/node/state_keeper/src/types.rs index db18e32e0963..4861916fc227 100644 --- a/core/node/state_keeper/src/types.rs +++ b/core/node/state_keeper/src/types.rs @@ -5,15 +5,9 @@ use std::{ use zksync_dal::{Connection, Core, CoreDal}; use zksync_mempool::{L2TxFilter, MempoolInfo, MempoolStore}; -use zksync_multivm::interface::{VmExecutionMetrics, VmExecutionResultAndLogs}; -use zksync_types::{ - block::BlockGasCount, Address, Nonce, PriorityOpId, Transaction, TransactionTimeRangeConstraint, -}; +use zksync_types::{Address, Nonce, PriorityOpId, Transaction, TransactionTimeRangeConstraint}; -use super::{ - metrics::StateKeeperGauges, - utils::{gas_count_from_metrics, gas_count_from_tx_and_metrics}, -}; +use super::metrics::StateKeeperGauges; #[derive(Debug, Clone)] pub struct MempoolGuard(Arc>); @@ -101,27 +95,3 @@ impl MempoolGuard { StateKeeperGauges::register(Arc::downgrade(&self.0)); } } - -#[derive(Debug, Clone, Copy, PartialEq)] -pub struct ExecutionMetricsForCriteria { - pub l1_gas: BlockGasCount, - pub execution_metrics: VmExecutionMetrics, -} - -impl ExecutionMetricsForCriteria { - pub fn new( - tx: Option<&Transaction>, - execution_result: &VmExecutionResultAndLogs, - ) -> ExecutionMetricsForCriteria { - let execution_metrics = execution_result.get_execution_metrics(tx); - let l1_gas = match tx { - Some(tx) => gas_count_from_tx_and_metrics(tx, &execution_metrics), - None => gas_count_from_metrics(&execution_metrics), - }; - - ExecutionMetricsForCriteria { - l1_gas, - execution_metrics, - } - } -} diff --git a/core/node/state_keeper/src/updates/l1_batch_updates.rs b/core/node/state_keeper/src/updates/l1_batch_updates.rs index aa2e22cac483..f7a93b4870b9 100644 --- a/core/node/state_keeper/src/updates/l1_batch_updates.rs +++ b/core/node/state_keeper/src/updates/l1_batch_updates.rs @@ -1,10 +1,9 @@ use zksync_multivm::interface::{FinishedL1Batch, TransactionExecutionResult, VmExecutionMetrics}; use zksync_types::{ - block::BlockGasCount, priority_op_onchain_data::PriorityOpOnchainData, - ExecuteTransactionCommon, L1BatchNumber, + priority_op_onchain_data::PriorityOpOnchainData, ExecuteTransactionCommon, L1BatchNumber, }; -use crate::{updates::l2_block_updates::L2BlockUpdates, utils::new_block_gas_count}; +use crate::updates::l2_block_updates::L2BlockUpdates; #[derive(Debug)] pub struct L1BatchUpdates { @@ -12,9 +11,8 @@ pub struct L1BatchUpdates { pub executed_transactions: Vec, pub priority_ops_onchain_data: Vec, pub block_execution_metrics: VmExecutionMetrics, - // how much L1 gas will it take to submit this block? - pub l1_gas_count: BlockGasCount, pub txs_encoding_size: usize, + pub l1_tx_count: usize, pub finished: Option, } @@ -25,8 +23,8 @@ impl L1BatchUpdates { executed_transactions: Default::default(), priority_ops_onchain_data: Default::default(), block_execution_metrics: Default::default(), - l1_gas_count: new_block_gas_count(), txs_encoding_size: 0, + l1_tx_count: 0, finished: None, } } @@ -41,9 +39,9 @@ impl L1BatchUpdates { self.executed_transactions .extend(l2_block_updates.executed_transactions); - self.l1_gas_count += l2_block_updates.l1_gas_count; self.block_execution_metrics += l2_block_updates.block_execution_metrics; self.txs_encoding_size += l2_block_updates.txs_encoding_size; + self.l1_tx_count += l2_block_updates.l1_tx_count; } } @@ -53,10 +51,7 @@ mod tests { use zksync_types::{L2BlockNumber, ProtocolVersionId, H256}; use super::*; - use crate::{ - tests::{create_execution_result, create_transaction}, - utils::new_block_gas_count, - }; + use crate::tests::{create_execution_result, create_transaction}; #[test] fn apply_l2_block_with_empty_tx() { @@ -73,7 +68,6 @@ mod tests { l2_block_accumulator.extend_from_executed_transaction( tx, create_execution_result([]), - BlockGasCount::default(), VmExecutionMetrics::default(), vec![], vec![], @@ -83,12 +77,12 @@ mod tests { l1_batch_accumulator.extend_from_sealed_l2_block(l2_block_accumulator); assert_eq!(l1_batch_accumulator.executed_transactions.len(), 1); - assert_eq!(l1_batch_accumulator.l1_gas_count, new_block_gas_count()); assert_eq!(l1_batch_accumulator.priority_ops_onchain_data.len(), 0); assert_eq!( l1_batch_accumulator.block_execution_metrics.l2_to_l1_logs, 0 ); assert_eq!(l1_batch_accumulator.txs_encoding_size, expected_tx_size); + assert_eq!(l1_batch_accumulator.l1_tx_count, 0); } } diff --git a/core/node/state_keeper/src/updates/l2_block_updates.rs b/core/node/state_keeper/src/updates/l2_block_updates.rs index d258f8eeac0b..628f9e4a2910 100644 --- a/core/node/state_keeper/src/updates/l2_block_updates.rs +++ b/core/node/state_keeper/src/updates/l2_block_updates.rs @@ -8,7 +8,7 @@ use zksync_multivm::{ vm_latest::TransactionVmExt, }; use zksync_types::{ - block::{BlockGasCount, L2BlockHasher}, + block::L2BlockHasher, bytecode::BytecodeHash, l2_to_l1_log::{SystemL2ToL1Log, UserL2ToL1Log}, L2BlockNumber, ProtocolVersionId, StorageLogWithPreviousValue, Transaction, H256, @@ -24,11 +24,10 @@ pub struct L2BlockUpdates { pub user_l2_to_l1_logs: Vec, pub system_l2_to_l1_logs: Vec, pub new_factory_deps: HashMap>, - /// How much L1 gas will it take to submit this block? - pub l1_gas_count: BlockGasCount, pub block_execution_metrics: VmExecutionMetrics, pub txs_encoding_size: usize, pub payload_encoding_size: usize, + pub l1_tx_count: usize, pub timestamp: u64, pub number: L2BlockNumber, pub prev_block_hash: H256, @@ -51,10 +50,10 @@ impl L2BlockUpdates { user_l2_to_l1_logs: vec![], system_l2_to_l1_logs: vec![], new_factory_deps: HashMap::new(), - l1_gas_count: BlockGasCount::default(), block_execution_metrics: VmExecutionMetrics::default(), txs_encoding_size: 0, payload_encoding_size: 0, + l1_tx_count: 0, timestamp, number, prev_block_hash, @@ -66,7 +65,6 @@ impl L2BlockUpdates { pub(crate) fn extend_from_fictive_transaction( &mut self, result: VmExecutionResultAndLogs, - l1_gas_count: BlockGasCount, execution_metrics: VmExecutionMetrics, ) { self.events.extend(result.logs.events); @@ -76,7 +74,6 @@ impl L2BlockUpdates { self.system_l2_to_l1_logs .extend(result.logs.system_l2_to_l1_logs); - self.l1_gas_count += l1_gas_count; self.block_execution_metrics += execution_metrics; } @@ -85,7 +82,6 @@ impl L2BlockUpdates { &mut self, tx: Transaction, tx_execution_result: VmExecutionResultAndLogs, - tx_l1_gas_this_tx: BlockGasCount, execution_metrics: VmExecutionMetrics, compressed_bytecodes: Vec, call_traces: Vec, @@ -143,7 +139,6 @@ impl L2BlockUpdates { }); self.new_factory_deps.extend(known_bytecodes); - self.l1_gas_count += tx_l1_gas_this_tx; self.block_execution_metrics += execution_metrics; self.txs_encoding_size += tx.bootloader_encoding_size(); self.payload_encoding_size += @@ -155,6 +150,9 @@ impl L2BlockUpdates { .extend(tx_execution_result.logs.system_l2_to_l1_logs); self.storage_logs .extend(tx_execution_result.logs.storage_logs); + if tx.is_l1() { + self.l1_tx_count += 1; + } self.executed_transactions.push(TransactionExecutionResult { hash: tx.hash(), @@ -212,7 +210,6 @@ mod tests { accumulator.extend_from_executed_transaction( tx, create_execution_result([]), - BlockGasCount::default(), VmExecutionMetrics::default(), vec![], vec![], @@ -223,10 +220,10 @@ mod tests { assert_eq!(accumulator.storage_logs.len(), 0); assert_eq!(accumulator.user_l2_to_l1_logs.len(), 0); assert_eq!(accumulator.system_l2_to_l1_logs.len(), 0); - assert_eq!(accumulator.l1_gas_count, Default::default()); assert_eq!(accumulator.new_factory_deps.len(), 0); assert_eq!(accumulator.block_execution_metrics.l2_to_l1_logs, 0); assert_eq!(accumulator.txs_encoding_size, bootloader_encoding_size); assert_eq!(accumulator.payload_encoding_size, payload_encoding_size); + assert_eq!(accumulator.l1_tx_count, 0); } } diff --git a/core/node/state_keeper/src/updates/mod.rs b/core/node/state_keeper/src/updates/mod.rs index 752963580e37..06ac4bcd5de0 100644 --- a/core/node/state_keeper/src/updates/mod.rs +++ b/core/node/state_keeper/src/updates/mod.rs @@ -7,8 +7,8 @@ use zksync_multivm::{ utils::{get_batch_base_fee, StorageWritesDeduplicator}, }; use zksync_types::{ - block::BlockGasCount, commitment::PubdataParams, fee_model::BatchFeeInput, Address, - L1BatchNumber, L2BlockNumber, ProtocolVersionId, Transaction, + commitment::PubdataParams, fee_model::BatchFeeInput, Address, L1BatchNumber, L2BlockNumber, + ProtocolVersionId, Transaction, }; pub(crate) use self::{l1_batch_updates::L1BatchUpdates, l2_block_updates::L2BlockUpdates}; @@ -16,7 +16,6 @@ use super::{ io::{IoCursor, L2BlockParams}, metrics::{BATCH_TIP_METRICS, UPDATES_MANAGER_METRICS}, }; -use crate::types::ExecutionMetricsForCriteria; pub mod l1_batch_updates; pub mod l2_block_updates; @@ -117,7 +116,6 @@ impl UpdatesManager { tx: Transaction, tx_execution_result: VmExecutionResultAndLogs, compressed_bytecodes: Vec, - tx_l1_gas_this_tx: BlockGasCount, execution_metrics: VmExecutionMetrics, call_traces: Vec, ) { @@ -129,7 +127,6 @@ impl UpdatesManager { self.l2_block.extend_from_executed_transaction( tx, tx_execution_result, - tx_l1_gas_this_tx, execution_metrics, compressed_bytecodes, call_traces, @@ -145,7 +142,7 @@ impl UpdatesManager { ); let result = &finished_batch.block_tip_execution_result; - let batch_tip_metrics = ExecutionMetricsForCriteria::new(None, result); + let batch_tip_execution_metrics = result.get_execution_metrics(None); let before = self.storage_writes_deduplicator.metrics(); self.storage_writes_deduplicator @@ -153,11 +150,8 @@ impl UpdatesManager { let after = self.storage_writes_deduplicator.metrics(); BATCH_TIP_METRICS.observe_writes_metrics(&before, &after, self.protocol_version()); - self.l2_block.extend_from_fictive_transaction( - result.clone(), - batch_tip_metrics.l1_gas, - batch_tip_metrics.execution_metrics, - ); + self.l2_block + .extend_from_fictive_transaction(result.clone(), batch_tip_execution_metrics); self.l1_batch.finished = Some(finished_batch); latency.observe(); @@ -190,8 +184,8 @@ impl UpdatesManager { self.l1_batch.executed_transactions.len() + self.l2_block.executed_transactions.len() } - pub(crate) fn pending_l1_gas_count(&self) -> BlockGasCount { - self.l1_batch.l1_gas_count + self.l2_block.l1_gas_count + pub(crate) fn pending_l1_transactions_len(&self) -> usize { + self.l1_batch.l1_tx_count + self.l2_block.l1_tx_count } pub(crate) fn pending_execution_metrics(&self) -> VmExecutionMetrics { @@ -225,10 +219,7 @@ pub struct L2BlockSealCommand { #[cfg(test)] mod tests { use super::*; - use crate::{ - tests::{create_execution_result, create_transaction, create_updates_manager}, - utils::new_block_gas_count, - }; + use crate::tests::{create_execution_result, create_transaction, create_updates_manager}; #[test] fn apply_l2_block() { @@ -242,7 +233,6 @@ mod tests { tx, create_execution_result([]), vec![], - new_block_gas_count(), VmExecutionMetrics::default(), vec![], ); diff --git a/core/node/state_keeper/src/utils.rs b/core/node/state_keeper/src/utils.rs index 320dd49583ed..eca73b6e3800 100644 --- a/core/node/state_keeper/src/utils.rs +++ b/core/node/state_keeper/src/utils.rs @@ -1,92 +1,9 @@ use std::time::{SystemTime, UNIX_EPOCH}; -use zksync_multivm::interface::{DeduplicatedWritesMetrics, VmExecutionMetrics}; -use zksync_types::{ - aggregated_operations::AggregatedActionType, block::BlockGasCount, ExecuteTransactionCommon, - ProtocolVersionId, Transaction, -}; +use tokio::sync::watch; -// TODO(QIT-32): Remove constants(except `L1_OPERATION_EXECUTE_COST`) and logic that use them -const L1_BATCH_COMMIT_BASE_COST: u32 = 31_000; -const L1_BATCH_PROVE_BASE_COST: u32 = 7_000; -const L1_BATCH_EXECUTE_BASE_COST: u32 = 30_000; - -const EXECUTE_COMMIT_COST: u32 = 0; -const EXECUTE_EXECUTE_COST: u32 = 0; - -const L1_OPERATION_EXECUTE_COST: u32 = 12_500; - -const GAS_PER_BYTE: u32 = 18; - -pub(super) fn l1_batch_base_cost(op: AggregatedActionType) -> u32 { - match op { - AggregatedActionType::Commit => L1_BATCH_COMMIT_BASE_COST, - AggregatedActionType::PublishProofOnchain => L1_BATCH_PROVE_BASE_COST, - AggregatedActionType::Execute => L1_BATCH_EXECUTE_BASE_COST, - } -} - -fn base_tx_cost(tx: &Transaction, op: AggregatedActionType) -> u32 { - match op { - AggregatedActionType::Commit => EXECUTE_COMMIT_COST, - AggregatedActionType::PublishProofOnchain => 0, - AggregatedActionType::Execute => match tx.common_data { - ExecuteTransactionCommon::L1(_) => L1_OPERATION_EXECUTE_COST, - ExecuteTransactionCommon::L2(_) => EXECUTE_EXECUTE_COST, - ExecuteTransactionCommon::ProtocolUpgrade(_) => EXECUTE_EXECUTE_COST, - }, - } -} - -fn additional_pubdata_commit_cost(execution_metrics: &VmExecutionMetrics) -> u32 { - (execution_metrics.size() as u32) * GAS_PER_BYTE -} - -fn additional_writes_commit_cost( - writes_metrics: &DeduplicatedWritesMetrics, - protocol_version: ProtocolVersionId, -) -> u32 { - (writes_metrics.size(protocol_version) as u32) * GAS_PER_BYTE -} - -pub(super) fn new_block_gas_count() -> BlockGasCount { - BlockGasCount { - commit: l1_batch_base_cost(AggregatedActionType::Commit), - prove: l1_batch_base_cost(AggregatedActionType::PublishProofOnchain), - execute: l1_batch_base_cost(AggregatedActionType::Execute), - } -} - -pub(super) fn gas_count_from_tx_and_metrics( - tx: &Transaction, - execution_metrics: &VmExecutionMetrics, -) -> BlockGasCount { - let commit = base_tx_cost(tx, AggregatedActionType::Commit) - + additional_pubdata_commit_cost(execution_metrics); - BlockGasCount { - commit, - prove: base_tx_cost(tx, AggregatedActionType::PublishProofOnchain), - execute: base_tx_cost(tx, AggregatedActionType::Execute), - } -} - -pub(super) fn gas_count_from_metrics(execution_metrics: &VmExecutionMetrics) -> BlockGasCount { - BlockGasCount { - commit: additional_pubdata_commit_cost(execution_metrics), - prove: 0, - execute: 0, - } -} - -pub(super) fn gas_count_from_writes( - writes_metrics: &DeduplicatedWritesMetrics, - protocol_version: ProtocolVersionId, -) -> BlockGasCount { - BlockGasCount { - commit: additional_writes_commit_cost(writes_metrics, protocol_version), - prove: 0, - execute: 0, - } +pub(super) fn is_canceled(stop_receiver: &watch::Receiver) -> bool { + *stop_receiver.borrow() } // TODO (SMA-1206): use seconds instead of milliseconds. diff --git a/core/node/test_utils/src/lib.rs b/core/node/test_utils/src/lib.rs index 9a02c18cd235..ac900e72bb6b 100644 --- a/core/node/test_utils/src/lib.rs +++ b/core/node/test_utils/src/lib.rs @@ -382,16 +382,18 @@ pub async fn recover( storage .pruning_dal() - .soft_prune_batches_range(snapshot.l1_batch.number, snapshot.l2_block.number) + .insert_soft_pruning_log(snapshot.l1_batch.number, snapshot.l2_block.number) .await .unwrap(); - storage .pruning_dal() - .hard_prune_batches_range(snapshot.l1_batch.number, snapshot.l2_block.number) + .insert_hard_pruning_log( + snapshot.l1_batch.number, + snapshot.l2_block.number, + snapshot_recovery.l1_batch_root_hash, + ) .await .unwrap(); - storage.commit().await.unwrap(); snapshot_recovery } diff --git a/core/tests/vm-benchmark/benches/batch.rs b/core/tests/vm-benchmark/benches/batch.rs index 608f6be6d089..f4151c39a6f8 100644 --- a/core/tests/vm-benchmark/benches/batch.rs +++ b/core/tests/vm-benchmark/benches/batch.rs @@ -18,9 +18,9 @@ use criterion::{black_box, criterion_group, criterion_main, Criterion, Throughpu use rand::{rngs::StdRng, Rng, SeedableRng}; use vm_benchmark::{ criterion::{is_test_mode, BenchmarkGroup, BenchmarkId, CriterionExt, MeteredTime}, - get_deploy_tx_with_gas_limit, get_heavy_load_test_tx, get_load_test_deploy_tx, - get_load_test_tx, get_realistic_load_test_tx, get_transfer_tx, BenchmarkingVm, - BenchmarkingVmFactory, Bytecode, Fast, Legacy, LoadTestParams, + get_deploy_tx_with_gas_limit, get_erc20_deploy_tx, get_erc20_transfer_tx, + get_heavy_load_test_tx, get_load_test_deploy_tx, get_load_test_tx, get_realistic_load_test_tx, + get_transfer_tx, BenchmarkingVm, BenchmarkingVmFactory, Bytecode, Fast, Legacy, LoadTestParams, }; use zksync_types::Transaction; @@ -146,6 +146,12 @@ fn bench_fill_bootloader( run_vm::(&mut group, "load_test_heavy", &txs); drop(txs); + // ERC-20 token transfers + let txs = (1..=max_txs).map(get_erc20_transfer_tx); + let txs: Vec<_> = iter::once(get_erc20_deploy_tx()).chain(txs).collect(); + run_vm::(&mut group, "erc20_transfer", &txs); + drop(txs); + // Base token transfers let txs: Vec<_> = (0..max_txs).map(get_transfer_tx).collect(); run_vm::(&mut group, "transfer", &txs); diff --git a/core/tests/vm-benchmark/src/lib.rs b/core/tests/vm-benchmark/src/lib.rs index dbe2fdb808db..8f43f61b28b6 100644 --- a/core/tests/vm-benchmark/src/lib.rs +++ b/core/tests/vm-benchmark/src/lib.rs @@ -2,9 +2,9 @@ use zksync_types::Transaction; pub use crate::{ transaction::{ - get_deploy_tx, get_deploy_tx_with_gas_limit, get_heavy_load_test_tx, - get_load_test_deploy_tx, get_load_test_tx, get_realistic_load_test_tx, get_transfer_tx, - LoadTestParams, + get_deploy_tx, get_deploy_tx_with_gas_limit, get_erc20_deploy_tx, get_erc20_transfer_tx, + get_heavy_load_test_tx, get_load_test_deploy_tx, get_load_test_tx, + get_realistic_load_test_tx, get_transfer_tx, LoadTestParams, }, vm::{BenchmarkingVm, BenchmarkingVmFactory, CountInstructions, Fast, Legacy, VmLabel}, }; diff --git a/core/tests/vm-benchmark/src/transaction.rs b/core/tests/vm-benchmark/src/transaction.rs index 5c1824e6ffa2..e50f40a06ef1 100644 --- a/core/tests/vm-benchmark/src/transaction.rs +++ b/core/tests/vm-benchmark/src/transaction.rs @@ -56,6 +56,38 @@ pub fn get_transfer_tx(nonce: u32) -> Transaction { signed.into() } +pub fn get_erc20_transfer_tx(nonce: u32) -> Transaction { + let transfer_fn = TestContract::test_erc20().function("transfer"); + let calldata = transfer_fn + .encode_input(&[ + Token::Address(Address::from_low_u64_be(nonce.into())), // send tokens to unique addresses + Token::Uint(1.into()), + ]) + .unwrap(); + + let mut signed = L2Tx::new_signed( + Some(*LOAD_TEST_CONTRACT_ADDRESS), + calldata, + Nonce(nonce), + tx_fee(1_000_000), + 0.into(), // value + L2ChainId::from(270), + &PRIVATE_KEY, + vec![], // factory deps + Default::default(), // paymaster params + ) + .expect("should create a signed execute transaction"); + + signed.set_input(H256::random().as_bytes().to_vec(), H256::random()); + signed.into() +} + +pub fn get_erc20_deploy_tx() -> Transaction { + let calldata = [Token::Uint(U256::one() << 128)]; // initial token amount minted to the deployer + let execute = TestContract::test_erc20().deploy_payload(&calldata); + Account::new(PRIVATE_KEY.clone()).get_l2_tx_for_execute(execute, Some(tx_fee(500_000_000))) +} + pub fn get_load_test_deploy_tx() -> Transaction { let calldata = [Token::Uint(LOAD_TEST_MAX_READS.into())]; let execute = TestContract::load_test().deploy_payload(&calldata); diff --git a/core/tests/vm-benchmark/src/vm.rs b/core/tests/vm-benchmark/src/vm.rs index 4bd7d7eb1aa6..e69e7ca1e909 100644 --- a/core/tests/vm-benchmark/src/vm.rs +++ b/core/tests/vm-benchmark/src/vm.rs @@ -240,7 +240,9 @@ mod tests { use super::*; use crate::{ get_deploy_tx, get_heavy_load_test_tx, get_load_test_deploy_tx, get_load_test_tx, - get_realistic_load_test_tx, get_transfer_tx, LoadTestParams, BYTECODES, + get_realistic_load_test_tx, get_transfer_tx, + transaction::{get_erc20_deploy_tx, get_erc20_transfer_tx}, + LoadTestParams, BYTECODES, }; #[test] @@ -259,6 +261,18 @@ mod tests { assert_matches!(res.result, ExecutionResult::Success { .. }); } + #[test] + fn can_erc20_transfer() { + let mut vm = BenchmarkingVm::new(); + let res = vm.run_transaction(&get_erc20_deploy_tx()); + assert_matches!(res.result, ExecutionResult::Success { .. }); + + for nonce in 1..=5 { + let res = vm.run_transaction(&get_erc20_transfer_tx(nonce)); + assert_matches!(res.result, ExecutionResult::Success { .. }); + } + } + #[test] fn can_load_test() { let mut vm = BenchmarkingVm::new(); diff --git a/deny.toml b/deny.toml index 13ce6504107f..954017750ab2 100644 --- a/deny.toml +++ b/deny.toml @@ -9,10 +9,9 @@ feature-depth = 1 [advisories] ignore = [ "RUSTSEC-2024-0375", # atty dependency being unmaintained, dependency of clap and criterion, we would need to update to newer major of dependencies - "RUSTSEC-2020-0168", # mach dependency being unmaintained, dependency in consensus, we should consider moving to mach2 fork + "RUSTSEC-2020-0168", # mach dependency being unmaintained, dependency in api server, we should consider moving to mach2 fork "RUSTSEC-2024-0370", # `cs_derive` needs to be updated to not rely on `proc-macro-error` # all below caused by StructOpt which we still use and we should move to clap v4 instead - "RUSTSEC-2024-0375", "RUSTSEC-2021-0145", "RUSTSEC-2021-0139", "RUSTSEC-2024-0388", # `derivative` is unmaintained, crypto dependenicies (boojum, circuit_encodings and others) rely on it @@ -25,7 +24,6 @@ allow = [ "ISC", "Unlicense", "MPL-2.0", - "Unicode-DFS-2016", "CC0-1.0", "BSD-2-Clause", "BSD-3-Clause", @@ -34,6 +32,7 @@ allow = [ "Apache-2.0 WITH LLVM-exception", "0BSD", "BSL-1.0", + "Unicode-3.0" ] confidence-threshold = 0.8 diff --git a/docker/circuit-prover-gpu-gar/Dockerfile b/docker/circuit-prover-gpu-gar/Dockerfile index 3dfc6bdf9ada..e1bc0a7804d9 100644 --- a/docker/circuit-prover-gpu-gar/Dockerfile +++ b/docker/circuit-prover-gpu-gar/Dockerfile @@ -1,7 +1,7 @@ ARG PROVER_IMAGE FROM us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/circuit-prover-gpu:2.0-$PROVER_IMAGE as prover -FROM nvidia/cuda:12.2.0-runtime-ubuntu22.04 as app +FROM nvidia/cuda:12.4.0-runtime-ubuntu22.04 as app # HACK copying to root is the only way to make Docker layer caching work for these files for some reason COPY *.bin / diff --git a/docker/circuit-prover-gpu/Dockerfile b/docker/circuit-prover-gpu/Dockerfile index 8e193e205897..852f42d705ec 100644 --- a/docker/circuit-prover-gpu/Dockerfile +++ b/docker/circuit-prover-gpu/Dockerfile @@ -1,4 +1,4 @@ -FROM nvidia/cuda:12.2.0-devel-ubuntu22.04 AS builder +FROM nvidia/cuda:12.4.0-devel-ubuntu22.04 AS builder ARG DEBIAN_FRONTEND=noninteractive @@ -45,7 +45,10 @@ COPY . . RUN cd prover && cargo build --release --bin zksync_circuit_prover -FROM nvidia/cuda:12.2.0-runtime-ubuntu22.04 +# Output build in CUDA architectures for debugging purposes. +RUN cuobjdump /usr/src/zksync/prover/target/release/zksync_circuit_prover + +FROM nvidia/cuda:12.4.0-runtime-ubuntu22.04 RUN apt-get update && apt-get install -y curl libpq5 ca-certificates && rm -rf /var/lib/apt/lists/* diff --git a/docker/proof-fri-gpu-compressor/Dockerfile b/docker/proof-fri-gpu-compressor/Dockerfile index e744787c8259..6157bca0ac13 100644 --- a/docker/proof-fri-gpu-compressor/Dockerfile +++ b/docker/proof-fri-gpu-compressor/Dockerfile @@ -50,6 +50,9 @@ RUN cd prover && \ RUN cd prover && BELLMAN_CUDA_DIR=$PWD/bellman-cuda cargo build --features "gpu" --release --bin zksync_proof_fri_compressor +# Output build in CUDA architectures for debugging purposes. +RUN cuobjdump /usr/src/zksync/prover/target/release/zksync_proof_fri_compressor + FROM nvidia/cuda:12.2.0-runtime-ubuntu22.04 RUN apt-get update && apt-get install -y curl libpq5 ca-certificates && rm -rf /var/lib/apt/lists/* diff --git a/docker/prover-gpu-fri/Dockerfile b/docker/prover-gpu-fri/Dockerfile index 2a680a49c5de..db497fc5d500 100644 --- a/docker/prover-gpu-fri/Dockerfile +++ b/docker/prover-gpu-fri/Dockerfile @@ -45,6 +45,9 @@ COPY . . RUN cd prover && cargo build --release --features "gpu" --bin zksync_prover_fri +# Output build in CUDA architectures for debugging purposes. +RUN cuobjdump /usr/src/zksync/prover/target/release/zksync_prover_fri + FROM nvidia/cuda:12.2.0-runtime-ubuntu22.04 RUN apt-get update && apt-get install -y curl libpq5 ca-certificates && rm -rf /var/lib/apt/lists/* diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index c0dd8638c8d9..ca34a5190d3a 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -79,6 +79,10 @@ - [Fee Model](specs/zk_evm/fee_model.md) - [Precompiles](specs/zk_evm/precompiles.md) - [System Contracts](specs/zk_evm/system_contracts.md) +- [Interop](specs/interop/overview.md) + - [Interop Messages](specs/interop/interopmessages.md) + - [Bundles and Calls](specs/interop/bundlesandcalls.md) + - [Interop Transactions](specs/interop/interoptransactions.md) # Announcements diff --git a/docs/src/guides/advanced/16_decentralization.md b/docs/src/guides/advanced/16_decentralization.md index a5f889a813d0..ce7d0e219efb 100644 --- a/docs/src/guides/advanced/16_decentralization.md +++ b/docs/src/guides/advanced/16_decentralization.md @@ -8,17 +8,71 @@ and enabled as follows: Run the following to generate consensus secrets: ``` -docker run --entrypoint /usr/bin/zksync_external_node "matterlabs/external-node:2.0-v25.0.0" generate-secrets > consensus_secrets.yaml -chmod 600 consensus_secrets.yaml +docker run --entrypoint /usr/bin/zksync_external_node "matterlabs/external-node:2.0-v25.0.0" generate-secrets +``` + +That will output something like this (but with different keys obviously): + +``` +#validator:public:bls12_381:84fe19a96b6443ca7ce...98dec0870f6d8aa95c8164102f0d62e4c47e3566c4e5c32354d +validator_key: validator:secret:bls12_381:1de85683e6decbfcf6c12aa42a5c8bfa98d7ae796dee068ae73dc784a58f5213 +# attester:public:secp256k1:02e262af8c97536b9e479c6d60f213920e759faf4086d8352e98bc25d06b4142e3 +attester_key: attester:secret:secp256k1:1111eb31c2389613f3ceb4288eadda35780e98df4cabb2b7663882262f72e422 +# node:public:ed25519:acb7e350cf53e3b4c2042e2c8044734384cee51f58a0fa052fd7e0c9c3f4b20d +node_key: node:secret:ed25519:0effb1d7c335d23606f656ca1ba87566144d5af2984bd7486379d4f83a204ba2 +``` + +You then have two different paths depending if your main node is using file-based or env-based configuration. + +## Configuring consensus + +### File-based configuration + +If you are using the recommended file-based configuration then you'll need to add the following information to your +`general.yaml` config file (see [Ecosystem Configuration](../launch.md#ecosystem-configuration)): + +```yaml +consensus: + server_addr: '0.0.0.0:3054' + public_addr: + '???' + # Address under which the node is accessible to the other nodes. + # It can be a public domain, like `example.com:3054`, in case the main node is accessible from the internet, + # or it can be a kubernetes cluster domain, like `server-v2-core..svc.cluster.local:3054` in + # case the main node should be only accessible within the cluster. + debug_page_addr: '0.0.0.0:5000' + max_payload_size: 3200000 + gossip_dynamic_inbound_limit: 10 + genesis_spec: + chain_id: ??? # chain id + protocol_version: 1 # consensus protocol version + validators: + - key: validator:public:??? # validator public key of the main node (copy this PUBLIC key from the secrets you generated) + weight: 1 + leader: validator:public:??? # same as above - main node will be the only validator and the only leader. + seed_peers: + - key: 'node:public:ed25519:...' # node public key of the main node (copy this PUBLIC key from the secrets you generated) + addr: '???' # same as public_addr above ``` -## Preparing the consensus config +And the secrets you generated to your `secrets.yaml` config file: -Create `consensus_config.yaml` file with the following content (remember to replace the placeholders): +```yaml +consensus: + validator_key: validator:secret:??? + attester_key: attester:secret:??? + node_key: node:secret:??? +``` + +### Env-based configuration + +If you are using the env-based configuration you'll need to create a `consensus_config.yaml` file with the following +content: ```yaml server_addr: '0.0.0.0:3054' public_addr: + '???' # Address under which the node is accessible to the other nodes. # It can be a public domain, like `example.com:3054`, in case the main node is accessible from the internet, # or it can be a kubernetes cluster domain, like `server-v2-core..svc.cluster.local:3054` in @@ -27,52 +81,83 @@ debug_page_addr: '0.0.0.0:5000' max_payload_size: 3200000 gossip_dynamic_inbound_limit: 10 genesis_spec: - chain_id: # chain id + chain_id: ??? # chain id protocol_version: 1 # consensus protocol version validators: - - key: validator:public:??? # public key of the main node (copy this PUBLIC key from consensus_secrets.yaml) + - key: validator:public:??? # validator public key of the main node (copy this PUBLIC key from the secrets you generated) weight: 1 leader: validator:public:??? # same as above - main node will be the only validator and the only leader. + seed_peers: + - key: 'node:public:ed25519:...' # node public key of the main node (copy this PUBLIC key from the secrets you generated) + addr: '???' # same as public_addr above ``` -## Providing the configuration to the `zksync_server` +And a `consensus_secrets.yaml` file with the with the secrets you generated previously: -To enable consensus component for the main node you need to append -`--components=,consensus` to the `zksync_server` command line arguments. -In addition to that, you need to provide the configuration (from the files `consensus_config.yaml` and -`consensus_secrets.yaml` that we have just prepared) to the `zksync_server` binary. There are 2 ways (hopefully not for -long) to achieve that: +```yaml +validator_key: validator:secret:??? +attester_key: attester:secret:??? +node_key: node:secret:??? +``` + +Don't forget to set secure permissions to it: -- In file-based configuration system, the consensus config is embedded in the - [general config](https://github.com/matter-labs/zksync-era/blob/1edcabe0c6a02d5b6700c29c0d9f6220ec6fb03c/core/lib/config/src/configs/general.rs#L58), - and the consensus secrets are embedded in the - [secrets config](https://github.com/matter-labs/zksync-era/blob/main/core/bin/zksync_server/src/main.rs). Paste the - content of the generated `consensus_secrets.yaml` file to the `secrets` config, and prepared config to the `general` - config. +``` +chmod 600 consensus_secrets.yaml +``` -- In env-var-based configuration system, the consensus config and consensus secrets files are passed as standalone - files. The paths to these files need to be passed as env vars `CONSENSUS_CONFIG_PATH` and `CONSENSUS_SECRETS_PATH`. +Then you'll need to pass the paths to these files as env vars `CONSENSUS_CONFIG_PATH` and `CONSENSUS_SECRETS_PATH`. + +## Running the `zksync_server` + +Finally, to enable the consensus component for the main node you just need to append +`--components=,consensus` to the `zksync_server` command line arguments. ## Gitops repo config -If you are using the matterlabs gitops repo to configure the main node, it is even more complicated because the -`consensus_config.yaml` file is rendered from a helm chart. See the -[example](https://github.com/matter-labs/gitops-kubernetes/blob/main/apps/environments/mainnet2/server-v2/server-v2-core.yaml), -to see where you have to paste the content of the `consensus_config.yaml` file. +If you are using the matterlabs gitops repo to configure the main node, you'll need to add this information to your +kubernetes config for the core server, `server-v2-core.yaml` file (see +[example](https://github.com/matter-labs/gitops-kubernetes/blob/177dcd575c6ab446e70b9a9ced8024766095b516/apps/environments/era-stage-proofs/server-v2/server-v2-core.yaml#L23-L35)): + +```yaml +spec: + values: + args: + - --components=state_keeper,consensus + service: + main: + ports: + consensus: + enabled: true + port: 3054 +``` + +Then again you have two paths depending if the deployment is using file-based or env-based configuration. Although by +default you should be using file-based configuration. -You need to embed the `consensus_secrets.yaml` file into a kubernetes config: +### File-based configuration + +Just like before you'll add the consensus config information to the `general.yaml` config file (see +[example](https://github.com/matter-labs/gitops-kubernetes/blob/177dcd575c6ab446e70b9a9ced8024766095b516/apps/environments/era-stage-proofs/server-v2-config/general.yaml#L353-L368)). + +And the secrets you generated to your whatever secrets managing system you are using (see an example +[here](https://github.com/matter-labs/gitops-kubernetes/blob/177dcd575c6ab446e70b9a9ced8024766095b516/apps/clusters/era-stage-proofs/stage2/secrets/server-v2-secrets.yaml) +using SOPS). ```yaml -apiVersion: v1 -kind: Secret -metadata: - name: consensus-secrets -type: Opaque -stringData: - .consensus_secrets.yaml: +consensus: + validator_key: validator:secret:??? + attester_key: attester:secret:??? + node_key: node:secret:??? ``` -You need to add the following sections to your kubernetes config for the core server: +### Env-based configuration + +It is even more complicated because the `consensus_config.yaml` file is rendered from a helm chart. See the +[example](https://github.com/matter-labs/gitops-kubernetes/blob/177dcd575c6ab446e70b9a9ced8024766095b516/apps/environments/mainnet2/server-v2/server-v2-core.yaml#L37-L92), +to see where you have to paste the content of the `consensus_config.yaml` file. + +You also need to add the following sections to your `server-v2-core.yaml` file: ```yaml spec: @@ -83,14 +168,6 @@ spec: enabled: true type: secret mountPath: '/etc/consensus_secrets/' - args: - - --components=state_keeper,consensus - service: - main: - ports: - consensus: - enabled: true - port: 3054 configMap: consensus: enabled: true @@ -102,3 +179,18 @@ spec: - name: CONSENSUS_SECRETS_PATH value: /etc/consensus_secrets/.consensus_secrets.yaml ``` + +You need to embed the `consensus_secrets.yaml` file into a kubernetes config (see how to do it +[here](https://github.com/matter-labs/gitops-kubernetes/blob/177dcd575c6ab446e70b9a9ced8024766095b516/apps/environments/mainnet2/zksync-v2-secret/kustomization.yaml#L3-L4) +and +[here](https://github.com/matter-labs/gitops-kubernetes/blob/177dcd575c6ab446e70b9a9ced8024766095b516/apps/environments/mainnet2/zksync-v2-secret/consensus_secrets.yaml)): + +```yaml +apiVersion: v1 +kind: Secret +metadata: + name: consensus-secrets +type: Opaque +stringData: + .consensus_secrets.yaml: +``` diff --git a/docs/src/guides/build-docker.md b/docs/src/guides/build-docker.md index 5dd9cff022b9..6b0608275d8b 100644 --- a/docs/src/guides/build-docker.md +++ b/docs/src/guides/build-docker.md @@ -10,7 +10,8 @@ Install prerequisites: see ## Build docker files -You may build all images with [Makefile](../../docker/Makefile) located in [docker](../../docker) directory in this repository +You may build all images with [Makefile](../../docker/Makefile) located in [docker](../../docker) directory in this +repository > All commands should be run from the root directory of the repository diff --git a/docs/src/guides/external-node/10_decentralization.md b/docs/src/guides/external-node/10_decentralization.md index f2b1782c2d72..951538e6ab86 100644 --- a/docs/src/guides/external-node/10_decentralization.md +++ b/docs/src/guides/external-node/10_decentralization.md @@ -9,14 +9,6 @@ On the gossipnet, the data integrity will be protected by the BFT (byzantine fau ## Enabling gossipnet on your node -> [!NOTE] -> -> Because the data transmitted over the gossipnet is signed by the main node (and eventually by the consensus quorum), -> the signatures need to be backfilled to the node's local storage the first time you switch from centralized (ZKsync -> API based) synchronization to the decentralized (gossipnet based) synchronization (this is a one-time thing). With the -> current implementation it may take a couple of hours and gets faster the more nodes you add to the -> `gossip_static_outbound` list (see below). We are working to remove this inconvenience. - > [!NOTE] > > The minimal supported server version for this is @@ -65,9 +57,6 @@ for more details): your node from getting DoS`ed by too large network messages. Use the value from the template. - `gossip_dynamic_inbound_limit` - maximal number of unauthenticated concurrent inbound connections that can be established to your node. This is a DDoS protection measure. -- `gossip_static_outbound` - list of trusted peers that your node should always try to connect to. The template contains - the nodes maintained by Matterlabs, but you can add more if you know any. Note that the list contains both the network - address AND the public key of the node - this prevents spoofing attacks. ### Setting environment variables diff --git a/docs/src/specs/img/autoexecution.png b/docs/src/specs/img/autoexecution.png new file mode 100644 index 000000000000..bef85e282012 Binary files /dev/null and b/docs/src/specs/img/autoexecution.png differ diff --git a/docs/src/specs/img/callride.png b/docs/src/specs/img/callride.png new file mode 100644 index 000000000000..568aab9c7242 Binary files /dev/null and b/docs/src/specs/img/callride.png differ diff --git a/docs/src/specs/img/chainop.png b/docs/src/specs/img/chainop.png new file mode 100644 index 000000000000..bd18e98b0e1a Binary files /dev/null and b/docs/src/specs/img/chainop.png differ diff --git a/docs/src/specs/img/finaltx.png b/docs/src/specs/img/finaltx.png new file mode 100644 index 000000000000..c3bd287c0465 Binary files /dev/null and b/docs/src/specs/img/finaltx.png differ diff --git a/docs/src/specs/img/gateway.png b/docs/src/specs/img/gateway.png new file mode 100644 index 000000000000..4d18d38ba338 Binary files /dev/null and b/docs/src/specs/img/gateway.png differ diff --git a/docs/src/specs/img/globalroot.png b/docs/src/specs/img/globalroot.png new file mode 100644 index 000000000000..3152cbec4e81 Binary files /dev/null and b/docs/src/specs/img/globalroot.png differ diff --git a/docs/src/specs/img/interopcall.png b/docs/src/specs/img/interopcall.png new file mode 100644 index 000000000000..b683122f1363 Binary files /dev/null and b/docs/src/specs/img/interopcall.png differ diff --git a/docs/src/specs/img/interopcallbundle.png b/docs/src/specs/img/interopcallbundle.png new file mode 100644 index 000000000000..fc82f02bc69c Binary files /dev/null and b/docs/src/specs/img/interopcallbundle.png differ diff --git a/docs/src/specs/img/interopmsg.png b/docs/src/specs/img/interopmsg.png new file mode 100644 index 000000000000..5469b1a4e3ff Binary files /dev/null and b/docs/src/specs/img/interopmsg.png differ diff --git a/docs/src/specs/img/interoptx.png b/docs/src/specs/img/interoptx.png new file mode 100644 index 000000000000..3d9fe295fe4e Binary files /dev/null and b/docs/src/specs/img/interoptx.png differ diff --git a/docs/src/specs/img/ipointers.png b/docs/src/specs/img/ipointers.png new file mode 100644 index 000000000000..60c29e601237 Binary files /dev/null and b/docs/src/specs/img/ipointers.png differ diff --git a/docs/src/specs/img/levelsofinterop.png b/docs/src/specs/img/levelsofinterop.png new file mode 100644 index 000000000000..5ee15946935d Binary files /dev/null and b/docs/src/specs/img/levelsofinterop.png differ diff --git a/docs/src/specs/img/msgdotsender.png b/docs/src/specs/img/msgdotsender.png new file mode 100644 index 000000000000..b411690785db Binary files /dev/null and b/docs/src/specs/img/msgdotsender.png differ diff --git a/docs/src/specs/img/paymastertx.png b/docs/src/specs/img/paymastertx.png new file mode 100644 index 000000000000..b3f25936b8dd Binary files /dev/null and b/docs/src/specs/img/paymastertx.png differ diff --git a/docs/src/specs/img/proofmerklepath.png b/docs/src/specs/img/proofmerklepath.png new file mode 100644 index 000000000000..beaaa175e1d0 Binary files /dev/null and b/docs/src/specs/img/proofmerklepath.png differ diff --git a/docs/src/specs/img/retryexample.png b/docs/src/specs/img/retryexample.png new file mode 100644 index 000000000000..6c79ef637340 Binary files /dev/null and b/docs/src/specs/img/retryexample.png differ diff --git a/docs/src/specs/img/sendtol1.png b/docs/src/specs/img/sendtol1.png new file mode 100644 index 000000000000..b98e51879f34 Binary files /dev/null and b/docs/src/specs/img/sendtol1.png differ diff --git a/docs/src/specs/img/verifyinteropmsg.png b/docs/src/specs/img/verifyinteropmsg.png new file mode 100644 index 000000000000..031fe811207b Binary files /dev/null and b/docs/src/specs/img/verifyinteropmsg.png differ diff --git a/docs/src/specs/interop/README.md b/docs/src/specs/interop/README.md new file mode 100644 index 000000000000..88e3d514b562 --- /dev/null +++ b/docs/src/specs/interop/README.md @@ -0,0 +1,6 @@ +# Interop + +- [Overview](./overview.md) +- [Interop Messages](./interopmessages.md) +- [Bundles and Calls](./bundlesandcalls.md) +- [Interop Transactions](./interoptransactions.md) diff --git a/docs/src/specs/interop/bundlesandcalls.md b/docs/src/specs/interop/bundlesandcalls.md new file mode 100644 index 000000000000..e49e10e8eed2 --- /dev/null +++ b/docs/src/specs/interop/bundlesandcalls.md @@ -0,0 +1,278 @@ +# Bundles and Calls + +## Basics Calls + +Interop Calls are the next level of interfaces, built on top of Interop Messages, enabling you to call contracts on +other chains. + +![interopcall.png](../img/interopcall.png) + +At this level, the system handles replay protection—once a call is successfully executed, it cannot be executed again +(eliminating the need for your own nullifiers or similar mechanisms). + +Additionally, these calls originate from aliased accounts, simplifying permission management (more details on this +below). + +Cancellations and retries are managed at the next level (Bundles), which are covered in the following section. + +### Interface + +On the sending side, the interface provides the option to send this "call" to the destination contract. + +```solidity +struct InteropCall { + address sourceSender, + address destinationAddress, + uint256 destinationChainId, + calldata data, + uint256 value +} +contract InteropCenter { + // On source chain. + // Sends a 'single' basic internal call to destination chain & address. + // Internally, it starts a bundle, adds this call and sends it over. + function sendCall(destinationChain, destinationAddress, calldata, msgValue) returns bytes32 bundleId; +} +``` + +In return, you receive a `bundleId` (we’ll explain bundles later, but for now, think of it as a unique identifier for +your call). + +On the destination chain, you can execute the call using the execute method: + +```solidity +contract InteropCenter { + // Executes a given bundle. + // interopMessage is the message that contains your bundle as payload. + // If it fails, it can be called again. + function executeInteropBundle(interopMessage, proof); + + // If the bundle didn't execute succesfully yet, it can be marked as cancelled. + // See details below. + function cancelInteropBundle(interopMessage, proof); +} + +``` + +You can retrieve the `interopMessage` (which contains your entire payload) from the Gateway, or you can construct it +yourself using L1 data. + +Under the hood, this process calls the `destinationAddress` with the specified calldata. + +This leads to an important question: **Who is the msg.sender for this call?** + +## `msg.sender` of the Destination Call + +The `msg.sender` on the destination chain will be the **AliasedAccount** — an address created as a hash of the original +sender and the original source chain. + +(Normally, we’d like to use `sourceAccount@sourceChain`, but since Ethereum limits the size of addresses to 20 bytes, we +compute the Keccak hash of the string above and use this as the address.) + +One way to think about it is this: You (as account `0x5bFF1...` on chain A) can send a call to a contract on a +destination chain, and for that contract, it will appear as if the call came locally from the address +`keccak(0x5bFF1 || A)`. This means you are effectively "controlling" such an account address on **every ZK Chain** by +sending interop messages from the `0x5bFF1...` account on chain A. + +![msgdotsender.png](../img/msgdotsender.png) + +## Simple Example + +Imagine you have contracts on chains B, C, and D, and you’d like them to send "reports" to the Headquarters (HQ) +contract on chain A every time a customer makes a purchase. + +```solidity +// Deployed on chains B, C, D. +contract Shop { + /// Called by the customers when they buy something. + function buy(uint256 itemPrice) { + // handle payment etc. + ... + // report to HQ + InteropCenter(INTEROP_ADDRESS).sendCall( + 324, // chain id of chain A, + 0xc425.., // HQ contract on chain A, + createCalldata("reportSales(uint256)", itemPrice), // calldata + 0, // no value + ); + } +} + +// Deployed on chain A +contract HQ { + // List of shops + mapping (address => bool) shops; + mapping (address => uint256) sales; + function addShop(address addressOnChain, uint256 chainId) onlyOwner { + // Adding aliased accounts. + shops[address(keccak(addressOnChain || chainId))] = true; + } + + function reportSales(uint256 itemPrice) { + // only allow calls from our shops (their aliased accounts). + require(shops[msg.sender]); + sales[msg.sender] += itemPrice; + } +} +``` + +#### Who is paying for gas? How does this Call get to the destination chain + +At this level, the **InteropCall** acts like a hitchhiker — it relies on someone (anyone) to pick it up, execute it, and +pay for the gas! + +![callride.png](../img/callride.png) + +While any transaction on the destination chain can simply call `InteropCenter.executeInteropBundle`, if you don’t want +to rely on hitchhiking, you can create one yourself. We’ll discuss this in the section about **Interop Transactions**. + +## Bundles + +Before we proceed to discuss **InteropTransactions**, there is one more layer in between: **InteropBundles**. + +![interopcallbundle.png](../img/interopcallbundle.png) + +**Bundles Offer:** + +- **Shared Fate**: All calls in the bundle either succeed or fail together. +- **Retries**: If a bundle fails, it can be retried (e.g., with more gas). +- **Cancellations**: If a bundle has not been successfully executed yet, it can be cancelled. + +If you look closely at the interface we used earlier, you’ll notice that we were already discussing the execution of +**Bundles** rather than single calls. So, let’s dive into what bundles are and the role they fulfill. + +The primary purpose of a bundle is to ensure that a given list of calls is executed in a specific order and has a shared +fate (i.e., either all succeed or all fail). + +In this sense, you can think of a bundle as a **"multicall"**, but with two key differences: + +1. You cannot "unbundle" items—an individual `InteropCall` cannot be run independently; it is tightly tied to the + bundle. + +2. Each `InteropCall` within a bundle can use a different aliased account, enabling separate permissions for each call. + +```solidity +contract InteropCenter { + struct InteropBundle { + // Calls have to be done in this order. + InteropCall calls[]; + uint256 destinationChain; + + // If not set - anyone can execute it. + address executionAddresses[]; + // Who can 'cancel' this bundle. + address cancellationAddress; + } + + // Starts a new bundle. + // All the calls that will be added to this bundle (potentially by different contracts) + // will have a 'shared fate'. + // The whole bundle must be going to a single destination chain. + function startBundle(destinationChain) returns bundleId; + // Adds a new call to the opened bundle. + // Returns the messageId of this single message in the bundle. + function addToBundle(bundleId, destinationAddress, calldata, msgValue) return msgHash; + // Finishes a given bundle, and sends it. + function finishAndSendBundle(bundleId) return msgHash; +} +``` + +### Cross Chain Swap Example + +Imagine you want to perform a swap on chain B, exchanging USDC for PEPE, but all your assets are currently on chain A. + +This process would typically involve four steps: + +1. Transfer USDC from chain A to chain B. +2. Set allowance for the swap. +3. Execute the swap. +4. Transfer PEPE back to chain A. + +Each of these steps is a separate "call," but you need them to execute in exactly this order and, ideally, atomically. +If the swap fails, you wouldn’t want the allowance to remain set on the destination chain. + +Below is an example of how this process could look (note that the code is pseudocode; we’ll explain the helper methods +required to make it work in a later section). + +```solidity +bundleId = InteropCenter(INTEROP_CENTER).startBundle(chainD); +// This will 'burn' the 1k USDC, create the special interopCall +// when this call is executed on chainD, it will mint 1k USDC there. +// BUT - this interopCall is tied to this bundle id. +USDCBridge.transferWithBundle( + bundleId, + chainD, + aliasedAccount(this(account), block.chain_id), + 1000); + + +// This will create interopCall to set allowance. +InteropCenter.addToBundle(bundleId, + USDCOnDestinationChain, + createCalldata("approve", 1000, poolOnDestinationChain), + 0); +// This will create interopCall to do the swap. +InteropCenter.addToBundle(bundleId, + poolOnDestinationChain, + createCalldata("swap", "USDC_PEPE", 1000, ...), + 0) +// And this will be the interopcall to transfer all the assets back. +InteropCenter.addToBundle(bundleId, + pepeBridgeOnDestinationChain, + createCalldata("transferAll", block.chain_id, this(account)), + 0) + + +bundleHash = interopCenter.finishAndSendBundle(bundleId); +``` + +In the code above, we created a bundle that anyone can execute on the destination chain. This bundle will handle the +entire process: minting, approving, swapping, and transferring back. + +### Bundle Restrictions + +When starting a bundle, if you specify the `executionAddress`, only that account will be able to execute the bundle on +the destination chain. If no `executionAddress` is specified, anyone can trigger the execution. + +## Retries and Cancellations + +If bundle execution fails — whether due to a contract error or running out of gas—none of its calls will be applied. The +bundle can be re-run on the **destination chain** without requiring any updates or notifications to the source chain. +More details about retries and gas will be covered in the next level, **Interop Transactions**. + +This process can be likened to a "hitchhiker" (or in the case of a bundle, a group of hitchhikers) — if the car they’re +traveling in doesn’t reach the destination, they simply find another ride rather than returning home. + +However, there are cases where the bundle should be cancelled. Cancellation can be performed by the +`cancellationAddress` specified in the bundle itself. + +#### For our cross chain swap example + +1. Call `cancelInteropBundle(interopMessage, proof)` on the destination chain. + - A helper method for this will be introduced in the later section. +2. When cancellation occurs, the destination chain will generate an `InteropMessage` containing cancellation + information. +3. Using the proof from this method, the user can call the USDC bridge to recover their assets: + +```solidity +USDCBridge.recoverFailedTransfer(bundleId, cancellationMessage, proof); +``` + +### Some details on our approach + +#### Destination Contract + +- On ElasticChain, the destination contract does not need to know it is being called via an interop call. Requests + arrive from `aliased accounts'. + +#### Batching + +- ElasticChain supports bundling of messages, ensuring shared fate and strict order. + +#### Execution Permissions + +- ElasticChain allows restricting who can execute the call or bundle on the destination chain. + +#### Cancellations + +- ElasticChain supports restricting who can cancel. Cancellation can happen at any time. diff --git a/docs/src/specs/interop/interopmessages.md b/docs/src/specs/interop/interopmessages.md new file mode 100644 index 000000000000..f32724e0013d --- /dev/null +++ b/docs/src/specs/interop/interopmessages.md @@ -0,0 +1,176 @@ +# Interop Messages + +In this section, we’re going to cover the lowest level of the interop stack: **Interop Messages** — the interface that +forms the foundation for everything else. + +We’ll explore the details of the interface, its use cases, and how it compares to similar interfaces from +Superchain/Optimism. + +This is an advanced document. While most users and app developers typically interact with higher levels of interop, it’s +still valuable to understand how the internals work. + +## Basics + +![interopmsg.png](../img/interopmsg.png) + +Interop Messages are the lowest level of our stack. + +An **InteropMessage** contains data and offers two methods: + +- Send a message +- Verify that a given message was sent on some chain + +Notice that the message itself doesn’t have any ‘destination chain’ or address—it is simply a payload that a user (or +contract) is creating. Think of it as a broadcast. + +The `InteropCenter` is a contract that is pre-deployed on all chains at a fixed address `0x00..1234`. + +```solidity +contract InteropCenter { + // Sends interop message. Can be called by anyone. + // Returns the unique interopHash. + function sendInteropMessage(bytes data) returns interopHash; + + // Interop message - uniquely identified by the hash of the payload. + struct InteropMessage { + bytes data; + address sender; // filled by InteropCenter + uint256 sourceChainId; // filled by InteropCenter + uint256 messageNum; // a 'nonce' to guarantee different hashes. + } + + // Verifies if such interop message was ever producted. + function verifyInteropMessage(bytes32 interopHash, Proof merkleProof) return bool; +} +``` + +When you call `sendInteropMessage`, the `InteropCenter` adds additional fields, such as your sender address, source +chain ID, and messageNum (a nonce ensuring the hash of this structure is globally unique). It then returns the +`interopHash`. + +This `interopHash` serves as a globally unique identifier that can be used on any chain in the network to call +`verifyInteropMessage`. + +![A message created on one chain can be verified on any other chain.](../img/verifyinteropmsg.png) + +#### How do I get the proof + +You’ll notice that **verifyInteropMessage** has a second argument — a proof that you need to provide. This proof is a +Merkle tree proof (more details below). You can obtain it by querying the +[chain](https://docs.zksync.io/build/api-reference/zks-rpc#zks_getl2tol1msgproof) , or generate it off-chain - by +looking at the chain's state on L1 + +#### How does the interop message differ from other layers (InteropTransactions, InteropCalls) + +As the most basic layer, an interop message doesn’t include any advanced features — it lacks support for selecting +destination chains, nullifiers/replay, cancellation, and more. + +If you need these capabilities, consider integrating with a higher layer of interop, such as Call or Bundle, which +provide these additional functionalities. + +## Simple Use Case + +Before we dive into the details of how the system works, let’s look at a simple use case for a DApp that decides to use +InteropMessage. + +For this example, imagine a basic cross-chain contract where the `signup()` method can be called on chains B, C, and D +only if someone has first called `signup_open()` on chain A. + +```solidity +// Contract deployed on chain A. +contract SignupManager { + public bytes32 sigup_open_msg_hash; + function signup_open() onlyOwner { + // We are open for business + signup_open_msg_hash = InteropCenter(INTEROP_CENTER_ADDRESS).sendInteropMessage("We are open"); + } +} + +// Contract deployed on all other chains. +contract SignupContract { + public bool signupIsOpen; + // Anyone can call it. + function openSignup(InteropMessage message, InteropProof proof) { + InteropCenter(INTEROP_CENTER_ADDRESS).verifyInteropMessage(keccak(message), proof); + require(message.sourceChainId == CHAIN_A_ID); + require(message.sender == SIGNUP_MANAGER_ON_CHAIN_A); + require(message.data == "We are open"); + signupIsOpen = true; + } + + function signup() { + require(signupIsOpen); + signedUpUser[msg.sender] = true; + } +} +``` + +In the example above, the `signupManager` on chain A calls the `signup_open` method. After that, any user on other +chains can retrieve the `signup_open_msg_hash`, obtain the necessary proof from the Gateway (or another source), and +call the `openSignup` function on any destination chain. + +## Deeper Technical Dive + +Let’s break down what happens inside the InteropCenter when a new interop message is created: + +```solidity +function sendInteropMessage(bytes data) { + messageNum += 1; + msg = InteropMessage({data, msg.sender, block.chain_id, messageNum}); + // Does L2->L1 Messaging. + sendToL1(abi.encode(msg)); + return keccak(msg); +} +``` + +As you can see, it populates the necessary data and then calls the `sendToL1` method. + +The `sendToL1` method is part of a system contract that gathers all messages during a batch, constructs a Merkle tree +from them at the end of the batch, and sends this tree to the SettlementLayer (Gateway) when the batch is committed. + +![sendtol1.png](../img/sendtol1.png) + +The settlement layer receives the messages and once the proof for the batch is submitted (or more accurately, during the +"execute" step), it will add the root of the Merkle tree to its `messageRoot` (sometimes called `globalRoot`). + +![globalroot.png](../img/globalroot.png) + +The `messageRoot` is the root of the Merkle tree that includes all messages from all chains. Each chain regularly reads +the messageRoot value from the Gateway to stay synchronized. + +![gateway.png](../img/gateway.png) + +If a user wants to call `verifyInteropMessage` on a chain, they first need to query the Gateway for the Merkle path from +the batch they are interested in up to the `messageRoot`. Once they have this path, they can provide it as an argument +when calling a method on the destination chain (such as the `openSignup` method in our example). + +![proofmerklepath.png](../img/proofmerklepath.png) + +#### What if Chain doesn’t provide the proof + +If the chain doesn’t respond, users can manually re-create the Merkle proof using data available on L1. Every +interopMessage is also sent to L1. + +#### Message roots change frequently + +Yes, message roots update continuously as new chains prove their blocks. However, chains retain historical message roots +for a reasonable period (around 24 hours) to ensure that recently generated Merkle paths remain valid. + +#### Is this secure? Could a chain operator, like Chain D, use a different message root + +Yes, it’s secure. If a malicious operator on Chain D attempted to use a different message root, they wouldn’t be able to +submit the proof for their new batch to the Gateway. This is because the proof’s public inputs must include the valid +message root. + +### Other Features + +#### Dependency Set + +- In ElasticChain, this is implicitly handled by the Gateway. Any chain that is part of the message root can exchange + messages with any other chain, effectively forming an undirected graph. + +#### Timestamps and Expiration + +- In ElasticChain, older messages become increasingly difficult to validate as it becomes harder to gather the data + required to construct a Merkle proof. Expiration is also being considered for this reason, but the specifics are yet + to be determined. diff --git a/docs/src/specs/interop/interoptransactions.md b/docs/src/specs/interop/interoptransactions.md new file mode 100644 index 000000000000..feccc49ea91a --- /dev/null +++ b/docs/src/specs/interop/interoptransactions.md @@ -0,0 +1,196 @@ +# Interop Transactions + +## Basics + +The **InteropTransaction** sits at the top of our interop stack, acting as the “delivery” mechanism for **Interop +Bundles**. + +Think of it like a car that picks up our "hitchhiker" bundles and carries them to their destination. + +![interoptx.png](../img/interoptx.png) + +**Note:** Interop Transactions aren’t the only way to execute a bundle. Once an interop bundle is created on the source +chain, users can simply send a regular transaction on the destination chain to execute it. + +However, this approach can be inconvenient as it requires users to have funds on the destination chain to cover gas fees +and to configure the necessary network settings (like the RPC address). + +**InteropTransactions** simplify this process by handling everything from the source chain. They allow you to select +which **interopBundle** to execute, specify gas details (such as gas amount and gas price), and determine who will cover +the gas costs. This can be achieved using tokens on the source chain or through a paymaster. + +Once configured, the transaction will automatically execute, either by the chain operator, the gateway, or off-chain +tools. + +An **InteropTransaction** contains two pointers to bundles: + +- **feesBundle**: Holds interop calls to cover fees. +- **bundleHash**: Contains the main execution. + +![ipointers.png](../img/ipointers.png) + +## Interface + +The function `sendInteropTransaction` provides all the options. For simpler use cases, refer to the helper methods +defined later in the article. + +```solidity +contract InteropCenter { + /// Creates a transaction that will attempt to execute a given Bundle on the destination chain. + /// Such transaction can be 'picked up' by the destination chain automatically. + /// This function covers all the cases - we expect most users to use the helper + /// functions defined later. + function sendInteropTransaction( + destinationChain, + bundleHash, // the main bundle that you want to execute on destination chain + gasLimit, // gasLimit & price for execution + gasPrice, + feesBundleHash, // this is the bundle that contains the calls to pay for gas + destinationPaymaster, // optionally - you can use a paymaster on destination chain + destinationPaymasterInput); // with specific params + + + struct InteropTransaction { + address sourceChainSender + uint256 destinationChain + uint256 gasLimit; + uint256 gasPrice; + uint256 value; + bytes32 bundleHash; + bytes32 feesBundleHash; + address destinationPaymaster; + bytes destinationPaymasterInput; + } +} +``` + +After creating the **InteropBundle**, you can simply call `sendInteropTransaction` to create the complete transaction +that will execute the bundle. + +## Retries + +If your transaction fails to execute the bundle (e.g., due to a low gas limit) or isn’t included at all (e.g., due to +too low gasPrice), you can send another transaction to **attempt to execute the same bundle again**. + +Simply call `sendInteropTransaction` again with updated gas settings. + +### Example of Retrying + +Here’s a concrete example: Suppose you created a bundle to perform a swap that includes transferring 100 ETH, executing +the swap, and transferring some tokens back. + +You attempted to send the interop transaction with a low gas limit (e.g., 100). Since you didn’t have any base tokens on +the destination chain, you created a separate bundle to transfer a small fee (e.g., 0.0001) to cover the gas. + +You sent your first interop transaction to the destination chain, but it failed due to insufficient gas. However, your +“fee bundle” was successfully executed, as it covered the gas cost for the failed attempt. + +Now, you have two options: either cancel the execution bundle (the one with 100 ETH) or retry. + +To retry, you decide to set a higher gas limit (e.g., 10,000) and create another fee transfer (e.g., 0.01) but use **the +same execution bundle** as before. + +This time, the transaction succeeds — the swap completes on the destination chain, and the resulting tokens are +successfully transferred back to the source chain. + +![retryexample.png](../img/retryexample.png) + +## Fees & Restrictions + +Using an **InteropBundle** for fee payments offers flexibility, allowing users to transfer a small amount to cover the +fees while keeping the main assets in the execution bundle itself. + +### Restrictions + +This flexibility comes with trade-offs, similar to the validation phases in **Account Abstraction** or **ERC4337**, +primarily designed to prevent DoS attacks. Key restrictions include: + +- **Lower gas limits** +- **Limited access to specific slots** + +Additionally, when the `INTEROP_CENTER` constructs an **InteropTransaction**, it enforces extra restrictions on +**feePaymentBundles**: + +- **Restricted Executors**: + Only your **AliasedAccount** on the receiving side can execute the `feePaymentBundle`. + +This restriction is crucial for security, preventing others from executing your **fee bundle**, which could cause your +transaction to fail and prevent the **execution bundle** from processing. + +### **Types of Fees** + +#### Using the Destination Chain’s Base Token + +The simplest scenario is when you (as the sender) already have the destination chain’s base token available on the +source chain. + +For example: + +- If you are sending a transaction from **Era** (base token: ETH) to **Sophon** (base token: SOPH) and already have SOPH + on ERA, you can use it for the fee. + +To make this easier, we’ll provide a helper function: + +```solidity +contract InteropCenter { + // Creates InteropTransaction to the destination chain with payment with base token. + // Before calling, you have to 'approve' InteropCenter to the ERC20/Bridge that holds the destination chain's base tokens. + // or if the destination chain's tokens are the same as yours, just attach value to this call. + function sendInteropTxMinimal( + destinationChain, + bundleHash, // the main bundle that you want to execute on destination chain + gasLimit, // gasLimit & price for execution + gasPrice, + ); + } +``` + +#### Using paymaster on the destination chain + +If you don’t have the base token from the destination chain (e.g., SOPH in our example) on your source chain, you’ll +need to use a paymaster on the destination chain instead. + +In this case, you’ll send the token you do have (e.g., USDC) to the destination chain as part of the **feeBundleHash**. +Once there, you’ll use it to pay the paymaster on the destination chain to cover your gas fees. + +Your **InteropTransaction** would look like this: + +![paymastertx.png](../img/paymastertx.png) + +## **Automatic Execution** + +One of the main advantages of **InteropTransactions** is that they execute automatically. As the sender on the source +chain, you don’t need to worry about technical details like RPC addresses or obtaining proofs — it’s all handled for +you. + +After creating an **InteropTransaction**, it can be relayed to the destination chain by anyone. The transaction already +includes a signature (also known as an interop message proof), making it fully self-contained and ready to send without +requiring additional permissions. + +Typically, the destination chain’s operator will handle and include incoming **InteropTransactions**. However, if they +don’t, the **Gateway** or other participants can step in to prepare and send them. + +You can also use the available tools to create and send the destination transaction yourself. Since the transaction is +self-contained, it doesn’t require additional funds or signatures to execute. + +![Usually destination chain operator will keep querying gateway to see if there are any messages for their chain.](../img/autoexecution.png) + +Once they see the message, they can request the proof from the **Gateway** and also fetch the **InteropBundles** +contained within the message (along with their respective proofs). + +![Operator getting necessary data from Gateway.](../img/chainop.png) + +As the final step, the operator can use the received data to create a regular transaction, which can then be sent to +their chain. + +![Creating the final transaction to send to the destination chain](../img/finaltx.png) + +The steps above don’t require any special permissions and can be executed by anyone. + +While the **Gateway** was used above for tasks like providing proofs, if the Gateway becomes malicious, all this +information can still be constructed off-chain using data available on L1. + +### How it Works Under the hood + +We’ll modify the default account to accept interop proofs as signatures, seamlessly integrating with the existing ZKSync +native **Account Abstraction** model. diff --git a/docs/src/specs/interop/overview.md b/docs/src/specs/interop/overview.md new file mode 100644 index 000000000000..4b6b7417083d --- /dev/null +++ b/docs/src/specs/interop/overview.md @@ -0,0 +1,166 @@ +# Intro Guide to Interop + +## What is Interop + +Interop is a way to communicate and transact between two ZK Stack chains. It allows you to: + +**1. Observe messages:** Track when an interop message (think of it as a special event) is created on the source chain. + +**2. Send assets:** Transfer ERC20 tokens and other assets between chains. + +**3. Execute calls:** Call a contract on a remote chain with specific calldata and value. + +With interop, you automatically get an account (a.k.a. aliasedAccount) on each chain, which you can control from the +source chain. + +**4. Execute bundles of calls:** Group multiple remote calls into a single bundle, ensuring all of them execute at once. + +**5. Execute transactions:** Create transactions on the source chain, which will automatically get executed on the +destination chain, with options to choose from various cross-chain Paymaster solutions to handle gas fees. + +## How to Use Interop + +Here’s a simple example of calling a contract on a destination chain: + +```solidity +cast send source-chain-rpc.com INTEROP_CENTER_ADDRESS sendInteropWithSingleCall( + 0x1fa72e78 // destination_chain_id, + 0xb4AB2FF34fa... // destination_contract, + 0x29723511000000... // destination_calldata, + 0, // value + 100_000, // gasLimit + 250_000_000, // gasPrice + ) +``` + +While this looks very similar to a 'regular' call, there are some nuances, especially around handling failures and +errors. + +Let’s explore these key details together. + +## Common Questions and Considerations + +#### 1. Who pays for gas + +When using this method, your account must hold `gasLimit * gasPrice` worth of destination chain tokens on the source +chain. + +For example, if you’re sending the request from Era and the destination chain is Sophon (with SOPH tokens), you’ll need +SOPH tokens available on Era. + +Additional payment options are available, which will be covered in later sections. + +#### 2. How does the destination contract know it’s from me + +The destination contract will see `msg.sender` as `keccak(source_account, source_chain)[:20]`. + +Ideally, we would use something like `source_account@source_chain` (similar to an email format), but since Ethereum +addresses are limited to 20 bytes, we use a Keccak hash to fit this constraint. + +#### 3. Who executes it on the destination chain + +The call is auto-executed on the destination chain. As a user, you don’t need to take any additional actions. + +#### 4. What if it runs out of gas or the gasPrice is set too low + +In either scenario, you can retry the transaction using the `retryInteropTransaction` method: + +```solidity + cast send source-chain.com INTEROP_CENTER_ADDRESS retryInteropTransaction( + 0x2654.. // previous interop transaction hash from above + 200_000, // new gasLimit + 300_000_000 // new gasPrice + ) +``` + +**Important** : Depending on your use case, it’s crucial to retry the transaction rather than creating a new one with +`sendInteropWithSingleCall`. + +For example, if your call involves transferring a large amount of assets, initiating a new `sendInteropWithSingleCall` +could result in freezing or burning those assets again. + +#### 5. What if my assets were burned during the transaction, but it failed on the destination chain? How do I get them back + +If your transaction fails on the destination chain, you can either: + +1. Retry the transaction with more gas or a higher gas limit (refer to the retry method above). + +2. Cancel the transaction using the following method: + +```solidity +cast send source-chain INTEROP_CENTER_ADDRESS cancelInteropTransaction( + 0x2654.., // previous interop transaction + 100_000, // gasLimit (cancellation also requires gas, but only to mark it as cancelled) + 300_000_000 // gasPrice +) +``` + +After cancellation, call the claimFailedDeposit method on the source chain contracts to recover the burned assets. Note +that the details for this step may vary depending on the contract specifics. + +## Complex Scenario + +#### 6. What if I want to transfer USDC to the Sophon chain, swap it for PEPE coin, and transfer the results back + +To accomplish this, you’ll need to: + +- Create multiple **InteropCalls** (e.g., transferring USDC, executing the swap). +- Combine these calls into a single **InteropBundle**. +- Execute the **InteropTransaction** on the destination chain. + +The step-by-step process and exact details will be covered in the next section. + +## Technical Details + +### How does native bridging differ from a third party bridging + +Bridges generally fall into two categories: Native and Third-Party. + +#### 1. Native Bridges + +Native bridges enable asset transfers “up and down” (from L2 to L1 and vice versa), but interop (which is also a form of +native bridging) allows you to move them between different L2s. + +Instead of doing a "round trip" (L2 → L1 → another L2), interop lets you move assets directly between two L2s, saving +both time and cost. + +#### 2. Third-Party Bridging + +Third-party bridges enable transfers between two L2s, but they rely on their own liquidity. While you, as the user, +receive assets on the destination chain instantly, these assets come from the bridge’s liquidity pool. + +Bridge operators then rebalance using native bridging, which requires maintaining token reserves on both sides. Without +interop this adds costs for the bridge operators, often resulting in higher fees for users. + +The good news is that third-party bridges can use interop to improve their token transfers by utilizing the +**InteropMessage** layer. + +More details on this will follow below. + +### How Fast is It + +Interop speed is determined by its lowest level: **InteropMessage** propagation speed. This essentially depends on how +quickly the destination chain can confirm that the message created by the source chain is valid. + +- **Default Mode:** To prioritize security, the default interop mode waits for a ZK proof to validate the message, which + typically takes around 10 minutes. + +- **Fast Mode (Planned):** We are developing an alternative **INTEROP_CENTER** contract (using a different address but + the same interface) that will operate within 1 second. However, this faster mode comes with additional risks, similar + to the approach used by optimistic chains. + +### 4 Levels of Interop + +When analyzing interop, it can be broken into four levels, allowing you to choose the appropriate level for integration: + +- **InteropMessages:** The lowest level, directly used by third-party bridges and other protocols. + +- **InteropCall:** A medium level, designed for use by "library" contracts. + +- **InteropCallBundle:** A higher level, intended for use by "user-visible" contracts. + +- **InteropTransaction:** The highest level, designed for use in UX and frontends. + +![levelsofinterop.png](../img/levelsofinterop.png) + +We will be covering the details of each layer in the next section. diff --git a/etc/env/ecosystems/sepolia.yaml b/etc/env/ecosystems/sepolia.yaml index 1c0d9ef135cb..772ea98c3cdf 100644 --- a/etc/env/ecosystems/sepolia.yaml +++ b/etc/env/ecosystems/sepolia.yaml @@ -5,7 +5,7 @@ ecosystem_contracts: state_transition_proxy_addr: 0x4e39E90746A9ee410A8Ce173C7B96D3AfEd444a5 transparent_proxy_admin_addr: 0x0358BACa94dcD7931B7BA7aAf8a5Ac6090E143a5 validator_timelock_addr: 0xD3876643180A79d0A56d0900C060528395f34453 - diamond_cut_data: 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000006000000000000000000000000027a7f18106281fe53d371958e8bc3f833694d24a0000000000000000000000000000000000000000000000000000000000000c400000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000002c000000000000000000000000000000000000000000000000000000000000008800000000000000000000000000000000000000000000000000000000000000a2000000000000000000000000096b40174102c93155cdb46a5e4691eeb6c4e1b7b000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000d0e18b6810000000000000000000000000000000000000000000000000000000064bf8d6600000000000000000000000000000000000000000000000000000000a9f6d9410000000000000000000000000000000000000000000000000000000027ae4c16000000000000000000000000000000000000000000000000000000004dd18bf5000000000000000000000000000000000000000000000000000000001cc5d10300000000000000000000000000000000000000000000000000000000be6f11cf00000000000000000000000000000000000000000000000000000000e76db86500000000000000000000000000000000000000000000000000000000235d9eb50000000000000000000000000000000000000000000000000000000021f603d7000000000000000000000000000000000000000000000000000000004623c91d000000000000000000000000000000000000000000000000000000001733894500000000000000000000000000000000000000000000000000000000fc57565f00000000000000000000000000000000000000000000000000000000000000000000000000000000183a8459e2a4440f364bec5040d8327bbb619be300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000291de72e3400000000000000000000000000000000000000000000000000000000ea6c029c00000000000000000000000000000000000000000000000000000000cdffacc60000000000000000000000000000000000000000000000000000000052ef6b2c00000000000000000000000000000000000000000000000000000000adfca15e000000000000000000000000000000000000000000000000000000007a0ed627000000000000000000000000000000000000000000000000000000006e9960c30000000000000000000000000000000000000000000000000000000098acd7a600000000000000000000000000000000000000000000000000000000086a56f8000000000000000000000000000000000000000000000000000000003591c1a00000000000000000000000000000000000000000000000000000000079823c9a00000000000000000000000000000000000000000000000000000000d86970d800000000000000000000000000000000000000000000000000000000fd791f3c00000000000000000000000000000000000000000000000000000000e5355c75000000000000000000000000000000000000000000000000000000009d1b5a81000000000000000000000000000000000000000000000000000000007b30c8da00000000000000000000000000000000000000000000000000000000d046815600000000000000000000000000000000000000000000000000000000631f4bac000000000000000000000000000000000000000000000000000000000ec6b0b70000000000000000000000000000000000000000000000000000000033ce93fe0000000000000000000000000000000000000000000000000000000006d49e5b00000000000000000000000000000000000000000000000000000000f5c1182c000000000000000000000000000000000000000000000000000000005518c73b00000000000000000000000000000000000000000000000000000000db1f0bf900000000000000000000000000000000000000000000000000000000b8c2f66f00000000000000000000000000000000000000000000000000000000ef3f0bae00000000000000000000000000000000000000000000000000000000fe26699e000000000000000000000000000000000000000000000000000000003960738200000000000000000000000000000000000000000000000000000000af6a2dcd00000000000000000000000000000000000000000000000000000000a1954fc50000000000000000000000000000000000000000000000000000000046657fe90000000000000000000000000000000000000000000000000000000018e3a9410000000000000000000000000000000000000000000000000000000029b98c6700000000000000000000000000000000000000000000000000000000bd7c541200000000000000000000000000000000000000000000000000000000c3bbd2d700000000000000000000000000000000000000000000000000000000e81e0ba100000000000000000000000000000000000000000000000000000000facd743b000000000000000000000000000000000000000000000000000000009cd939e40000000000000000000000000000000000000000000000000000000056142d7a00000000000000000000000000000000000000000000000000000000b22dd78e0000000000000000000000000000000000000000000000000000000074f4d30d00000000000000000000000000000000000000000000000000000000000000000000000000000000550cf73f4b50aa0df0257f2d07630d48fa00f73a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000812f43dab000000000000000000000000000000000000000000000000000000006c0960f900000000000000000000000000000000000000000000000000000000b473318e00000000000000000000000000000000000000000000000000000000042901c700000000000000000000000000000000000000000000000000000000263b7f8e00000000000000000000000000000000000000000000000000000000e4948f4300000000000000000000000000000000000000000000000000000000eb67241900000000000000000000000000000000000000000000000000000000c924de3500000000000000000000000000000000000000000000000000000000000000000000000000000000200caf816bcdd94123d3c18488741d4e4fa40ba60000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000008701f58c5000000000000000000000000000000000000000000000000000000006edd4f1200000000000000000000000000000000000000000000000000000000c3d93e7c000000000000000000000000000000000000000000000000000000006f497ac6000000000000000000000000000000000000000000000000000000007f61885c00000000000000000000000000000000000000000000000000000000c37533bb0000000000000000000000000000000000000000000000000000000097c09d34000000000000000000000000000000000000000000000000000000000f23da430000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001c0000000000000000000000000ac3a2dc46cea843f0a9d6554f8804aed18ff0795f520cd5b37e74e19fdb369c8d676a04dce8a19457497ac6686d2bb95d94109c8f9664f4324c1400fa5c3822d667f30e873f53f1b8033180cd15fe41c1e2355c60000000000000000000000000000000000000000000000000000000000000000010008e742608b21bf7eb23c1a9d0602047e3618b464c9b59c0fba3b3d7ab66e01000563374c277a2c1e34659a2a1e87371bb6d852ce142022d497bfb50b9e3200000000000000000000000000000000000000000000000000000000044aa200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f4240000000000000000000000000000000000000000000000000000000000001d4c00000000000000000000000000000000000000000000000000000000004c4b40000000000000000000000000000000000000000000000000000000000000182b8000000000000000000000000000000000000000000000000000000000ee6b280000000000000000000000000273bdccdd979510adf4fb801d92f64b243c01fe2 + diamond_cut_data: 0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000060000000000000000000000000d43b0b525e2cb6005f39ced7b69d3437f0fb840f0000000000000000000000000000000000000000000000000000000000000c400000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000002c000000000000000000000000000000000000000000000000000000000000008800000000000000000000000000000000000000000000000000000000000000a2000000000000000000000000090c0a0a63d7ff47bfaa1e9f8fa554dabc986504a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000d0e18b6810000000000000000000000000000000000000000000000000000000064bf8d6600000000000000000000000000000000000000000000000000000000a9f6d9410000000000000000000000000000000000000000000000000000000027ae4c16000000000000000000000000000000000000000000000000000000004dd18bf5000000000000000000000000000000000000000000000000000000001cc5d10300000000000000000000000000000000000000000000000000000000be6f11cf00000000000000000000000000000000000000000000000000000000e76db86500000000000000000000000000000000000000000000000000000000235d9eb50000000000000000000000000000000000000000000000000000000021f603d7000000000000000000000000000000000000000000000000000000004623c91d000000000000000000000000000000000000000000000000000000001733894500000000000000000000000000000000000000000000000000000000fc57565f0000000000000000000000000000000000000000000000000000000000000000000000000000000081754d2e48e3e553ba6dfd193fc72b3a0c6076d900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000291de72e3400000000000000000000000000000000000000000000000000000000ea6c029c00000000000000000000000000000000000000000000000000000000cdffacc60000000000000000000000000000000000000000000000000000000052ef6b2c00000000000000000000000000000000000000000000000000000000adfca15e000000000000000000000000000000000000000000000000000000007a0ed627000000000000000000000000000000000000000000000000000000006e9960c30000000000000000000000000000000000000000000000000000000098acd7a600000000000000000000000000000000000000000000000000000000086a56f8000000000000000000000000000000000000000000000000000000003591c1a00000000000000000000000000000000000000000000000000000000079823c9a00000000000000000000000000000000000000000000000000000000d86970d800000000000000000000000000000000000000000000000000000000fd791f3c00000000000000000000000000000000000000000000000000000000e5355c75000000000000000000000000000000000000000000000000000000009d1b5a81000000000000000000000000000000000000000000000000000000007b30c8da00000000000000000000000000000000000000000000000000000000d046815600000000000000000000000000000000000000000000000000000000631f4bac000000000000000000000000000000000000000000000000000000000ec6b0b70000000000000000000000000000000000000000000000000000000033ce93fe0000000000000000000000000000000000000000000000000000000006d49e5b00000000000000000000000000000000000000000000000000000000f5c1182c000000000000000000000000000000000000000000000000000000005518c73b00000000000000000000000000000000000000000000000000000000db1f0bf900000000000000000000000000000000000000000000000000000000b8c2f66f00000000000000000000000000000000000000000000000000000000ef3f0bae00000000000000000000000000000000000000000000000000000000fe26699e000000000000000000000000000000000000000000000000000000003960738200000000000000000000000000000000000000000000000000000000af6a2dcd00000000000000000000000000000000000000000000000000000000a1954fc50000000000000000000000000000000000000000000000000000000046657fe90000000000000000000000000000000000000000000000000000000018e3a9410000000000000000000000000000000000000000000000000000000029b98c6700000000000000000000000000000000000000000000000000000000bd7c541200000000000000000000000000000000000000000000000000000000c3bbd2d700000000000000000000000000000000000000000000000000000000e81e0ba100000000000000000000000000000000000000000000000000000000facd743b000000000000000000000000000000000000000000000000000000009cd939e40000000000000000000000000000000000000000000000000000000056142d7a00000000000000000000000000000000000000000000000000000000b22dd78e0000000000000000000000000000000000000000000000000000000074f4d30d00000000000000000000000000000000000000000000000000000000000000000000000000000000b2d1ca55203e96b1d1e6f034805431b7ac983185000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000812f43dab000000000000000000000000000000000000000000000000000000006c0960f900000000000000000000000000000000000000000000000000000000b473318e00000000000000000000000000000000000000000000000000000000042901c700000000000000000000000000000000000000000000000000000000263b7f8e00000000000000000000000000000000000000000000000000000000e4948f4300000000000000000000000000000000000000000000000000000000eb67241900000000000000000000000000000000000000000000000000000000c924de3500000000000000000000000000000000000000000000000000000000000000000000000000000000bb13642f795014e0eac2b0d52ecd5162ecb667120000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000008701f58c5000000000000000000000000000000000000000000000000000000006edd4f1200000000000000000000000000000000000000000000000000000000c3d93e7c000000000000000000000000000000000000000000000000000000006f497ac6000000000000000000000000000000000000000000000000000000007f61885c00000000000000000000000000000000000000000000000000000000c37533bb0000000000000000000000000000000000000000000000000000000097c09d34000000000000000000000000000000000000000000000000000000000f23da430000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001c00000000000000000000000009a2ebeb3676d4c593443fd0bb1bb9977c73b4118f520cd5b37e74e19fdb369c8d676a04dce8a19457497ac6686d2bb95d94109c8f9664f4324c1400fa5c3822d667f30e873f53f1b8033180cd15fe41c1e2355c60000000000000000000000000000000000000000000000000000000000000000010008c3be57ae5800e077b6c2056d9d75ad1a7b4f0ce583407961cc6fe0b6780100055dba11508480be023137563caec69debc85f826cb3a4b68246a7cabe3000000000000000000000000000000000000000000000000000000000044aa200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f4240000000000000000000000000000000000000000000000000000000000001d4c000000000000000000000000000000000000000000000000000000000000182b80000000000000000000000000000000000000000000000000000000004c4b400000000000000000000000000000000000000000000000000000000000ee6b2800000000000000000000000009c30b772c02c1008efcf47cb282c5e4cdde4c2f1 bridges: erc20: l1_address: 0x2Ae09702F77a4940621572fBcDAe2382D44a2cbA @@ -14,11 +14,11 @@ bridges: weth: l1_address: 0x7b79995e5f793A07Bc00c21412e50Ecae098E7f9 l1: - default_upgrade_addr: 0x27A7F18106281fE53d371958E8bC3f833694D24a + default_upgrade_addr: 0xA7Cc1Bf4d8404d14caB00d2b2F0b2F4198CddDfF diamond_proxy_addr: 0x9A6DE0f62Aa270A8bCB1e2610078650D539B1Ef9 governance_addr: 0x62e77441531b4B045a6B6f4891be4AdBA7eD4d88 multicall3_addr: 0xca11bde05977b3631167028862be2a173976ca11 - verifier_addr: 0xAC3a2Dc46ceA843F0A9d6554f8804AeD18ff0795 + verifier_addr: 0x9A2eBeb3676d4c593443Fd0bb1bb9977C73b4118 validator_timelock_addr: 0xD3876643180A79d0A56d0900C060528395f34453 base_token_addr: '0x0000000000000000000000000000000000000000' l2: diff --git a/etc/env/file_based/external_node.yaml b/etc/env/file_based/external_node.yaml index e97b04fb3900..675baf739686 100644 --- a/etc/env/file_based/external_node.yaml +++ b/etc/env/file_based/external_node.yaml @@ -4,5 +4,3 @@ l1_batch_commit_data_generator_mode: Rollup main_node_url: http://localhost:3050 main_node_rate_limit_rps: 1000 - -gateway_url: http://localhost:3052 diff --git a/etc/env/file_based/general.yaml b/etc/env/file_based/general.yaml index 23e8b3ee420c..62c264a376db 100644 --- a/etc/env/file_based/general.yaml +++ b/etc/env/file_based/general.yaml @@ -101,7 +101,6 @@ eth: aggregated_block_execute_deadline: 10 timestamp_criteria_max_allowed_lag: 30 max_eth_tx_data_size: 120000 - aggregated_proof_sizes: [ 1 ] max_aggregated_tx_gas: 15000000 max_acceptable_priority_fee_in_gwei: 100000000000 # typo: value is in wei (100 gwei) pubdata_sending_mode: BLOBS diff --git a/etc/env/file_based/genesis.yaml b/etc/env/file_based/genesis.yaml index 9617b011d2c7..cbc29b9d0312 100644 --- a/etc/env/file_based/genesis.yaml +++ b/etc/env/file_based/genesis.yaml @@ -8,8 +8,8 @@ l1_chain_id: 9 l2_chain_id: 270 fee_account: '0x0000000000000000000000000000000000000001' prover: - recursion_scheduler_level_vk_hash: 0x14f97b81e54b35fe673d8708cc1a19e1ea5b5e348e12d31e39824ed4f42bbca2 dummy_verifier: true + snark_wrapper_vk_hash: 0x14f97b81e54b35fe673d8708cc1a19e1ea5b5e348e12d31e39824ed4f42bbca2 genesis_protocol_semantic_version: 0.25.0 l1_batch_commit_data_generator_mode: Rollup # TODO: uncomment once EVM emulator is present in the `contracts` submodule diff --git a/package.json b/package.json index 9e3428e614cc..b293bedd8f69 100644 --- a/package.json +++ b/package.json @@ -39,12 +39,12 @@ "@typescript-eslint/eslint-plugin": "^6.7.4", "@typescript-eslint/parser": "^4.10.0", "babel-eslint": "^10.1.0", - "eslint-config-alloy": "^3.8.2", "eslint": "^7.16.0", + "eslint-config-alloy": "^3.8.2", "markdownlint-cli": "^0.24.0", "npm-run-all": "^4.1.5", + "prettier": "^3.3.3", "prettier-plugin-solidity": "=1.0.0-dev.22", - "prettier": "^2.3.2", "solhint": "^3.3.2", "sql-formatter": "^13.1.0" } diff --git a/prover/CHANGELOG.md b/prover/CHANGELOG.md index d30076cddcf1..e99c20193eb5 100644 --- a/prover/CHANGELOG.md +++ b/prover/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [17.1.1](https://github.com/matter-labs/zksync-era/compare/prover-v17.1.0...prover-v17.1.1) (2024-11-26) + + +### Bug Fixes + +* Add prometheus exporter to circut prover ([#3311](https://github.com/matter-labs/zksync-era/issues/3311)) ([920eba1](https://github.com/matter-labs/zksync-era/commit/920eba15fc64b40f10fcc9f6910d5e7607f3d313)) + ## [17.1.0](https://github.com/matter-labs/zksync-era/compare/prover-v17.0.0...prover-v17.1.0) (2024-11-18) diff --git a/prover/Cargo.lock b/prover/Cargo.lock index 2fb9b5f10cfa..c73554f6d7a8 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -15,18 +15,18 @@ dependencies = [ [[package]] name = "addr2line" -version = "0.22.0" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "gimli", ] [[package]] -name = "adler" -version = "1.0.2" +name = "adler2" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] name = "ahash" @@ -63,9 +63,9 @@ dependencies = [ [[package]] name = "allocator-api2" -version = "0.2.18" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "android-tzdata" @@ -93,9 +93,9 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.14" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" +checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" dependencies = [ "anstyle", "anstyle-parse", @@ -108,43 +108,43 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.7" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" [[package]] name = "anstyle-parse" -version = "0.2.4" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" +checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.0" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.3" +version = "3.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" +checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125" dependencies = [ "anstyle", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anyhow" -version = "1.0.86" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" +checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7" [[package]] name = "arr_macro" @@ -163,15 +163,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0609c78bd572f4edc74310dfb63a01f5609d53fa8b4dd7c4d98aef3b3e8d72d1" dependencies = [ "proc-macro-hack", - "quote 1.0.36", + "quote 1.0.37", "syn 1.0.109", ] [[package]] name = "arrayref" -version = "0.3.7" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" +checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" [[package]] name = "arrayvec" @@ -190,19 +190,20 @@ checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" [[package]] name = "arrayvec" -version = "0.7.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" [[package]] name = "assert_cmd" -version = "2.0.14" +version = "2.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed72493ac66d5804837f480ab3766c72bdfab91a65e565fc54fa9e42db0073a8" +checksum = "dc1835b7f27878de8525dc71410b5a31cdcc5f230aed5ba5df968e09c201b23d" dependencies = [ "anstyle", "bstr", "doc-comment", + "libc", "predicates", "predicates-core", "predicates-tree", @@ -223,9 +224,9 @@ dependencies = [ [[package]] name = "async-stream" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" dependencies = [ "async-stream-impl", "futures-core", @@ -234,24 +235,24 @@ dependencies = [ [[package]] name = "async-stream-impl" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] name = "async-trait" -version = "0.1.80" +version = "0.1.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca" +checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] @@ -282,27 +283,26 @@ dependencies = [ [[package]] name = "autocfg" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "aws-lc-rs" -version = "1.8.1" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ae74d9bd0a7530e8afd1770739ad34b36838829d6ad61818f9230f683f5ad77" +checksum = "f47bb8cc16b669d267eeccf585aea077d0882f4777b1c1f740217885d6e6e5a3" dependencies = [ "aws-lc-sys", - "mirai-annotations", "paste", "zeroize", ] [[package]] name = "aws-lc-sys" -version = "0.20.1" +version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f0e249228c6ad2d240c2dc94b714d711629d52bad946075d8e9b2f5391f0703" +checksum = "a2101df3813227bbaaaa0b04cd61c534c7954b22bd68d399b440be937dc63ff7" dependencies = [ "bindgen", "cc", @@ -315,18 +315,18 @@ dependencies = [ [[package]] name = "axum" -version = "0.7.5" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf" +checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" dependencies = [ "async-trait", "axum-core", "bytes", "futures-util", - "http 1.1.0", - "http-body 1.0.0", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", - "hyper 1.5.0", + "hyper 1.5.2", "hyper-util", "itoa", "matchit", @@ -339,9 +339,9 @@ dependencies = [ "serde_json", "serde_path_to_error", "serde_urlencoded", - "sync_wrapper 1.0.1", + "sync_wrapper 1.0.2", "tokio", - "tower 0.4.13", + "tower 0.5.2", "tower-layer", "tower-service", "tracing", @@ -349,20 +349,20 @@ dependencies = [ [[package]] name = "axum-core" -version = "0.4.3" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a15c63fd72d41492dc4f497196f5da1fb04fb7529e631d73630d1b491e47a2e3" +checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" dependencies = [ "async-trait", "bytes", "futures-util", - "http 1.1.0", - "http-body 1.0.0", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", "mime", "pin-project-lite", "rustversion", - "sync_wrapper 0.1.2", + "sync_wrapper 1.0.2", "tower-layer", "tower-service", "tracing", @@ -381,17 +381,17 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.72" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17c6a35df3749d2e8bb1b7b21a976d82b15548788d2735b9d82f329268f71a11" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" dependencies = [ "addr2line", - "cc", "cfg-if", "libc", "miniz_oxide", "object", "rustc-demangle", + "windows-targets 0.52.6", ] [[package]] @@ -441,13 +441,13 @@ dependencies = [ [[package]] name = "bigdecimal" -version = "0.4.5" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d712318a27c7150326677b321a5fa91b55f6d9034ffd67f20319e147d40cee" +checksum = "7f31f3af01c5c65a07985c804d3366560e6fa7883d640a122819b14ec327482c" dependencies = [ "autocfg", "libm", - "num-bigint 0.4.5", + "num-bigint 0.4.6", "num-integer", "num-traits", ] @@ -463,9 +463,9 @@ dependencies = [ [[package]] name = "bindgen" -version = "0.69.4" +version = "0.69.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a00dc851838a2120612785d195287475a3ac45514741da670b735818822129a0" +checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088" dependencies = [ "bitflags 2.6.0", "cexpr", @@ -475,22 +475,22 @@ dependencies = [ "lazycell", "log", "prettyplease", - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "regex", "rustc-hash", "shlex", - "syn 2.0.66", + "syn 2.0.90", "which", ] [[package]] name = "bit-set" -version = "0.5.3" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" +checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" dependencies = [ - "bit-vec", + "bit-vec 0.8.0", ] [[package]] @@ -502,6 +502,12 @@ dependencies = [ "serde", ] +[[package]] +name = "bit-vec" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" + [[package]] name = "bitflags" version = "1.3.2" @@ -623,7 +629,7 @@ version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68ec2f007ff8f90cc459f03e9f30ca1065440170f013c868823646e2e48d0234" dependencies = [ - "arrayvec 0.7.4", + "arrayvec 0.7.6", "bincode", "blake2 0.10.6", "const_format", @@ -665,9 +671,9 @@ dependencies = [ [[package]] name = "borsh" -version = "1.3.0" +version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d4d6dafc1a3bb54687538972158f07b2c948bc57d5890df22c0739098b3028" +checksum = "2506947f73ad44e344215ccd6403ac2ae18cd8e046e581a441bf8d199f257f03" dependencies = [ "borsh-derive", "cfg_aliases", @@ -675,26 +681,25 @@ dependencies = [ [[package]] name = "borsh-derive" -version = "1.3.0" +version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf4918709cc4dd777ad2b6303ed03cb37f3ca0ccede8c1b0d28ac6db8f4710e0" +checksum = "c2593a3b8b938bd68373196c9832f516be11fa487ef4ae745eb282e6a56a7244" dependencies = [ "once_cell", - "proc-macro-crate 2.0.0", - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", - "syn_derive", + "proc-macro-crate 3.2.0", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] name = "bstr" -version = "1.9.1" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05efc5cfd9110c8416e471df0e96702d58690178e206e61b7173706673c93706" +checksum = "786a307d683a5bf92e6fd5fd69a7eb613751668d1d8d67d802846dfe367c62c8" dependencies = [ "memchr", - "regex-automata 0.4.8", + "regex-automata 0.4.9", "serde", ] @@ -727,8 +732,8 @@ version = "0.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3db406d29fbcd95542e92559bed4d8ad92636d1ca8b3b72ede10b4bcc010e659" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "syn 1.0.109", ] @@ -740,15 +745,15 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.7.2" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" +checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" [[package]] name = "cc" -version = "1.1.14" +version = "1.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50d2eb3cd3d1bf4529e31c215ee6f93ec5a3d536d9f578f93d9d33ee19562932" +checksum = "9157bbaa6b165880c27a4293a474c91cdcf265cc68cc829bf10be0964a391caf" dependencies = [ "jobserver", "libc", @@ -778,15 +783,15 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "cfg_aliases" -version = "0.1.1" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "chrono" -version = "0.4.38" +version = "0.4.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" dependencies = [ "android-tzdata", "iana-time-zone", @@ -794,7 +799,7 @@ dependencies = [ "num-traits", "serde", "wasm-bindgen", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -813,38 +818,38 @@ dependencies = [ [[package]] name = "circuit_encodings" -version = "0.140.1" +version = "0.140.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8438d7af992b730143b679e2c6938cb9e0193897ecaf668c59189af8ac296b7" +checksum = "cf6b7cc842eadb4c250cdc6a8bc1dd97624d9f08bbe54db3e11fb23c3a72be07" dependencies = [ "derivative", "serde", "zk_evm 0.140.0", - "zkevm_circuits 0.140.2", + "zkevm_circuits 0.140.3", ] [[package]] name = "circuit_encodings" -version = "0.141.1" +version = "0.141.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01a2fcc80e97682104f355dd819cb4972583828a6c0f65ec26889a78a84b0c56" +checksum = "7898ffbf3cd413576b4b674fe1545a35488c67eb16bd5a4148425e42c2a2b65b" dependencies = [ "derivative", "serde", "zk_evm 0.141.0", - "zkevm_circuits 0.141.1", + "zkevm_circuits 0.141.2", ] [[package]] name = "circuit_encodings" -version = "0.142.1" +version = "0.142.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94be7afb5ace6024d6e3c105d521b4b9b563bac14a92c2f59c4683e9169a25d8" +checksum = "8364ecafcc4b2c896023f8d3af952c52a500aa55f14fd268bb5d9ab07f837369" dependencies = [ "derivative", "serde", "zk_evm 0.141.0", - "zkevm_circuits 0.141.1", + "zkevm_circuits 0.141.2", ] [[package]] @@ -878,7 +883,7 @@ version = "0.140.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa5f22311ce609d852d7d9f4943535ea4610aeb785129ae6ff83d5201c4fb387" dependencies = [ - "circuit_encodings 0.140.1", + "circuit_encodings 0.140.3", "derivative", "rayon", "serde", @@ -892,7 +897,7 @@ version = "0.141.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c47c71d6ba83a8beb0af13af70beffd627f5497caf3d44c6f96363e788b07ea" dependencies = [ - "circuit_encodings 0.141.1", + "circuit_encodings 0.141.2", "derivative", "rayon", "serde", @@ -906,7 +911,7 @@ version = "0.142.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e264723359e6a1aad98110bdccf1ae3ad596e93e7d31da9e40f6adc07e4add54" dependencies = [ - "circuit_encodings 0.142.1", + "circuit_encodings 0.142.2", "derivative", "rayon", "serde", @@ -949,15 +954,15 @@ dependencies = [ "bitflags 1.3.2", "strsim 0.8.0", "textwrap", - "unicode-width", + "unicode-width 0.1.14", "vec_map", ] [[package]] name = "clap" -version = "4.5.4" +version = "4.5.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0" +checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84" dependencies = [ "clap_builder", "clap_derive", @@ -965,9 +970,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.2" +version = "4.5.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4" +checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838" dependencies = [ "anstream", "anstyle", @@ -977,27 +982,27 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.4" +version = "4.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64" +checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" dependencies = [ "heck 0.5.0", - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] name = "clap_lex" -version = "0.7.0" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" [[package]] name = "cmake" -version = "0.1.50" +version = "0.1.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a31c789563b815f77f4250caee12365734369f942439b7defd71e18a48197130" +checksum = "c682c223677e0e5b6b7f63a64b9351844c3f1b1678a68b7ee617e30fb082620e" dependencies = [ "cc", ] @@ -1013,18 +1018,18 @@ dependencies = [ [[package]] name = "colorchoice" -version = "1.0.1" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" [[package]] name = "colored" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8" +checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c" dependencies = [ "lazy_static", - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] @@ -1054,15 +1059,15 @@ dependencies = [ [[package]] name = "console" -version = "0.15.8" +version = "0.15.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" +checksum = "ea3c6ecd8059b57859df5c69830340ed3c41d30e3da0c1cbed90a96ac853041b" dependencies = [ "encode_unicode", - "lazy_static", "libc", - "unicode-width", - "windows-sys 0.52.0", + "once_cell", + "unicode-width 0.2.0", + "windows-sys 0.59.0", ] [[package]] @@ -1088,22 +1093,22 @@ checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" [[package]] name = "const_format" -version = "0.2.32" +version = "0.2.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3a214c7af3d04997541b18d432afaff4c455e79e2029079647e72fc2bd27673" +checksum = "126f97965c8ad46d6d9163268ff28432e8f6a1196a55578867832e3049df63dd" dependencies = [ "const_format_proc_macros", ] [[package]] name = "const_format_proc_macros" -version = "0.2.32" +version = "0.2.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7f6ff08fd20f4f299298a28e2dfa8a8ba1036e6cd2460ac1de7b425d76f2500" +checksum = "1d57c2eccfb16dbac1f4e61e206105db5820c9d26c3c472bc17c774259ef7744" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "unicode-xid 0.2.4", + "proc-macro2 1.0.92", + "quote 1.0.37", + "unicode-xid 0.2.6", ] [[package]] @@ -1131,17 +1136,27 @@ dependencies = [ "libc", ] +[[package]] +name = "core-foundation" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation-sys" -version = "0.8.6" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" -version = "0.2.12" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" +checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" dependencies = [ "libc", ] @@ -1185,18 +1200,18 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.13" +version = "0.5.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" +checksum = "06ba6d68e24814cb8de6bb986db8222d3a027d15872cabc0d18817bc3c0e4471" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-deque" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" dependencies = [ "crossbeam-epoch", "crossbeam-utils", @@ -1213,18 +1228,18 @@ dependencies = [ [[package]] name = "crossbeam-queue" -version = "0.3.11" +version = "0.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df0346b5d5e76ac2fe4e327c5fd1118d6be7c51dfb18f9b7922923f287471e35" +checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-utils" -version = "0.8.20" +version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crunchy" @@ -1276,36 +1291,24 @@ dependencies = [ "subtle", ] -[[package]] -name = "cs_derive" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24cf603ca4299c6e20e644da88897f7b81d688510f4887e818b0bfe0b792081b" -dependencies = [ - "proc-macro-error", - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 1.0.109", -] - [[package]] name = "ctor" -version = "0.2.8" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edb49164822f3ee45b17acd4a208cfc1251410cf0cad9a833234c9890774dd9f" +checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501" dependencies = [ - "quote 1.0.36", - "syn 2.0.66", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] name = "ctrlc" -version = "3.4.4" +version = "3.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "672465ae37dc1bc6380a6547a8883d5dd397b0f1faaad4f265726cc7042a5345" +checksum = "90eeab0aa92f3f9b4e87f258c72b139c207d251f9cbc1080a0086b86a8870dd3" dependencies = [ "nix", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -1330,9 +1333,9 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] @@ -1363,8 +1366,8 @@ checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "strsim 0.10.0", "syn 1.0.109", ] @@ -1377,10 +1380,10 @@ checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "strsim 0.11.1", - "syn 2.0.66", + "syn 2.0.90", ] [[package]] @@ -1390,7 +1393,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" dependencies = [ "darling_core 0.13.4", - "quote 1.0.36", + "quote 1.0.37", "syn 1.0.109", ] @@ -1401,8 +1404,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core 0.20.10", - "quote 1.0.36", - "syn 2.0.66", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] @@ -1461,8 +1464,8 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "syn 1.0.109", ] @@ -1481,10 +1484,10 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", - "unicode-xid 0.2.4", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", + "unicode-xid 0.2.6", ] [[package]] @@ -1496,7 +1499,7 @@ dependencies = [ "console", "shell-words", "tempfile", - "thiserror", + "thiserror 1.0.69", "zeroize", ] @@ -1533,6 +1536,17 @@ dependencies = [ "subtle", ] +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", +] + [[package]] name = "doc-comment" version = "0.3.3" @@ -1553,9 +1567,9 @@ checksum = "dcbb2bf8e87535c23f7a8a321e364ce21462d0ff10cb6407820e8e96dfff6653" [[package]] name = "dunce" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" [[package]] name = "dyn-clone" @@ -1621,16 +1635,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d7bc049e1bd8cdeb31b68bbd586a9464ecf9f3944af3958a7a9d0f8b9799417" dependencies = [ "enum-ordinalize", - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] name = "either" -version = "1.12.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" dependencies = [ "serde", ] @@ -1686,15 +1700,15 @@ dependencies = [ [[package]] name = "encode_unicode" -version = "0.3.6" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" [[package]] name = "encoding_rs" -version = "0.8.34" +version = "0.8.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" dependencies = [ "cfg-if", ] @@ -1714,9 +1728,9 @@ version = "4.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0d28318a75d4aead5c4db25382e8ef717932d0346600cacae6357eb5941bc5ff" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] @@ -1726,16 +1740,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa18ce2bc66555b3218614519ac839ddb759a7d6720732f979ef8d13be147ecd" dependencies = [ "once_cell", - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] name = "env_filter" -version = "0.1.0" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a009aa4810eb158359dda09d0c87378e4bbb89b5a801f016885a4707ba24f7ea" +checksum = "4f2c92ceda6ceec50f43169f9ee8424fe2db276791afde7b2cd8bc084cb376ab" dependencies = [ "log", ] @@ -1755,9 +1769,9 @@ dependencies = [ [[package]] name = "env_logger" -version = "0.11.3" +version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38b35839ba51819680ba087cd351788c9a3c476841207e0b8cee0b04722343b9" +checksum = "e13fa619b91fb2381732789fc5de83b45675e882f66623b7d8cb4f643017018d" dependencies = [ "anstream", "anstyle", @@ -1802,12 +1816,12 @@ dependencies = [ [[package]] name = "errno" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -1834,7 +1848,7 @@ dependencies = [ "serde", "serde_json", "sha3 0.10.8", - "thiserror", + "thiserror 1.0.69", "uint", ] @@ -1878,9 +1892,9 @@ dependencies = [ [[package]] name = "event-listener-strategy" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1" +checksum = "3c3e4e0dd3673c1139bf041f3008816d9cf2946bbfac2945c09e523b8d7b05b2" dependencies = [ "event-listener", "pin-project-lite", @@ -1888,9 +1902,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.1.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "ff" @@ -1956,9 +1970,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.30" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" +checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" dependencies = [ "crc32fast", "miniz_oxide", @@ -1975,9 +1989,9 @@ dependencies = [ [[package]] name = "flume" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55ac459de2512911e4b674ce33cf20befaba382d05b62b008afc1c8b57cbf181" +checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095" dependencies = [ "futures-core", "futures-sink", @@ -2021,7 +2035,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "971289216ea5c91872e5e0bb6989214b537bbce375d09fabea5c3ccfe031b204" dependencies = [ "arr_macro", - "bit-vec", + "bit-vec 0.6.3", "blake2 0.9.2", "blake2-rfc_bellman_edition", "blake2s_simd", @@ -2033,7 +2047,7 @@ dependencies = [ "indexmap 1.9.3", "itertools 0.10.5", "lazy_static", - "num-bigint 0.4.5", + "num-bigint 0.4.6", "num-derive", "num-integer", "num-traits", @@ -2073,9 +2087,9 @@ checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" [[package]] name = "futures" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" dependencies = [ "futures-channel", "futures-core", @@ -2088,9 +2102,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" dependencies = [ "futures-core", "futures-sink", @@ -2098,15 +2112,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" [[package]] name = "futures-executor" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" dependencies = [ "futures-core", "futures-task", @@ -2127,9 +2141,9 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" [[package]] name = "futures-locks" @@ -2144,26 +2158,26 @@ dependencies = [ [[package]] name = "futures-macro" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] name = "futures-sink" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" [[package]] name = "futures-task" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" [[package]] name = "futures-timer" @@ -2177,9 +2191,9 @@ dependencies = [ [[package]] name = "futures-util" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ "futures 0.1.31", "futures-channel", @@ -2220,9 +2234,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.29.0" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "glob" @@ -2245,7 +2259,7 @@ dependencies = [ "pin-project", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", @@ -2288,10 +2302,10 @@ dependencies = [ "google-cloud-token", "home", "jsonwebtoken", - "reqwest 0.12.5", + "reqwest 0.12.9", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "time", "tokio", "tracing", @@ -2304,8 +2318,8 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04f945a208886a13d07636f38fb978da371d0abc3e34bad338124b9f8c135a8f" dependencies = [ - "reqwest 0.12.5", - "thiserror", + "reqwest 0.12.9", + "thiserror 1.0.69", "tokio", ] @@ -2329,13 +2343,13 @@ dependencies = [ "percent-encoding", "pkcs8 0.10.2", "regex", - "reqwest 0.12.5", + "reqwest 0.12.9", "reqwest-middleware", "ring", "serde", "serde_json", "sha2 0.10.8", - "thiserror", + "thiserror 1.0.69", "time", "tokio", "tracing", @@ -2385,7 +2399,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap 2.2.6", + "indexmap 2.7.0", "slab", "tokio", "tokio-util", @@ -2394,17 +2408,17 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.5" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa82e28a107a8cc405f0839610bdc9b15f1e25ec7d696aa5cf173edbcb1486ab" +checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" dependencies = [ "atomic-waker", "bytes", "fnv", "futures-core", "futures-sink", - "http 1.1.0", - "indexmap 2.2.6", + "http 1.2.0", + "indexmap 2.7.0", "slab", "tokio", "tokio-util", @@ -2444,6 +2458,12 @@ dependencies = [ "allocator-api2", ] +[[package]] +name = "hashbrown" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" + [[package]] name = "hashlink" version = "0.9.1" @@ -2462,7 +2482,7 @@ dependencies = [ "base64 0.21.7", "bytes", "headers-core", - "http 1.1.0", + "http 1.2.0", "httpdate", "mime", "sha1", @@ -2474,7 +2494,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4" dependencies = [ - "http 1.1.0", + "http 1.2.0", ] [[package]] @@ -2533,11 +2553,11 @@ dependencies = [ [[package]] name = "home" -version = "0.5.9" +version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -2564,9 +2584,9 @@ dependencies = [ [[package]] name = "http" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" dependencies = [ "bytes", "fnv", @@ -2586,12 +2606,12 @@ dependencies = [ [[package]] name = "http-body" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.1.0", + "http 1.2.0", ] [[package]] @@ -2602,16 +2622,16 @@ checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes", "futures-util", - "http 1.1.0", - "http-body 1.0.0", + "http 1.2.0", + "http-body 1.0.1", "pin-project-lite", ] [[package]] name = "httparse" -version = "1.8.0" +version = "1.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" +checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" [[package]] name = "httpdate" @@ -2637,9 +2657,9 @@ dependencies = [ [[package]] name = "hyper" -version = "0.14.29" +version = "0.14.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f361cde2f109281a220d4307746cdfd5ee3f410da58a70377762396775634b33" +checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" dependencies = [ "bytes", "futures-channel", @@ -2661,16 +2681,16 @@ dependencies = [ [[package]] name = "hyper" -version = "1.5.0" +version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbbff0a806a4728c99295b254c8838933b5b082d75e3cb70c8dab21fdfbcfa9a" +checksum = "256fb8d4bd6413123cc9d91832d78325c48ff41677595be797d90f42969beae0" dependencies = [ "bytes", "futures-channel", "futures-util", - "h2 0.4.5", - "http 1.1.0", - "http-body 1.0.0", + "h2 0.4.7", + "http 1.2.0", + "http-body 1.0.1", "httparse", "httpdate", "itoa", @@ -2689,12 +2709,12 @@ dependencies = [ "bytes", "futures-util", "headers", - "http 1.1.0", - "hyper 1.5.0", + "http 1.2.0", + "hyper 1.5.2", "hyper-rustls", "hyper-util", "pin-project-lite", - "rustls-native-certs", + "rustls-native-certs 0.7.3", "tokio", "tokio-rustls", "tower-service", @@ -2702,17 +2722,17 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.27.2" +version = "0.27.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ee4be2c948921a1a5320b629c4193916ed787a7f7f293fd3f7f5a6c9de74155" +checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" dependencies = [ "futures-util", - "http 1.1.0", - "hyper 1.5.0", + "http 1.2.0", + "hyper 1.5.2", "hyper-util", "log", "rustls", - "rustls-native-certs", + "rustls-native-certs 0.8.1", "rustls-pki-types", "tokio", "tokio-rustls", @@ -2721,11 +2741,11 @@ dependencies = [ [[package]] name = "hyper-timeout" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3203a961e5c83b6f5498933e78b6b263e208c197b63e9c6c53cc82ffd3f63793" +checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" dependencies = [ - "hyper 1.5.0", + "hyper 1.5.2", "hyper-util", "pin-project-lite", "tokio", @@ -2739,7 +2759,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", - "hyper 0.14.29", + "hyper 0.14.32", "native-tls", "tokio", "tokio-native-tls", @@ -2753,7 +2773,7 @@ checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", - "hyper 1.5.0", + "hyper 1.5.2", "hyper-util", "native-tls", "tokio", @@ -2763,16 +2783,16 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41296eb09f183ac68eec06e03cdbea2e759633d4067b2f6552fc2e009bcad08b" +checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" dependencies = [ "bytes", "futures-channel", "futures-util", - "http 1.1.0", - "http-body 1.0.0", - "hyper 1.5.0", + "http 1.2.0", + "http-body 1.0.1", + "hyper 1.5.2", "pin-project-lite", "socket2", "tokio", @@ -2782,9 +2802,9 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.60" +version = "0.1.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -2803,6 +2823,124 @@ dependencies = [ "cc", ] +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", +] + [[package]] name = "ident_case" version = "1.0.1" @@ -2811,12 +2949,23 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "0.5.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" dependencies = [ - "unicode-bidi", - "unicode-normalization", + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", ] [[package]] @@ -2848,13 +2997,13 @@ dependencies = [ [[package]] name = "impl-trait-for-tuples" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" +checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 1.0.109", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] @@ -2869,12 +3018,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.2.6" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" dependencies = [ "equivalent", - "hashbrown 0.14.5", + "hashbrown 0.15.2", ] [[package]] @@ -2891,18 +3040,18 @@ dependencies = [ [[package]] name = "instant" -version = "0.1.12" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" dependencies = [ "cfg-if", ] [[package]] name = "ipnet" -version = "2.9.0" +version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" +checksum = "ddc24109865250148c2e0f3d25d4f0f479571723792d3802153c60922a4fb708" [[package]] name = "ipnetwork" @@ -2915,9 +3064,9 @@ dependencies = [ [[package]] name = "is_terminal_polyfill" -version = "1.70.0" +version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" [[package]] name = "itertools" @@ -2948,9 +3097,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.11" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "jemalloc-sys" @@ -2982,7 +3131,7 @@ dependencies = [ "combine", "jni-sys", "log", - "thiserror", + "thiserror 1.0.69", "walkdir", ] @@ -2994,19 +3143,20 @@ checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" [[package]] name = "jobserver" -version = "0.1.31" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e" +checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" dependencies = [ "libc", ] [[package]] name = "js-sys" -version = "0.3.69" +version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7" dependencies = [ + "once_cell", "wasm-bindgen", ] @@ -3019,7 +3169,7 @@ dependencies = [ "jsonptr", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -3034,7 +3184,7 @@ dependencies = [ "pest_derive", "regex", "serde_json", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -3074,14 +3224,14 @@ dependencies = [ "futures-channel", "futures-util", "gloo-net", - "http 1.1.0", + "http 1.2.0", "jsonrpsee-core", "pin-project", "rustls", "rustls-pki-types", "rustls-platform-verifier", "soketto", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-rustls", "tokio-util", @@ -3101,15 +3251,15 @@ dependencies = [ "bytes", "futures-timer", "futures-util", - "http 1.1.0", - "http-body 1.0.0", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", "jsonrpsee-types", "pin-project", "rustc-hash", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-stream", "tracing", @@ -3124,8 +3274,8 @@ checksum = "2d90064e04fb9d7282b1c71044ea94d0bbc6eff5621c66f1a0bce9e9de7cf3ac" dependencies = [ "async-trait", "base64 0.22.1", - "http-body 1.0.0", - "hyper 1.5.0", + "http-body 1.0.1", + "hyper 1.5.2", "hyper-rustls", "hyper-util", "jsonrpsee-core", @@ -3134,7 +3284,7 @@ dependencies = [ "rustls-platform-verifier", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "tokio", "tower 0.4.13", "tracing", @@ -3148,10 +3298,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7895f186d5921065d96e16bd795e5ca89ac8356ec423fafc6e3d7cf8ec11aee4" dependencies = [ "heck 0.5.0", - "proc-macro-crate 3.1.0", - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro-crate 3.2.0", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] @@ -3161,10 +3311,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9c465fbe385238e861fdc4d1c85e04ada6c1fd246161d26385c1b311724d2af" dependencies = [ "beef", - "http 1.1.0", + "http 1.2.0", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -3184,7 +3334,7 @@ version = "0.23.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c28759775f5cb2f1ea9667672d3fe2b0e701d1f4b7b67954e60afe7fd058b5e" dependencies = [ - "http 1.1.0", + "http 1.2.0", "jsonrpsee-client-transport", "jsonrpsee-core", "jsonrpsee-types", @@ -3220,9 +3370,9 @@ dependencies = [ [[package]] name = "k256" -version = "0.13.3" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "956ff9b67e26e1a6a866cb758f12c6f8746208489e3e4a4b5580802f2f0a587b" +checksum = "f6e3919bbaa2945715f0bb6d3934a173d1e9a59ac23767fbaaef277265a7411b" dependencies = [ "cfg-if", "ecdsa 0.16.9", @@ -3277,12 +3427,12 @@ dependencies = [ "bytes", "chrono", "either", - "futures 0.3.30", + "futures 0.3.31", "home", - "http 1.1.0", - "http-body 1.0.0", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", - "hyper 1.5.0", + "hyper 1.5.2", "hyper-http-proxy", "hyper-rustls", "hyper-timeout", @@ -3292,15 +3442,15 @@ dependencies = [ "kube-core", "pem", "rustls", - "rustls-pemfile 2.1.2", + "rustls-pemfile 2.2.0", "secrecy 0.10.3", "serde", "serde_json", "serde_yaml", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-util", - "tower 0.5.1", + "tower 0.5.2", "tower-http", "tracing", ] @@ -3313,14 +3463,14 @@ checksum = "f42346d30bb34d1d7adc5c549b691bce7aa3a1e60254e68fab7e2d7b26fe3d77" dependencies = [ "chrono", "form_urlencoded", - "http 1.1.0", + "http 1.2.0", "json-patch", "k8s-openapi", "schemars", "serde", "serde-value", "serde_json", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -3330,10 +3480,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9364e04cc5e0482136c6ee8b7fb7551812da25802249f35b3def7aaa31e82ad" dependencies = [ "darling 0.20.10", - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "serde_json", - "syn 2.0.66", + "syn 2.0.90", ] [[package]] @@ -3348,7 +3498,7 @@ dependencies = [ "async-trait", "backoff", "educe", - "futures 0.3.30", + "futures 0.3.31", "hashbrown 0.14.5", "json-patch", "jsonptr", @@ -3358,7 +3508,7 @@ dependencies = [ "pin-project", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-util", "tracing", @@ -3381,25 +3531,25 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" -version = "0.2.155" +version = "0.2.168" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" +checksum = "5aaeb2981e0606ca11d79718f8bb01164f1d6ed75080182d3abf017e6d244b6d" [[package]] name = "libloading" -version = "0.8.3" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19" +checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" dependencies = [ "cfg-if", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] name = "libm" -version = "0.2.8" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" +checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" [[package]] name = "libsqlite3-sys" @@ -3418,6 +3568,12 @@ version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +[[package]] +name = "litemap" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" + [[package]] name = "local-ip-address" version = "0.5.7" @@ -3426,7 +3582,7 @@ checksum = "612ed4ea9ce5acfb5d26339302528a5e1e59dfed95e9e11af3c083236ff1d15d" dependencies = [ "libc", "neli", - "thiserror", + "thiserror 1.0.69", "windows-sys 0.48.0", ] @@ -3442,9 +3598,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.21" +version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" [[package]] name = "logos" @@ -3463,10 +3619,10 @@ checksum = "dc487311295e0002e452025d6b580b77bb17286de87b57138f3b5db711cded68" dependencies = [ "beef", "fnv", - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "regex-syntax 0.6.29", - "syn 2.0.66", + "syn 2.0.90", ] [[package]] @@ -3517,9 +3673,9 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" [[package]] name = "memchr" -version = "2.7.2" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "miette" @@ -3529,8 +3685,8 @@ checksum = "59bb584eaeeab6bd0226ccf3509a69d7936d148cf3d036ad350abe35e8c6856e" dependencies = [ "miette-derive", "once_cell", - "thiserror", - "unicode-width", + "thiserror 1.0.69", + "unicode-width 0.1.14", ] [[package]] @@ -3539,9 +3695,9 @@ version = "5.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49e7bc1560b95a3c4a25d03de42fe76ca718ab92d1a22a55b9b4cf67b3ae635c" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] @@ -3552,9 +3708,9 @@ checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] name = "mime_guess" -version = "2.0.4" +version = "2.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef" +checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" dependencies = [ "mime", "unicase", @@ -3568,30 +3724,24 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.7.3" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87dfd01fe195c66b572b37921ad8803d010623c0aca821bea2302239d155cdae" +checksum = "4ffbe83022cedc1d264172192511ae958937694cd57ce297164951b8b3568394" dependencies = [ - "adler", + "adler2", ] [[package]] name = "mio" -version = "0.8.11" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" dependencies = [ "libc", "wasi", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] -[[package]] -name = "mirai-annotations" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9be0862c1b3f26a88803c4a49de6889c10e608b3ee9344e6ef5b45fb37ad3d1" - [[package]] name = "multimap" version = "0.10.0" @@ -3610,7 +3760,7 @@ dependencies = [ "openssl-probe", "openssl-sys", "schannel", - "security-framework", + "security-framework 2.11.1", "security-framework-sys", "tempfile", ] @@ -3634,17 +3784,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c168194d373b1e134786274020dae7fc5513d565ea2ebb9bc9ff17ffb69106d4" dependencies = [ "either", - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "serde", "syn 1.0.109", ] [[package]] name = "nix" -version = "0.28.0" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4" +checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" dependencies = [ "bitflags 2.6.0", "cfg-if", @@ -3684,7 +3834,7 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23" dependencies = [ - "num-bigint 0.4.5", + "num-bigint 0.4.6", "num-complex", "num-integer", "num-iter", @@ -3705,9 +3855,9 @@ dependencies = [ [[package]] name = "num-bigint" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c165a9ab64cf766f73521c0dd2cfdff64f488b8f0b3e621face3462d3db536d7" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ "num-integer", "num-traits", @@ -3794,7 +3944,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" dependencies = [ - "num-bigint 0.4.5", + "num-bigint 0.4.6", "num-integer", "num-traits", "serde", @@ -3831,11 +3981,11 @@ dependencies = [ [[package]] name = "num_enum" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02339744ee7253741199f897151b38e72257d13802d4ee837285cc2990a90845" +checksum = "4e613fc340b2220f734a8595782c551f1250e969d87d3be1ae0579e8d4065179" dependencies = [ - "num_enum_derive 0.7.2", + "num_enum_derive 0.7.3", ] [[package]] @@ -3845,21 +3995,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6" dependencies = [ "proc-macro-crate 1.3.1", - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] name = "num_enum_derive" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "681030a937600a36906c185595136d26abfebb4aa9c65701cefcaf8578bb982b" +checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56" dependencies = [ - "proc-macro-crate 3.1.0", - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro-crate 3.2.0", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] @@ -3870,18 +4020,18 @@ checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" [[package]] name = "object" -version = "0.35.0" +version = "0.36.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8ec7ab813848ba4522158d5517a6093db1ded27575b070f4177b8d12b41db5e" +checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.19.0" +version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" [[package]] name = "opaque-debug" @@ -3891,9 +4041,9 @@ checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" [[package]] name = "openssl" -version = "0.10.66" +version = "0.10.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1" +checksum = "6174bc48f102d208783c2c84bf931bb75927a617866870de8a4ea85597f871f5" dependencies = [ "bitflags 2.6.0", "cfg-if", @@ -3910,9 +4060,9 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] @@ -3923,9 +4073,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.103" +version = "0.9.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6" +checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741" dependencies = [ "cc", "libc", @@ -3944,7 +4094,7 @@ dependencies = [ "js-sys", "once_cell", "pin-project-lite", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -3967,9 +4117,9 @@ checksum = "ad31e9de44ee3538fb9d64fe3376c1362f406162434609e79aea2a41a0af78ab" dependencies = [ "async-trait", "bytes", - "http 1.1.0", + "http 1.2.0", "opentelemetry", - "reqwest 0.12.5", + "reqwest 0.12.9", ] [[package]] @@ -3980,14 +4130,14 @@ checksum = "6b925a602ffb916fb7421276b86756027b37ee708f9dce2dbdcc51739f07e727" dependencies = [ "async-trait", "futures-core", - "http 1.1.0", + "http 1.2.0", "opentelemetry", "opentelemetry-http", "opentelemetry-proto", "opentelemetry_sdk", - "prost 0.13.1", - "reqwest 0.12.5", - "thiserror", + "prost 0.13.4", + "reqwest 0.12.9", + "thiserror 1.0.69", "tokio", "tonic", ] @@ -4000,7 +4150,7 @@ checksum = "30ee9f20bff9c984511a02f082dc8ede839e4a9bf15cc2487c8d6fea5ad850d9" dependencies = [ "opentelemetry", "opentelemetry_sdk", - "prost 0.13.1", + "prost 0.13.4", "tonic", ] @@ -4026,7 +4176,7 @@ dependencies = [ "percent-encoding", "rand 0.8.5", "serde_json", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-stream", ] @@ -4042,9 +4192,9 @@ dependencies = [ [[package]] name = "os_info" -version = "3.8.2" +version = "3.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae99c7fa6dd38c7cafe1ec085e804f8f555a2f8659b0dbe03f1f9963a9b51092" +checksum = "e5ca711d8b83edbb00b44d504503cd247c9c0bd8b0fa2694f2a1a3d8165379ce" dependencies = [ "log", "serde", @@ -4071,11 +4221,11 @@ dependencies = [ [[package]] name = "parity-scale-codec" -version = "3.6.11" +version = "3.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1b5927e4a9ae8d6cdb6a69e4e04a0ec73381a358e21b8a576f44769f34e7c24" +checksum = "306800abfa29c7f16596b5970a588435e3d5b3149683d00c12b699cc19f895ee" dependencies = [ - "arrayvec 0.7.4", + "arrayvec 0.7.6", "bitvec", "byte-slice-cast", "impl-trait-for-tuples", @@ -4085,21 +4235,21 @@ dependencies = [ [[package]] name = "parity-scale-codec-derive" -version = "3.6.9" +version = "3.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be30eaf4b0a9fba5336683b38de57bb86d179a35862ba6bfcf57625d006bde5b" +checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c" dependencies = [ - "proc-macro-crate 2.0.0", - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro-crate 3.2.0", + "proc-macro2 1.0.92", + "quote 1.0.37", "syn 1.0.109", ] [[package]] name = "parking" -version = "2.2.0" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" [[package]] name = "parking_lot" @@ -4119,9 +4269,9 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.1", + "redox_syscall", "smallvec", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -4157,20 +4307,20 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" -version = "2.7.11" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd53dff83f26735fdc1ca837098ccf133605d794cdae66acfc2bfac3ec809d95" +checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc" dependencies = [ "memchr", - "thiserror", + "thiserror 2.0.7", "ucd-trie", ] [[package]] name = "pest_derive" -version = "2.7.11" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a548d2beca6773b1c244554d36fcf8548a8a58e74156968211567250e48e49a" +checksum = "816518421cfc6887a0d62bf441b6ffb4536fcc926395a69e1a85852d4363f57e" dependencies = [ "pest", "pest_generator", @@ -4178,22 +4328,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.11" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c93a82e8d145725dcbaf44e5ea887c8a869efdcc28706df2d08c69e17077183" +checksum = "7d1396fd3a870fc7838768d171b4616d5c91f6cc25e377b673d714567d99377b" dependencies = [ "pest", "pest_meta", - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] name = "pest_meta" -version = "2.7.11" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a941429fea7e08bedec25e4f6785b6ffaacc6b755da98df5ef3e7dcf4a124c4f" +checksum = "e1e58089ea25d717bfd31fb534e4f3afcc2cc569c70de3e239778991ea3b7dea" dependencies = [ "once_cell", "pest", @@ -4207,34 +4357,34 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap 2.2.6", + "indexmap 2.7.0", ] [[package]] name = "pin-project" -version = "1.1.5" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.5" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] name = "pin-project-lite" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" +checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" [[package]] name = "pin-utils" @@ -4275,9 +4425,9 @@ dependencies = [ [[package]] name = "pkg-config" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" +checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" [[package]] name = "powerfmt" @@ -4287,9 +4437,12 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" -version = "0.2.17" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] [[package]] name = "predicates" @@ -4320,9 +4473,9 @@ dependencies = [ [[package]] name = "pretty_assertions" -version = "1.4.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66" +checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" dependencies = [ "diff", "yansi", @@ -4330,12 +4483,12 @@ dependencies = [ [[package]] name = "prettyplease" -version = "0.2.20" +version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f12335488a2f3b0a83b14edad48dca9879ce89b2edd10e80237e4e852dd645e" +checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" dependencies = [ - "proc-macro2 1.0.85", - "syn 2.0.66", + "proc-macro2 1.0.92", + "syn 2.0.90", ] [[package]] @@ -4372,20 +4525,11 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8" -dependencies = [ - "toml_edit 0.20.2", -] - -[[package]] -name = "proc-macro-crate" -version = "3.1.0" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" +checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" dependencies = [ - "toml_edit 0.21.1", + "toml_edit 0.22.22", ] [[package]] @@ -4395,8 +4539,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "syn 1.0.109", "version_check", ] @@ -4407,8 +4551,8 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "version_check", ] @@ -4429,18 +4573,18 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.85" +version = "1.0.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23" +checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" dependencies = [ "unicode-ident", ] [[package]] name = "prometheus-client" -version = "0.22.2" +version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1ca959da22a332509f2a73ae9e5f23f9dcfc31fd3a54d71f159495bd5909baa" +checksum = "504ee9ff529add891127c4827eb481bd69dc0ebc72e9a682e187db4caa60c3ca" dependencies = [ "dtoa", "itoa", @@ -4454,19 +4598,19 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] name = "proptest" -version = "1.4.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31b476131c3c86cb68032fdc5cb6d5a1045e3e42d96b69fa599fd77701e1f5bf" +checksum = "14cae93065090804185d3b75f0bf93b8eeda30c7a9b4a33d3bdb3988d6229e50" dependencies = [ "bit-set", - "bit-vec", + "bit-vec 0.8.0", "bitflags 2.6.0", "lazy_static", "num-traits", @@ -4491,12 +4635,12 @@ dependencies = [ [[package]] name = "prost" -version = "0.13.1" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13db3d3fde688c61e2446b4d843bc27a7e8af269a69440c0308021dc92333cc" +checksum = "2c0fef6c4230e4ccf618a35c59d7ede15dea37de8427500f50aff708806e42ec" dependencies = [ "bytes", - "prost-derive 0.13.1", + "prost-derive 0.13.4", ] [[package]] @@ -4516,7 +4660,7 @@ dependencies = [ "prost 0.12.6", "prost-types", "regex", - "syn 2.0.66", + "syn 2.0.90", "tempfile", ] @@ -4528,22 +4672,22 @@ checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1" dependencies = [ "anyhow", "itertools 0.12.1", - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] name = "prost-derive" -version = "0.13.1" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18bec9b0adc4eba778b33684b7ba3e7137789434769ee3ce3930463ef904cfca" +checksum = "157c5a9d7ea5c2ed2d9fb8f495b64759f7816c7eaea54ba3978f0d63000162e3" dependencies = [ "anyhow", "itertools 0.13.0", - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] @@ -4583,7 +4727,7 @@ dependencies = [ "prost-reflect", "prost-types", "protox-parse", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -4595,7 +4739,7 @@ dependencies = [ "logos", "miette", "prost-types", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -4607,7 +4751,7 @@ dependencies = [ "bincode", "chrono", "circuit_definitions", - "clap 4.5.4", + "clap 4.5.23", "colored", "dialoguer", "hex", @@ -4653,8 +4797,8 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "syn 1.0.109", ] @@ -4690,11 +4834,11 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.36" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" dependencies = [ - "proc-macro2 1.0.85", + "proc-macro2 1.0.92", ] [[package]] @@ -4801,18 +4945,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" -dependencies = [ - "bitflags 1.3.2", -] - -[[package]] -name = "redox_syscall" -version = "0.5.1" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "469052894dcb553421e483e4209ee581a45100d31b4018de03e5a7ad86374a7e" +checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" dependencies = [ "bitflags 2.6.0", ] @@ -4825,7 +4960,7 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.8", + "regex-automata 0.4.9", "regex-syntax 0.8.5", ] @@ -4840,9 +4975,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", @@ -4884,7 +5019,7 @@ dependencies = [ "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", - "hyper 0.14.29", + "hyper 0.14.32", "hyper-tls 0.5.0", "ipnet", "js-sys", @@ -4899,7 +5034,7 @@ dependencies = [ "serde_json", "serde_urlencoded", "sync_wrapper 0.1.2", - "system-configuration", + "system-configuration 0.5.1", "tokio", "tokio-native-tls", "tower-service", @@ -4907,14 +5042,14 @@ dependencies = [ "wasm-bindgen", "wasm-bindgen-futures", "web-sys", - "winreg 0.50.0", + "winreg", ] [[package]] name = "reqwest" -version = "0.12.5" +version = "0.12.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7d6d2a27d57148378eb5e111173f4276ad26340ecc5c49a4a2152167a2d6a37" +checksum = "a77c62af46e79de0a562e1a9849205ffcb7fc1238876e9bd743357570e04046f" dependencies = [ "base64 0.22.1", "bytes", @@ -4922,11 +5057,11 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2 0.4.5", - "http 1.1.0", - "http-body 1.0.0", + "h2 0.4.7", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", - "hyper 1.5.0", + "hyper 1.5.2", "hyper-rustls", "hyper-tls 0.6.0", "hyper-util", @@ -4939,12 +5074,12 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls-pemfile 2.1.2", + "rustls-pemfile 2.2.0", "serde", "serde_json", "serde_urlencoded", - "sync_wrapper 1.0.1", - "system-configuration", + "sync_wrapper 1.0.2", + "system-configuration 0.6.1", "tokio", "tokio-native-tls", "tokio-util", @@ -4954,21 +5089,21 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", - "winreg 0.52.0", + "windows-registry", ] [[package]] name = "reqwest-middleware" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39346a33ddfe6be00cbc17a34ce996818b97b230b87229f10114693becca1268" +checksum = "562ceb5a604d3f7c885a792d42c199fd8af239d0a51b2fa6a78aafa092452b04" dependencies = [ "anyhow", "async-trait", - "http 1.1.0", - "reqwest 0.12.5", + "http 1.2.0", + "reqwest 0.12.9", "serde", - "thiserror", + "thiserror 1.0.69", "tower-service", ] @@ -4979,7 +5114,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82900c877a0ba5362ac5756efbd82c5b795dc509011c1253e2389d8708f1389d" dependencies = [ "addchain", - "arrayvec 0.7.4", + "arrayvec 0.7.6", "blake2 0.10.6", "byteorder", "derivative", @@ -5035,9 +5170,9 @@ dependencies = [ [[package]] name = "rkyv" -version = "0.7.44" +version = "0.7.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cba464629b3394fc4dbc6f940ff8f5b4ff5c7aef40f29166fd4ad12acbc99c0" +checksum = "9008cd6385b9e161d8229e1f6549dd23c3d022f132a2ea37ac3a10ac4935779b" dependencies = [ "bitvec", "bytecheck", @@ -5053,12 +5188,12 @@ dependencies = [ [[package]] name = "rkyv_derive" -version = "0.7.44" +version = "0.7.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7dddfff8de25e6f62b9d64e6e432bf1c6736c57d20323e15ee10435fbda7c65" +checksum = "503d1d27590a2b0a3a4ca4c94755aa2875657196ecbf401a42eff41d7de532c0" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "syn 1.0.109", ] @@ -5074,9 +5209,9 @@ dependencies = [ [[package]] name = "rsa" -version = "0.9.6" +version = "0.9.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc" +checksum = "47c75d7c5c6b673e58bf54d8544a9f432e3a925b0e80f7cd3602ab5c50c55519" dependencies = [ "const-oid", "digest 0.10.7", @@ -5094,11 +5229,11 @@ dependencies = [ [[package]] name = "rust_decimal" -version = "1.35.0" +version = "1.36.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1790d1c4c0ca81211399e0e0af16333276f375209e71a37b67698a373db5b47a" +checksum = "b082d80e3e3cc52b2ed634388d436fe1f4de6af5786cc2de9ba9737527bdf555" dependencies = [ - "arrayvec 0.7.4", + "arrayvec 0.7.6", "borsh", "bytes", "num-traits", @@ -5128,31 +5263,31 @@ checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" [[package]] name = "rustc_version" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" dependencies = [ "semver", ] [[package]] name = "rustix" -version = "0.38.34" +version = "0.38.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" +checksum = "f93dc38ecbab2eb790ff964bb77fa94faf256fd3e73285fd7ba0903b76bedb85" dependencies = [ "bitflags 2.6.0", "errno", "libc", "linux-raw-sys", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "rustls" -version = "0.23.12" +version = "0.23.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c58f8c84392efc0a126acce10fa59ff7b3d2ac06ab451a33f2741989b806b044" +checksum = "5065c3f250cbd332cd894be57c40fa52387247659b14a2d6041d121547903b1b" dependencies = [ "aws-lc-rs", "log", @@ -5166,15 +5301,27 @@ dependencies = [ [[package]] name = "rustls-native-certs" -version = "0.7.0" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5bfb394eeed242e909609f56089eecfe5fda225042e8b171791b9c95f5931e5" +dependencies = [ + "openssl-probe", + "rustls-pemfile 2.2.0", + "rustls-pki-types", + "schannel", + "security-framework 2.11.1", +] + +[[package]] +name = "rustls-native-certs" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f1fb85efa936c42c6d5fc28d2629bb51e4b2f4b8a5211e297d599cc5a093792" +checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3" dependencies = [ "openssl-probe", - "rustls-pemfile 2.1.2", "rustls-pki-types", "schannel", - "security-framework", + "security-framework 3.0.1", ] [[package]] @@ -5188,36 +5335,35 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "2.1.2" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" dependencies = [ - "base64 0.22.1", "rustls-pki-types", ] [[package]] name = "rustls-pki-types" -version = "1.7.0" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "976295e77ce332211c0d24d92c0e83e50f5c5f046d11082cea19f3df13a3562d" +checksum = "d2bf47e6ff922db3825eb750c4e2ff784c6ff8fb9e13046ef6a1d1c5401b0b37" [[package]] name = "rustls-platform-verifier" -version = "0.3.2" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e3beb939bcd33c269f4bf946cc829fcd336370267c4a927ac0399c84a3151a1" +checksum = "afbb878bdfdf63a336a5e63561b1835e7a8c91524f51621db870169eac84b490" dependencies = [ - "core-foundation", + "core-foundation 0.9.4", "core-foundation-sys", "jni", "log", "once_cell", "rustls", - "rustls-native-certs", + "rustls-native-certs 0.7.3", "rustls-platform-verifier-android", "rustls-webpki", - "security-framework", + "security-framework 2.11.1", "security-framework-sys", "webpki-roots", "winapi", @@ -5225,15 +5371,15 @@ dependencies = [ [[package]] name = "rustls-platform-verifier-android" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84e217e7fdc8466b5b35d30f8c0a30febd29173df4a3a0c2115d306b9c4117ad" +checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" [[package]] name = "rustls-webpki" -version = "0.102.7" +version = "0.102.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84678086bd54edf2b415183ed7a94d0efb049f1b646a33e22a36f3794be6ae56" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" dependencies = [ "aws-lc-rs", "ring", @@ -5243,9 +5389,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.17" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" +checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" [[package]] name = "rusty-fork" @@ -5276,11 +5422,11 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.23" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -5301,10 +5447,10 @@ version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b1eee588578aff73f856ab961cd2f79e36bc45d7ded33a7562adba4667aecc0e" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "serde_derive_internals", - "syn 2.0.66", + "syn 2.0.90", ] [[package]] @@ -5385,23 +5531,36 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.11.0" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags 2.6.0", + "core-foundation 0.9.4", + "core-foundation-sys", + "libc", + "num-bigint 0.4.6", + "security-framework-sys", +] + +[[package]] +name = "security-framework" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c627723fd09706bacdb5cf41499e95098555af3c3c29d014dc3c458ef6be11c0" +checksum = "e1415a607e92bec364ea2cf9264646dcce0f91e6d65281bd6f2819cca3bf39c8" dependencies = [ "bitflags 2.6.0", - "core-foundation", + "core-foundation 0.10.0", "core-foundation-sys", "libc", - "num-bigint 0.4.5", "security-framework-sys", ] [[package]] name = "security-framework-sys" -version = "2.11.0" +version = "2.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317936bbbd05227752583946b9e66d7ce3b489f84e11a94a510b4437fef407d7" +checksum = "fa39c7303dc58b5543c94d22c1766b0d31f2ee58306363ea622b10bbc075eaa2" dependencies = [ "core-foundation-sys", "libc", @@ -5409,9 +5568,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.23" +version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" +checksum = "3cb6eb87a131f756572d7fb904f6e7b68633f09cca868c5df1c4b8d1a694bbba" [[package]] name = "send_wrapper" @@ -5521,7 +5680,7 @@ dependencies = [ "rand 0.8.5", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "time", "url", "uuid", @@ -5535,9 +5694,9 @@ checksum = "a3f0bf26fd526d2a95683cd0f87bf103b8539e2ca1ef48ce002d67aad59aa0b4" [[package]] name = "serde" -version = "1.0.210" +version = "1.0.216" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" +checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e" dependencies = [ "serde_derive", ] @@ -5554,13 +5713,13 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.210" +version = "1.0.216" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" +checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] @@ -5569,16 +5728,16 @@ version = "0.29.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] name = "serde_json" -version = "1.0.128" +version = "1.0.133" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" +checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" dependencies = [ "itoa", "memchr", @@ -5627,8 +5786,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" dependencies = [ "darling 0.13.4", - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "syn 1.0.109", ] @@ -5638,7 +5797,7 @@ version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ - "indexmap 2.2.6", + "indexmap 2.7.0", "itoa", "ryu", "serde", @@ -5796,9 +5955,9 @@ dependencies = [ [[package]] name = "simdutf8" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f27f6278552951f1f2b8cf9da965d10969b2efdea95a6ec47987ab46edfe263a" +checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e" [[package]] name = "simple_asn1" @@ -5806,9 +5965,9 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adc4e5204eb1910f40f9cfa375f6f05b68c3abac4b6fd879c8ff5e7ae8a0a085" dependencies = [ - "num-bigint 0.4.5", + "num-bigint 0.4.6", "num-traits", - "thiserror", + "thiserror 1.0.69", "time", ] @@ -5843,9 +6002,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" dependencies = [ "libc", "windows-sys 0.52.0", @@ -5853,13 +6012,13 @@ dependencies = [ [[package]] name = "soketto" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37468c595637c10857701c990f93a40ce0e357cedb0953d1c26c8d8027f9bb53" +checksum = "2e859df029d160cb88608f5d7df7fb4753fd20fdfb4de5644f3d8b8440841721" dependencies = [ "base64 0.22.1", "bytes", - "futures 0.3.30", + "futures 0.3.31", "httparse", "log", "rand 0.8.5", @@ -5903,20 +6062,19 @@ checksum = "c85070f382340e8b23a75808e83573ddf65f9ad9143df9573ca37c1ed2ee956a" [[package]] name = "sqlformat" -version = "0.2.3" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce81b7bd7c4493975347ef60d8c7e8b742d4694f4c49f93e0a12ea263938176c" +checksum = "7bba3a93db0cc4f7bdece8bb09e77e2e785c20bfebf79eb8340ed80708048790" dependencies = [ - "itertools 0.12.1", "nom", "unicode_categories", ] [[package]] name = "sqlx" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcfa89bea9500db4a0d038513d7a060566bfc51d46d1c014847049a45cce85e8" +checksum = "93334716a037193fac19df402f8571269c84a00852f6a7066b5d2616dcd64d3e" dependencies = [ "sqlx-core", "sqlx-macros", @@ -5927,9 +6085,9 @@ dependencies = [ [[package]] name = "sqlx-core" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d06e2f2bd861719b1f3f0c7dbe1d80c30bf59e76cf019f07d9014ed7eefb8e08" +checksum = "d4d8060b456358185f7d50c55d9b5066ad956956fddec42ee2e8567134a8936e" dependencies = [ "atoi", "bigdecimal", @@ -5948,7 +6106,7 @@ dependencies = [ "hashbrown 0.14.5", "hashlink", "hex", - "indexmap 2.2.6", + "indexmap 2.7.0", "ipnetwork", "log", "memchr", @@ -5962,7 +6120,7 @@ dependencies = [ "sha2 0.10.8", "smallvec", "sqlformat", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-stream", "tracing", @@ -5971,30 +6129,30 @@ dependencies = [ [[package]] name = "sqlx-macros" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f998a9defdbd48ed005a89362bd40dd2117502f15294f61c8d47034107dbbdc" +checksum = "cac0692bcc9de3b073e8d747391827297e075c7710ff6276d9f7a1f3d58c6657" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "sqlx-core", "sqlx-macros-core", - "syn 2.0.66", + "syn 2.0.90", ] [[package]] name = "sqlx-macros-core" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d100558134176a2629d46cec0c8891ba0be8910f7896abfdb75ef4ab6f4e7ce" +checksum = "1804e8a7c7865599c9c79be146dc8a9fd8cc86935fa641d3ea58e5f0688abaa5" dependencies = [ "dotenvy", "either", "heck 0.5.0", "hex", "once_cell", - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "serde", "serde_json", "sha2 0.10.8", @@ -6002,7 +6160,7 @@ dependencies = [ "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 2.0.66", + "syn 2.0.90", "tempfile", "tokio", "url", @@ -6010,9 +6168,9 @@ dependencies = [ [[package]] name = "sqlx-mysql" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "936cac0ab331b14cb3921c62156d913e4c15b74fb6ec0f3146bd4ef6e4fb3c12" +checksum = "64bb4714269afa44aef2755150a0fc19d756fb580a67db8885608cf02f47d06a" dependencies = [ "atoi", "base64 0.22.1", @@ -6048,16 +6206,16 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror", + "thiserror 1.0.69", "tracing", "whoami", ] [[package]] name = "sqlx-postgres" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9734dbce698c67ecf67c442f768a5e90a49b2a4d61a9f1d59f73874bd4cf0710" +checksum = "6fa91a732d854c5d7726349bb4bb879bb9478993ceb764247660aee25f67c2f8" dependencies = [ "atoi", "base64 0.22.1", @@ -6081,7 +6239,7 @@ dependencies = [ "log", "md-5", "memchr", - "num-bigint 0.4.5", + "num-bigint 0.4.6", "once_cell", "rand 0.8.5", "rust_decimal", @@ -6091,16 +6249,16 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror", + "thiserror 1.0.69", "tracing", "whoami", ] [[package]] name = "sqlx-sqlite" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75b419c3c1b1697833dd927bdc4c6545a620bc1bbafabd44e1efbe9afcd337e" +checksum = "d5b2cf34a45953bfd3daaf3db0f7a7878ab9b7a6b91b422d24a7a9e4c857b680" dependencies = [ "atoi", "chrono", @@ -6180,8 +6338,8 @@ checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" dependencies = [ "heck 0.3.3", "proc-macro-error", - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "syn 1.0.109", ] @@ -6201,17 +6359,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" dependencies = [ "heck 0.5.0", - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "rustversion", - "syn 2.0.66", + "syn 2.0.90", ] [[package]] name = "subtle" -version = "2.5.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" @@ -6230,34 +6388,22 @@ version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "unicode-ident", ] [[package]] name = "syn" -version = "2.0.66" +version = "2.0.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5" +checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "unicode-ident", ] -[[package]] -name = "syn_derive" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b" -dependencies = [ - "proc-macro-error", - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", -] - [[package]] name = "sync_wrapper" version = "0.1.2" @@ -6266,9 +6412,23 @@ checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" [[package]] name = "sync_wrapper" -version = "1.0.1" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", +] [[package]] name = "system-configuration" @@ -6277,8 +6437,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" dependencies = [ "bitflags 1.3.2", - "core-foundation", - "system-configuration-sys", + "core-foundation 0.9.4", + "system-configuration-sys 0.5.0", +] + +[[package]] +name = "system-configuration" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +dependencies = [ + "bitflags 2.6.0", + "core-foundation 0.9.4", + "system-configuration-sys 0.6.0", ] [[package]] @@ -6291,6 +6462,16 @@ dependencies = [ "libc", ] +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "tap" version = "1.0.1" @@ -6299,14 +6480,15 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.10.1" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" +checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" dependencies = [ "cfg-if", "fastrand", + "once_cell", "rustix", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -6330,7 +6512,7 @@ version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3dffced63c2b5c7be278154d76b479f9f9920ed34e7574201407f0b14e2bbb93" dependencies = [ - "env_logger 0.11.3", + "env_logger 0.11.5", "test-log-macros", "tracing-subscriber", ] @@ -6341,9 +6523,9 @@ version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5999e24eaa32083191ba4e425deb75cdf25efefabe5aaccb7446dd0d4122a3f5" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] @@ -6352,27 +6534,47 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" dependencies = [ - "unicode-width", + "unicode-width 0.1.14", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", ] [[package]] name = "thiserror" -version = "1.0.61" +version = "2.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" +checksum = "93605438cbd668185516ab499d589afb7ee1859ea3d5fc8f6b0755e1c7443767" dependencies = [ - "thiserror-impl", + "thiserror-impl 2.0.7", ] [[package]] name = "thiserror-impl" -version = "1.0.61" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1d8749b4531af2117677a5fcd12b1348a3fe2b81e36e61ffeac5c4aa3273e36" +dependencies = [ + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] @@ -6396,9 +6598,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.36" +version = "0.3.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" dependencies = [ "deranged", "itoa", @@ -6417,9 +6619,9 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.18" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" dependencies = [ "num-conv", "time-core", @@ -6443,11 +6645,21 @@ dependencies = [ "crunchy", ] +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + [[package]] name = "tinyvec" -version = "1.6.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" dependencies = [ "tinyvec_macros", ] @@ -6460,32 +6672,31 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.38.0" +version = "1.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba4f4a02a7a80d6f274636f0aa95c7e383b912d41fe721a31f29e29698585a4a" +checksum = "5cec9b21b0450273377fc97bd4c33a8acffc8c996c987a7c5b319a0083707551" dependencies = [ "backtrace", "bytes", "libc", "mio", - "num_cpus", "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2", "tokio-macros", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] name = "tokio-macros" -version = "2.3.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a" +checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] @@ -6500,20 +6711,19 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.26.0" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" +checksum = "5f6d0975eaace0cf0fcadee4e4aaa5da15b5c079146f2cffb67c113be122bf37" dependencies = [ "rustls", - "rustls-pki-types", "tokio", ] [[package]] name = "tokio-stream" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" dependencies = [ "futures-core", "pin-project-lite", @@ -6522,9 +6732,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.11" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" +checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" dependencies = [ "bytes", "futures-core", @@ -6537,9 +6747,9 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.6" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" [[package]] name = "toml_edit" @@ -6558,54 +6768,43 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.2.6", + "indexmap 2.7.0", "toml_datetime", - "winnow", + "winnow 0.5.40", ] [[package]] name = "toml_edit" -version = "0.20.2" +version = "0.22.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "396e4d48bbb2b7554c944bde63101b5ae446cff6ec4a24227428f15eb72ef338" +checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" dependencies = [ - "indexmap 2.2.6", + "indexmap 2.7.0", "toml_datetime", - "winnow", -] - -[[package]] -name = "toml_edit" -version = "0.21.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" -dependencies = [ - "indexmap 2.2.6", - "toml_datetime", - "winnow", + "winnow 0.6.20", ] [[package]] name = "tonic" -version = "0.12.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38659f4a91aba8598d27821589f5db7dddd94601e7a01b1e485a50e5484c7401" +checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52" dependencies = [ "async-stream", "async-trait", "axum", "base64 0.22.1", "bytes", - "h2 0.4.5", - "http 1.1.0", - "http-body 1.0.0", + "h2 0.4.7", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", - "hyper 1.5.0", + "hyper 1.5.2", "hyper-timeout", "hyper-util", "percent-encoding", "pin-project", - "prost 0.13.1", + "prost 0.13.4", "socket2", "tokio", "tokio-stream", @@ -6637,14 +6836,14 @@ dependencies = [ [[package]] name = "tower" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2873938d487c3cfb9aed7546dc9f2711d867c9f90c46b889989a2cb84eba6b4f" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" dependencies = [ "futures-core", "futures-util", "pin-project-lite", - "sync_wrapper 0.1.2", + "sync_wrapper 1.0.2", "tokio", "tokio-util", "tower-layer", @@ -6654,15 +6853,15 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8437150ab6bbc8c5f0f519e3d5ed4aa883a83dd4cdd3d1b21f9482936046cb97" +checksum = "403fa3b783d4b626a8ad51d766ab03cb6d2dbfc46b1c5d4448395e6628dc9697" dependencies = [ "base64 0.22.1", "bitflags 2.6.0", "bytes", - "http 1.1.0", - "http-body 1.0.0", + "http 1.2.0", + "http-body 1.0.1", "mime", "pin-project-lite", "tower-layer", @@ -6684,9 +6883,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "log", "pin-project-lite", @@ -6696,20 +6895,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", "valuable", @@ -6746,9 +6945,9 @@ dependencies = [ [[package]] name = "tracing-serde" -version = "0.1.3" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" +checksum = "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1" dependencies = [ "serde", "tracing-core", @@ -6756,9 +6955,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" dependencies = [ "matchers", "nu-ansi-term", @@ -6793,8 +6992,8 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04659ddb06c87d233c566112c1c9c5b9e98256d9af50ec3bc9c8327f873a7568" dependencies = [ - "quote 1.0.36", - "syn 2.0.66", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] @@ -6817,9 +7016,9 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "ucd-trie" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" +checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" [[package]] name = "uint" @@ -6850,51 +7049,54 @@ checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" [[package]] name = "unicase" -version = "2.7.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" -dependencies = [ - "version_check", -] +checksum = "7e51b68083f157f853b6379db119d1c1be0e6e4dec98101079dec41f6f5cf6df" [[package]] name = "unicode-bidi" -version = "0.3.15" +version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" [[package]] name = "unicode-ident" -version = "1.0.12" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" [[package]] name = "unicode-normalization" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" dependencies = [ "tinyvec", ] [[package]] name = "unicode-properties" -version = "0.1.1" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4259d9d4425d9f0661581b804cb85fe66a4c631cadd8f490d1c13a35d5d9291" +checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" [[package]] name = "unicode-segmentation" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" [[package]] name = "unicode-width" -version = "0.1.13" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" + +[[package]] +name = "unicode-width" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" [[package]] name = "unicode-xid" @@ -6904,9 +7106,9 @@ checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" [[package]] name = "unicode-xid" -version = "0.2.4" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" [[package]] name = "unicode_categories" @@ -6920,7 +7122,7 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ad948c1cb799b1a70f836077721a92a35ac177d4daddf4c20a633786d4cf618" dependencies = [ - "quote 1.0.36", + "quote 1.0.37", "syn 1.0.109", ] @@ -6938,9 +7140,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "ureq" -version = "2.9.7" +version = "2.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d11a831e3c0b56e438a28308e7c810799e3c118417f342d30ecec080105395cd" +checksum = "02d1a66277ed75f640d608235660df48c8e3c19f3b4edb6a263315626cc3c01d" dependencies = [ "base64 0.22.1", "log", @@ -6951,9 +7153,9 @@ dependencies = [ [[package]] name = "url" -version = "2.5.2" +version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna", @@ -6967,17 +7169,29 @@ version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + [[package]] name = "utf8parse" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.8.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a183cf7feeba97b4dd1c0d46788634f6221d87fa961b305bed08c851829efcc0" +checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" dependencies = [ "serde", ] @@ -7002,9 +7216,9 @@ checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" [[package]] name = "version_check" -version = "0.9.4" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "vise" @@ -7026,7 +7240,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "671d3b894d5d0849f0a597f56bf071f42d4f2a1cbcf2f78ca21f870ab7c0cc2b" dependencies = [ - "hyper 0.14.29", + "hyper 0.14.32", "once_cell", "tokio", "tracing", @@ -7039,9 +7253,9 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a511871dc5de990a3b2a0e715facfbc5da848c0c0395597a1415029fb7c250a" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] @@ -7086,75 +7300,76 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" -version = "0.2.92" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396" dependencies = [ "cfg-if", + "once_cell", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.92" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79" dependencies = [ "bumpalo", "log", - "once_cell", - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.42" +version = "0.4.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" +checksum = "38176d9b44ea84e9184eff0bc34cc167ed044f816accfe5922e54d84cf48eca2" dependencies = [ "cfg-if", "js-sys", + "once_cell", "wasm-bindgen", "web-sys", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.92" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe" dependencies = [ - "quote 1.0.36", + "quote 1.0.37", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.92" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.92" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" +checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6" [[package]] name = "wasm-streams" -version = "0.4.0" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b65dc4c90b63b118468cf747d8bf3566c1913ef60be765b5730ead9e0a3ba129" +checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" dependencies = [ "futures-util", "js-sys", @@ -7165,9 +7380,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.69" +version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" +checksum = "04dd7223427d52553d3702c004d3b2fe07c148165faa56313cb00211e31c12bc" dependencies = [ "js-sys", "wasm-bindgen", @@ -7185,9 +7400,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.26.2" +version = "0.26.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c452ad30530b54a4d8e71952716a212b08efd0f3562baa66c29a618b07da7c3" +checksum = "5d642ff16b7e79272ae451b7322067cdc17cadf68c23264be9d94a32319efe7e" dependencies = [ "rustls-pki-types", ] @@ -7206,11 +7421,11 @@ dependencies = [ [[package]] name = "whoami" -version = "1.5.1" +version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a44ab49fad634e88f55bf8f9bb3abd2f27d7204172a112c7c9987e01c1c94ea9" +checksum = "372d5b87f58ec45c384ba03563b03544dc5fadc3983e434b286913f5b4a9bb6d" dependencies = [ - "redox_syscall 0.4.1", + "redox_syscall", "wasite", ] @@ -7232,11 +7447,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -7251,7 +7466,37 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows-targets 0.52.5", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-registry" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" +dependencies = [ + "windows-result", + "windows-strings", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-result" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-strings" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" +dependencies = [ + "windows-result", + "windows-targets 0.52.6", ] [[package]] @@ -7269,7 +7514,16 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.5", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", ] [[package]] @@ -7289,18 +7543,18 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm 0.52.5", - "windows_aarch64_msvc 0.52.5", - "windows_i686_gnu 0.52.5", + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", "windows_i686_gnullvm", - "windows_i686_msvc 0.52.5", - "windows_x86_64_gnu 0.52.5", - "windows_x86_64_gnullvm 0.52.5", - "windows_x86_64_msvc 0.52.5", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", ] [[package]] @@ -7311,9 +7565,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" @@ -7323,9 +7577,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" @@ -7335,15 +7589,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" @@ -7353,9 +7607,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" @@ -7365,9 +7619,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" @@ -7377,9 +7631,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" @@ -7389,9 +7643,9 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" @@ -7403,25 +7657,36 @@ dependencies = [ ] [[package]] -name = "winreg" -version = "0.50.0" +name = "winnow" +version = "0.6.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" dependencies = [ - "cfg-if", - "windows-sys 0.48.0", + "memchr", ] [[package]] name = "winreg" -version = "0.52.0" +version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ "cfg-if", "windows-sys 0.48.0", ] +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + [[package]] name = "wyz" version = "0.5.1" @@ -7433,28 +7698,74 @@ dependencies = [ [[package]] name = "yansi" -version = "0.5.1" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" + +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", + "synstructure", +] [[package]] name = "zerocopy" -version = "0.7.34" +version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae87e3fcd617500e5d106f0380cf7b77f3c6092aae37191433159dda23cfb087" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" dependencies = [ + "byteorder", "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.34" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", +] + +[[package]] +name = "zerofrom" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15e934569e47891f7d9411f1a451d947a60e000ab3bd24fbb970f000387d1b3b" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", + "synstructure", ] [[package]] @@ -7472,9 +7783,31 @@ version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ - "proc-macro2 1.0.85", - "quote 1.0.36", - "syn 2.0.66", + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", +] + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2 1.0.92", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] @@ -7608,25 +7941,24 @@ dependencies = [ "lazy_static", "log", "nom", - "num-bigint 0.4.5", + "num-bigint 0.4.6", "num-traits", "sha3 0.10.8", "smallvec", "structopt", - "thiserror", + "thiserror 1.0.69", "zkevm_opcode_defs 0.150.7", ] [[package]] name = "zkevm_circuits" -version = "0.140.2" +version = "0.140.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8beed4cc1ab1f9d99a694506d18705e10059534b30742832be49637c4775e1f8" +checksum = "e3c365c801e0c6eda83fbd153df45575172beb406bfb663d386f9154b4325eda" dependencies = [ - "arrayvec 0.7.4", + "arrayvec 0.7.6", "bincode", "boojum", - "cs_derive", "derivative", "hex", "itertools 0.10.5", @@ -7637,18 +7969,18 @@ dependencies = [ "serde_json", "smallvec", "zkevm_opcode_defs 0.132.0", + "zksync_cs_derive", ] [[package]] name = "zkevm_circuits" -version = "0.141.1" +version = "0.141.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20f1a64d256cc5f5c58d19cf976cb45973df54e4e3010ca4a3e6fafe9f06075e" +checksum = "2ccd0352e122a4e6f0046d2163b7e692e627b23fc3264faa77331a21b65833fd" dependencies = [ - "arrayvec 0.7.4", + "arrayvec 0.7.6", "bincode", "boojum", - "cs_derive", "derivative", "hex", "itertools 0.10.5", @@ -7659,6 +7991,7 @@ dependencies = [ "serde_json", "smallvec", "zkevm_opcode_defs 0.141.0", + "zksync_cs_derive", ] [[package]] @@ -7667,7 +8000,7 @@ version = "0.150.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d06fb35b00699d25175a2ad447f86a9088af8b0bc698bb57086fb04c13e52eab" dependencies = [ - "arrayvec 0.7.4", + "arrayvec 0.7.6", "boojum", "derivative", "hex", @@ -7717,7 +8050,7 @@ dependencies = [ "bitflags 2.6.0", "blake2 0.10.6", "ethereum-types", - "k256 0.13.3", + "k256 0.13.4", "lazy_static", "sha2 0.10.8", "sha3 0.10.8", @@ -7732,7 +8065,7 @@ dependencies = [ "bitflags 2.6.0", "blake2 0.10.6", "ethereum-types", - "k256 0.13.3", + "k256 0.13.4", "lazy_static", "p256", "serde", @@ -7777,7 +8110,7 @@ dependencies = [ "crossbeam", "derivative", "era_cudart_sys", - "futures 0.3.30", + "futures 0.3.31", "futures-locks", "num_cpus", ] @@ -7788,7 +8121,7 @@ version = "0.151.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5714848e6f8361820346483246dd68b4e7fb05ec41dd6610a8b53fb5c3ca7f3a" dependencies = [ - "bit-vec", + "bit-vec 0.6.3", "cfg-if", "crossbeam", "franklin-crypto", @@ -7819,14 +8152,14 @@ dependencies = [ "const-decoder 0.4.0", "ethabi", "hex", - "num_enum 0.7.2", + "num_enum 0.7.3", "secrecy 0.8.0", "serde", "serde_json", "serde_with", "sha2 0.10.8", "strum", - "thiserror", + "thiserror 1.0.69", "tiny-keccak 2.0.2", "url", ] @@ -7837,13 +8170,13 @@ version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ffa03efe9bdb137a4b36b97d1a74237e18c9ae42b755163d903a9d48c1a5d80" dependencies = [ - "arrayvec 0.7.4", - "bit-vec", + "arrayvec 0.7.6", + "bit-vec 0.6.3", "blake2s_simd", "byteorder", "cfg-if", "crossbeam", - "futures 0.3.30", + "futures 0.3.31", "hex", "lazy_static", "num_cpus", @@ -7861,7 +8194,7 @@ dependencies = [ "anyhow", "async-trait", "bincode", - "clap 4.5.4", + "clap 4.5.23", "shivini", "tokio", "tokio-util", @@ -7906,16 +8239,16 @@ dependencies = [ [[package]] name = "zksync_concurrency" -version = "0.5.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "035269d811b3770debca372141ab64cad067dce8e58cb39a48cb7617d30c626b" +checksum = "e8312ab73d3caa55775bd531795b507fa8f76bd9dabfaeb0954fe43e8fc1323b" dependencies = [ "anyhow", "once_cell", "pin-project", "rand 0.8.5", "sha3 0.10.8", - "thiserror", + "thiserror 1.0.69", "time", "tokio", "tracing", @@ -7941,39 +8274,39 @@ dependencies = [ [[package]] name = "zksync_consensus_crypto" -version = "0.5.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49e38d1b5ed28c66e785caff53ea4863375555d818aafa03290397192dd3e665" +checksum = "86b539960de98df3c3bd27d2d9b97de862027686bbb3bdfc5aaad5b74bb929a1" dependencies = [ "anyhow", "blst", "ed25519-dalek", "elliptic-curve 0.13.8", "hex", - "k256 0.13.3", - "num-bigint 0.4.5", + "k256 0.13.4", + "num-bigint 0.4.6", "num-traits", "rand 0.8.5", "sha3 0.10.8", - "thiserror", + "thiserror 1.0.69", "tracing", "zeroize", ] [[package]] name = "zksync_consensus_roles" -version = "0.5.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e49fbd4e69b276058f3dfc06cf6ada0e8caa6ed826e81289e4d596da95a0f17a" +checksum = "c49949546895a10431b9daec6ec4208ef0917ace006446d304b51f5b234ba462" dependencies = [ "anyhow", - "bit-vec", + "bit-vec 0.6.3", "hex", - "num-bigint 0.4.5", + "num-bigint 0.4.6", "prost 0.12.6", "rand 0.8.5", "serde", - "thiserror", + "thiserror 1.0.69", "tracing", "zksync_concurrency", "zksync_consensus_crypto", @@ -7984,15 +8317,15 @@ dependencies = [ [[package]] name = "zksync_consensus_storage" -version = "0.5.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2b2aab4ed18b13cd584f4edcc2546c8da82f89ac62e525063e12935ff28c9be" +checksum = "feb0d6a54e7d8d2adeee4ba38662161e9309180ad497299092e5641db9fb1c1e" dependencies = [ "anyhow", "async-trait", "prost 0.12.6", "rand 0.8.5", - "thiserror", + "thiserror 1.0.69", "tracing", "vise", "zksync_concurrency", @@ -8004,13 +8337,13 @@ dependencies = [ [[package]] name = "zksync_consensus_utils" -version = "0.5.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10bac8f471b182d4fa3d40cf158aac3624fe636a1ff0b4cf3fe26a0e20c68a42" +checksum = "723e2a4b056cc5af192a83163c89a6951ee75c098cc5c4a4cdc435f4232d88bd" dependencies = [ "anyhow", "rand 0.8.5", - "thiserror", + "thiserror 1.0.69", "zksync_concurrency", ] @@ -8053,7 +8386,7 @@ dependencies = [ "serde", "serde_json", "sha2 0.10.8", - "thiserror", + "thiserror 1.0.69", "zksync_basic_types", ] @@ -8064,8 +8397,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5939e2df4288c263c706ff18ac718e984149223ad4289d6d957d767dcfc04c81" dependencies = [ "proc-macro-error", - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "syn 1.0.109", ] @@ -8085,7 +8418,7 @@ dependencies = [ "serde_json", "sqlx", "strum", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "vise", @@ -8113,7 +8446,7 @@ dependencies = [ "serde", "serde_json", "sqlx", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "vise", @@ -8138,7 +8471,7 @@ dependencies = [ "async-trait", "jsonrpsee", "rlp", - "thiserror", + "thiserror 1.0.69", "tracing", "vise", "zksync_config", @@ -8154,7 +8487,7 @@ version = "0.1.0" dependencies = [ "async-trait", "rlp", - "thiserror", + "thiserror 1.0.69", "zksync_basic_types", "zksync_crypto_primitives", ] @@ -8178,11 +8511,11 @@ version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f91e58e75d65877f09f83bc3dca8f054847ae7ec4f3e64bfa610a557edd8e8e" dependencies = [ - "num-bigint 0.4.5", + "num-bigint 0.4.6", "num-integer", "num-traits", - "proc-macro2 1.0.85", - "quote 1.0.36", + "proc-macro2 1.0.92", + "quote 1.0.37", "serde", "syn 1.0.109", ] @@ -8243,7 +8576,7 @@ dependencies = [ "hex", "itertools 0.10.5", "once_cell", - "thiserror", + "thiserror 1.0.69", "tracing", "vise", "zk_evm 0.131.0-rc.2", @@ -8269,10 +8602,10 @@ dependencies = [ "flate2", "google-cloud-auth", "google-cloud-storage", - "http 1.1.0", + "http 1.2.0", "prost 0.12.6", "rand 0.8.5", - "reqwest 0.12.5", + "reqwest 0.12.9", "serde_json", "tokio", "tracing", @@ -8303,10 +8636,10 @@ dependencies = [ "async-trait", "bincode", "circuit_sequencer_api 0.150.7", - "clap 4.5.4", + "clap 4.5.23", "ctrlc", - "futures 0.3.30", - "reqwest 0.12.5", + "futures 0.3.31", + "reqwest 0.12.9", "serde", "serde_json", "structopt", @@ -8331,12 +8664,12 @@ dependencies = [ [[package]] name = "zksync_protobuf" -version = "0.5.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abd55c64f54cb10967a435422f66ff5880ae14a232b245517c7ce38da32e0cab" +checksum = "e8986ad796f8e00d8999fee72effba1a21bce40f5f877d681ac9cd89a94834d8" dependencies = [ "anyhow", - "bit-vec", + "bit-vec 0.6.3", "once_cell", "prost 0.12.6", "prost-reflect", @@ -8352,19 +8685,19 @@ dependencies = [ [[package]] name = "zksync_protobuf_build" -version = "0.5.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4121952bcaf711005dd554612fc6e2de9b30cb58088508df87f1d38046ce8ac8" +checksum = "8d870b31995e3acb8e47afeb68ebeeffcf6121e70020e65b3d5d31692115d236" dependencies = [ "anyhow", "heck 0.5.0", "prettyplease", - "proc-macro2 1.0.85", + "proc-macro2 1.0.92", "prost-build", "prost-reflect", "protox", - "quote 1.0.36", - "syn 2.0.66", + "quote 1.0.37", + "syn 2.0.90", ] [[package]] @@ -8380,6 +8713,7 @@ dependencies = [ "serde_yaml", "tracing", "zksync_basic_types", + "zksync_concurrency", "zksync_config", "zksync_protobuf", "zksync_protobuf_build", @@ -8396,13 +8730,13 @@ dependencies = [ "chrono", "ctrlc", "debug-map-sorted", - "futures 0.3.30", + "futures 0.3.31", "humantime-serde", "k8s-openapi", "kube", "once_cell", "regex", - "reqwest 0.12.5", + "reqwest 0.12.9", "ring", "rustls", "serde", @@ -8440,12 +8774,12 @@ dependencies = [ "anyhow", "async-trait", "circuit_definitions", - "clap 4.5.4", + "clap 4.5.23", "ctrlc", - "futures 0.3.30", + "futures 0.3.31", "local-ip-address", "regex", - "reqwest 0.12.5", + "reqwest 0.12.9", "serde", "shivini", "tokio", @@ -8473,11 +8807,11 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", - "clap 4.5.4", + "clap 4.5.23", "ctrlc", - "futures 0.3.30", + "futures 0.3.31", "log", - "reqwest 0.12.5", + "reqwest 0.12.9", "serde", "tokio", "tracing", @@ -8509,7 +8843,7 @@ version = "0.1.0" dependencies = [ "anyhow", "regex", - "reqwest 0.12.5", + "reqwest 0.12.9", "serde", "tracing", "vise", @@ -8542,7 +8876,7 @@ dependencies = [ "anyhow", "async-trait", "axum", - "clap 4.5.4", + "clap 4.5.23", "ctrlc", "serde", "tokio", @@ -8563,7 +8897,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", - "futures 0.3.30", + "futures 0.3.31", "strum", "tokio", "tokio-stream", @@ -8579,7 +8913,7 @@ dependencies = [ "anyhow", "bincode", "circuit_definitions", - "futures 0.3.30", + "futures 0.3.31", "hex", "md5", "once_cell", @@ -8644,7 +8978,7 @@ dependencies = [ "hex", "itertools 0.10.5", "num", - "num_enum 0.7.2", + "num_enum 0.7.3", "once_cell", "prost 0.12.6", "rlp", @@ -8652,7 +8986,7 @@ dependencies = [ "serde_json", "serde_with", "strum", - "thiserror", + "thiserror 1.0.69", "tracing", "zksync_basic_types", "zksync_contracts", @@ -8668,9 +9002,9 @@ name = "zksync_utils" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.30", + "futures 0.3.31", "once_cell", - "reqwest 0.12.5", + "reqwest 0.12.9", "serde_json", "tokio", "tracing", @@ -8683,7 +9017,7 @@ version = "0.1.0" dependencies = [ "anyhow", "circuit_definitions", - "clap 4.5.4", + "clap 4.5.23", "indicatif", "proptest", "toml_edit 0.14.4", @@ -8711,7 +9045,7 @@ dependencies = [ "sentry", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "time", "tokio", "tracing", @@ -8751,7 +9085,7 @@ dependencies = [ "hex", "pretty_assertions", "serde", - "thiserror", + "thiserror 1.0.69", "tracing", "zksync_contracts", "zksync_system_constants", @@ -8764,14 +9098,14 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", - "futures 0.3.30", + "futures 0.3.31", "jsonrpsee", "pin-project-lite", "rlp", "rustls", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", "vise", @@ -8789,7 +9123,7 @@ dependencies = [ "circuit_definitions", "const-decoder 0.3.0", "ctrlc", - "futures 0.3.30", + "futures 0.3.31", "jemallocator", "once_cell", "rand 0.8.5", @@ -8824,7 +9158,7 @@ dependencies = [ "anyhow", "async-trait", "bincode", - "clap 4.5.4", + "clap 4.5.23", "ctrlc", "tokio", "tracing", diff --git a/prover/crates/bin/prover_autoscaler/src/global/queuer.rs b/prover/crates/bin/prover_autoscaler/src/global/queuer.rs index baeb5b70a4ef..bc781e793408 100644 --- a/prover/crates/bin/prover_autoscaler/src/global/queuer.rs +++ b/prover/crates/bin/prover_autoscaler/src/global/queuer.rs @@ -3,14 +3,8 @@ use std::{collections::HashMap, ops::Deref}; use anyhow::{Context, Ok}; use reqwest::Method; use zksync_prover_job_monitor::autoscaler_queue_reporter::{QueueReport, VersionedQueueReport}; -use zksync_utils::http_with_retries::send_request_with_retries; -use crate::{ - config::QueueReportFields, - metrics::{AUTOSCALER_METRICS, DEFAULT_ERROR_CODE}, -}; - -const MAX_RETRIES: usize = 5; +use crate::{config::QueueReportFields, http_client::HttpClient}; pub struct Queue(HashMap<(String, QueueReportFields), u64>); @@ -23,6 +17,7 @@ impl Deref for Queue { #[derive(Default)] pub struct Queuer { + http_client: HttpClient, pub prover_job_monitor_url: String, } @@ -40,8 +35,9 @@ fn target_to_queue(target: QueueReportFields, report: &QueueReport) -> u64 { } impl Queuer { - pub fn new(pjm_url: String) -> Self { + pub fn new(http_client: HttpClient, pjm_url: String) -> Self { Self { + http_client, prover_job_monitor_url: pjm_url, } } @@ -50,13 +46,13 @@ impl Queuer { /// list of jobs. pub async fn get_queue(&self, jobs: &[QueueReportFields]) -> anyhow::Result { let url = &self.prover_job_monitor_url; - let response = send_request_with_retries(url, MAX_RETRIES, Method::GET, None, None).await; - let response = response.map_err(|err| { - AUTOSCALER_METRICS.calls[&(url.clone(), DEFAULT_ERROR_CODE)].inc(); - anyhow::anyhow!("Failed fetching queue from URL: {url}: {err:?}") - })?; + let response = self + .http_client + .send_request_with_retries(url, Method::GET, None, None) + .await; + let response = response + .map_err(|err| anyhow::anyhow!("Failed fetching queue from URL: {url}: {err:?}"))?; - AUTOSCALER_METRICS.calls[&(url.clone(), response.status().as_u16())].inc(); let response = response .json::>() .await diff --git a/prover/crates/bin/prover_autoscaler/src/global/watcher.rs b/prover/crates/bin/prover_autoscaler/src/global/watcher.rs index 95b9e32cac5b..9a56471b72d5 100644 --- a/prover/crates/bin/prover_autoscaler/src/global/watcher.rs +++ b/prover/crates/bin/prover_autoscaler/src/global/watcher.rs @@ -8,17 +8,14 @@ use reqwest::{ }; use tokio::sync::Mutex; use url::Url; -use zksync_utils::http_with_retries::send_request_with_retries; use crate::{ agent::{ScaleRequest, ScaleResponse}, cluster_types::{Cluster, Clusters}, - metrics::{AUTOSCALER_METRICS, DEFAULT_ERROR_CODE}, + http_client::HttpClient, task_wiring::Task, }; -const MAX_RETRIES: usize = 5; - #[derive(Default)] pub struct WatchedData { pub clusters: Clusters, @@ -36,6 +33,7 @@ pub fn check_is_ready(v: &Vec) -> Result<()> { #[derive(Default, Clone)] pub struct Watcher { + http_client: HttpClient, /// List of base URLs of all agents. pub cluster_agents: Vec>, pub dry_run: bool, @@ -43,9 +41,10 @@ pub struct Watcher { } impl Watcher { - pub fn new(agent_urls: Vec, dry_run: bool) -> Self { + pub fn new(http_client: HttpClient, agent_urls: Vec, dry_run: bool) -> Self { let size = agent_urls.len(); Self { + http_client, cluster_agents: agent_urls .into_iter() .map(|u| { @@ -92,6 +91,7 @@ impl Watcher { .unwrap() .to_string(); tracing::debug!("Sending scale request to {}, data: {:?}.", url, sr); + let http_client = self.http_client.clone(); tokio::spawn(async move { let mut headers = HeaderMap::new(); headers.insert(CONTENT_TYPE, HeaderValue::from_static("application/json")); @@ -99,19 +99,17 @@ impl Watcher { tracing::info!("Dry-run mode, not sending the request."); return Ok((id, Ok(ScaleResponse::default()))); } - let response = send_request_with_retries( - &url, - MAX_RETRIES, - Method::POST, - Some(headers), - Some(serde_json::to_vec(&sr)?), - ) - .await; + let response = http_client + .send_request_with_retries( + &url, + Method::POST, + Some(headers), + Some(serde_json::to_vec(&sr)?), + ) + .await; let response = response.map_err(|err| { - AUTOSCALER_METRICS.calls[&(url.clone(), DEFAULT_ERROR_CODE)].inc(); anyhow::anyhow!("Failed fetching cluster from url: {url}: {err:?}") })?; - AUTOSCALER_METRICS.calls[&(url, response.status().as_u16())].inc(); let response = response .json::() .await @@ -164,21 +162,20 @@ impl Task for Watcher { .enumerate() .map(|(i, a)| { tracing::debug!("Getting cluster data from agent {}.", a); + let http_client = self.http_client.clone(); tokio::spawn(async move { let url: String = a .clone() .join("/cluster") .context("Failed to join URL with /cluster")? .to_string(); - let response = - send_request_with_retries(&url, MAX_RETRIES, Method::GET, None, None).await; + let response = http_client + .send_request_with_retries(&url, Method::GET, None, None) + .await; let response = response.map_err(|err| { - // TODO: refactor send_request_with_retries to return status. - AUTOSCALER_METRICS.calls[&(url.clone(), DEFAULT_ERROR_CODE)].inc(); anyhow::anyhow!("Failed fetching cluster from url: {url}: {err:?}") })?; - AUTOSCALER_METRICS.calls[&(url, response.status().as_u16())].inc(); let response = response .json::() .await diff --git a/prover/crates/bin/prover_autoscaler/src/http_client.rs b/prover/crates/bin/prover_autoscaler/src/http_client.rs new file mode 100644 index 000000000000..6710ea53a26d --- /dev/null +++ b/prover/crates/bin/prover_autoscaler/src/http_client.rs @@ -0,0 +1,94 @@ +use reqwest::{header::HeaderMap, Client, Error, Method, Response, StatusCode}; +use tokio::time::{sleep, Duration}; + +use crate::metrics::AUTOSCALER_METRICS; + +#[derive(Clone)] +pub struct HttpClient { + client: Client, + max_retries: usize, +} + +impl Default for HttpClient { + fn default() -> Self { + Self { + client: Client::new(), + max_retries: 5, + } + } +} + +#[derive(Debug)] +pub enum HttpError { + ReqwestError(Error), + RetryExhausted(String), +} + +impl HttpClient { + /// Method to send HTTP request with fixed number of retires with exponential back-offs. + pub async fn send_request_with_retries( + &self, + url: &str, + method: Method, + headers: Option, + body: Option>, + ) -> Result { + let mut retries = 0usize; + let mut delay = Duration::from_secs(1); + loop { + let result = self + .send_request(url, method.clone(), headers.clone(), body.clone()) + .await; + AUTOSCALER_METRICS.calls[&( + url.into(), + match result { + Ok(ref response) => response.status().as_u16(), + Err(ref err) => err + .status() + .unwrap_or(StatusCode::INTERNAL_SERVER_ERROR) + .as_u16(), + }, + )] + .inc(); + match result { + Ok(response) if response.status().is_success() => return Ok(response), + Ok(response) => { + tracing::error!("Received non OK http response {:?}", response.status()) + } + Err(err) => tracing::error!("Error while sending http request {:?}", err), + } + + if retries >= self.max_retries { + return Err(HttpError::RetryExhausted(format!( + "All {} http retires failed", + self.max_retries + ))); + } + retries += 1; + sleep(delay).await; + delay = delay.checked_mul(2).unwrap_or(Duration::MAX); + } + } + + async fn send_request( + &self, + url: &str, + method: Method, + headers: Option, + body: Option>, + ) -> Result { + let mut request = self.client.request(method, url); + + if let Some(headers) = headers { + request = request.headers(headers); + } + + if let Some(body) = body { + request = request.body(body); + } + + let request = request.build()?; + let response = self.client.execute(request).await?; + Ok(response) + } +} diff --git a/prover/crates/bin/prover_autoscaler/src/k8s/watcher.rs b/prover/crates/bin/prover_autoscaler/src/k8s/watcher.rs index b8476ab475ab..4730a0259e4c 100644 --- a/prover/crates/bin/prover_autoscaler/src/k8s/watcher.rs +++ b/prover/crates/bin/prover_autoscaler/src/k8s/watcher.rs @@ -13,9 +13,11 @@ use reqwest::{ Method, }; use tokio::sync::Mutex; -use zksync_utils::http_with_retries::send_request_with_retries; -use crate::cluster_types::{Cluster, Deployment, Namespace, Pod, ScaleEvent}; +use crate::{ + cluster_types::{Cluster, Deployment, Namespace, Pod, ScaleEvent}, + http_client::HttpClient, +}; #[derive(Clone)] pub struct Watcher { @@ -23,11 +25,13 @@ pub struct Watcher { pub cluster: Arc>, } -async fn get_cluster_name() -> anyhow::Result { +async fn get_cluster_name(http_client: HttpClient) -> anyhow::Result { let mut headers = HeaderMap::new(); headers.insert("Metadata-Flavor", HeaderValue::from_static("Google")); let url = "http://metadata.google.internal/computeMetadata/v1/instance/attributes/cluster-name"; - let response = send_request_with_retries(url, 5, Method::GET, Some(headers), None).await; + let response = http_client + .send_request_with_retries(url, Method::GET, Some(headers), None) + .await; response .map_err(|err| anyhow::anyhow!("Failed fetching response from url: {url}: {err:?}"))? .text() @@ -37,6 +41,7 @@ async fn get_cluster_name() -> anyhow::Result { impl Watcher { pub async fn new( + http_client: HttpClient, client: kube::Client, cluster_name: Option, namespaces: Vec, @@ -48,7 +53,7 @@ impl Watcher { let cluster_name = match cluster_name { Some(c) => c, - None => get_cluster_name() + None => get_cluster_name(http_client) .await .expect("Load cluster_name from GCP"), }; diff --git a/prover/crates/bin/prover_autoscaler/src/lib.rs b/prover/crates/bin/prover_autoscaler/src/lib.rs index 019fe2b7fb4d..1861f3af10da 100644 --- a/prover/crates/bin/prover_autoscaler/src/lib.rs +++ b/prover/crates/bin/prover_autoscaler/src/lib.rs @@ -2,6 +2,7 @@ pub mod agent; pub(crate) mod cluster_types; pub mod config; pub mod global; +pub mod http_client; pub mod k8s; pub(crate) mod metrics; pub mod task_wiring; diff --git a/prover/crates/bin/prover_autoscaler/src/main.rs b/prover/crates/bin/prover_autoscaler/src/main.rs index 98ffdb49d824..3baf3d13b2d6 100644 --- a/prover/crates/bin/prover_autoscaler/src/main.rs +++ b/prover/crates/bin/prover_autoscaler/src/main.rs @@ -10,6 +10,7 @@ use zksync_prover_autoscaler::{ agent, config::{config_from_yaml, ProverAutoscalerConfig}, global::{self}, + http_client::HttpClient, k8s::{Scaler, Watcher}, task_wiring::TaskRunner, }; @@ -74,6 +75,8 @@ async fn main() -> anyhow::Result<()> { let mut tasks = vec![]; + let http_client = HttpClient::default(); + match opt.job { AutoscalerType::Agent => { tracing::info!("Starting ProverAutoscaler Agent"); @@ -84,8 +87,13 @@ async fn main() -> anyhow::Result<()> { let _ = rustls::crypto::ring::default_provider().install_default(); let client = kube::Client::try_default().await?; - let watcher = - Watcher::new(client.clone(), opt.cluster_name, agent_config.namespaces).await; + let watcher = Watcher::new( + http_client, + client.clone(), + opt.cluster_name, + agent_config.namespaces, + ) + .await; let scaler = Scaler::new(client, agent_config.dry_run); tasks.push(tokio::spawn(watcher.clone().run())); tasks.push(tokio::spawn(agent::run_server( @@ -101,9 +109,15 @@ async fn main() -> anyhow::Result<()> { let interval = scaler_config.scaler_run_interval; let exporter_config = PrometheusExporterConfig::pull(scaler_config.prometheus_port); tasks.push(tokio::spawn(exporter_config.run(stop_receiver.clone()))); - let watcher = - global::watcher::Watcher::new(scaler_config.agents.clone(), scaler_config.dry_run); - let queuer = global::queuer::Queuer::new(scaler_config.prover_job_monitor_url.clone()); + let watcher = global::watcher::Watcher::new( + http_client.clone(), + scaler_config.agents.clone(), + scaler_config.dry_run, + ); + let queuer = global::queuer::Queuer::new( + http_client, + scaler_config.prover_job_monitor_url.clone(), + ); let scaler = global::scaler::Scaler::new(watcher.clone(), queuer, scaler_config); tasks.extend(get_tasks(watcher, scaler, interval, stop_receiver)?); } diff --git a/prover/crates/bin/prover_autoscaler/src/metrics.rs b/prover/crates/bin/prover_autoscaler/src/metrics.rs index 115ae3b74259..775f7ec22abd 100644 --- a/prover/crates/bin/prover_autoscaler/src/metrics.rs +++ b/prover/crates/bin/prover_autoscaler/src/metrics.rs @@ -2,8 +2,6 @@ use vise::{Counter, Gauge, LabeledFamily, Metrics}; use crate::config::Gpu; -pub const DEFAULT_ERROR_CODE: u16 = 500; - #[derive(Debug, Metrics)] #[metrics(prefix = "autoscaler")] pub(crate) struct AutoscalerMetrics { diff --git a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal.rs b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal.rs index cd96edc21b09..ba2d9fdc6415 100644 --- a/prover/crates/lib/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/crates/lib/prover_dal/src/fri_witness_generator_dal.rs @@ -1237,7 +1237,7 @@ impl FriWitnessGeneratorDal<'_, '_> { updated_at = NOW() WHERE {job_id_column} = {job_id} - AND status != 'successful + AND status != 'successful' "#, ); diff --git a/yarn.lock b/yarn.lock index 15fb8bb7d967..5df8cb570e0f 100644 --- a/yarn.lock +++ b/yarn.lock @@ -9131,6 +9131,11 @@ prettier@^3.0.3: resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.2.5.tgz#e52bc3090586e824964a8813b09aba6233b28368" integrity sha512-3/GWa9aOC0YeD7LUfvOG2NiDyhOWRvt1k+rcKhOuYnMY24iiCphgneUfJDyFXd6rZCAnuLBv6UeAULtrhT/F4A== +prettier@^3.3.3: + version "3.3.3" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.3.3.tgz#30c54fe0be0d8d12e6ae61dbb10109ea00d53105" + integrity sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew== + pretty-format@^29.0.0, pretty-format@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-29.7.0.tgz#ca42c758310f365bfa71a0bda0a807160b776812" @@ -10191,7 +10196,7 @@ string-length@^4.0.1: char-regex "^1.0.2" strip-ansi "^6.0.0" -"string-width-cjs@npm:string-width@^4.2.0": +"string-width-cjs@npm:string-width@^4.2.0", string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2, string-width@^4.2.3: version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -10208,15 +10213,6 @@ string-width@^2.1.0, string-width@^2.1.1: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" -string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2, string-width@^4.2.3: - version "4.2.3" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - string-width@^5.0.1, string-width@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794" @@ -10283,7 +10279,7 @@ string_decoder@~1.1.1: dependencies: safe-buffer "~5.1.0" -"strip-ansi-cjs@npm:strip-ansi@^6.0.1": +"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== @@ -10304,13 +10300,6 @@ strip-ansi@^5.1.0: dependencies: ansi-regex "^4.1.0" -strip-ansi@^6.0.0, strip-ansi@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== - dependencies: - ansi-regex "^5.0.1" - strip-ansi@^7.0.1: version "7.1.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" @@ -11161,16 +11150,7 @@ workerpool@6.2.1: resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-6.2.1.tgz#46fc150c17d826b86a008e5a4508656777e9c343" integrity sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw== -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" - integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== - dependencies: - ansi-styles "^4.0.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - -wrap-ansi@^7.0.0: +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== diff --git a/zkstack_cli/Cargo.lock b/zkstack_cli/Cargo.lock index 2206a1052f59..900ac677fd61 100644 --- a/zkstack_cli/Cargo.lock +++ b/zkstack_cli/Cargo.lock @@ -64,9 +64,9 @@ dependencies = [ [[package]] name = "allocator-api2" -version = "0.2.18" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" +checksum = "45862d1c77f2228b9e10bc609d5bc203d86ebc9b87ad8d5d5167a6c9abf739d9" [[package]] name = "android-tzdata" @@ -85,9 +85,9 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.15" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" +checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" dependencies = [ "anstyle", "anstyle-parse", @@ -100,49 +100,49 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.8" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" [[package]] name = "anstyle-parse" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" +checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.4" +version = "3.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" +checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125" dependencies = [ "anstyle", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anyhow" -version = "1.0.89" +version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6" +checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775" [[package]] name = "append-only-vec" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bb8633986ce6db557d8a568b0b874d840db9946e589a3de4cf84fb02130745f" +checksum = "7992085ec035cfe96992dd31bfd495a2ebd31969bb95f624471cb6c0b349e571" [[package]] name = "arrayvec" @@ -178,7 +178,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -189,7 +189,7 @@ checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -226,7 +226,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -237,9 +237,9 @@ checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "axum" -version = "0.7.7" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "504e3947307ac8326a5437504c517c4b56716c9d98fac0028c2acc7ca47d70ae" +checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" dependencies = [ "async-trait", "axum-core", @@ -256,7 +256,7 @@ dependencies = [ "pin-project-lite", "rustversion", "serde", - "sync_wrapper 1.0.1", + "sync_wrapper 1.0.2", "tower 0.5.1", "tower-layer", "tower-service", @@ -277,7 +277,7 @@ dependencies = [ "mime", "pin-project-lite", "rustversion", - "sync_wrapper 1.0.1", + "sync_wrapper 1.0.2", "tower-layer", "tower-service", ] @@ -341,9 +341,9 @@ checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" [[package]] name = "bigdecimal" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d712318a27c7150326677b321a5fa91b55f6d9034ffd67f20319e147d40cee" +checksum = "8f850665a0385e070b64c38d2354e6c104c8479c59868d1e48a0c13ee2c7a1c1" dependencies = [ "autocfg", "libm", @@ -426,9 +426,9 @@ dependencies = [ [[package]] name = "borsh" -version = "1.5.1" +version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6362ed55def622cddc70a4746a68554d7b687713770de539e59a739b249f8ed" +checksum = "2506947f73ad44e344215ccd6403ac2ae18cd8e046e581a441bf8d199f257f03" dependencies = [ "cfg_aliases", ] @@ -463,9 +463,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.7.2" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" +checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da" dependencies = [ "serde", ] @@ -525,9 +525,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.1.28" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e80e3b6a3ab07840e1cae9b0666a63970dc28e8ed5ffbcdacbfc760c281bfc1" +checksum = "fd9de9f2205d5ef3fd67e685b0df337994ddd4495e2a28d185500d0e1edfea47" dependencies = [ "jobserver", "libc", @@ -573,9 +573,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.20" +version = "4.5.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97f376d85a664d5837dbae44bf546e6477a679ff6610010f17276f686d867e8" +checksum = "fb3b4b9e5a7c7514dfa52869339ee98b3156b0bfb4e8a77c4ff4babb64b1604f" dependencies = [ "clap_builder", "clap_derive", @@ -592,9 +592,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.20" +version = "4.5.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19bc80abd44e4bed93ca373a0704ccbd1b710dc5749406201bb018272808dc54" +checksum = "b17a95aa67cc7b5ebd32aa5370189aa0d79069ef1c64ce893bd30fb24bff20ec" dependencies = [ "anstream", "anstyle", @@ -605,9 +605,9 @@ dependencies = [ [[package]] name = "clap_complete" -version = "4.5.33" +version = "4.5.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9646e2e245bf62f45d39a0f3f36f1171ad1ea0d6967fd114bca72cb02a8fcdfb" +checksum = "d9647a559c112175f17cf724dc72d3645680a883c58481332779192b0d8e7a01" dependencies = [ "clap", ] @@ -621,14 +621,14 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] name = "clap_lex" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" +checksum = "afb84c814227b90d6895e01398aee0d8033c00e7466aca416fb6a8e0eb19d8a7" [[package]] name = "cliclack" @@ -697,9 +697,9 @@ dependencies = [ [[package]] name = "colorchoice" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" [[package]] name = "common" @@ -785,7 +785,7 @@ dependencies = [ "encode_unicode", "lazy_static", "libc", - "unicode-width", + "unicode-width 0.1.14", "windows-sys 0.52.0", ] @@ -800,9 +800,9 @@ dependencies = [ [[package]] name = "const-hex" -version = "1.13.1" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0121754e84117e65f9d90648ee6aa4882a6e63110307ab73967a4c5e7e69e586" +checksum = "487981fa1af147182687064d0a2c336586d337a606595ced9ffb0c685c250c73" dependencies = [ "cfg-if", "cpufeatures", @@ -861,9 +861,9 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" -version = "0.2.14" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608697df725056feaccfa42cffdaeeec3fccc4ffc38358ecd19b243e716a78e0" +checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" dependencies = [ "libc", ] @@ -956,12 +956,12 @@ dependencies = [ [[package]] name = "ctor" -version = "0.2.8" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edb49164822f3ee45b17acd4a208cfc1251410cf0cad9a833234c9890774dd9f" +checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501" dependencies = [ "quote", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -997,7 +997,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -1048,7 +1048,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef552e6f588e446098f6ba40d89ac146c8c7b64aade83c051ee00bb5d2bc18d" dependencies = [ "serde", - "uuid 1.10.0", + "uuid 1.11.0", ] [[package]] @@ -1071,17 +1071,6 @@ dependencies = [ "powerfmt", ] -[[package]] -name = "derive_more" -version = "0.99.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.79", -] - [[package]] name = "derive_more" version = "1.0.0" @@ -1099,7 +1088,7 @@ checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", "unicode-xid", ] @@ -1169,6 +1158,17 @@ dependencies = [ "winapi", ] +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.89", +] + [[package]] name = "dotenvy" version = "0.15.7" @@ -1286,9 +1286,9 @@ checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" [[package]] name = "encoding_rs" -version = "0.8.34" +version = "0.8.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" dependencies = [ "cfg-if", ] @@ -1320,7 +1320,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -1495,7 +1495,7 @@ dependencies = [ "reqwest 0.11.27", "serde", "serde_json", - "syn 2.0.79", + "syn 2.0.89", "toml", "walkdir", ] @@ -1513,7 +1513,7 @@ dependencies = [ "proc-macro2", "quote", "serde_json", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -1539,7 +1539,7 @@ dependencies = [ "serde", "serde_json", "strum", - "syn 2.0.79", + "syn 2.0.89", "tempfile", "thiserror", "tiny-keccak", @@ -1705,15 +1705,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "531e46835a22af56d1e3b66f04844bed63158bc094a628bec1d321d9b4c44bf2" dependencies = [ "bit-set", - "regex-automata 0.4.8", + "regex-automata 0.4.9", "regex-syntax 0.8.5", ] [[package]] name = "fastrand" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6" +checksum = "486f806e73c5707928240ddc295403b1b93c96a02038563881c4a2fd84b81ac4" [[package]] name = "ff" @@ -1763,9 +1763,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.34" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1b589b4dc103969ad3cf85c950899926ec64300a1a46d76c03a6072957036f0" +checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" dependencies = [ "crc32fast", "miniz_oxide", @@ -1773,9 +1773,9 @@ dependencies = [ [[package]] name = "flume" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55ac459de2512911e4b674ce33cf20befaba382d05b62b008afc1c8b57cbf181" +checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095" dependencies = [ "futures-core", "futures-sink", @@ -1905,7 +1905,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -2044,9 +2044,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e8ac6999421f49a846c2d4411f337e53497d8ec55d67753beffa43c5d9205" +checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" dependencies = [ "atomic-waker", "bytes", @@ -2079,9 +2079,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.15.0" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" [[package]] name = "hashers" @@ -2227,9 +2227,9 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "human-panic" -version = "2.0.1" +version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c5a08ed290eac04006e21e63d32e90086b6182c7cd0452d10f4264def1fec9a" +checksum = "80b84a66a325082740043a6c28bbea400c129eac0d3a27673a1de971e44bf1f7" dependencies = [ "anstream", "anstyle", @@ -2238,14 +2238,14 @@ dependencies = [ "serde", "serde_derive", "toml", - "uuid 1.10.0", + "uuid 1.11.0", ] [[package]] name = "hyper" -version = "0.14.30" +version = "0.14.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a152ddd61dfaec7273fe8419ab357f33aee0d914c5f4efbf0d96fa749eea5ec9" +checksum = "8c08302e8fa335b151b788c775ff56e7a03ae64ff85c548ee820fecb70356e85" dependencies = [ "bytes", "futures-channel", @@ -2267,14 +2267,14 @@ dependencies = [ [[package]] name = "hyper" -version = "1.4.1" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +checksum = "97818827ef4f364230e16705d4706e2897df2bb60617d6ca15d598025a3c481f" dependencies = [ "bytes", "futures-channel", "futures-util", - "h2 0.4.6", + "h2 0.4.7", "http 1.1.0", "http-body 1.0.1", "httparse", @@ -2294,7 +2294,7 @@ checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", "http 0.2.12", - "hyper 0.14.30", + "hyper 0.14.31", "rustls 0.21.12", "tokio", "tokio-rustls 0.24.1", @@ -2308,9 +2308,9 @@ checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" dependencies = [ "futures-util", "http 1.1.0", - "hyper 1.4.1", + "hyper 1.5.1", "hyper-util", - "rustls 0.23.14", + "rustls 0.23.19", "rustls-pki-types", "tokio", "tokio-rustls 0.26.0", @@ -2319,11 +2319,11 @@ dependencies = [ [[package]] name = "hyper-timeout" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3203a961e5c83b6f5498933e78b6b263e208c197b63e9c6c53cc82ffd3f63793" +checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" dependencies = [ - "hyper 1.4.1", + "hyper 1.5.1", "hyper-util", "pin-project-lite", "tokio", @@ -2337,7 +2337,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", - "hyper 0.14.30", + "hyper 0.14.31", "native-tls", "tokio", "tokio-native-tls", @@ -2351,7 +2351,7 @@ checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", - "hyper 1.4.1", + "hyper 1.5.1", "hyper-util", "native-tls", "tokio", @@ -2361,16 +2361,16 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41296eb09f183ac68eec06e03cdbea2e759633d4067b2f6552fc2e009bcad08b" +checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" dependencies = [ "bytes", "futures-channel", "futures-util", "http 1.1.0", "http-body 1.0.1", - "hyper 1.4.1", + "hyper 1.5.1", "pin-project-lite", "socket2", "tokio", @@ -2401,6 +2401,124 @@ dependencies = [ "cc", ] +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.89", +] + [[package]] name = "ident_case" version = "1.0.1" @@ -2409,12 +2527,23 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "0.5.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" dependencies = [ - "unicode-bidi", - "unicode-normalization", + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", ] [[package]] @@ -2446,13 +2575,13 @@ dependencies = [ [[package]] name = "impl-trait-for-tuples" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" +checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.89", ] [[package]] @@ -2478,20 +2607,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" dependencies = [ "equivalent", - "hashbrown 0.15.0", + "hashbrown 0.15.2", ] [[package]] name = "indicatif" -version = "0.17.8" +version = "0.17.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3" +checksum = "cbf675b85ed934d3c67b5c5469701eec7db22689d0a2139d856e0925fa28b281" dependencies = [ "console", - "instant", "number_prefix", "portable-atomic", - "unicode-width", + "unicode-width 0.2.0", + "web-time", ] [[package]] @@ -2562,9 +2691,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.11" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "jobserver" @@ -2577,9 +2706,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.71" +version = "0.3.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cb94a0ffd3f3ee755c20f7d8752f45cac88605a4dcf808abcff72873296ec7b" +checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9" dependencies = [ "wasm-bindgen", ] @@ -2648,7 +2777,7 @@ version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "507460a910eb7b32ee961886ff48539633b788a36b65692b95f225b844c82553" dependencies = [ - "regex-automata 0.4.8", + "regex-automata 0.4.9", ] [[package]] @@ -2671,7 +2800,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -2685,15 +2814,15 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.159" +version = "0.2.166" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "561d97a539a36e26a9a5fad1ea11a3039a67714694aaa379433e580854bc3dc5" +checksum = "c2ccc108bbc0b1331bd061864e7cd823c0cab660bbe6970e66e2c0614decde36" [[package]] name = "libm" -version = "0.2.8" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" +checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" [[package]] name = "libredox" @@ -2722,6 +2851,12 @@ version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +[[package]] +name = "litemap" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" + [[package]] name = "lock_api" version = "0.4.12" @@ -2758,7 +2893,7 @@ dependencies = [ "proc-macro2", "quote", "regex-syntax 0.6.29", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -2816,7 +2951,7 @@ dependencies = [ "miette-derive", "once_cell", "thiserror", - "unicode-width", + "unicode-width 0.1.14", ] [[package]] @@ -2827,7 +2962,7 @@ checksum = "49e7bc1560b95a3c4a25d03de42fe76ca718ab92d1a22a55b9b4cf67b3ae635c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -3055,7 +3190,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -3106,9 +3241,9 @@ dependencies = [ [[package]] name = "openssl" -version = "0.10.66" +version = "0.10.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1" +checksum = "6174bc48f102d208783c2c84bf931bb75927a617866870de8a4ea85597f871f5" dependencies = [ "bitflags 2.6.0", "cfg-if", @@ -3127,7 +3262,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -3138,9 +3273,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.103" +version = "0.9.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6" +checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741" dependencies = [ "cc", "libc", @@ -3184,7 +3319,7 @@ dependencies = [ "bytes", "http 1.1.0", "opentelemetry", - "reqwest 0.12.8", + "reqwest 0.12.9", ] [[package]] @@ -3201,7 +3336,7 @@ dependencies = [ "opentelemetry-proto", "opentelemetry_sdk", "prost 0.13.3", - "reqwest 0.12.8", + "reqwest 0.12.9", "thiserror", "tokio", "tonic", @@ -3280,28 +3415,29 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "parity-scale-codec" -version = "3.6.12" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "306800abfa29c7f16596b5970a588435e3d5b3149683d00c12b699cc19f895ee" +checksum = "8be4817d39f3272f69c59fe05d0535ae6456c2dc2fa1ba02910296c7e0a5c590" dependencies = [ "arrayvec", "bitvec", "byte-slice-cast", "impl-trait-for-tuples", "parity-scale-codec-derive", + "rustversion", "serde", ] [[package]] name = "parity-scale-codec-derive" -version = "3.6.12" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c" +checksum = "8781a75c6205af67215f382092b6e0a4ff3734798523e69073d4bcd294ec767b" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.89", ] [[package]] @@ -3452,7 +3588,7 @@ dependencies = [ "phf_shared 0.11.2", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", "unicase", ] @@ -3477,29 +3613,29 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.6" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf123a161dde1e524adf36f90bc5d8d3462824a9c43553ad07a8183161189ec" +checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.6" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4502d8515ca9f32f1fb543d987f63d95a14934883db45bdb48060b6b69257f8" +checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] name = "pin-project-lite" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" +checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" [[package]] name = "pin-utils" @@ -3536,9 +3672,9 @@ checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" [[package]] name = "portable-atomic" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc9c68a3f6da06753e9335d63e27f6b9754dd1920d941135b7ea8224f141adb2" +checksum = "280dc24453071f1b63954171985a0b0d30058d287960968b9b2aca264c8d4ee6" [[package]] name = "powerfmt" @@ -3573,12 +3709,12 @@ dependencies = [ [[package]] name = "prettyplease" -version = "0.2.22" +version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479cf940fbbb3426c32c5d5176f62ad57549a0bb84773423ba8be9d089f5faba" +checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" dependencies = [ "proc-macro2", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -3606,9 +3742,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.87" +version = "1.0.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3e4daa0dcf6feba26f985457cdf104d4b4256fc5a09547140f3631bb076b19a" +checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" dependencies = [ "unicode-ident", ] @@ -3633,7 +3769,7 @@ checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -3689,7 +3825,7 @@ dependencies = [ "prost 0.12.6", "prost-types", "regex", - "syn 2.0.79", + "syn 2.0.89", "tempfile", ] @@ -3703,7 +3839,7 @@ dependencies = [ "itertools 0.12.1", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -3716,7 +3852,7 @@ dependencies = [ "itertools 0.13.0", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -3876,13 +4012,13 @@ dependencies = [ [[package]] name = "regex" -version = "1.11.0" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38200e5ee88914975b69f657f0801b6f6dccafd44fd9326302a4aaeecfacb1d8" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.8", + "regex-automata 0.4.9", "regex-syntax 0.8.5", ] @@ -3897,9 +4033,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", @@ -3932,7 +4068,7 @@ dependencies = [ "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", - "hyper 0.14.30", + "hyper 0.14.31", "hyper-rustls 0.24.2", "hyper-tls 0.5.0", "ipnet", @@ -3964,9 +4100,9 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.8" +version = "0.12.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f713147fbe92361e52392c73b8c9e48c04c6625bce969ef54dc901e58e042a7b" +checksum = "a77c62af46e79de0a562e1a9849205ffcb7fc1238876e9bd743357570e04046f" dependencies = [ "base64 0.22.1", "bytes", @@ -3974,11 +4110,11 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2 0.4.6", + "h2 0.4.7", "http 1.1.0", "http-body 1.0.1", "http-body-util", - "hyper 1.4.1", + "hyper 1.5.1", "hyper-rustls 0.27.3", "hyper-tls 0.6.0", "hyper-util", @@ -3994,7 +4130,7 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded", - "sync_wrapper 1.0.1", + "sync_wrapper 1.0.2", "system-configuration 0.6.1", "tokio", "tokio-native-tls", @@ -4079,9 +4215,9 @@ dependencies = [ [[package]] name = "rsa" -version = "0.9.6" +version = "0.9.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc" +checksum = "47c75d7c5c6b673e58bf54d8544a9f432e3a925b0e80f7cd3602ab5c50c55519" dependencies = [ "const-oid", "digest", @@ -4126,9 +4262,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.37" +version = "0.38.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811" +checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6" dependencies = [ "bitflags 2.6.0", "errno", @@ -4151,9 +4287,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.14" +version = "0.23.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "415d9944693cb90382053259f89fbb077ea730ad7273047ec63b19bc9b160ba8" +checksum = "934b404430bb06b3fae2cba809eb45a1ab1aecd64491213d7c3301b88393f8d1" dependencies = [ "once_cell", "rustls-pki-types", @@ -4182,9 +4318,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e696e35370c65c9c541198af4543ccd580cf17fc25d8e05c5a242b202488c55" +checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b" [[package]] name = "rustls-webpki" @@ -4209,9 +4345,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.17" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" +checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" [[package]] name = "ryu" @@ -4239,33 +4375,33 @@ dependencies = [ [[package]] name = "scale-info" -version = "2.11.3" +version = "2.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eca070c12893629e2cc820a9761bedf6ce1dcddc9852984d1dc734b8bd9bd024" +checksum = "346a3b32eba2640d17a9cb5927056b08f3de90f65b72fe09402c2ad07d684d0b" dependencies = [ "cfg-if", - "derive_more 0.99.18", + "derive_more", "parity-scale-codec", "scale-info-derive", ] [[package]] name = "scale-info-derive" -version = "2.11.3" +version = "2.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d35494501194174bda522a32605929eefc9ecf7e0a326c26db1fdd85881eb62" +checksum = "c6630024bf739e2179b91fb424b28898baf819414262c5d376677dbff1fe7ebf" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.89", ] [[package]] name = "schannel" -version = "0.1.26" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01227be5826fa0690321a2ba6c5cd57a19cf3f6a09e76973b58e61de6ab9d1c1" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" dependencies = [ "windows-sys 0.59.0", ] @@ -4354,9 +4490,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.12.0" +version = "2.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea4a292869320c0272d7bc55a5a6aafaff59b4f63404a003887b679a2e05b4b6" +checksum = "fa39c7303dc58b5543c94d22c1766b0d31f2ee58306363ea622b10bbc075eaa2" dependencies = [ "core-foundation-sys", "libc", @@ -4488,14 +4624,14 @@ dependencies = [ "thiserror", "time", "url", - "uuid 1.10.0", + "uuid 1.11.0", ] [[package]] name = "serde" -version = "1.0.210" +version = "1.0.215" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" +checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f" dependencies = [ "serde_derive", ] @@ -4512,20 +4648,20 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.210" +version = "1.0.215" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" +checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] name = "serde_json" -version = "1.0.128" +version = "1.0.133" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" +checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" dependencies = [ "itoa", "memchr", @@ -4716,9 +4852,9 @@ checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c" [[package]] name = "smol_str" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66eaf762c5af19db3108300515c8aa7a50efc90ff745f4c62288052ebf9fdd25" +checksum = "9676b89cd56310a87b93dec47b11af744f34d5fc9f367b829474eec0a891350d" dependencies = [ "borsh", "serde", @@ -4851,7 +4987,7 @@ dependencies = [ "quote", "sqlx-core", "sqlx-macros-core", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -4874,7 +5010,7 @@ dependencies = [ "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 2.0.79", + "syn 2.0.89", "tempfile", "tokio", "url", @@ -4985,9 +5121,9 @@ dependencies = [ [[package]] name = "sqruff-lib" -version = "0.19.0" +version = "0.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "676775189e83a98fc603d59fc6d760a66895d511502a538081dac993fde1a09a" +checksum = "dd3d7d11b58d658bf0e33d6729a92a81790ffb757440828a7b01869a40314b5f" dependencies = [ "ahash", "anstyle", @@ -5020,9 +5156,9 @@ dependencies = [ [[package]] name = "sqruff-lib-core" -version = "0.19.0" +version = "0.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48ec5ba65376ae9ba3e3dda153668dcb6452a7212ee7b4c9d48e053eb4f0f3fa" +checksum = "03b19ebfd19c2bb1fdf8ca626f451645d89b74fa696f3cc1286989e58436f791" dependencies = [ "ahash", "enum_dispatch", @@ -5041,9 +5177,9 @@ dependencies = [ [[package]] name = "sqruff-lib-dialects" -version = "0.19.0" +version = "0.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00fa1cd168dad593f8f6996d805acc1fd52c6d0ad0f6f5847a9cc22a6198cfc2" +checksum = "60dc004661c65d9163edaa876e6bb2fbe7a0bcf7f00cb0e13428cd0b4ab4b27f" dependencies = [ "ahash", "itertools 0.13.0", @@ -5119,7 +5255,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -5161,9 +5297,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.79" +version = "2.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89132cd0bf050864e1d38dc3bbc07a0eb8e7530af26344d3d2bbbef83499f590" +checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e" dependencies = [ "proc-macro2", "quote", @@ -5178,13 +5314,24 @@ checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" [[package]] name = "sync_wrapper" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" dependencies = [ "futures-core", ] +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.89", +] + [[package]] name = "system-configuration" version = "0.5.1" @@ -5235,9 +5382,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.13.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0f2c9fc62d0beef6951ccffd757e241266a2c833136efbe35af6cd2567dca5b" +checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" dependencies = [ "cfg-if", "fastrand", @@ -5275,27 +5422,27 @@ checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" dependencies = [ "smawk", "unicode-linebreak", - "unicode-width", + "unicode-width 0.1.14", ] [[package]] name = "thiserror" -version = "1.0.64" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.64" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -5377,6 +5524,16 @@ dependencies = [ "crunchy", ] +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + [[package]] name = "tinyvec" version = "1.8.0" @@ -5394,9 +5551,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.40.0" +version = "1.41.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" +checksum = "22cfb5bee7a6a52939ca9224d6ac897bb669134078daa8735560897f69de4d33" dependencies = [ "backtrace", "bytes", @@ -5418,7 +5575,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -5447,7 +5604,7 @@ version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" dependencies = [ - "rustls 0.23.14", + "rustls 0.23.19", "rustls-pki-types", "tokio", ] @@ -5536,11 +5693,11 @@ dependencies = [ "axum", "base64 0.22.1", "bytes", - "h2 0.4.6", + "h2 0.4.7", "http 1.1.0", "http-body 1.0.1", "http-body-util", - "hyper 1.4.1", + "hyper 1.5.1", "hyper-timeout", "hyper-util", "percent-encoding", @@ -5603,9 +5760,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "log", "pin-project-lite", @@ -5615,20 +5772,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", "valuable", @@ -5778,12 +5935,9 @@ checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" [[package]] name = "unicase" -version = "2.7.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" -dependencies = [ - "version_check", -] +checksum = "7e51b68083f157f853b6379db119d1c1be0e6e4dec98101079dec41f6f5cf6df" [[package]] name = "unicode-bidi" @@ -5793,9 +5947,9 @@ checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" [[package]] name = "unicode-ident" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" +checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" [[package]] name = "unicode-linebreak" @@ -5824,6 +5978,12 @@ version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" +[[package]] +name = "unicode-width" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" + [[package]] name = "unicode-xid" version = "0.2.6" @@ -5869,9 +6029,9 @@ dependencies = [ [[package]] name = "url" -version = "2.5.2" +version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna", @@ -5885,6 +6045,18 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + [[package]] name = "utf8parse" version = "0.2.2" @@ -5903,9 +6075,9 @@ dependencies = [ [[package]] name = "uuid" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" +checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" dependencies = [ "getrandom", "serde", @@ -5949,7 +6121,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "671d3b894d5d0849f0a597f56bf071f42d4f2a1cbcf2f78ca21f870ab7c0cc2b" dependencies = [ - "hyper 0.14.30", + "hyper 0.14.31", "once_cell", "tokio", "tracing", @@ -5964,7 +6136,7 @@ checksum = "6a511871dc5de990a3b2a0e715facfbc5da848c0c0395597a1415029fb7c250a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -6000,9 +6172,9 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" -version = "0.2.94" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef073ced962d62984fb38a36e5fdc1a2b23c9e0e1fa0689bb97afa4202ef6887" +checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e" dependencies = [ "cfg-if", "once_cell", @@ -6011,24 +6183,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.94" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4bfab14ef75323f4eb75fa52ee0a3fb59611977fd3240da19b2cf36ff85030e" +checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.44" +version = "0.4.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65471f79c1022ffa5291d33520cbbb53b7687b01c2f8e83b57d102eed7ed479d" +checksum = "cc7ec4f8827a71586374db3e87abdb5a2bb3a15afed140221307c3ec06b1f63b" dependencies = [ "cfg-if", "js-sys", @@ -6038,9 +6210,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.94" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7bec9830f60924d9ceb3ef99d55c155be8afa76954edffbb5936ff4509474e7" +checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -6048,28 +6220,28 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.94" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c74f6e152a76a2ad448e223b0fc0b6b5747649c3d769cc6bf45737bf97d0ed6" +checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.94" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a42f6c679374623f295a8623adfe63d9284091245c3504bde47c17a3ce2777d9" +checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d" [[package]] name = "web-sys" -version = "0.3.71" +version = "0.3.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44188d185b5bdcae1052d08bcbcf9091a5524038d4572cc4f4f2bb9d5554ddd9" +checksum = "f6488b90108c040df0fe62fa815cbdee25124641df01814dd7282749234c6112" dependencies = [ "js-sys", "wasm-bindgen", @@ -6338,6 +6510,18 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + [[package]] name = "ws_stream_wasm" version = "0.7.4" @@ -6368,18 +6552,18 @@ dependencies = [ [[package]] name = "xshell" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db0ab86eae739efd1b054a8d3d16041914030ac4e01cd1dca0cf252fd8b6437" +checksum = "9e7290c623014758632efe00737145b6867b66292c42167f2ec381eb566a373d" dependencies = [ "xshell-macros", ] [[package]] name = "xshell-macros" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d422e8e38ec76e2f06ee439ccc765e9c6a9638b9e7c9f2e8255e4d41e8bd852" +checksum = "32ac00cd3f8ec9c1d33fb3e7958a82df6989c42d747bd326c822b1d625283547" [[package]] name = "yansi" @@ -6393,6 +6577,30 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.89", + "synstructure", +] + [[package]] name = "zerocopy" version = "0.7.35" @@ -6411,7 +6619,28 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", +] + +[[package]] +name = "zerofrom" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.89", + "synstructure", ] [[package]] @@ -6431,7 +6660,29 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.89", +] + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.89", ] [[package]] @@ -6473,7 +6724,7 @@ dependencies = [ "lazy_static", "prost 0.12.6", "rand", - "reqwest 0.12.8", + "reqwest 0.12.9", "secrecy", "serde", "serde_json", @@ -6520,9 +6771,9 @@ dependencies = [ [[package]] name = "zksync_concurrency" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "035269d811b3770debca372141ab64cad067dce8e58cb39a48cb7617d30c626b" +checksum = "4c24c9a056499823227503dd9e5fb3037d93bcc3ae9b06c1ac0a47334c6283af" dependencies = [ "anyhow", "once_cell", @@ -6553,9 +6804,9 @@ dependencies = [ [[package]] name = "zksync_consensus_crypto" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49e38d1b5ed28c66e785caff53ea4863375555d818aafa03290397192dd3e665" +checksum = "5da303b01f24283e93f80f361bf62c3df4a761d061c8b38b4faebeebf26362fc" dependencies = [ "anyhow", "blst", @@ -6574,9 +6825,9 @@ dependencies = [ [[package]] name = "zksync_consensus_roles" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e49fbd4e69b276058f3dfc06cf6ada0e8caa6ed826e81289e4d596da95a0f17a" +checksum = "50f07db2a8ec2d2cda5cb4c5ac408101e81c8fa5d95c9f3302829dafae78d11c" dependencies = [ "anyhow", "bit-vec", @@ -6596,9 +6847,9 @@ dependencies = [ [[package]] name = "zksync_consensus_utils" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10bac8f471b182d4fa3d40cf158aac3624fe636a1ff0b4cf3fe26a0e20c68a42" +checksum = "3222410c67617a86edb192e0c4bb48afc254a17052200a0a839c90e8b0378842" dependencies = [ "anyhow", "rand", @@ -6646,9 +6897,9 @@ dependencies = [ [[package]] name = "zksync_protobuf" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abd55c64f54cb10967a435422f66ff5880ae14a232b245517c7ce38da32e0cab" +checksum = "f05755c38b134b409736008bfdfd0fdb42bfa061947be93be4c78069aa10c9b3" dependencies = [ "anyhow", "bit-vec", @@ -6667,9 +6918,9 @@ dependencies = [ [[package]] name = "zksync_protobuf_build" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4121952bcaf711005dd554612fc6e2de9b30cb58088508df87f1d38046ce8ac8" +checksum = "64c3930a73ca667780be6dcd94e469d40a93fa52f4654c9ab732991b62238cb5" dependencies = [ "anyhow", "heck", @@ -6679,7 +6930,7 @@ dependencies = [ "prost-reflect", "protox", "quote", - "syn 2.0.79", + "syn 2.0.89", ] [[package]] @@ -6717,7 +6968,7 @@ dependencies = [ "bigdecimal", "blake2", "chrono", - "derive_more 1.0.0", + "derive_more", "hex", "itertools 0.10.5", "num", @@ -6747,7 +6998,7 @@ dependencies = [ "anyhow", "futures", "once_cell", - "reqwest 0.12.8", + "reqwest 0.12.9", "serde_json", "tokio", "tracing", diff --git a/zkstack_cli/Cargo.toml b/zkstack_cli/Cargo.toml index 1f493f9c3e41..29a0e5bc43c6 100644 --- a/zkstack_cli/Cargo.toml +++ b/zkstack_cli/Cargo.toml @@ -19,7 +19,6 @@ repository = "https://github.com/matter-labs/zksync-era/tree/main/zkstack_cli/" description = "ZK Stack CLI is a set of tools for working with zk stack." keywords = ["zk", "cryptography", "blockchain", "ZKStack", "ZKsync"] - [workspace.dependencies] # Local dependencies common = { path = "crates/common" } @@ -31,11 +30,11 @@ git_version_macro = { path = "crates/git_version_macro" } zksync_config = { path = "../core/lib/config" } zksync_protobuf_config = { path = "../core/lib/protobuf_config" } zksync_basic_types = { path = "../core/lib/basic_types" } -zksync_consensus_roles = "=0.5.0" -zksync_consensus_crypto = "=0.5.0" -zksync_consensus_utils = "=0.5.0" -zksync_protobuf = "=0.5.0" -zksync_protobuf_build = "=0.5.0" +zksync_consensus_roles = "=0.7.0" +zksync_consensus_crypto = "=0.7.0" +zksync_consensus_utils = "=0.7.0" +zksync_protobuf = "=0.7.0" +zksync_protobuf_build = "=0.7.0" # External dependencies anyhow = "1.0.82" diff --git a/zkstack_cli/README.md b/zkstack_cli/README.md index e81165088218..86d42bcbc906 100644 --- a/zkstack_cli/README.md +++ b/zkstack_cli/README.md @@ -1,11 +1,11 @@ # ZK Stack CLI -Toolkit for creating and managing ZK Stack chains. `ZK Stack CLI` facilitates the creation and management of ZK Stacks. -Commands are interactive but can also accept arguments via the command line. +Toolkit for creating and managing ZK Stack chains. `ZK Stack CLI` facilitates the creation and management of ZK Stack +ecosystems. Commands are interactive but can also accept arguments via the command line. ### Dependencies -Follow [these instructions](https://github.com/matter-labs/zksync-era/blob/main/docs/guides/setup-dev.md) to set up +Follow [these instructions](https://github.com/matter-labs/zksync-era/blob/main/docs/src/guides/setup-dev.md) to set up dependencies on your machine. Ignore the Environment section for now. ### Installation @@ -48,7 +48,7 @@ Foundry is used for deploying smart contracts. Pass flags for Foundry integratio ### Ecosystem ZK Stack allows you to create a new ecosystem or connect to an existing one. An ecosystem includes components like the -BridgeHub, shared bridges, and state transition managers. +BridgeHub, shared bridges, and state transition managers. Multiple ZK chains can be registered to an ecosystem. [Learn more](https://docs.zksync.io/zk-stack/components/shared-bridges). #### Global Config @@ -140,7 +140,7 @@ zkstack containers --observability #### Create -The first ZK chain is generated upon ecosystem creation. Create additional chains and switch between them: +The first ZK chain is generated upon ecosystem creation. You can also create additional chains and switch between them: ```bash zkstack chain create @@ -148,7 +148,7 @@ zkstack chain create #### Init -Deploy contracts and initialize Zk Chain: +Deploy contracts and initialize ZK chain: ```bash zkstack chain init @@ -184,7 +184,7 @@ Ensure you have installed: - [cmake](https://apt.kitware.com/) - [nvcc (CUDA toolkit)](https://developer.nvidia.com/cuda-downloads) -Refer to the [prover docs](https://github.com/matter-labs/zksync-era/blob/main/prover/docs/02_setup.md) for more +Refer to the [prover docs](https://github.com/matter-labs/zksync-era/blob/main/prover/docs/src/02_setup.md) for more information. #### Running the Prover @@ -313,7 +313,7 @@ needed. ## Dev -The subcommand `zkstack dev` offers tools for developing ZKsync. +The subcommand `zkstack dev` offers tools for developing. ### Database diff --git a/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/input.rs b/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/input.rs index d5611f805b17..17b2bac38a3f 100644 --- a/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/input.rs +++ b/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/input.rs @@ -11,7 +11,7 @@ use zksync_basic_types::L2ChainId; use crate::{ consts::INITIAL_DEPLOYMENT_FILE, traits::{FileConfigWithDefaultName, ZkStackConfig}, - ContractsConfig, GenesisConfig, WalletsConfig, + ContractsConfig, GenesisConfig, WalletsConfig, ERC20_DEPLOYMENT_FILE, }; #[derive(Debug, Deserialize, Serialize, Clone)] @@ -69,7 +69,7 @@ pub struct Erc20DeploymentConfig { } impl FileConfigWithDefaultName for Erc20DeploymentConfig { - const FILE_NAME: &'static str = INITIAL_DEPLOYMENT_FILE; + const FILE_NAME: &'static str = ERC20_DEPLOYMENT_FILE; } impl ZkStackConfig for Erc20DeploymentConfig {} diff --git a/zkstack_cli/crates/zkstack/src/accept_ownership.rs b/zkstack_cli/crates/zkstack/src/accept_ownership.rs index 474e76e599a8..73dfd8082708 100644 --- a/zkstack_cli/crates/zkstack/src/accept_ownership.rs +++ b/zkstack_cli/crates/zkstack/src/accept_ownership.rs @@ -10,7 +10,7 @@ use xshell::Shell; use crate::{ messages::MSG_ACCEPTING_GOVERNANCE_SPINNER, - utils::forge::{check_the_balance, fill_forge_private_key}, + utils::forge::{check_the_balance, fill_forge_private_key, WalletOwner}, }; lazy_static! { @@ -89,7 +89,7 @@ async fn accept_ownership( governor: &Wallet, mut forge: ForgeScript, ) -> anyhow::Result<()> { - forge = fill_forge_private_key(forge, Some(governor))?; + forge = fill_forge_private_key(forge, Some(governor), WalletOwner::Governor)?; check_the_balance(&forge).await?; let spinner = Spinner::new(MSG_ACCEPTING_GOVERNANCE_SPINNER); forge.run(shell)?; diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs b/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs index 31cfc7f83977..4164f9a05a2a 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs @@ -27,7 +27,7 @@ use crate::{ MSG_CHAIN_NOT_INITIALIZED, MSG_DEPLOYING_L2_CONTRACT_SPINNER, MSG_L1_SECRETS_MUST_BE_PRESENTED, }, - utils::forge::{check_the_balance, fill_forge_private_key}, + utils::forge::{check_the_balance, fill_forge_private_key, WalletOwner}, }; pub enum Deploy2ContractsOption { @@ -311,7 +311,11 @@ async fn call_forge( forge = forge.with_signature(signature); } - forge = fill_forge_private_key(forge, Some(&ecosystem_config.get_wallets()?.governor))?; + forge = fill_forge_private_key( + forge, + Some(&ecosystem_config.get_wallets()?.governor), + WalletOwner::Governor, + )?; check_the_balance(&forge).await?; forge.run(shell)?; diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/deploy_paymaster.rs b/zkstack_cli/crates/zkstack/src/commands/chain/deploy_paymaster.rs index 4a93fcc089f8..4bcfd6c08099 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/deploy_paymaster.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/deploy_paymaster.rs @@ -12,7 +12,7 @@ use xshell::Shell; use crate::{ messages::{MSG_CHAIN_NOT_INITIALIZED, MSG_L1_SECRETS_MUST_BE_PRESENTED}, - utils::forge::{check_the_balance, fill_forge_private_key}, + utils::forge::{check_the_balance, fill_forge_private_key, WalletOwner}, }; pub async fn run(args: ForgeScriptArgs, shell: &Shell) -> anyhow::Result<()> { @@ -56,7 +56,11 @@ pub async fn deploy_paymaster( if let Some(address) = sender { forge = forge.with_sender(address); } else { - forge = fill_forge_private_key(forge, Some(&chain_config.get_wallets_config()?.governor))?; + forge = fill_forge_private_key( + forge, + Some(&chain_config.get_wallets_config()?.governor), + WalletOwner::Governor, + )?; } if broadcast { diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/register_chain.rs b/zkstack_cli/crates/zkstack/src/commands/chain/register_chain.rs index 65ee05a1ea5f..42b3bbd59c71 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/register_chain.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/register_chain.rs @@ -19,7 +19,7 @@ use crate::{ MSG_CHAIN_NOT_INITIALIZED, MSG_CHAIN_REGISTERED, MSG_L1_SECRETS_MUST_BE_PRESENTED, MSG_REGISTERING_CHAIN_SPINNER, }, - utils::forge::{check_the_balance, fill_forge_private_key}, + utils::forge::{check_the_balance, fill_forge_private_key, WalletOwner}, }; pub async fn run(args: ForgeScriptArgs, shell: &Shell) -> anyhow::Result<()> { @@ -81,7 +81,11 @@ pub async fn register_chain( if let Some(address) = sender { forge = forge.with_sender(address); } else { - forge = fill_forge_private_key(forge, Some(&config.get_wallets()?.governor))?; + forge = fill_forge_private_key( + forge, + Some(&config.get_wallets()?.governor), + WalletOwner::Governor, + )?; check_the_balance(&forge).await?; } diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/set_token_multiplier_setter.rs b/zkstack_cli/crates/zkstack/src/commands/chain/set_token_multiplier_setter.rs index 4a6cd31b2c0a..326aa393f8f2 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/set_token_multiplier_setter.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/set_token_multiplier_setter.rs @@ -17,7 +17,7 @@ use crate::{ MSG_TOKEN_MULTIPLIER_SETTER_UPDATED_TO, MSG_UPDATING_TOKEN_MULTIPLIER_SETTER_SPINNER, MSG_WALLETS_CONFIG_MUST_BE_PRESENT, MSG_WALLET_TOKEN_MULTIPLIER_SETTER_NOT_FOUND, }, - utils::forge::{check_the_balance, fill_forge_private_key}, + utils::forge::{check_the_balance, fill_forge_private_key, WalletOwner}, }; lazy_static! { @@ -109,7 +109,7 @@ async fn update_token_multiplier_setter( governor: &Wallet, mut forge: ForgeScript, ) -> anyhow::Result<()> { - forge = fill_forge_private_key(forge, Some(governor))?; + forge = fill_forge_private_key(forge, Some(governor), WalletOwner::Governor)?; check_the_balance(&forge).await?; forge.run(shell)?; Ok(()) diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/setup_legacy_bridge.rs b/zkstack_cli/crates/zkstack/src/commands/chain/setup_legacy_bridge.rs index f61c640ffb6b..a05ef04eee3e 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/setup_legacy_bridge.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/setup_legacy_bridge.rs @@ -14,7 +14,7 @@ use xshell::Shell; use crate::{ messages::{MSG_DEPLOYING_PAYMASTER, MSG_L1_SECRETS_MUST_BE_PRESENTED}, - utils::forge::{check_the_balance, fill_forge_private_key}, + utils::forge::{check_the_balance, fill_forge_private_key, WalletOwner}, }; pub async fn setup_legacy_bridge( @@ -59,7 +59,11 @@ pub async fn setup_legacy_bridge( ) .with_broadcast(); - forge = fill_forge_private_key(forge, Some(&ecosystem_config.get_wallets()?.governor))?; + forge = fill_forge_private_key( + forge, + Some(&ecosystem_config.get_wallets()?.governor), + WalletOwner::Governor, + )?; let spinner = Spinner::new(MSG_DEPLOYING_PAYMASTER); check_the_balance(&forge).await?; diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/common.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/common.rs index 00d937bba294..074913d79fa2 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/common.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/common.rs @@ -14,7 +14,7 @@ use config::{ use types::{L1Network, ProverMode}; use xshell::Shell; -use crate::utils::forge::{check_the_balance, fill_forge_private_key}; +use crate::utils::forge::{check_the_balance, fill_forge_private_key, WalletOwner}; pub async fn deploy_l1( shell: &Shell, @@ -54,7 +54,11 @@ pub async fn deploy_l1( if let Some(address) = sender { forge = forge.with_sender(address); } else { - forge = fill_forge_private_key(forge, wallets_config.deployer.as_ref())?; + forge = fill_forge_private_key( + forge, + wallets_config.deployer.as_ref(), + WalletOwner::Deployer, + )?; } if broadcast { diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/init.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/init.rs index 06b9b9161112..7b01abf03b9a 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/init.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/init.rs @@ -43,7 +43,7 @@ use crate::{ MSG_ECOSYSTEM_CONTRACTS_PATH_PROMPT, MSG_INITIALIZING_ECOSYSTEM, MSG_INTALLING_DEPS_SPINNER, }, - utils::forge::{check_the_balance, fill_forge_private_key}, + utils::forge::{check_the_balance, fill_forge_private_key, WalletOwner}, }; pub async fn run(args: EcosystemInitArgs, shell: &Shell) -> anyhow::Result<()> { @@ -150,7 +150,11 @@ async fn deploy_erc20( .with_rpc_url(l1_rpc_url) .with_broadcast(); - forge = fill_forge_private_key(forge, ecosystem_config.get_wallets()?.deployer.as_ref())?; + forge = fill_forge_private_key( + forge, + ecosystem_config.get_wallets()?.deployer.as_ref(), + WalletOwner::Deployer, + )?; let spinner = Spinner::new(MSG_DEPLOYING_ERC20_SPINNER); check_the_balance(&forge).await?; diff --git a/zkstack_cli/crates/zkstack/src/commands/external_node/prepare_configs.rs b/zkstack_cli/crates/zkstack/src/commands/external_node/prepare_configs.rs index d714a0f8e843..03a586a0652a 100644 --- a/zkstack_cli/crates/zkstack/src/commands/external_node/prepare_configs.rs +++ b/zkstack_cli/crates/zkstack/src/commands/external_node/prepare_configs.rs @@ -74,7 +74,6 @@ fn prepare_configs( .http_url, )?, main_node_rate_limit_rps: None, - gateway_url: None, bridge_addresses_refresh_interval_sec: None, }; let mut general_en = general.clone(); @@ -112,6 +111,7 @@ fn prepare_configs( }), l1: Some(L1Secrets { l1_rpc_url: SensitiveUrl::from_str(&args.l1_rpc_url).context("l1_rpc_url")?, + gateway_rpc_url: None, }), data_availability: None, }; diff --git a/zkstack_cli/crates/zkstack/src/messages.rs b/zkstack_cli/crates/zkstack/src/messages.rs index a4f42ff76350..09185caf438f 100644 --- a/zkstack_cli/crates/zkstack/src/messages.rs +++ b/zkstack_cli/crates/zkstack/src/messages.rs @@ -7,6 +7,8 @@ use ethers::{ use url::Url; use zksync_consensus_roles::attester; +use crate::utils::forge::WalletOwner; + pub(super) const MSG_SETUP_KEYS_DOWNLOAD_SELECTION_PROMPT: &str = "Do you want to download the setup keys or generate them?"; pub(super) const MSG_SETUP_KEYS_REGION_PROMPT: &str = @@ -335,7 +337,15 @@ pub(super) fn msg_explorer_chain_not_initialized(chain: &str) -> String { } /// Forge utils related messages -pub(super) const MSG_DEPLOYER_PK_NOT_SET_ERR: &str = "Deployer private key is not set"; +pub(super) fn msg_wallet_private_key_not_set(wallet_owner: WalletOwner) -> String { + format!( + "{} private key is not set", + match wallet_owner { + WalletOwner::Governor => "Governor", + WalletOwner::Deployer => "Deployer", + } + ) +} pub(super) fn msg_address_doesnt_have_enough_money_prompt( address: &H160, diff --git a/zkstack_cli/crates/zkstack/src/utils/forge.rs b/zkstack_cli/crates/zkstack/src/utils/forge.rs index 355cf7b5f930..76f045f82b9e 100644 --- a/zkstack_cli/crates/zkstack/src/utils/forge.rs +++ b/zkstack_cli/crates/zkstack/src/utils/forge.rs @@ -4,18 +4,24 @@ use ethers::types::U256; use crate::{ consts::MINIMUM_BALANCE_FOR_WALLET, - messages::{msg_address_doesnt_have_enough_money_prompt, MSG_DEPLOYER_PK_NOT_SET_ERR}, + messages::{msg_address_doesnt_have_enough_money_prompt, msg_wallet_private_key_not_set}, }; +pub enum WalletOwner { + Governor, + Deployer, +} + pub fn fill_forge_private_key( mut forge: ForgeScript, wallet: Option<&Wallet>, + wallet_owner: WalletOwner, ) -> anyhow::Result { if !forge.wallet_args_passed() { forge = forge.with_private_key( wallet .and_then(|w| w.private_key_h256()) - .context(MSG_DEPLOYER_PK_NOT_SET_ERR)?, + .context(msg_wallet_private_key_not_set(wallet_owner))?, ); } Ok(forge)