diff --git a/.github/release-please/manifest.json b/.github/release-please/manifest.json index fa33d4c8d7a1..7e3c7e01ae74 100644 --- a/.github/release-please/manifest.json +++ b/.github/release-please/manifest.json @@ -1,5 +1,5 @@ { - "core": "25.1.0", - "prover": "17.0.0", + "core": "25.2.0", + "prover": "17.1.0", "zkstack_cli": "0.1.2" } diff --git a/.github/workflows/build-docker-from-tag.yml b/.github/workflows/build-docker-from-tag.yml index 206e15bd195f..b3f442ff4662 100644 --- a/.github/workflows/build-docker-from-tag.yml +++ b/.github/workflows/build-docker-from-tag.yml @@ -49,7 +49,7 @@ jobs: build-push-core-images: name: Build and push image needs: [ setup ] - uses: ./.github/workflows/build-core-template.yml + uses: ./.github/workflows/new-build-core-template.yml if: contains(github.ref_name, 'core') secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} @@ -57,6 +57,7 @@ jobs: with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} en_alpha_release: true + action: "push" build-push-tee-prover-images: name: Build and push images @@ -73,23 +74,25 @@ jobs: build-push-contract-verifier: name: Build and push image needs: [ setup ] - uses: ./.github/workflows/build-contract-verifier-template.yml + uses: ./.github/workflows/new-build-contract-verifier-template.yml if: contains(github.ref_name, 'contract_verifier') secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} + action: "push" build-push-prover-images: name: Build and push image needs: [ setup ] - uses: ./.github/workflows/build-prover-template.yml + uses: ./.github/workflows/new-build-prover-template.yml if: contains(github.ref_name, 'prover') with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} CUDA_ARCH: "60;70;75;80;89" + action: "push" secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} @@ -97,13 +100,14 @@ jobs: build-push-witness-generator-image-avx512: name: Build and push image needs: [ setup ] - uses: ./.github/workflows/build-witness-generator-template.yml + uses: ./.github/workflows/new-build-witness-generator-template.yml if: contains(github.ref_name, 'prover') with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }}-avx512 ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} CUDA_ARCH: "60;70;75;80;89" WITNESS_GENERATOR_RUST_FLAGS: "-Ctarget_feature=+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512vl" + action: "push" secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/ci-core-reusable.yml b/.github/workflows/ci-core-reusable.yml index da3e2d5abb56..d76bb776968d 100644 --- a/.github/workflows/ci-core-reusable.yml +++ b/.github/workflows/ci-core-reusable.yml @@ -61,7 +61,7 @@ jobs: - name: Init run: | ci_run run_retried rustup show - + - name: Install zkstack run: | ci_run ./zkstack_cli/zkstackup/install -g --path ./zkstack_cli/zkstackup/zkstackup @@ -147,10 +147,10 @@ jobs: --base-token-price-denominator 1 \ --set-as-default false \ --ignore-prerequisites \ - --legacy-bridge + --legacy-bridge \ + --evm-emulator false ci_run zkstack ecosystem init --dev --verbose - ci_run zkstack dev contracts --test-contracts # `sleep 60` because we need to wait until server added all the tokens - name: Run server @@ -201,7 +201,7 @@ jobs: run: | ci_run ./zkstack_cli/zkstackup/install -g --path ./zkstack_cli/zkstackup/zkstackup || true ci_run zkstackup -g --local - + - name: Create log directories run: | SERVER_LOGS_DIR=logs/server @@ -262,7 +262,8 @@ jobs: --base-token-price-nominator 1 \ --base-token-price-denominator 1 \ --set-as-default false \ - --ignore-prerequisites + --ignore-prerequisites \ + --evm-emulator false ci_run zkstack chain init \ --deploy-paymaster \ @@ -283,7 +284,8 @@ jobs: --base-token-price-nominator 314 \ --base-token-price-denominator 1000 \ --set-as-default false \ - --ignore-prerequisites + --ignore-prerequisites \ + --evm-emulator false ci_run zkstack chain init \ --deploy-paymaster \ @@ -304,7 +306,8 @@ jobs: --base-token-price-nominator 1 \ --base-token-price-denominator 1 \ --set-as-default false \ - --ignore-prerequisites + --ignore-prerequisites \ + --evm-emulator false ci_run zkstack chain build-transactions --chain offline_chain --l1-rpc-url http://127.0.0.1:8545 @@ -339,7 +342,8 @@ jobs: --base-token-price-nominator 314 \ --base-token-price-denominator 1000 \ --set-as-default false \ - --ignore-prerequisites + --ignore-prerequisites \ + --evm-emulator false ci_run zkstack chain init \ --deploy-paymaster \ @@ -357,10 +361,17 @@ jobs: run: | ci_run zkstack dev test build + - name: Build tested binaries + run: | + ci_run zkstack server build + ci_run zkstack external-node build + ci_run zkstack contract-verifier build + - name: Initialize Contract verifier run: | ci_run zkstack contract-verifier init --zksolc-version=v1.5.3 --zkvyper-version=v1.5.4 --solc-version=0.8.26 --vyper-version=v0.3.10 --era-vm-solc-version=0.8.26-1.0.1 --only --chain era ci_run zkstack contract-verifier run --chain era &> ${{ env.SERVER_LOGS_DIR }}/contract-verifier-rollup.log & + ci_run zkstack contract-verifier wait --chain era --verbose - name: Run servers run: | @@ -375,10 +386,14 @@ jobs: --components=api,tree,eth,state_keeper,housekeeper,commitment_generator,vm_runner_protective_reads,vm_runner_bwip,vm_playground,da_dispatcher,consensus \ &> ${{ env.SERVER_LOGS_DIR }}/consensus.log & - ci_run sleep 5 + ci_run zkstack server wait --ignore-prerequisites --verbose --chain era + ci_run zkstack server wait --ignore-prerequisites --verbose --chain validium + ci_run zkstack server wait --ignore-prerequisites --verbose --chain custom_token + ci_run zkstack server wait --ignore-prerequisites --verbose --chain consensus - - name: Setup attester committee for the consensus chain + - name: Set up attester committee for the consensus chain run: | + ci_run zkstack consensus wait-for-registry --ignore-prerequisites --verbose --chain consensus ci_run zkstack consensus set-attester-committee --chain consensus --from-genesis &> ${{ env.INTEGRATION_TESTS_LOGS_DIR }}/consensus.log - name: Run integration tests @@ -411,12 +426,17 @@ jobs: run: | ci_run ./bin/run_on_all_chains.sh "zkstack dev test recovery --no-deps --no-kill --ignore-prerequisites --verbose" ${{ env.CHAINS }} ${{ env.INTEGRATION_TESTS_LOGS_DIR }} - - name: Run external node server + - name: Run external nodes run: | ci_run zkstack external-node run --ignore-prerequisites --chain era &> ${{ env.EXTERNAL_NODE_LOGS_DIR }}/rollup.log & ci_run zkstack external-node run --ignore-prerequisites --chain validium &> ${{ env.EXTERNAL_NODE_LOGS_DIR }}/validium.log & ci_run zkstack external-node run --ignore-prerequisites --chain custom_token &> ${{ env.EXTERNAL_NODE_LOGS_DIR }}/custom_token.log & ci_run zkstack external-node run --ignore-prerequisites --chain consensus --enable-consensus &> ${{ env.EXTERNAL_NODE_LOGS_DIR }}/consensus.log & + + ci_run zkstack external-node wait --ignore-prerequisites --verbose --chain era + ci_run zkstack external-node wait --ignore-prerequisites --verbose --chain validium + ci_run zkstack external-node wait --ignore-prerequisites --verbose --chain custom_token + ci_run zkstack external-node wait --ignore-prerequisites --verbose --chain consensus - name: Run integration tests en run: | diff --git a/.github/workflows/ci-prover-e2e.yml b/.github/workflows/ci-prover-e2e.yml index 6076874c3710..a4a9b29e1d64 100644 --- a/.github/workflows/ci-prover-e2e.yml +++ b/.github/workflows/ci-prover-e2e.yml @@ -50,7 +50,8 @@ jobs: --base-token-price-nominator 1 \ --base-token-price-denominator 1 \ --set-as-default true \ - --ignore-prerequisites + --ignore-prerequisites \ + --evm-emulator false ci_run zkstack ecosystem init --dev --verbose ci_run zkstack prover init --dev --verbose @@ -86,7 +87,7 @@ jobs: ci_run zkstack prover run --component=witness-generator --round=all-rounds --docker=false &>prover_logs/witness-generator.log & - name: Run Circuit Prover run: | - ci_run zkstack prover run --component=circuit-prover --witness-vector-generator-count=10 --docker=false &>prover_logs/circuit_prover.log & + ci_run zkstack prover run --component=circuit-prover -l=23 -h=3 --docker=false &>prover_logs/circuit_prover.log & - name: Wait for prover jobs to finish env: DATABASE_URL: postgres://postgres:notsecurepassword@localhost:5432/zksync_prover_localhost_proving_chain diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2f29fe98f0e6..849fccc2e22c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -20,7 +20,6 @@ jobs: outputs: core: ${{ steps.changed-files.outputs.core_any_changed }} prover: ${{ steps.changed-files.outputs.prover_any_changed }} - zkstack_cli: ${{ steps.changed-files.outputs.zkstack_cli_any_changed }} docs: ${{ steps.changed-files.outputs.docs_any_changed }} all: ${{ steps.changed-files.outputs.all_any_changed }} steps: @@ -178,6 +177,7 @@ jobs: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }}-avx512 action: "build" WITNESS_GENERATOR_RUST_FLAGS: "-Ctarget_feature=+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512vl" + ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} @@ -186,7 +186,7 @@ jobs: name: Github Status Check runs-on: ubuntu-latest if: always() && !cancelled() - needs: [ ci-for-core-lint, ci-for-common, ci-for-core, ci-for-prover, ci-for-docs, build-core-images, build-contract-verifier, build-prover-images ] + needs: [ ci-for-core-lint, ci-for-common, ci-for-core, ci-for-prover, ci-for-docs, build-core-images, build-contract-verifier, build-prover-images, e2e-for-prover ] steps: - name: Status run: | diff --git a/.github/workflows/deploy-core-docs.yml b/.github/workflows/deploy-core-docs.yml new file mode 100644 index 000000000000..f01c56f68c3c --- /dev/null +++ b/.github/workflows/deploy-core-docs.yml @@ -0,0 +1,67 @@ +name: Deploy core docs + +on: + push: + branches: + - "main" + tags: + - "core-v*.*.*" + paths: + - 'docs/**' + - '.github/workflows/deploy-core-docs.yml' + pull_request: + paths: + - 'docs/**' + - '.github/workflows/deploy-core-docs.yml' + workflow_dispatch: + inputs: + ref: + description: "Branch, tag or commit to deploy the core docs. If empty, use the ref that triggered the workflow." + required: false + default: "" + version: + type: string + description: "Version of the documentation to deploy" + required: false + default: "latest" + +# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. +# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. +concurrency: + group: "pages" + cancel-in-progress: false + +jobs: + + deploy-core-docs: + runs-on: ubuntu-latest + permissions: + contents: write + env: + DOCS_DIR: 'docs' + PROJECT: 'core' + ENABLE_TESTS: false + steps: + - name: Checkout + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + with: + ref: ${{ inputs.ref || '' }} + + - name: Extract version from tag + if: startsWith(github.ref, 'refs/tags/') + id: extract-version + shell: 'bash -ex {0}' + run: | + TAG="${{ github.ref_name }}" + VERSION="${TAG#*-}" + echo "version=${VERSION}" >> "${GITHUB_OUTPUT}" + + - name: Deploy core docs + uses: matter-labs/deploy-mdbooks@73f638643d1be948d1002fe5433747f4a3e37a29 # v1 + with: + version: ${{ inputs.version || steps.extract-version.outputs.version || github.ref_name }} + docs-dir: ${{ env.DOCS_DIR }} + github-token: ${{ secrets.GITHUB_TOKEN }} + enable-tests: ${{ env.ENABLE_TESTS }} + project: ${{ env.PROJECT }} + deploy: ${{ github.event_name != 'pull_request' }} diff --git a/.github/workflows/deploy-prover-docs.yml b/.github/workflows/deploy-prover-docs.yml new file mode 100644 index 000000000000..7f797c61cf5a --- /dev/null +++ b/.github/workflows/deploy-prover-docs.yml @@ -0,0 +1,67 @@ +name: Deploy prover docs + +on: + push: + branches: + - "main" + tags: + - "prover-v*.*.*" + paths: + - 'prover/docs/**' + - '.github/workflows/deploy-prover-docs.yml' + pull_request: + paths: + - 'prover/docs/**' + - '.github/workflows/deploy-prover-docs.yml' + workflow_dispatch: + inputs: + ref: + description: "Branch, tag or commit to deploy the prover docs. If empty, use the ref that triggered the workflow." + required: false + default: "" + version: + type: string + description: "Version of the documentation to deploy" + required: false + default: "latest" + +# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. +# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. +concurrency: + group: "pages" + cancel-in-progress: false + +jobs: + + deploy-prover-docs: + runs-on: ubuntu-latest + permissions: + contents: write + env: + DOCS_DIR: 'prover/docs' + PROJECT: 'prover' + ENABLE_TESTS: false + steps: + - name: Checkout + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + with: + ref: ${{ inputs.ref || '' }} + + - name: Extract version from tag + if: startsWith(github.ref, 'refs/tags/') + id: extract-version + shell: 'bash -ex {0}' + run: | + TAG="${{ github.ref_name }}" + VERSION="${TAG#*-}" + echo "version=${VERSION}" >> "${GITHUB_OUTPUT}" + + - name: Deploy prover docs + uses: matter-labs/deploy-mdbooks@73f638643d1be948d1002fe5433747f4a3e37a29 # v1 + with: + version: ${{ inputs.version || steps.extract-version.outputs.version || github.ref_name }} + docs-dir: ${{ env.DOCS_DIR }} + github-token: ${{ secrets.GITHUB_TOKEN }} + enable-tests: ${{ env.ENABLE_TESTS }} + project: ${{ env.PROJECT }} + deploy: ${{ github.event_name != 'pull_request' }} diff --git a/.github/workflows/new-build-contract-verifier-template.yml b/.github/workflows/new-build-contract-verifier-template.yml index 7e48968a65c1..7d75f81fb73c 100644 --- a/.github/workflows/new-build-contract-verifier-template.yml +++ b/.github/workflows/new-build-contract-verifier-template.yml @@ -212,7 +212,8 @@ jobs: uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 with: context: . - push: ${{ inputs.action == 'push' }} + load: true + platforms: ${{ matrix.platforms }} file: docker/${{ matrix.components }}/Dockerfile build-args: | SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage @@ -220,23 +221,16 @@ jobs: SCCACHE_GCS_RW_MODE=READ_WRITE RUSTC_WRAPPER=sccache tags: | - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:latest - matterlabs/${{ matrix.components }}:latest - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:latest2.0 - matterlabs/${{ matrix.components }}:latest2.0 - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA }} - matterlabs/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA }} - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA }} - matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA }} - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} - matterlabs/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }} - matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }} - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} - matterlabs/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }} + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} + - name: Push docker image + if: ${{ inputs.action == 'push' }} + run: | + docker push us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} + docker push matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} + + create_manifest: name: Create release manifest runs-on: matterlabs-ci-runner diff --git a/.github/workflows/new-build-core-template.yml b/.github/workflows/new-build-core-template.yml index 350d689c4572..557d8455a31d 100644 --- a/.github/workflows/new-build-core-template.yml +++ b/.github/workflows/new-build-core-template.yml @@ -197,10 +197,8 @@ jobs: shell: bash run: | echo PLATFORM=$(echo ${{ matrix.platforms }} | tr '/' '-') >> $GITHUB_ENV - echo IMAGE_TAG_SHA=$(git rev-parse --short HEAD) >> $GITHUB_ENV - # Support for custom tag suffix if [ -n "${{ inputs.image_tag_suffix }}" ]; then - echo IMAGE_TAG_SHA_TS="${{ inputs.image_tag_suffix }}" >> $GITHUB_ENV + echo IMAGE_TAG_SHA_TS="${{ env.IMAGE_TAG_SUFFIX }}" >> $GITHUB_ENV else echo IMAGE_TAG_SHA_TS=$(git rev-parse --short HEAD)-$(date +%s) >> $GITHUB_ENV fi @@ -219,35 +217,28 @@ jobs: docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} gcloud auth configure-docker us-docker.pkg.dev -q - - name: Build and push + - name: Build docker image uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 with: context: . - push: ${{ inputs.action == 'push' }} + load: true + platforms: ${{ matrix.platforms }} file: docker/${{ matrix.components }}/Dockerfile build-args: | SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com SCCACHE_GCS_RW_MODE=READ_WRITE RUSTC_WRAPPER=sccache - tags: | - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:latest - matterlabs/${{ matrix.components }}:latest - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:latest2.0 - matterlabs/${{ matrix.components }}:latest2.0 - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA }} - matterlabs/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA }} - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA }} - matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA }} - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} - matterlabs/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }} - matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }} - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} - matterlabs/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }} + tags: | + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} + - name: Push docker image + if: ${{ inputs.action == 'push' }} + run: | + docker push us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} + docker push matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} + create_manifest: name: Create release manifest runs-on: matterlabs-ci-runner @@ -269,13 +260,11 @@ jobs: - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 - name: login to Docker registries - shell: bash run: | docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} gcloud auth configure-docker us-docker.pkg.dev -q - name: Create Docker manifest - shell: bash run: | docker_repositories=("matterlabs/${{ matrix.component.name }}" "us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component.name }}") platforms=${{ matrix.component.platform }} diff --git a/.github/workflows/new-build-prover-template.yml b/.github/workflows/new-build-prover-template.yml index 046711d679e8..cb254f602fc5 100644 --- a/.github/workflows/new-build-prover-template.yml +++ b/.github/workflows/new-build-prover-template.yml @@ -97,6 +97,7 @@ jobs: - prover-job-monitor - proof-fri-gpu-compressor - prover-autoscaler + - circuit-prover-gpu steps: - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 with: @@ -152,17 +153,33 @@ jobs: uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 with: context: . - push: ${{ inputs.action == 'push' }} + load: true build-args: | CUDA_ARCH=${{ inputs.CUDA_ARCH }} SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com SCCACHE_GCS_RW_MODE=READ_WRITE RUSTC_WRAPPER=sccache + PROTOCOL_VERSION=${{ env.PROTOCOL_VERSION }} + ERA_BELLMAN_CUDA_RELEASE=${{ inputs.ERA_BELLMAN_CUDA_RELEASE }} file: docker/${{ matrix.components }}/Dockerfile tags: | us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} matterlabs/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} + matterlabs/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:latest + matterlabs/${{ matrix.components }}:latest + + - name: Push docker image + if: ${{ inputs.action == 'push' }} + run: | + docker push us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} + docker push matterlabs/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} + docker push us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} + docker push matterlabs/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} + docker push us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:latest + docker push matterlabs/${{ matrix.components }}:latest copy-images: name: Copy images between docker registries @@ -189,6 +206,10 @@ jobs: docker buildx imagetools create \ --tag asia-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ inputs.image_tag_suffix }} \ us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ inputs.image_tag_suffix }} + docker buildx imagetools create \ + --tag asia-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ inputs.image_tag_suffix }} \ + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ inputs.image_tag_suffix }} + - name: Login and push to Europe GAR run: | @@ -196,3 +217,6 @@ jobs: docker buildx imagetools create \ --tag europe-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ inputs.image_tag_suffix }} \ us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ inputs.image_tag_suffix }} + docker buildx imagetools create \ + --tag europe-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ inputs.image_tag_suffix }} \ + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ inputs.image_tag_suffix }} diff --git a/.github/workflows/new-build-witness-generator-template.yml b/.github/workflows/new-build-witness-generator-template.yml index 2f1fc0b2dd86..bbd6aee23ed1 100644 --- a/.github/workflows/new-build-witness-generator-template.yml +++ b/.github/workflows/new-build-witness-generator-template.yml @@ -9,6 +9,10 @@ on: description: "DOCKERHUB_TOKEN" required: true inputs: + ERA_BELLMAN_CUDA_RELEASE: + description: "ERA_BELLMAN_CUDA_RELEASE" + type: string + required: true image_tag_suffix: description: "Optional suffix to override tag name generation" type: string @@ -127,7 +131,14 @@ jobs: SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com SCCACHE_GCS_RW_MODE=READ_WRITE RUSTC_WRAPPER=sccache + PROTOCOL_VERSION=${{ env.PROTOCOL_VERSION }} + ERA_BELLMAN_CUDA_RELEASE=${{ inputs.ERA_BELLMAN_CUDA_RELEASE }} + RUST_FLAGS=${{ inputs.WITNESS_GENERATOR_RUST_FLAGS }} file: docker/${{ matrix.components }}/Dockerfile tags: | us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} matterlabs/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} + matterlabs/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:latest + matterlabs/${{ matrix.components }}:latest diff --git a/.github/workflows/protobuf.yaml b/.github/workflows/protobuf.yaml index 9c2c34186701..f0565919ded1 100644 --- a/.github/workflows/protobuf.yaml +++ b/.github/workflows/protobuf.yaml @@ -23,7 +23,7 @@ env: RUSTC_WRAPPER: "sccache" SCCACHE_GHA_ENABLED: "true" RUST_BACKTRACE: "1" - SQLX_OFFLINE: true, + SQLX_OFFLINE: true # github.base_ref -> github.head_ref for pull_request BASE: ${{ github.event.pull_request.base.sha || github.event.before }} # github.event.before -> github.event.after for push @@ -41,9 +41,10 @@ jobs: ref: ${{ env.BASE }} path: before fetch-depth: 0 # fetches all branches and tags, which is needed to compute the LCA. + submodules: "recursive" - name: checkout LCA run: - git checkout $(git merge-base $BASE $HEAD) + git checkout $(git merge-base $BASE $HEAD) --recurse-submodules working-directory: ./before - name: compile before run: cargo check --all-targets @@ -59,6 +60,7 @@ jobs: with: ref: ${{ env.HEAD }} path: after + submodules: recursive - name: compile after run: cargo check --all-targets working-directory: ./after diff --git a/.github/workflows/release-test-stage.yml b/.github/workflows/release-test-stage.yml index 18708420dab0..eb75ab179b8e 100644 --- a/.github/workflows/release-test-stage.yml +++ b/.github/workflows/release-test-stage.yml @@ -61,10 +61,11 @@ jobs: build-push-core-images: name: Build and push images needs: [setup, changed_files] - uses: ./.github/workflows/build-core-template.yml + uses: ./.github/workflows/new-build-core-template.yml if: needs.changed_files.outputs.core == 'true' || needs.changed_files.outputs.all == 'true' with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} + action: "push" secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} @@ -84,10 +85,11 @@ jobs: build-push-contract-verifier: name: Build and push images needs: [setup, changed_files] - uses: ./.github/workflows/build-contract-verifier-template.yml + uses: ./.github/workflows/new-build-contract-verifier-template.yml if: needs.changed_files.outputs.core == 'true' || needs.changed_files.outputs.all == 'true' with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} + action: "push" secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} @@ -95,12 +97,13 @@ jobs: build-push-prover-images: name: Build and push images needs: [setup, changed_files] - uses: ./.github/workflows/build-prover-template.yml + uses: ./.github/workflows/new-build-prover-template.yml if: needs.changed_files.outputs.prover == 'true' || needs.changed_files.outputs.all == 'true' with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} CUDA_ARCH: "60;70;75;80;89" + action: "push" secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} @@ -108,13 +111,14 @@ jobs: build-push-witness-generator-image-avx512: name: Build and push prover images with avx512 instructions needs: [setup, changed_files] - uses: ./.github/workflows/build-witness-generator-template.yml + uses: ./.github/workflows/new-build-witness-generator-template.yml if: needs.changed_files.outputs.prover == 'true' || needs.changed_files.outputs.all == 'true' with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }}-avx512 ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} CUDA_ARCH: "60;70;75;80;89" WITNESS_GENERATOR_RUST_FLAGS: "-Ctarget_feature=+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512vl " + action: "push" secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/Cargo.lock b/Cargo.lock index ec085a15d32d..0994c0133e70 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -101,16 +101,581 @@ version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" +[[package]] +name = "alloy" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8367891bf380210abb0d6aa30c5f85a9080cb4a066c4d5c5acadad630823751b" +dependencies = [ + "alloy-consensus", + "alloy-contract", + "alloy-core", + "alloy-eips", + "alloy-genesis", + "alloy-network", + "alloy-provider", + "alloy-pubsub", + "alloy-rpc-client", + "alloy-rpc-types", + "alloy-serde", + "alloy-signer", + "alloy-signer-local", + "alloy-transport", + "alloy-transport-http", + "alloy-transport-ipc", + "alloy-transport-ws", +] + +[[package]] +name = "alloy-chains" +version = "0.1.47" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18c5c520273946ecf715c0010b4e3503d7eba9893cd9ce6b7fff5654c4a3c470" +dependencies = [ + "alloy-primitives", + "num_enum 0.7.3", + "strum", +] + +[[package]] +name = "alloy-consensus" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "629b62e38d471cc15fea534eb7283d2f8a4e8bdb1811bcc5d66dda6cfce6fae1" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "alloy-serde", + "c-kzg", + "serde", +] + +[[package]] +name = "alloy-contract" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eefe64fd344cffa9cf9e3435ec4e93e6e9c3481bc37269af988bf497faf4a6a" +dependencies = [ + "alloy-dyn-abi", + "alloy-json-abi", + "alloy-network", + "alloy-network-primitives", + "alloy-primitives", + "alloy-provider", + "alloy-pubsub", + "alloy-rpc-types-eth", + "alloy-sol-types", + "alloy-transport", + "futures 0.3.31", + "futures-util", + "thiserror", +] + +[[package]] +name = "alloy-core" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47ef9e96462d0b9fee9008c53c1f3d017b9498fcdef3ad8d728db98afef47955" +dependencies = [ + "alloy-dyn-abi", + "alloy-json-abi", + "alloy-primitives", + "alloy-rlp", + "alloy-sol-types", +] + +[[package]] +name = "alloy-dyn-abi" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85132f2698b520fab3f54beed55a44389f7006a7b557a0261e1e69439dcc1572" +dependencies = [ + "alloy-json-abi", + "alloy-primitives", + "alloy-sol-type-parser", + "alloy-sol-types", + "const-hex", + "itoa", + "serde", + "serde_json", + "winnow 0.6.20", +] + +[[package]] +name = "alloy-eip2930" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0069cf0642457f87a01a014f6dc29d5d893cd4fd8fddf0c3cdfad1bb3ebafc41" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "serde", +] + +[[package]] +name = "alloy-eip7702" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea59dc42102bc9a1905dc57901edc6dd48b9f38115df86c7d252acba70d71d04" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "k256 0.13.4", + "serde", +] + +[[package]] +name = "alloy-eips" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f923dd5fca5f67a43d81ed3ebad0880bd41f6dd0ada930030353ac356c54cd0f" +dependencies = [ + "alloy-eip2930", + "alloy-eip7702", + "alloy-primitives", + "alloy-rlp", + "alloy-serde", + "c-kzg", + "derive_more 1.0.0", + "once_cell", + "serde", + "sha2 0.10.8", +] + +[[package]] +name = "alloy-genesis" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a7a18afb0b318616b6b2b0e2e7ac5529d32a966c673b48091c9919e284e6aca" +dependencies = [ + "alloy-primitives", + "alloy-serde", + "serde", +] + +[[package]] +name = "alloy-json-abi" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ded610181f3dad5810f6ff12d1a99994cf9b42d2fcb7709029352398a5da5ae6" +dependencies = [ + "alloy-primitives", + "alloy-sol-type-parser", + "serde", + "serde_json", +] + +[[package]] +name = "alloy-json-rpc" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3c717b5298fad078cd3a418335b266eba91b511383ca9bd497f742d5975d5ab" +dependencies = [ + "alloy-primitives", + "alloy-sol-types", + "serde", + "serde_json", + "thiserror", + "tracing", +] + +[[package]] +name = "alloy-network" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb3705ce7d8602132bcf5ac7a1dd293a42adc2f183abf5907c30ac535ceca049" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-json-rpc", + "alloy-network-primitives", + "alloy-primitives", + "alloy-rpc-types-eth", + "alloy-serde", + "alloy-signer", + "alloy-sol-types", + "async-trait", + "auto_impl", + "futures-utils-wasm", + "thiserror", +] + +[[package]] +name = "alloy-network-primitives" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94ad40869867ed2d9cd3842b1e800889e5b49e6b92da346e93862b4a741bedf3" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "alloy-serde", + "serde", +] + +[[package]] +name = "alloy-primitives" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd58d377699e6cfeab52c4a9d28bdc4ef37e2bd235ff2db525071fe37a2e9af5" +dependencies = [ + "alloy-rlp", + "bytes", + "cfg-if", + "const-hex", + "derive_more 1.0.0", + "foldhash", + "getrandom", + "hashbrown 0.15.0", + "hex-literal", + "indexmap 2.6.0", + "itoa", + "k256 0.13.4", + "keccak-asm", + "paste", + "proptest", + "rand 0.8.5", + "ruint", + "rustc-hash 2.0.0", + "serde", + "sha3 0.10.8", + "tiny-keccak 2.0.2", +] + +[[package]] +name = "alloy-provider" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "927f708dd457ed63420400ee5f06945df9632d5d101851952056840426a10dc5" +dependencies = [ + "alloy-chains", + "alloy-consensus", + "alloy-eips", + "alloy-json-rpc", + "alloy-network", + "alloy-network-primitives", + "alloy-primitives", + "alloy-pubsub", + "alloy-rpc-client", + "alloy-rpc-types-eth", + "alloy-transport", + "alloy-transport-http", + "alloy-transport-ipc", + "alloy-transport-ws", + "async-stream", + "async-trait", + "auto_impl", + "dashmap 6.1.0", + "futures 0.3.31", + "futures-utils-wasm", + "lru", + "pin-project", + "reqwest 0.12.9", + "serde", + "serde_json", + "thiserror", + "tokio", + "tracing", + "url", +] + +[[package]] +name = "alloy-pubsub" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d05f63677e210d758cd5d6d1ce10f20c980c3560ccfbe79ba1997791862a04f" +dependencies = [ + "alloy-json-rpc", + "alloy-primitives", + "alloy-transport", + "bimap", + "futures 0.3.31", + "serde", + "serde_json", + "tokio", + "tokio-stream", + "tower 0.5.1", + "tracing", +] + [[package]] name = "alloy-rlp" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da0822426598f95e45dd1ea32a738dac057529a709ee645fcc516ffa4cbde08f" dependencies = [ + "alloy-rlp-derive", "arrayvec 0.7.6", "bytes", ] +[[package]] +name = "alloy-rlp-derive" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b09cae092c27b6f1bde952653a22708691802e57bfef4a2973b80bea21efd3f" +dependencies = [ + "proc-macro2 1.0.89", + "quote 1.0.37", + "syn 2.0.85", +] + +[[package]] +name = "alloy-rpc-client" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d82952dca71173813d4e5733e2c986d8b04aea9e0f3b0a576664c232ad050a5" +dependencies = [ + "alloy-json-rpc", + "alloy-primitives", + "alloy-pubsub", + "alloy-transport", + "alloy-transport-http", + "alloy-transport-ipc", + "alloy-transport-ws", + "futures 0.3.31", + "pin-project", + "reqwest 0.12.9", + "serde", + "serde_json", + "tokio", + "tokio-stream", + "tower 0.5.1", + "tracing", + "url", +] + +[[package]] +name = "alloy-rpc-types" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64333d639f2a0cf73491813c629a405744e16343a4bc5640931be707c345ecc5" +dependencies = [ + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "alloy-serde", + "serde", +] + +[[package]] +name = "alloy-rpc-types-engine" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1464c4dd646e1bdfde86ae65ce5ba168dbb29180b478011fe87117ae46b1629b" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "derive_more 1.0.0", +] + +[[package]] +name = "alloy-rpc-types-eth" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83aa984386deda02482660aa31cb8ca1e63d533f1c31a52d7d181ac5ec68e9b8" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-network-primitives", + "alloy-primitives", + "alloy-rlp", + "alloy-serde", + "alloy-sol-types", + "cfg-if", + "derive_more 1.0.0", + "hashbrown 0.14.5", + "itertools 0.13.0", + "serde", + "serde_json", +] + +[[package]] +name = "alloy-serde" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "731f75ec5d383107fd745d781619bd9cedf145836c51ecb991623d41278e71fa" +dependencies = [ + "alloy-primitives", + "serde", + "serde_json", +] + +[[package]] +name = "alloy-signer" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "307324cca94354cd654d6713629f0383ec037e1ff9e3e3d547212471209860c0" +dependencies = [ + "alloy-primitives", + "async-trait", + "auto_impl", + "elliptic-curve 0.13.8", + "k256 0.13.4", + "thiserror", +] + +[[package]] +name = "alloy-signer-local" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fabe917ab1778e760b4701628d1cae8e028ee9d52ac6307de4e1e9286ab6b5f" +dependencies = [ + "alloy-consensus", + "alloy-network", + "alloy-primitives", + "alloy-signer", + "async-trait", + "k256 0.13.4", + "rand 0.8.5", + "thiserror", +] + +[[package]] +name = "alloy-sol-macro" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a1b42ac8f45e2f49f4bcdd72cbfde0bb148f5481d403774ffa546e48b83efc1" +dependencies = [ + "alloy-sol-macro-expander", + "alloy-sol-macro-input", + "proc-macro-error2", + "proc-macro2 1.0.89", + "quote 1.0.37", + "syn 2.0.85", +] + +[[package]] +name = "alloy-sol-macro-expander" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06318f1778e57f36333e850aa71bd1bb5e560c10279e236622faae0470c50412" +dependencies = [ + "alloy-json-abi", + "alloy-sol-macro-input", + "const-hex", + "heck 0.5.0", + "indexmap 2.6.0", + "proc-macro-error2", + "proc-macro2 1.0.89", + "quote 1.0.37", + "syn 2.0.85", + "syn-solidity", + "tiny-keccak 2.0.2", +] + +[[package]] +name = "alloy-sol-macro-input" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaebb9b0ad61a41345a22c9279975c0cdd231b97947b10d7aad1cf0a7181e4a5" +dependencies = [ + "alloy-json-abi", + "const-hex", + "dunce", + "heck 0.5.0", + "proc-macro2 1.0.89", + "quote 1.0.37", + "serde_json", + "syn 2.0.85", + "syn-solidity", +] + +[[package]] +name = "alloy-sol-type-parser" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12c71028bfbfec210e24106a542aad3def7caf1a70e2c05710e92a98481980d3" +dependencies = [ + "serde", + "winnow 0.6.20", +] + +[[package]] +name = "alloy-sol-types" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "374d7fb042d68ddfe79ccb23359de3007f6d4d53c13f703b64fb0db422132111" +dependencies = [ + "alloy-json-abi", + "alloy-primitives", + "alloy-sol-macro", + "const-hex", + "serde", +] + +[[package]] +name = "alloy-transport" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33616b2edf7454302a1d48084db185e52c309f73f6c10be99b0fe39354b3f1e9" +dependencies = [ + "alloy-json-rpc", + "base64 0.22.1", + "futures-util", + "futures-utils-wasm", + "serde", + "serde_json", + "thiserror", + "tokio", + "tower 0.5.1", + "tracing", + "url", +] + +[[package]] +name = "alloy-transport-http" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a944f5310c690b62bbb3e7e5ce34527cbd36b2d18532a797af123271ce595a49" +dependencies = [ + "alloy-json-rpc", + "alloy-transport", + "reqwest 0.12.9", + "serde_json", + "tower 0.5.1", + "tracing", + "url", +] + +[[package]] +name = "alloy-transport-ipc" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09fd8491249f74d16ec979b1f5672377b12ebb818e6056478ffa386954dbd350" +dependencies = [ + "alloy-json-rpc", + "alloy-pubsub", + "alloy-transport", + "bytes", + "futures 0.3.31", + "interprocess", + "pin-project", + "serde_json", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "alloy-transport-ws" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9704761f6297fe482276bee7f77a93cb42bd541c2bd6c1c560b6f3a9ece672e" +dependencies = [ + "alloy-pubsub", + "alloy-transport", + "futures 0.3.31", + "http 1.1.0", + "rustls 0.23.16", + "serde_json", + "tokio", + "tokio-tungstenite", + "tracing", + "ws_stream_wasm", +] + [[package]] name = "android-tzdata" version = "0.1.1" @@ -196,6 +761,21 @@ version = "1.0.91" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c042108f3ed77fd83760a5fd79b53be043192bb3b9dba91d8c574c0ada7850c8" +[[package]] +name = "arbitrary" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223" +dependencies = [ + "derive_arbitrary", +] + +[[package]] +name = "arc-swap" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" + [[package]] name = "ark-bn254" version = "0.5.0" @@ -773,6 +1353,17 @@ dependencies = [ "syn 2.0.85", ] +[[package]] +name = "async_io_stream" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6d7b9decdf35d8908a7e3ef02f64c5e9b1695e230154c0e8de3969142d9b94c" +dependencies = [ + "futures 0.3.31", + "pharos", + "rustc_version 0.4.1", +] + [[package]] name = "atoi" version = "2.0.0" @@ -1064,6 +1655,12 @@ dependencies = [ "num-traits", ] +[[package]] +name = "bimap" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "230c5f1ca6a325a32553f8640d31ac9b49f2411e901e427570154868b46da4f7" + [[package]] name = "bincode" version = "1.3.3" @@ -1089,7 +1686,7 @@ dependencies = [ "proc-macro2 1.0.89", "quote 1.0.37", "regex", - "rustc-hash", + "rustc-hash 1.1.0", "shlex", "syn 2.0.85", ] @@ -1111,7 +1708,7 @@ dependencies = [ "proc-macro2 1.0.89", "quote 1.0.37", "regex", - "rustc-hash", + "rustc-hash 1.1.0", "shlex", "syn 2.0.85", "which", @@ -1415,6 +2012,12 @@ dependencies = [ "tinyvec", ] +[[package]] +name = "build_const" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4ae4235e6dac0694637c763029ecea1a2ec9e4e06ec2729bd21ba4d9c863eb7" + [[package]] name = "build_html" version = "2.5.0" @@ -1493,6 +2096,21 @@ dependencies = [ "pkg-config", ] +[[package]] +name = "c-kzg" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0307f72feab3300336fb803a57134159f6e20139af1357f36c54cb90d8e8928" +dependencies = [ + "blst", + "cc", + "glob", + "hex", + "libc", + "once_cell", + "serde", +] + [[package]] name = "camino" version = "1.1.9" @@ -2021,6 +2639,19 @@ dependencies = [ "compile-fmt", ] +[[package]] +name = "const-hex" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0121754e84117e65f9d90648ee6aa4882a6e63110307ab73967a4c5e7e69e586" +dependencies = [ + "cfg-if", + "cpufeatures", + "hex", + "proptest", + "serde", +] + [[package]] name = "const-oid" version = "0.9.6" @@ -2554,6 +3185,17 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "derive_arbitrary" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" +dependencies = [ + "proc-macro2 1.0.89", + "quote 1.0.37", + "syn 2.0.85", +] + [[package]] name = "derive_more" version = "0.99.18" @@ -2624,6 +3266,15 @@ dependencies = [ "dirs-sys", ] +[[package]] +name = "dirs" +version = "5.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225" +dependencies = [ + "dirs-sys", +] + [[package]] name = "dirs-next" version = "2.0.0" @@ -2657,6 +3308,23 @@ dependencies = [ "winapi", ] +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2 1.0.89", + "quote 1.0.37", + "syn 2.0.85", +] + +[[package]] +name = "doctest-file" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aac81fa3e28d21450aa4d2ac065992ba96a1d7303efbce51a95f4fd175b67562" + [[package]] name = "dotenvy" version = "0.15.7" @@ -2681,6 +3349,12 @@ version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" +[[package]] +name = "dyn-clone" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" + [[package]] name = "ecdsa" version = "0.14.8" @@ -3186,7 +3860,137 @@ version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ - "percent-encoding", + "percent-encoding", +] + +[[package]] +name = "foundry-compilers" +version = "0.11.6" +source = "git+https://github.com/Moonsong-Labs/compilers.git?rev=7c69695e5c75451f158dd2456bf8c94a7492ea0b#7c69695e5c75451f158dd2456bf8c94a7492ea0b" +dependencies = [ + "alloy-json-abi", + "alloy-primitives", + "auto_impl", + "derivative", + "dirs", + "dyn-clone", + "foundry-compilers-artifacts", + "foundry-compilers-core", + "fs4 0.8.4", + "fs_extra", + "futures-util", + "home", + "itertools 0.13.0", + "md-5", + "once_cell", + "path-slash", + "rand 0.8.5", + "rayon", + "reqwest 0.12.9", + "semver 1.0.23", + "serde", + "serde_json", + "sha2 0.10.8", + "solang-parser", + "svm-rs", + "svm-rs-builds", + "tempfile", + "thiserror", + "tokio", + "tracing", + "walkdir", + "winnow 0.6.20", + "yansi", +] + +[[package]] +name = "foundry-compilers-artifacts" +version = "0.11.6" +source = "git+https://github.com/Moonsong-Labs/compilers.git?rev=7c69695e5c75451f158dd2456bf8c94a7492ea0b#7c69695e5c75451f158dd2456bf8c94a7492ea0b" +dependencies = [ + "foundry-compilers-artifacts-solc", + "foundry-compilers-artifacts-vyper", + "foundry-compilers-artifacts-zksolc", +] + +[[package]] +name = "foundry-compilers-artifacts-solc" +version = "0.11.6" +source = "git+https://github.com/Moonsong-Labs/compilers.git?rev=7c69695e5c75451f158dd2456bf8c94a7492ea0b#7c69695e5c75451f158dd2456bf8c94a7492ea0b" +dependencies = [ + "alloy-json-abi", + "alloy-primitives", + "foundry-compilers-core", + "futures-util", + "md-5", + "path-slash", + "rayon", + "semver 1.0.23", + "serde", + "serde_json", + "serde_repr", + "thiserror", + "tokio", + "tracing", + "walkdir", + "yansi", +] + +[[package]] +name = "foundry-compilers-artifacts-vyper" +version = "0.11.6" +source = "git+https://github.com/Moonsong-Labs/compilers.git?rev=7c69695e5c75451f158dd2456bf8c94a7492ea0b#7c69695e5c75451f158dd2456bf8c94a7492ea0b" +dependencies = [ + "alloy-json-abi", + "alloy-primitives", + "foundry-compilers-artifacts-solc", + "foundry-compilers-core", + "path-slash", + "semver 1.0.23", + "serde", +] + +[[package]] +name = "foundry-compilers-artifacts-zksolc" +version = "0.11.6" +source = "git+https://github.com/Moonsong-Labs/compilers.git?rev=7c69695e5c75451f158dd2456bf8c94a7492ea0b#7c69695e5c75451f158dd2456bf8c94a7492ea0b" +dependencies = [ + "alloy-json-abi", + "alloy-primitives", + "foundry-compilers-artifacts-solc", + "foundry-compilers-core", + "md-5", + "path-slash", + "rayon", + "semver 1.0.23", + "serde", + "serde_json", + "thiserror", + "tracing", + "walkdir", + "yansi", +] + +[[package]] +name = "foundry-compilers-core" +version = "0.11.6" +source = "git+https://github.com/Moonsong-Labs/compilers.git?rev=7c69695e5c75451f158dd2456bf8c94a7492ea0b#7c69695e5c75451f158dd2456bf8c94a7492ea0b" +dependencies = [ + "alloy-primitives", + "cfg-if", + "dunce", + "fs_extra", + "once_cell", + "path-slash", + "regex", + "semver 1.0.23", + "serde", + "serde_json", + "svm-rs", + "tempfile", + "thiserror", + "tokio", + "walkdir", ] [[package]] @@ -3255,6 +4059,26 @@ dependencies = [ "zksync_bellman", ] +[[package]] +name = "fs4" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7e180ac76c23b45e767bd7ae9579bc0bb458618c4bc71835926e098e61d15f8" +dependencies = [ + "rustix", + "windows-sys 0.52.0", +] + +[[package]] +name = "fs4" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8c6b3bd49c37d2aa3f3f2220233b29a7cd23f79d1fe70e5337d25fb390793de" +dependencies = [ + "rustix", + "windows-sys 0.52.0", +] + [[package]] name = "fs_extra" version = "1.3.0" @@ -3382,7 +4206,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" dependencies = [ "gloo-timers 0.2.6", - "send_wrapper", + "send_wrapper 0.4.0", ] [[package]] @@ -3404,6 +4228,12 @@ dependencies = [ "slab", ] +[[package]] +name = "futures-utils-wasm" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42012b0f064e01aa58b545fe3727f90f7dd4020f4a3ea735b50344965f5a57e9" + [[package]] name = "generic-array" version = "0.14.7" @@ -3436,7 +4266,26 @@ dependencies = [ "zksync_protobuf", "zksync_protobuf_config", "zksync_types", - "zksync_utils", +] + +[[package]] +name = "get_all_blobs" +version = "0.1.0" +dependencies = [ + "alloy", + "anyhow", + "axum 0.7.7", + "futures 0.3.31", + "hex", + "kzgpad-rs", + "prost 0.13.3", + "reqwest 0.12.9", + "rlp", + "rustls 0.23.16", + "serde", + "serde_json", + "tokio", + "tonic 0.12.3", ] [[package]] @@ -3713,7 +4562,7 @@ dependencies = [ "log", "pest", "pest_derive", - "quick-error", + "quick-error 2.0.1", "serde", "serde_json", ] @@ -3747,6 +4596,7 @@ dependencies = [ "allocator-api2", "equivalent", "foldhash", + "serde", ] [[package]] @@ -3805,6 +4655,9 @@ name = "hex" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +dependencies = [ + "serde", +] [[package]] name = "hex-conservative" @@ -4052,10 +4905,12 @@ dependencies = [ "hyper-util", "log", "rustls 0.23.16", + "rustls-native-certs 0.8.0", "rustls-pki-types", "tokio", "tokio-rustls 0.26.0", "tower-service", + "webpki-roots", ] [[package]] @@ -4246,6 +5101,7 @@ checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" dependencies = [ "equivalent", "hashbrown 0.15.0", + "serde", ] [[package]] @@ -4285,6 +5141,21 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "interprocess" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2f4e4a06d42fab3e85ab1b419ad32b09eab58b901d40c57935ff92db3287a13" +dependencies = [ + "doctest-file", + "futures-core", + "libc", + "recvmsg", + "tokio", + "widestring", + "windows-sys 0.52.0", +] + [[package]] name = "ipnet" version = "2.10.1" @@ -4300,6 +5171,16 @@ dependencies = [ "serde", ] +[[package]] +name = "iri-string" +version = "0.7.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc0f0a572e8ffe56e2ff4f769f32ffe919282c3916799f8b68688b6030063bea" +dependencies = [ + "memchr", + "serde", +] + [[package]] name = "is_terminal_polyfill" version = "1.70.1" @@ -4477,7 +5358,7 @@ dependencies = [ "hyper 0.14.31", "jsonrpsee-types 0.21.0", "pin-project", - "rustc-hash", + "rustc-hash 1.1.0", "serde", "serde_json", "thiserror", @@ -4505,7 +5386,7 @@ dependencies = [ "parking_lot", "pin-project", "rand 0.8.5", - "rustc-hash", + "rustc-hash 1.1.0", "serde", "serde_json", "thiserror", @@ -4701,6 +5582,16 @@ dependencies = [ "cpufeatures", ] +[[package]] +name = "keccak-asm" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "505d1856a39b200489082f90d897c3f07c455563880bc5952e38eabf731c83b6" +dependencies = [ + "digest 0.10.7", + "sha3-asm", +] + [[package]] name = "kv-log-macro" version = "1.0.7" @@ -4710,6 +5601,14 @@ dependencies = [ "log", ] +[[package]] +name = "kzgpad-rs" +version = "0.1.0" +source = "git+https://github.com/Layr-Labs/kzgpad-rs.git?tag=v0.1.0#b5f8c8d3d6482407dc118cb1f51597a017a1cc89" +dependencies = [ + "rand 0.8.5", +] + [[package]] name = "lalrpop" version = "0.20.2" @@ -4946,10 +5845,10 @@ dependencies = [ "tracing", "vise", "zksync_config", - "zksync_contracts", "zksync_eth_client", "zksync_eth_signer", "zksync_system_constants", + "zksync_test_contracts", "zksync_types", "zksync_utils", "zksync_vlog", @@ -4966,6 +5865,12 @@ dependencies = [ "scopeguard", ] +[[package]] +name = "lockfree-object-pool" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9374ef4228402d4b7e403e5838cb880d9ee663314b0a900d5a6aabf0c213552e" + [[package]] name = "log" version = "0.4.22" @@ -5600,6 +6505,46 @@ dependencies = [ "memchr", ] +[[package]] +name = "octocrab" +version = "0.41.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2dfd11f6efbd39491d71a3864496f0b6f45e2d01b73b26c55d631c4e0dafaef" +dependencies = [ + "arc-swap", + "async-trait", + "base64 0.22.1", + "bytes", + "cfg-if", + "chrono", + "either", + "futures 0.3.31", + "futures-core", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.5.0", + "hyper-rustls 0.27.3", + "hyper-timeout 0.5.1", + "hyper-util", + "jsonwebtoken", + "once_cell", + "percent-encoding", + "pin-project", + "secrecy 0.10.3", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "snafu", + "tokio", + "tower 0.5.1", + "tower-http 0.6.2", + "tracing", + "url", +] + [[package]] name = "once_cell" version = "1.20.2" @@ -5871,6 +6816,12 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" +[[package]] +name = "path-slash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e91099d4268b0e11973f036e885d652fb0b21fedcf69738c627f94db6a44f42" + [[package]] name = "pbjson" version = "0.6.0" @@ -6003,6 +6954,49 @@ dependencies = [ "indexmap 2.6.0", ] +[[package]] +name = "pharos" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9567389417feee6ce15dd6527a8a1ecac205ef62c2932bcf3d9f6fc5b78b414" +dependencies = [ + "futures 0.3.31", + "rustc_version 0.4.1", +] + +[[package]] +name = "phf" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" +dependencies = [ + "phf_macros", + "phf_shared 0.11.2", +] + +[[package]] +name = "phf_generator" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" +dependencies = [ + "phf_shared 0.11.2", + "rand 0.8.5", +] + +[[package]] +name = "phf_macros" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3444646e286606587e49f3bcf1679b8cef1dc2c5ecc29ddacaffc305180d464b" +dependencies = [ + "phf_generator", + "phf_shared 0.11.2", + "proc-macro2 1.0.89", + "quote 1.0.37", + "syn 2.0.85", +] + [[package]] name = "phf_shared" version = "0.10.0" @@ -6012,6 +7006,15 @@ dependencies = [ "siphasher 0.3.11", ] +[[package]] +name = "phf_shared" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" +dependencies = [ + "siphasher 0.3.11", +] + [[package]] name = "pico-args" version = "0.5.0" @@ -6271,6 +7274,28 @@ dependencies = [ "version_check", ] +[[package]] +name = "proc-macro-error-attr2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5" +dependencies = [ + "proc-macro2 1.0.89", + "quote 1.0.37", +] + +[[package]] +name = "proc-macro-error2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802" +dependencies = [ + "proc-macro-error-attr2", + "proc-macro2 1.0.89", + "quote 1.0.37", + "syn 2.0.85", +] + [[package]] name = "proc-macro-hack" version = "0.5.20+deprecated" @@ -6324,6 +7349,8 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d" dependencies = [ + "bit-set", + "bit-vec", "bitflags 2.6.0", "lazy_static", "num-traits", @@ -6331,6 +7358,8 @@ dependencies = [ "rand_chacha", "rand_xorshift", "regex-syntax 0.8.5", + "rusty-fork", + "tempfile", "unarray", ] @@ -6540,6 +7569,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + [[package]] name = "quick-error" version = "2.0.1" @@ -6555,6 +7590,55 @@ dependencies = [ "byteorder", ] +[[package]] +name = "quinn" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c7c5fdde3cdae7203427dc4f0a68fe0ed09833edc525a03456b153b79828684" +dependencies = [ + "bytes", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash 2.0.0", + "rustls 0.23.16", + "socket2", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "quinn-proto" +version = "0.11.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fadfaed2cd7f389d0161bb73eeb07b7b78f8691047a6f3e73caaeae55310a4a6" +dependencies = [ + "bytes", + "rand 0.8.5", + "ring", + "rustc-hash 2.0.0", + "rustls 0.23.16", + "slab", + "thiserror", + "tinyvec", + "tracing", +] + +[[package]] +name = "quinn-udp" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d5a626c6807713b15cac82a6acaccd6043c9a5408c24baae07611fec3f243da" +dependencies = [ + "cfg_aliases", + "libc", + "once_cell", + "socket2", + "tracing", + "windows-sys 0.59.0", +] + [[package]] name = "quote" version = "0.6.13" @@ -6601,6 +7685,7 @@ dependencies = [ "libc", "rand_chacha", "rand_core 0.6.4", + "serde", ] [[package]] @@ -6693,6 +7778,12 @@ dependencies = [ "rand_core 0.3.1", ] +[[package]] +name = "recvmsg" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3edd4d5d42c92f0a659926464d4cce56b562761267ecf0f469d85b7de384175" + [[package]] name = "redox_syscall" version = "0.5.7" @@ -6836,7 +7927,11 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", + "quinn", + "rustls 0.23.16", + "rustls-native-certs 0.8.0", "rustls-pemfile 2.2.0", + "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", @@ -6844,6 +7939,8 @@ dependencies = [ "system-configuration 0.6.1", "tokio", "tokio-native-tls", + "tokio-rustls 0.26.0", + "tokio-socks", "tokio-util", "tower-service", "url", @@ -6851,6 +7948,7 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", + "webpki-roots", "windows-registry", ] @@ -7098,6 +8196,15 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" +[[package]] +name = "rustc-hash" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" +dependencies = [ + "rand 0.8.5", +] + [[package]] name = "rustc-hex" version = "2.1.0" @@ -7193,7 +8300,20 @@ dependencies = [ name = "rustls-native-certs" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5bfb394eeed242e909609f56089eecfe5fda225042e8b171791b9c95f5931e5" +checksum = "e5bfb394eeed242e909609f56089eecfe5fda225042e8b171791b9c95f5931e5" +dependencies = [ + "openssl-probe", + "rustls-pemfile 2.2.0", + "rustls-pki-types", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-native-certs" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcaf18a4f2be7326cd874a5fa579fae794320a0f388d365dca7e480e55f83f8a" dependencies = [ "openssl-probe", "rustls-pemfile 2.2.0", @@ -7281,6 +8401,18 @@ version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" +[[package]] +name = "rusty-fork" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" +dependencies = [ + "fnv", + "quick-error 1.2.3", + "tempfile", + "wait-timeout", +] + [[package]] name = "ruzstd" version = "0.5.0" @@ -7554,6 +8686,15 @@ dependencies = [ "zeroize", ] +[[package]] +name = "secrecy" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e891af845473308773346dc847b2c23ee78fe442e0472ac50e22a18a93d3ae5a" +dependencies = [ + "zeroize", +] + [[package]] name = "security-framework" version = "2.11.1" @@ -7625,6 +8766,12 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" +[[package]] +name = "send_wrapper" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" + [[package]] name = "sentry" version = "0.31.8" @@ -7985,6 +9132,16 @@ dependencies = [ "keccak", ] +[[package]] +name = "sha3-asm" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28efc5e327c837aa837c59eae585fc250715ef939ac32881bcc11677cd02d46" +dependencies = [ + "cc", + "cfg-if", +] + [[package]] name = "sha3_ce" version = "0.10.6" @@ -8039,6 +9196,12 @@ dependencies = [ "rand_core 0.6.4", ] +[[package]] +name = "simd-adler32" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" + [[package]] name = "simdutf8" version = "0.1.5" @@ -8226,6 +9389,27 @@ dependencies = [ "zeroize", ] +[[package]] +name = "snafu" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "223891c85e2a29c3fe8fb900c1fae5e69c2e42415e3177752e8718475efa5019" +dependencies = [ + "snafu-derive", +] + +[[package]] +name = "snafu-derive" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03c3c6b7927ffe7ecaa769ee0e3994da3b8cafc8f444578982c83ecb161af917" +dependencies = [ + "heck 0.5.0", + "proc-macro2 1.0.89", + "quote 1.0.37", + "syn 2.0.85", +] + [[package]] name = "snapshots_creator" version = "0.1.0" @@ -8304,6 +9488,20 @@ dependencies = [ "sha1", ] +[[package]] +name = "solang-parser" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c425ce1c59f4b154717592f0bdf4715c3a1d55058883622d3157e1f0908a5b26" +dependencies = [ + "itertools 0.11.0", + "lalrpop", + "lalrpop-util", + "phf", + "thiserror", + "unicode-xid 0.2.6", +] + [[package]] name = "sp-core-hashing" version = "15.0.0" @@ -8592,7 +9790,7 @@ dependencies = [ "new_debug_unreachable", "once_cell", "parking_lot", - "phf_shared", + "phf_shared 0.10.0", "precomputed-hash", ] @@ -8807,7 +10005,7 @@ dependencies = [ "pbkdf2", "regex", "schnorrkel", - "secrecy", + "secrecy 0.8.0", "sha2 0.10.8", "sp-core-hashing", "subxt", @@ -8815,6 +10013,39 @@ dependencies = [ "zeroize", ] +[[package]] +name = "svm-rs" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4aebac1b1ef2b46e2e2bdf3c09db304800f2a77c1fa902bd5231490203042be8" +dependencies = [ + "const-hex", + "dirs", + "fs4 0.9.1", + "reqwest 0.12.9", + "semver 1.0.23", + "serde", + "serde_json", + "sha2 0.10.8", + "tempfile", + "thiserror", + "url", + "zip", +] + +[[package]] +name = "svm-rs-builds" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2fa0f145894cb4d1c14446f08098ee5f21fc37ccbd1a7dd9dd355bbc806de3b" +dependencies = [ + "build_const", + "const-hex", + "semver 1.0.23", + "serde_json", + "svm-rs", +] + [[package]] name = "syn" version = "0.15.44" @@ -8848,6 +10079,18 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "syn-solidity" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edf42e81491fb8871b74df3d222c64ae8cbc1269ea509fa768a3ed3e1b0ac8cb" +dependencies = [ + "paste", + "proc-macro2 1.0.89", + "quote 1.0.37", + "syn 2.0.85", +] + [[package]] name = "syn_derive" version = "0.1.8" @@ -9286,6 +10529,18 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-socks" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d4770b8024672c1101b3f6733eab95b18007dbe0847a8afe341fcf79e06043f" +dependencies = [ + "either", + "futures-util", + "thiserror", + "tokio", +] + [[package]] name = "tokio-stream" version = "0.1.16" @@ -9298,6 +10553,22 @@ dependencies = [ "tokio-util", ] +[[package]] +name = "tokio-tungstenite" +version = "0.23.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6989540ced10490aaf14e6bad2e3d33728a2813310a0c71d1574304c49631cd" +dependencies = [ + "futures-util", + "log", + "rustls 0.23.16", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.26.0", + "tungstenite", + "webpki-roots", +] + [[package]] name = "tokio-util" version = "0.7.12" @@ -9409,8 +10680,11 @@ dependencies = [ "percent-encoding", "pin-project", "prost 0.13.3", + "rustls-native-certs 0.8.0", + "rustls-pemfile 2.2.0", "socket2", "tokio", + "tokio-rustls 0.26.0", "tokio-stream", "tower 0.4.13", "tower-layer", @@ -9449,6 +10723,7 @@ dependencies = [ "pin-project-lite", "sync_wrapper 0.1.2", "tokio", + "tokio-util", "tower-layer", "tower-service", "tracing", @@ -9474,6 +10749,25 @@ dependencies = [ "tower-service", ] +[[package]] +name = "tower-http" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "403fa3b783d4b626a8ad51d766ab03cb6d2dbfc46b1c5d4448395e6628dc9697" +dependencies = [ + "bitflags 2.6.0", + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "iri-string", + "pin-project-lite", + "tower 0.5.1", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "tower-layer" version = "0.3.3" @@ -9607,6 +10901,26 @@ dependencies = [ "toml", ] +[[package]] +name = "tungstenite" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e2e2ce1e47ed2994fd43b04c8f618008d4cabdd5ee34027cf14f9d918edd9c8" +dependencies = [ + "byteorder", + "bytes", + "data-encoding", + "http 1.1.0", + "httparse", + "log", + "rand 0.8.5", + "rustls 0.23.16", + "rustls-pki-types", + "sha1", + "thiserror", + "utf-8", +] + [[package]] name = "twox-hash" version = "1.6.3" @@ -9799,6 +11113,12 @@ version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + [[package]] name = "utf8parse" version = "0.2.2" @@ -9908,12 +11228,21 @@ dependencies = [ "yab", "zksync_contracts", "zksync_multivm", + "zksync_test_contracts", "zksync_types", - "zksync_utils", "zksync_vlog", "zksync_vm2", ] +[[package]] +name = "wait-timeout" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +dependencies = [ + "libc", +] + [[package]] name = "walkdir" version = "2.5.0" @@ -10122,6 +11451,12 @@ dependencies = [ "wasite", ] +[[package]] +name = "widestring" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7219d36b6eac893fa81e84ebe06485e7dcbb616177469b142df14f1f4deb1311" + [[package]] name = "winapi" version = "0.3.9" @@ -10368,6 +11703,25 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "ws_stream_wasm" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7999f5f4217fe3818726b66257a4475f71e74ffd190776ad053fa159e50737f5" +dependencies = [ + "async_io_stream", + "futures 0.3.31", + "js-sys", + "log", + "pharos", + "rustc_version 0.4.1", + "send_wrapper 0.6.0", + "thiserror", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "wyz" version = "0.5.1" @@ -10454,6 +11808,23 @@ dependencies = [ "syn 2.0.85", ] +[[package]] +name = "zip" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc5e4288ea4057ae23afc69a4472434a87a2495cafce6632fd1c4ec9f5cf3494" +dependencies = [ + "arbitrary", + "crc32fast", + "crossbeam-utils", + "displaydoc", + "flate2", + "indexmap 2.6.0", + "memchr", + "thiserror", + "zopfli", +] + [[package]] name = "zk_evm" version = "0.131.0-rc.2" @@ -10717,7 +12088,6 @@ dependencies = [ "zksync_external_price_api", "zksync_node_fee_model", "zksync_types", - "zksync_utils", ] [[package]] @@ -10727,13 +12097,15 @@ dependencies = [ "anyhow", "bincode", "chrono", + "const-decoder", "ethabi", "hex", "num_enum 0.7.3", - "secrecy", + "secrecy 0.8.0", "serde", "serde_json", "serde_with", + "sha2 0.10.8", "strum", "thiserror", "tiny-keccak 2.0.2", @@ -10831,7 +12203,6 @@ dependencies = [ "zksync_node_test_utils", "zksync_system_constants", "zksync_types", - "zksync_utils", "zksync_web3_decl", ] @@ -10860,7 +12231,7 @@ version = "0.1.0" dependencies = [ "anyhow", "rand 0.8.5", - "secrecy", + "secrecy 0.8.0", "serde", "serde_json", "tracing", @@ -11063,13 +12434,12 @@ dependencies = [ "test-casing", "tokio", "tower 0.4.13", - "tower-http", + "tower-http 0.5.2", "tracing", "vise", "zksync_dal", "zksync_node_test_utils", "zksync_types", - "zksync_utils", ] [[package]] @@ -11094,10 +12464,15 @@ name = "zksync_contract_verifier_lib" version = "0.1.0" dependencies = [ "anyhow", + "assert_matches", "chrono", "ethabi", + "futures-util", "hex", + "octocrab", "regex", + "reqwest 0.12.9", + "rustls 0.23.16", "semver 1.0.23", "serde", "serde_json", @@ -11120,12 +12495,13 @@ dependencies = [ name = "zksync_contracts" version = "0.1.0" dependencies = [ + "bincode", "envy", - "ethabi", "hex", "once_cell", "serde", "serde_json", + "zksync_basic_types", "zksync_utils", ] @@ -11157,7 +12533,6 @@ dependencies = [ "sha2 0.10.8", "thiserror", "zksync_basic_types", - "zksync_utils", ] [[package]] @@ -11250,7 +12625,6 @@ dependencies = [ "zksync_da_client", "zksync_dal", "zksync_types", - "zksync_utils", ] [[package]] @@ -11284,9 +12658,8 @@ dependencies = [ "zksync_protobuf", "zksync_protobuf_build", "zksync_system_constants", - "zksync_test_account", + "zksync_test_contracts", "zksync_types", - "zksync_utils", "zksync_vm_interface", ] @@ -11368,7 +12741,6 @@ dependencies = [ "zksync_prover_interface", "zksync_shared_metrics", "zksync_types", - "zksync_utils", ] [[package]] @@ -11405,7 +12777,7 @@ dependencies = [ [[package]] name = "zksync_external_node" -version = "25.1.0" +version = "25.2.0" dependencies = [ "anyhow", "assert_matches", @@ -11454,7 +12826,6 @@ dependencies = [ "zksync_state_keeper", "zksync_storage", "zksync_types", - "zksync_utils", "zksync_vlog", "zksync_web3_decl", ] @@ -11639,7 +13010,6 @@ dependencies = [ "zksync_storage", "zksync_system_constants", "zksync_types", - "zksync_utils", ] [[package]] @@ -11674,7 +13044,6 @@ dependencies = [ "zksync_shared_metrics", "zksync_storage", "zksync_types", - "zksync_utils", ] [[package]] @@ -11717,9 +13086,8 @@ dependencies = [ "zksync_eth_signer", "zksync_mini_merkle_tree", "zksync_system_constants", - "zksync_test_account", + "zksync_test_contracts", "zksync_types", - "zksync_utils", "zksync_vm2", "zksync_vm_interface", ] @@ -11750,7 +13118,7 @@ dependencies = [ "thread_local", "tokio", "tower 0.4.13", - "tower-http", + "tower-http 0.5.2", "tracing", "vise", "zk_evm 0.150.7", @@ -11771,8 +13139,8 @@ dependencies = [ "zksync_state", "zksync_state_keeper", "zksync_system_constants", + "zksync_test_contracts", "zksync_types", - "zksync_utils", "zksync_vm_executor", "zksync_web3_decl", ] @@ -11784,13 +13152,14 @@ dependencies = [ "anyhow", "async-trait", "rand 0.8.5", - "secrecy", + "secrecy 0.8.0", "semver 1.0.23", "tempfile", "test-casing", "thiserror", "tokio", "tracing", + "vise", "zksync_concurrency", "zksync_config", "zksync_consensus_bft", @@ -11813,9 +13182,8 @@ dependencies = [ "zksync_state", "zksync_state_keeper", "zksync_system_constants", - "zksync_test_account", + "zksync_test_contracts", "zksync_types", - "zksync_utils", "zksync_vm_executor", "zksync_vm_interface", "zksync_web3_decl", @@ -11947,7 +13315,6 @@ dependencies = [ "zksync_multivm", "zksync_system_constants", "zksync_types", - "zksync_utils", ] [[package]] @@ -11992,6 +13359,7 @@ dependencies = [ "vise", "zksync_concurrency", "zksync_config", + "zksync_consensus_roles", "zksync_contracts", "zksync_dal", "zksync_eth_client", @@ -12002,7 +13370,6 @@ dependencies = [ "zksync_state_keeper", "zksync_system_constants", "zksync_types", - "zksync_utils", "zksync_vm_executor", "zksync_web3_decl", ] @@ -12016,7 +13383,6 @@ dependencies = [ "zksync_merkle_tree", "zksync_system_constants", "zksync_types", - "zksync_utils", "zksync_vm_interface", ] @@ -12069,10 +13435,9 @@ dependencies = [ "serde_json", "tokio", "tower 0.4.13", - "tower-http", + "tower-http 0.5.2", "tracing", "vise", - "zksync_basic_types", "zksync_config", "zksync_contracts", "zksync_dal", @@ -12080,7 +13445,6 @@ dependencies = [ "zksync_object_store", "zksync_prover_interface", "zksync_types", - "zksync_utils", "zksync_vm_executor", ] @@ -12130,7 +13494,7 @@ dependencies = [ "hex", "prost 0.12.6", "rand 0.8.5", - "secrecy", + "secrecy 0.8.0", "serde_json", "serde_yaml", "tracing", @@ -12218,7 +13582,6 @@ dependencies = [ "zksync_protobuf_config", "zksync_storage", "zksync_types", - "zksync_utils", "zksync_vlog", ] @@ -12252,7 +13615,6 @@ dependencies = [ "zksync_health_check", "zksync_object_store", "zksync_types", - "zksync_utils", "zksync_web3_decl", ] @@ -12295,7 +13657,6 @@ dependencies = [ "zksync_shared_metrics", "zksync_storage", "zksync_types", - "zksync_utils", "zksync_vm_interface", ] @@ -12332,9 +13693,8 @@ dependencies = [ "zksync_state", "zksync_storage", "zksync_system_constants", - "zksync_test_account", + "zksync_test_contracts", "zksync_types", - "zksync_utils", "zksync_vm_executor", ] @@ -12357,7 +13717,6 @@ version = "0.1.0" dependencies = [ "once_cell", "zksync_basic_types", - "zksync_utils", ] [[package]] @@ -12401,21 +13760,22 @@ dependencies = [ "zksync_multivm", "zksync_prover_interface", "zksync_types", - "zksync_utils", ] [[package]] -name = "zksync_test_account" +name = "zksync_test_contracts" version = "0.1.0" dependencies = [ "ethabi", + "foundry-compilers", "hex", + "once_cell", "rand 0.8.5", - "zksync_contracts", + "serde", + "serde_json", "zksync_eth_signer", "zksync_system_constants", "zksync_types", - "zksync_utils", ] [[package]] @@ -12450,7 +13810,6 @@ dependencies = [ "zksync_protobuf", "zksync_protobuf_build", "zksync_system_constants", - "zksync_utils", ] [[package]] @@ -12459,22 +13818,12 @@ version = "0.1.0" dependencies = [ "anyhow", "assert_matches", - "bigdecimal", - "bincode", - "const-decoder", "futures 0.3.31", - "hex", - "num", "once_cell", - "rand 0.8.5", "reqwest 0.12.9", - "serde", "serde_json", - "thiserror", "tokio", "tracing", - "zk_evm 0.133.0", - "zksync_basic_types", "zksync_vlog", ] @@ -12539,7 +13888,6 @@ dependencies = [ "zksync_dal", "zksync_multivm", "zksync_types", - "zksync_utils", ] [[package]] @@ -12588,9 +13936,8 @@ dependencies = [ "zksync_prover_interface", "zksync_state", "zksync_storage", - "zksync_test_account", + "zksync_test_contracts", "zksync_types", - "zksync_utils", "zksync_vm_executor", "zksync_vm_interface", ] @@ -12619,6 +13966,20 @@ dependencies = [ "zksync_types", ] +[[package]] +name = "zopfli" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5019f391bac5cf252e93bbcc53d039ffd62c7bfb7c150414d61369afe57e946" +dependencies = [ + "bumpalo", + "crc32fast", + "lockfree-object-pool", + "log", + "once_cell", + "simd-adler32", +] + [[package]] name = "zstd" version = "0.13.2" diff --git a/Cargo.toml b/Cargo.toml index 6c3a2384b4d4..5d3ea3b8dbe0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -75,10 +75,11 @@ members = [ "core/lib/snapshots_applier", "core/lib/crypto_primitives", "core/lib/external_price_api", + "core/lib/test_contracts", # Test infrastructure - "core/tests/test_account", "core/tests/loadnext", "core/tests/vm-benchmark", + "get_all_blobs", ] resolver = "2" @@ -124,6 +125,7 @@ ethabi = "18.0.0" flate2 = "1.0.28" fraction = "0.15.3" futures = "0.3" +futures-util = "0.3" glob = "0.3" google-cloud-auth = "0.16.0" google-cloud-storage = "0.20.0" @@ -142,6 +144,7 @@ mini-moka = "0.10.0" num = "0.4.0" num_cpus = "1.13" num_enum = "0.7.2" +octocrab = "0.41" once_cell = "1" opentelemetry = "0.24.0" opentelemetry_sdk = "0.24.0" @@ -200,6 +203,7 @@ trybuild = "1.0" # "Internal" dependencies vise = "0.2.0" vise-exporter = "0.2.0" +foundry-compilers = { version = "0.11.6", git = "https://github.com/Moonsong-Labs/compilers.git", rev = "7c69695e5c75451f158dd2456bf8c94a7492ea0b" } # DA clients' dependencies # Avail @@ -288,7 +292,7 @@ zksync_state = { version = "0.1.0", path = "core/lib/state" } zksync_storage = { version = "0.1.0", path = "core/lib/storage" } zksync_system_constants = { version = "0.1.0", path = "core/lib/constants" } zksync_tee_verifier = { version = "0.1.0", path = "core/lib/tee_verifier" } -zksync_test_account = { version = "0.1.0", path = "core/tests/test_account" } +zksync_test_contracts = { version = "0.1.0", path = "core/lib/test_contracts" } zksync_types = { version = "0.1.0", path = "core/lib/types" } zksync_utils = { version = "0.1.0", path = "core/lib/utils" } zksync_web3_decl = { version = "0.1.0", path = "core/lib/web3_decl" } diff --git a/README.md b/README.md index ce73242f11e7..f12ec08f3773 100644 --- a/README.md +++ b/README.md @@ -7,19 +7,12 @@ decentralization. Since it's EVM compatible (Solidity/Vyper), 99% of Ethereum pr or re-auditing a single line of code. ZKsync Era also uses an LLVM-based compiler that will eventually let developers write smart contracts in C++, Rust and other popular languages. -## Knowledge Index - -The following questions will be answered by the following resources: - -| Question | Resource | -| ------------------------------------------------------- | ---------------------------------------------- | -| What do I need to develop the project locally? | [development.md](docs/guides/development.md) | -| How can I set up my dev environment? | [setup-dev.md](docs/guides/setup-dev.md) | -| How can I run the project? | [launch.md](docs/guides/launch.md) | -| How can I build Docker images? | [build-docker.md](docs/guides/build-docker.md) | -| What is the logical project structure and architecture? | [architecture.md](docs/guides/architecture.md) | -| Where can I find protocol specs? | [specs.md](docs/specs/README.md) | -| Where can I find developer docs? | [docs](https://docs.zksync.io) | +## Documentation + +The most recent documentation can be found here: + +- [Core documentation](https://matter-labs.github.io/zksync-era/core/latest/) +- [Prover documentation](https://matter-labs.github.io/zksync-era/prover/latest/) ## Policies diff --git a/bin/run_loadtest_from_github_actions b/bin/run_loadtest_from_github_actions index f784ddd3180d..149988d63d8f 100755 --- a/bin/run_loadtest_from_github_actions +++ b/bin/run_loadtest_from_github_actions @@ -11,11 +11,12 @@ export TRANSACTION_WEIGHTS_WITHDRAWAL=${weights[3]} read -ra execution_params <<<"$CONTRACT_EXECUTION_PARAMS" #reading $CONTRACT_EXECUTION_PARAMS as an array as tokens separated by IFS export CONTRACT_EXECUTION_PARAMS_READS=${execution_params[0]} -export CONTRACT_EXECUTION_PARAMS_WRITES=${execution_params[1]} -export CONTRACT_EXECUTION_PARAMS_EVENTS=${execution_params[2]} -export CONTRACT_EXECUTION_PARAMS_HASHES=${execution_params[3]} -export CONTRACT_EXECUTION_PARAMS_RECURSIVE_CALLS=${execution_params[4]} -export CONTRACT_EXECUTION_PARAMS_DEPLOYS=${execution_params[5]} +export CONTRACT_EXECUTION_PARAMS_INITIAL_WRITES=${execution_params[1]} +export CONTRACT_EXECUTION_PARAMS_REPEATED_WRITES=${execution_params[2]} +export CONTRACT_EXECUTION_PARAMS_EVENTS=${execution_params[3]} +export CONTRACT_EXECUTION_PARAMS_HASHES=${execution_params[4]} +export CONTRACT_EXECUTION_PARAMS_RECURSIVE_CALLS=${execution_params[5]} +export CONTRACT_EXECUTION_PARAMS_DEPLOYS=${execution_params[6]} # Run the test cargo run --bin loadnext diff --git a/contracts b/contracts index 46d75088e7dd..64ed0ab97ff4 160000 --- a/contracts +++ b/contracts @@ -1 +1 @@ -Subproject commit 46d75088e7ddb534101874c3ec15b877da1cb417 +Subproject commit 64ed0ab97ff4e9d2a265522025bdb8e1a4a4d2eb diff --git a/core/CHANGELOG.md b/core/CHANGELOG.md index 3ccd261273b1..0ca0a3be025a 100644 --- a/core/CHANGELOG.md +++ b/core/CHANGELOG.md @@ -1,5 +1,30 @@ # Changelog +## [25.2.0](https://github.com/matter-labs/zksync-era/compare/core-v25.1.0...core-v25.2.0) (2024-11-19) + + +### Features + +* add more metrics for the tee_prover ([#3276](https://github.com/matter-labs/zksync-era/issues/3276)) ([8b62434](https://github.com/matter-labs/zksync-era/commit/8b62434a3b48aea2e66b5dd833f52c58e26969cb)) +* **api-server:** add `yParity` for non-legacy txs ([#3246](https://github.com/matter-labs/zksync-era/issues/3246)) ([6ea36d1](https://github.com/matter-labs/zksync-era/commit/6ea36d14940a19f638512556ccc4c5127150b5c9)) +* **consensus:** fallback json rpc syncing for consensus ([#3211](https://github.com/matter-labs/zksync-era/issues/3211)) ([726203b](https://github.com/matter-labs/zksync-era/commit/726203bab540e3d6ada10b6bc12bd3c09220d895)) +* **contract-verifier:** Adapt contract verifier API for EVM bytecodes ([#3234](https://github.com/matter-labs/zksync-era/issues/3234)) ([4509179](https://github.com/matter-labs/zksync-era/commit/4509179f62ead4b837dfb67760f52de76fac2e37)) +* **contract-verifier:** Support Solidity contracts with EVM bytecode in contract verifier ([#3225](https://github.com/matter-labs/zksync-era/issues/3225)) ([8a3a82c](https://github.com/matter-labs/zksync-era/commit/8a3a82ca16479183e96505bc91011fc07bfc6889)) +* **contract-verifier:** Support Vyper toolchain for EVM bytecodes ([#3251](https://github.com/matter-labs/zksync-era/issues/3251)) ([75f7db9](https://github.com/matter-labs/zksync-era/commit/75f7db9b535b4dee4c6662be609aec996555383c)) +* **en:** Support Merkle tree recovery with pruning enabled ([#3172](https://github.com/matter-labs/zksync-era/issues/3172)) ([7b8640a](https://github.com/matter-labs/zksync-era/commit/7b8640a89fa8666e14934481317c94f07280e591)) +* ProverJobProcessor & circuit prover ([#3287](https://github.com/matter-labs/zksync-era/issues/3287)) ([98823f9](https://github.com/matter-labs/zksync-era/commit/98823f95c0b95feeb37eb9086cc88d4ac5220904)) +* **prover:** Move prover_autoscaler config into crate ([#3222](https://github.com/matter-labs/zksync-era/issues/3222)) ([1b33b5e](https://github.com/matter-labs/zksync-era/commit/1b33b5e9ec04bea0010350798332a90413c482d3)) +* **vm_executor:** Add new histogram metric for gas per tx in vm_executor ([#3215](https://github.com/matter-labs/zksync-era/issues/3215)) ([3606fc1](https://github.com/matter-labs/zksync-era/commit/3606fc1d8f103b4f7174301f9a985ace2b89038d)) +* **vm:** add gateway changes to fast vm ([#3236](https://github.com/matter-labs/zksync-era/issues/3236)) ([f3a2517](https://github.com/matter-labs/zksync-era/commit/f3a2517a132b036ca70bc18aa8ac9f6da1cbc049)) + + +### Bug Fixes + +* **merkle-tree:** Repair stale keys for tree in background ([#3200](https://github.com/matter-labs/zksync-era/issues/3200)) ([363b4f0](https://github.com/matter-labs/zksync-era/commit/363b4f09937496fadeb38857f5c0c73146995ce5)) +* **tracer:** Add error to flat tracer ([#3306](https://github.com/matter-labs/zksync-era/issues/3306)) ([7c93c47](https://github.com/matter-labs/zksync-era/commit/7c93c47916845a90fc5a092e1465567aae611307)) +* use_dummy_inclusion_data condition ([#3244](https://github.com/matter-labs/zksync-era/issues/3244)) ([6e3c36e](https://github.com/matter-labs/zksync-era/commit/6e3c36e6426621bee82399db7814ca6756b613cb)) +* **vm:** Do not require experimental VM config ([#3270](https://github.com/matter-labs/zksync-era/issues/3270)) ([54e4b00](https://github.com/matter-labs/zksync-era/commit/54e4b007b2d32d86b2701b01cd3bef3b3bc97087)) + ## [25.1.0](https://github.com/matter-labs/zksync-era/compare/core-v25.0.0...core-v25.1.0) (2024-11-04) diff --git a/core/bin/contract-verifier/src/main.rs b/core/bin/contract-verifier/src/main.rs index 88f25256c40d..ab86c147977d 100644 --- a/core/bin/contract-verifier/src/main.rs +++ b/core/bin/contract-verifier/src/main.rs @@ -55,7 +55,9 @@ async fn main() -> anyhow::Result<()> { let contract_verifier = ContractVerifier::new(verifier_config.compilation_timeout(), pool) .await .context("failed initializing contract verifier")?; + let update_task = contract_verifier.sync_compiler_versions_task(); let tasks = vec![ + tokio::spawn(update_task), tokio::spawn(contract_verifier.run(stop_receiver.clone(), opt.jobs_number)), tokio::spawn( PrometheusExporterConfig::pull(prometheus_config.listener_port).run(stop_receiver), diff --git a/core/bin/external_node/Cargo.toml b/core/bin/external_node/Cargo.toml index 4c8f73eda94d..a69fdf263794 100644 --- a/core/bin/external_node/Cargo.toml +++ b/core/bin/external_node/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "zksync_external_node" description = "Non-validator ZKsync node" -version = "25.1.0" # x-release-please-version +version = "25.2.0" # x-release-please-version edition.workspace = true authors.workspace = true homepage.workspace = true @@ -20,7 +20,6 @@ zksync_config.workspace = true zksync_protobuf_config.workspace = true zksync_eth_client.workspace = true zksync_storage.workspace = true -zksync_utils.workspace = true zksync_state.workspace = true zksync_contracts.workspace = true zksync_l1_contract_interface.workspace = true diff --git a/core/bin/external_node/src/config/mod.rs b/core/bin/external_node/src/config/mod.rs index 0a94f993656a..81604f83008a 100644 --- a/core/bin/external_node/src/config/mod.rs +++ b/core/bin/external_node/src/config/mod.rs @@ -408,6 +408,9 @@ pub(crate) struct OptionalENConfig { /// Timeout to wait for the Merkle tree database to run compaction on stalled writes. #[serde(default = "OptionalENConfig::default_merkle_tree_stalled_writes_timeout_sec")] merkle_tree_stalled_writes_timeout_sec: u64, + /// Enables the stale keys repair task for the Merkle tree. + #[serde(default)] + pub merkle_tree_repair_stale_keys: bool, // Postgres config (new parameters) /// Threshold in milliseconds for the DB connection lifetime to denote it as long-living and log its details. @@ -639,6 +642,12 @@ impl OptionalENConfig { merkle_tree.stalled_writes_timeout_sec, default_merkle_tree_stalled_writes_timeout_sec ), + merkle_tree_repair_stale_keys: general_config + .db_config + .as_ref() + .map_or(false, |config| { + config.experimental.merkle_tree_repair_stale_keys + }), database_long_connection_threshold_ms: load_config!( general_config.postgres_config, long_connection_threshold_ms diff --git a/core/bin/external_node/src/node_builder.rs b/core/bin/external_node/src/node_builder.rs index b7f6f8039025..5c70fd436781 100644 --- a/core/bin/external_node/src/node_builder.rs +++ b/core/bin/external_node/src/node_builder.rs @@ -378,6 +378,11 @@ impl ExternalNodeBuilder { layer = layer.with_tree_api_config(merkle_tree_api_config); } + // Add stale keys repair task if requested. + if self.config.optional.merkle_tree_repair_stale_keys { + layer = layer.with_stale_keys_repair(); + } + // Add tree pruning if needed. if self.config.optional.pruning_enabled { layer = layer.with_pruning_config(self.config.optional.pruning_removal_delay()); diff --git a/core/bin/genesis_generator/Cargo.toml b/core/bin/genesis_generator/Cargo.toml index 1ece9ea09d2e..d0bbcb668713 100644 --- a/core/bin/genesis_generator/Cargo.toml +++ b/core/bin/genesis_generator/Cargo.toml @@ -15,7 +15,6 @@ publish = false zksync_config.workspace = true zksync_env_config.workspace = true zksync_protobuf_config.workspace = true -zksync_utils.workspace = true zksync_types.workspace = true zksync_core_leftovers.workspace = true zksync_dal.workspace = true diff --git a/core/bin/system-constants-generator/src/utils.rs b/core/bin/system-constants-generator/src/utils.rs index 16167975cf0e..e3558de3e6a1 100644 --- a/core/bin/system-constants-generator/src/utils.rs +++ b/core/bin/system-constants-generator/src/utils.rs @@ -21,13 +21,13 @@ use zksync_multivm::{ zk_evm_latest::aux_structures::Timestamp, }; use zksync_types::{ - block::L2BlockHasher, ethabi::Token, fee::Fee, fee_model::BatchFeeInput, l1::L1Tx, l2::L2Tx, - utils::storage_key_for_eth_balance, AccountTreeId, Address, Execute, K256PrivateKey, - L1BatchNumber, L1TxCommonData, L2BlockNumber, L2ChainId, Nonce, ProtocolVersionId, StorageKey, - Transaction, BOOTLOADER_ADDRESS, SYSTEM_CONTEXT_ADDRESS, SYSTEM_CONTEXT_GAS_PRICE_POSITION, - SYSTEM_CONTEXT_TX_ORIGIN_POSITION, U256, ZKPORTER_IS_AVAILABLE, + block::L2BlockHasher, bytecode::BytecodeHash, ethabi::Token, fee::Fee, + fee_model::BatchFeeInput, l1::L1Tx, l2::L2Tx, u256_to_h256, utils::storage_key_for_eth_balance, + AccountTreeId, Address, Execute, K256PrivateKey, L1BatchNumber, L1TxCommonData, L2BlockNumber, + L2ChainId, Nonce, ProtocolVersionId, StorageKey, Transaction, BOOTLOADER_ADDRESS, + SYSTEM_CONTEXT_ADDRESS, SYSTEM_CONTEXT_GAS_PRICE_POSITION, SYSTEM_CONTEXT_TX_ORIGIN_POSITION, + U256, ZKPORTER_IS_AVAILABLE, }; -use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words, u256_to_h256}; use crate::intrinsic_costs::VmSpentResourcesResult; @@ -62,19 +62,19 @@ impl VmTracer for SpecialBootloaderTracer pub static GAS_TEST_SYSTEM_CONTRACTS: Lazy = Lazy::new(|| { let bytecode = read_bootloader_code("gas_test"); - let hash = hash_bytecode(&bytecode); + let hash = BytecodeHash::for_bytecode(&bytecode).value(); let bootloader = SystemContractCode { - code: bytes_to_be_words(bytecode), + code: bytecode, hash, }; let bytecode = read_sys_contract_bytecode("", "DefaultAccount", ContractLanguage::Sol); - let hash = hash_bytecode(&bytecode); + let hash = BytecodeHash::for_bytecode(&bytecode).value(); BaseSystemContracts { default_aa: SystemContractCode { - code: bytes_to_be_words(bytecode), + code: bytecode, hash, }, bootloader, @@ -207,23 +207,23 @@ fn default_l1_batch() -> L1BatchEnv { /// returns the amount of gas needed to perform and internal transfer, assuming no gas price /// per pubdata, i.e. under assumption that the refund will not touch any new slots. pub(super) fn execute_internal_transfer_test() -> u32 { - let raw_storage = InMemoryStorage::with_system_contracts(hash_bytecode); + let raw_storage = InMemoryStorage::with_system_contracts(); let mut storage_view = StorageView::new(raw_storage); let bootloader_balance_key = storage_key_for_eth_balance(&BOOTLOADER_ADDRESS); storage_view.set_value(bootloader_balance_key, u256_to_h256(U256([0, 0, 1, 0]))); let bytecode = read_bootloader_test_code("transfer_test"); - let hash = hash_bytecode(&bytecode); + let hash = BytecodeHash::for_bytecode(&bytecode).value(); let bootloader = SystemContractCode { - code: bytes_to_be_words(bytecode), + code: bytecode, hash, }; let l1_batch = default_l1_batch(); let bytecode = read_sys_contract_bytecode("", "DefaultAccount", ContractLanguage::Sol); - let hash = hash_bytecode(&bytecode); + let hash = BytecodeHash::for_bytecode(&bytecode).value(); let default_aa = SystemContractCode { - code: bytes_to_be_words(bytecode), + code: bytecode, hash, }; @@ -263,7 +263,11 @@ pub(super) fn execute_internal_transfer_test() -> u32 { } input }; - let input: Vec<_> = bytes_to_be_words(input).into_iter().enumerate().collect(); + let input: Vec<_> = input + .chunks(32) + .map(U256::from_big_endian) + .enumerate() + .collect(); let tracer_result = Rc::new(RefCell::new(0)); let tracer = SpecialBootloaderTracer { @@ -288,7 +292,7 @@ pub(super) fn execute_user_txs_in_test_gas_vm( .iter() .fold(U256::zero(), |sum, elem| sum + elem.gas_limit()); - let raw_storage = InMemoryStorage::with_system_contracts(hash_bytecode); + let raw_storage = InMemoryStorage::with_system_contracts(); let mut storage_view = StorageView::new(raw_storage); for tx in txs.iter() { diff --git a/core/bin/zksync_server/Cargo.toml b/core/bin/zksync_server/Cargo.toml index 031183924064..4cf028be8210 100644 --- a/core/bin/zksync_server/Cargo.toml +++ b/core/bin/zksync_server/Cargo.toml @@ -17,7 +17,6 @@ zksync_env_config.workspace = true zksync_eth_client.workspace = true zksync_protobuf_config.workspace = true zksync_storage.workspace = true -zksync_utils.workspace = true zksync_types.workspace = true zksync_core_leftovers.workspace = true zksync_node_genesis.workspace = true diff --git a/core/bin/zksync_server/src/node_builder.rs b/core/bin/zksync_server/src/node_builder.rs index 32478ede5bf8..794c847a24d5 100644 --- a/core/bin/zksync_server/src/node_builder.rs +++ b/core/bin/zksync_server/src/node_builder.rs @@ -325,7 +325,11 @@ impl MainNodeBuilder { latest_values_cache_size: rpc_config.latest_values_cache_size() as u64, latest_values_max_block_lag: rpc_config.latest_values_max_block_lag(), }; - let vm_config = try_load_config!(self.configs.experimental_vm_config); + let vm_config = self + .configs + .experimental_vm_config + .clone() + .unwrap_or_default(); // On main node we always use master pool sink. self.node.add_layer(MasterPoolSinkLayer); @@ -597,7 +601,11 @@ impl MainNodeBuilder { } fn add_vm_playground_layer(mut self) -> anyhow::Result { - let vm_config = try_load_config!(self.configs.experimental_vm_config); + let vm_config = self + .configs + .experimental_vm_config + .clone() + .unwrap_or_default(); self.node.add_layer(VmPlaygroundLayer::new( vm_config.playground, self.genesis_config.l2_chain_id, diff --git a/core/bin/zksync_tee_prover/src/tee_prover.rs b/core/bin/zksync_tee_prover/src/tee_prover.rs index 5d22d1e7c630..58f3d45969ca 100644 --- a/core/bin/zksync_tee_prover/src/tee_prover.rs +++ b/core/bin/zksync_tee_prover/src/tee_prover.rs @@ -80,7 +80,13 @@ impl TeeProver { let msg_to_sign = Message::from_slice(root_hash_bytes) .map_err(|e| TeeProverError::Verification(e.into()))?; let signature = self.config.signing_key.sign_ecdsa(msg_to_sign); - observer.observe(); + let duration = observer.observe(); + tracing::info!( + proof_generation_time = duration.as_secs_f64(), + l1_batch_number = %batch_number, + l1_root_hash = ?verification_result.value_hash, + "L1 batch verified", + ); Ok((signature, batch_number, verification_result.value_hash)) } _ => Err(TeeProverError::Verification(anyhow::anyhow!( diff --git a/core/lib/basic_types/Cargo.toml b/core/lib/basic_types/Cargo.toml index 616b959b0783..6cac4f60f615 100644 --- a/core/lib/basic_types/Cargo.toml +++ b/core/lib/basic_types/Cargo.toml @@ -11,8 +11,10 @@ keywords.workspace = true categories.workspace = true [dependencies] +const-decoder.workspace = true ethabi.workspace = true hex.workspace = true +sha2.workspace = true tiny-keccak.workspace = true thiserror.workspace = true serde = { workspace = true, features = ["derive"] } diff --git a/core/lib/utils/src/bytecode.rs b/core/lib/basic_types/src/bytecode.rs similarity index 50% rename from core/lib/utils/src/bytecode.rs rename to core/lib/basic_types/src/bytecode.rs index 4fda5e9d48a0..585ba0ef8c88 100644 --- a/core/lib/utils/src/bytecode.rs +++ b/core/lib/basic_types/src/bytecode.rs @@ -1,24 +1,42 @@ -// FIXME: move to basic_types? +//! Bytecode-related types and utils. +//! +//! # Bytecode kinds +//! +//! ZKsync supports 2 kinds of bytecodes: EraVM and EVM ones. +//! +//! - **EraVM** bytecodes consist of 64-bit (8-byte) instructions for the corresponding VM. +//! - **EVM** bytecodes consist of ordinary EVM opcodes, preceded with a 32-byte big-endian code length (in bytes). +//! +//! Both bytecode kinds are right-padded to consist of an integer, odd number of 32-byte words. All methods +//! in this module operate on padded bytecodes unless explicitly specified otherwise. use anyhow::Context as _; -use zk_evm::k256::sha2::{Digest, Sha256}; -use zksync_basic_types::{H256, U256}; +use sha2::{Digest, Sha256}; -use crate::bytes_to_chunks; +use crate::{H256, U256}; const MAX_BYTECODE_LENGTH_IN_WORDS: usize = (1 << 16) - 1; const MAX_BYTECODE_LENGTH_BYTES: usize = MAX_BYTECODE_LENGTH_IN_WORDS * 32; -#[derive(Debug, thiserror::Error, PartialEq)] +/// Errors returned from [`validate_bytecode()`]. +#[derive(Debug, thiserror::Error)] +#[non_exhaustive] pub enum InvalidBytecodeError { + /// Bytecode is too long. #[error("Bytecode too long: {0} bytes, while max {1} allowed")] BytecodeTooLong(usize, usize), - #[error("Bytecode has even number of 32-byte words")] - BytecodeLengthInWordsIsEven, + /// Bytecode length isn't divisible by 32 (i.e., bytecode cannot be represented as a sequence of 32-byte EraVM words). #[error("Bytecode length is not divisible by 32")] BytecodeLengthIsNotDivisibleBy32, + /// Bytecode has an even number of 32-byte words. + #[error("Bytecode has even number of 32-byte words")] + BytecodeLengthInWordsIsEven, } +/// Validates that the given bytecode passes basic checks (e.g., not too long). +/// +/// The performed checks are universal both for EraVM and (padded) EVM bytecodes. If you need to additionally check EVM bytecode integrity, +/// use [`trim_padded_evm_bytecode()`]. pub fn validate_bytecode(code: &[u8]) -> Result<(), InvalidBytecodeError> { let bytecode_len = code.len(); @@ -42,21 +60,79 @@ pub fn validate_bytecode(code: &[u8]) -> Result<(), InvalidBytecodeError> { Ok(()) } -/// Hashes the provided EraVM bytecode. -pub fn hash_bytecode(code: &[u8]) -> H256 { - let chunked_code = bytes_to_chunks(code); - let hash = zk_evm::zkevm_opcode_defs::utils::bytecode_to_code_hash(&chunked_code) - .expect("Invalid bytecode"); +/// 32-byte bytecode hash. Besides a cryptographically secure hash of the bytecode contents, contains a [`BytecodeMarker`] +/// and the bytecode length. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct BytecodeHash(H256); - H256(hash) -} +impl BytecodeHash { + /// Hashes the provided EraVM bytecode. + pub fn for_bytecode(bytecode: &[u8]) -> Self { + Self::for_generic_bytecode(BytecodeMarker::EraVm, bytecode) + } + + /// Hashes the provided padded EVM bytecode. + pub fn for_evm_bytecode(bytecode: &[u8]) -> Self { + Self::for_generic_bytecode(BytecodeMarker::Evm, bytecode) + } + + fn for_generic_bytecode(kind: BytecodeMarker, bytecode: &[u8]) -> Self { + validate_bytecode(bytecode).expect("invalid bytecode"); + + let mut hasher = Sha256::new(); + let len = match kind { + BytecodeMarker::EraVm => (bytecode.len() / 32) as u16, + BytecodeMarker::Evm => bytecode.len() as u16, + }; + hasher.update(bytecode); + let result = hasher.finalize(); + + let mut output = [0u8; 32]; + output[..].copy_from_slice(result.as_slice()); + output[0] = kind as u8; + output[1] = 0; + output[2..4].copy_from_slice(&len.to_be_bytes()); + + Self(H256(output)) + } -pub fn bytecode_len_in_words(bytecodehash: &H256) -> u16 { - u16::from_be_bytes([bytecodehash[2], bytecodehash[3]]) + /// Returns a marker / kind of this bytecode. + pub fn marker(&self) -> BytecodeMarker { + match self.0.as_bytes()[0] { + val if val == BytecodeMarker::EraVm as u8 => BytecodeMarker::EraVm, + val if val == BytecodeMarker::Evm as u8 => BytecodeMarker::Evm, + _ => unreachable!(), + } + } + + /// Returns the length of the hashed bytecode in bytes. + pub fn len_in_bytes(&self) -> usize { + let bytes = self.0.as_bytes(); + let raw_len = u16::from_be_bytes([bytes[2], bytes[3]]); + match self.marker() { + BytecodeMarker::EraVm => raw_len as usize * 32, + BytecodeMarker::Evm => raw_len as usize, + } + } + + /// Returns the underlying hash value. + pub fn value(self) -> H256 { + self.0 + } + + /// Returns the underlying hash value interpreted as a big-endian unsigned integer. + pub fn value_u256(self) -> U256 { + crate::h256_to_u256(self.0) + } } -pub fn bytecode_len_in_bytes(bytecodehash: H256) -> usize { - bytecode_len_in_words(&bytecodehash) as usize * 32 +impl TryFrom for BytecodeHash { + type Error = anyhow::Error; + + fn try_from(raw_hash: H256) -> Result { + BytecodeMarker::new(raw_hash).context("unknown bytecode hash marker")?; + Ok(Self(raw_hash)) + } } /// Bytecode marker encoded in the first byte of the bytecode hash. @@ -80,26 +156,10 @@ impl BytecodeMarker { } } -/// Hashes the provided EVM bytecode. The bytecode must be padded to an odd number of 32-byte words; -/// bytecodes stored in the known codes storage satisfy this requirement automatically. -pub fn hash_evm_bytecode(bytecode: &[u8]) -> H256 { - validate_bytecode(bytecode).expect("invalid EVM bytecode"); - - let mut hasher = Sha256::new(); - let len = bytecode.len() as u16; - hasher.update(bytecode); - let result = hasher.finalize(); - - let mut output = [0u8; 32]; - output[..].copy_from_slice(result.as_slice()); - output[0] = BytecodeMarker::Evm as u8; - output[1] = 0; - output[2..4].copy_from_slice(&len.to_be_bytes()); +/// Removes padding from an EVM bytecode, returning the original EVM bytecode. +pub fn trim_padded_evm_bytecode(raw: &[u8]) -> anyhow::Result<&[u8]> { + validate_bytecode(raw).context("bytecode fails basic validity checks")?; - H256(output) -} - -pub fn prepare_evm_bytecode(raw: &[u8]) -> anyhow::Result<&[u8]> { // EVM bytecodes are prefixed with a big-endian `U256` bytecode length. let bytecode_len_bytes = raw.get(..32).context("length < 32")?; let bytecode_len = U256::from_big_endian(bytecode_len_bytes); @@ -121,6 +181,7 @@ pub fn prepare_evm_bytecode(raw: &[u8]) -> anyhow::Result<&[u8]> { Ok(bytecode) } +#[doc(hidden)] // only useful for tests pub mod testonly { use const_decoder::Decoder; @@ -158,21 +219,18 @@ mod tests { #[test] fn bytecode_markers_are_valid() { - let bytecode_hash = hash_bytecode(&[0; 32]); - assert_eq!( - BytecodeMarker::new(bytecode_hash), - Some(BytecodeMarker::EraVm) - ); - let bytecode_hash = hash_evm_bytecode(&[0; 32]); - assert_eq!( - BytecodeMarker::new(bytecode_hash), - Some(BytecodeMarker::Evm) - ); + let bytecode_hash = BytecodeHash::for_bytecode(&[0; 32]); + assert_eq!(bytecode_hash.marker(), BytecodeMarker::EraVm); + assert_eq!(bytecode_hash.len_in_bytes(), 32); + + let bytecode_hash = BytecodeHash::for_evm_bytecode(&[0; 32]); + assert_eq!(bytecode_hash.marker(), BytecodeMarker::Evm); + assert_eq!(bytecode_hash.len_in_bytes(), 32); } #[test] fn preparing_evm_bytecode() { - let prepared = prepare_evm_bytecode(RAW_EVM_BYTECODE).unwrap(); + let prepared = trim_padded_evm_bytecode(RAW_EVM_BYTECODE).unwrap(); assert_eq!(prepared, PROCESSED_EVM_BYTECODE); } } diff --git a/core/lib/basic_types/src/conversions.rs b/core/lib/basic_types/src/conversions.rs new file mode 100644 index 000000000000..544d4adc08f8 --- /dev/null +++ b/core/lib/basic_types/src/conversions.rs @@ -0,0 +1,36 @@ +//! Conversions between basic types. + +use crate::{Address, H256, U256}; + +pub fn h256_to_u256(num: H256) -> U256 { + U256::from_big_endian(num.as_bytes()) +} + +pub fn address_to_h256(address: &Address) -> H256 { + let mut buffer = [0u8; 32]; + buffer[12..].copy_from_slice(address.as_bytes()); + H256(buffer) +} + +pub fn address_to_u256(address: &Address) -> U256 { + h256_to_u256(address_to_h256(address)) +} + +pub fn u256_to_h256(num: U256) -> H256 { + let mut bytes = [0u8; 32]; + num.to_big_endian(&mut bytes); + H256::from_slice(&bytes) +} + +/// Converts `U256` value into an [`Address`]. +pub fn u256_to_address(value: &U256) -> Address { + let mut bytes = [0u8; 32]; + value.to_big_endian(&mut bytes); + + Address::from_slice(&bytes[12..]) +} + +/// Converts `H256` value into an [`Address`]. +pub fn h256_to_address(value: &H256) -> Address { + Address::from_slice(&value.as_bytes()[12..]) +} diff --git a/core/lib/basic_types/src/lib.rs b/core/lib/basic_types/src/lib.rs index 1b462fdf77d1..d79bc57cc5e1 100644 --- a/core/lib/basic_types/src/lib.rs +++ b/core/lib/basic_types/src/lib.rs @@ -22,21 +22,33 @@ pub use ethabi::{ }; use serde::{de, Deserialize, Deserializer, Serialize}; +pub use self::conversions::{ + address_to_h256, address_to_u256, h256_to_address, h256_to_u256, u256_to_address, u256_to_h256, +}; + #[macro_use] mod macros; pub mod basic_fri_types; +pub mod bytecode; pub mod commitment; +mod conversions; pub mod network; pub mod protocol_version; pub mod prover_dal; pub mod pubdata_da; pub mod secrets; +pub mod serde_wrappers; pub mod settlement; pub mod tee_types; pub mod url; pub mod vm; pub mod web3; +/// Computes `ceil(a / b)`. +pub fn ceil_div_u256(a: U256, b: U256) -> U256 { + (a + b - U256::from(1)) / b +} + /// Parses H256 from a slice of bytes. pub fn parse_h256(bytes: &[u8]) -> anyhow::Result { Ok(<[u8; 32]>::try_from(bytes).context("invalid size")?.into()) diff --git a/core/lib/basic_types/src/prover_dal.rs b/core/lib/basic_types/src/prover_dal.rs index d86f79ba77aa..d2af75fe2ff5 100644 --- a/core/lib/basic_types/src/prover_dal.rs +++ b/core/lib/basic_types/src/prover_dal.rs @@ -1,5 +1,5 @@ //! Types exposed by the prover DAL for general-purpose use. -use std::{net::IpAddr, ops::Add, str::FromStr}; +use std::{net::IpAddr, ops::Add, str::FromStr, time::Instant}; use chrono::{DateTime, Duration, NaiveDateTime, NaiveTime, Utc}; use serde::{Deserialize, Serialize}; @@ -18,6 +18,23 @@ pub struct FriProverJobMetadata { pub sequence_number: usize, pub depth: u16, pub is_node_final_proof: bool, + pub pick_time: Instant, +} + +impl FriProverJobMetadata { + /// Checks whether the metadata corresponds to a scheduler proof or not. + pub fn is_scheduler_proof(&self) -> anyhow::Result { + if self.aggregation_round == AggregationRound::Scheduler { + if self.circuit_id != 1 { + return Err(anyhow::anyhow!( + "Invalid circuit id {} for Scheduler proof", + self.circuit_id + )); + } + return Ok(true); + } + Ok(false) + } } #[derive(Debug, Clone, Copy, Default)] diff --git a/core/lib/utils/src/serde_wrappers.rs b/core/lib/basic_types/src/serde_wrappers.rs similarity index 97% rename from core/lib/utils/src/serde_wrappers.rs rename to core/lib/basic_types/src/serde_wrappers.rs index cb9687a8a504..4cc470493dce 100644 --- a/core/lib/utils/src/serde_wrappers.rs +++ b/core/lib/basic_types/src/serde_wrappers.rs @@ -1,3 +1,5 @@ +//! Generic `serde` helpers. + use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; /// Trait for specifying prefix for bytes to hex serialization @@ -61,9 +63,7 @@ pub type ZeroPrefixHexSerde = BytesToHexSerde; #[cfg(test)] mod tests { - use serde::{Deserialize, Serialize}; - - use crate::ZeroPrefixHexSerde; + use super::*; #[derive(Serialize, Deserialize, PartialEq, Debug)] struct Execute { diff --git a/core/lib/basic_types/src/tee_types.rs b/core/lib/basic_types/src/tee_types.rs index d49f2f183885..44a3ba02c1d2 100644 --- a/core/lib/basic_types/src/tee_types.rs +++ b/core/lib/basic_types/src/tee_types.rs @@ -2,7 +2,7 @@ use std::fmt; use serde::{Deserialize, Serialize}; -#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] #[serde(rename_all = "lowercase")] #[non_exhaustive] pub enum TeeType { diff --git a/core/lib/basic_types/src/web3/mod.rs b/core/lib/basic_types/src/web3/mod.rs index aa7c49670333..e6d3cab37273 100644 --- a/core/lib/basic_types/src/web3/mod.rs +++ b/core/lib/basic_types/src/web3/mod.rs @@ -73,6 +73,14 @@ pub fn keccak256(bytes: &[u8]) -> [u8; 32] { output } +/// Hashes concatenation of the two provided hashes using `keccak256`. +pub fn keccak256_concat(hash1: H256, hash2: H256) -> H256 { + let mut bytes = [0_u8; 64]; + bytes[..32].copy_from_slice(hash1.as_bytes()); + bytes[32..].copy_from_slice(hash2.as_bytes()); + H256(keccak256(&bytes)) +} + // `Bytes`: from `web3::types::bytes` /// Raw bytes wrapper diff --git a/core/lib/config/src/configs/da_dispatcher.rs b/core/lib/config/src/configs/da_dispatcher.rs index e9ad6bd3c074..c8bf1b3b8995 100644 --- a/core/lib/config/src/configs/da_dispatcher.rs +++ b/core/lib/config/src/configs/da_dispatcher.rs @@ -6,6 +6,7 @@ pub const DEFAULT_POLLING_INTERVAL_MS: u32 = 5000; pub const DEFAULT_MAX_ROWS_TO_DISPATCH: u32 = 100; pub const DEFAULT_MAX_RETRIES: u16 = 5; pub const DEFAULT_USE_DUMMY_INCLUSION_DATA: bool = false; +pub const DEFAULT_MAX_CONCURRENT_REQUESTS: u32 = 100; #[derive(Debug, Clone, PartialEq, Deserialize)] pub struct DADispatcherConfig { @@ -19,6 +20,8 @@ pub struct DADispatcherConfig { // TODO: run a verification task to check if the L1 contract expects the inclusion proofs to // avoid the scenario where contracts expect real proofs, and server is using dummy proofs. pub use_dummy_inclusion_data: Option, + /// The maximun number of concurrent request to send to the DA server. + pub max_concurrent_requests: Option, } impl DADispatcherConfig { @@ -28,6 +31,7 @@ impl DADispatcherConfig { max_rows_to_dispatch: Some(DEFAULT_MAX_ROWS_TO_DISPATCH), max_retries: Some(DEFAULT_MAX_RETRIES), use_dummy_inclusion_data: Some(DEFAULT_USE_DUMMY_INCLUSION_DATA), + max_concurrent_requests: Some(DEFAULT_MAX_CONCURRENT_REQUESTS), } } diff --git a/core/lib/config/src/configs/experimental.rs b/core/lib/config/src/configs/experimental.rs index a87a221ef222..2553864e251d 100644 --- a/core/lib/config/src/configs/experimental.rs +++ b/core/lib/config/src/configs/experimental.rs @@ -29,6 +29,9 @@ pub struct ExperimentalDBConfig { /// correspondingly; otherwise, RocksDB performance can significantly degrade. #[serde(default)] pub include_indices_and_filters_in_block_cache: bool, + /// Enables the stale keys repair task for the Merkle tree. + #[serde(default)] + pub merkle_tree_repair_stale_keys: bool, } impl Default for ExperimentalDBConfig { @@ -40,6 +43,7 @@ impl Default for ExperimentalDBConfig { protective_reads_persistence_enabled: false, processing_delay_ms: Self::default_merkle_tree_processing_delay_ms(), include_indices_and_filters_in_block_cache: false, + merkle_tree_repair_stale_keys: false, } } } diff --git a/core/lib/config/src/configs/proof_data_handler.rs b/core/lib/config/src/configs/proof_data_handler.rs index 1d8703df51aa..443d602b8126 100644 --- a/core/lib/config/src/configs/proof_data_handler.rs +++ b/core/lib/config/src/configs/proof_data_handler.rs @@ -9,9 +9,12 @@ pub struct TeeConfig { pub tee_support: bool, /// All batches before this one are considered to be processed. pub first_tee_processed_batch: L1BatchNumber, - /// Timeout in seconds for retrying TEE proof generation if it fails. Retries continue - /// indefinitely until successful. + /// Timeout in seconds for retrying the preparation of input for TEE proof generation if it + /// previously failed (e.g., due to a transient network issue) or if it was picked by a TEE + /// prover but the TEE proof was not submitted within that time. pub tee_proof_generation_timeout_in_secs: u16, + /// Timeout in hours after which a batch will be permanently ignored if repeated retries failed. + pub tee_batch_permanently_ignored_timeout_in_hours: u16, } impl Default for TeeConfig { @@ -21,6 +24,8 @@ impl Default for TeeConfig { first_tee_processed_batch: Self::default_first_tee_processed_batch(), tee_proof_generation_timeout_in_secs: Self::default_tee_proof_generation_timeout_in_secs(), + tee_batch_permanently_ignored_timeout_in_hours: + Self::default_tee_batch_permanently_ignored_timeout_in_hours(), } } } @@ -35,12 +40,20 @@ impl TeeConfig { } pub fn default_tee_proof_generation_timeout_in_secs() -> u16 { - 600 + 60 + } + + pub fn default_tee_batch_permanently_ignored_timeout_in_hours() -> u16 { + 10 * 24 } pub fn tee_proof_generation_timeout(&self) -> Duration { Duration::from_secs(self.tee_proof_generation_timeout_in_secs.into()) } + + pub fn tee_batch_permanently_ignored_timeout(&self) -> Duration { + Duration::from_secs(3600 * u64::from(self.tee_batch_permanently_ignored_timeout_in_hours)) + } } #[derive(Debug, Deserialize, Clone, PartialEq)] diff --git a/core/lib/config/src/testonly.rs b/core/lib/config/src/testonly.rs index 93d502cc4e8a..3574fc8cc58d 100644 --- a/core/lib/config/src/testonly.rs +++ b/core/lib/config/src/testonly.rs @@ -305,6 +305,7 @@ impl Distribution for EncodeDist { protective_reads_persistence_enabled: self.sample(rng), processing_delay_ms: self.sample(rng), include_indices_and_filters_in_block_cache: self.sample(rng), + merkle_tree_repair_stale_keys: self.sample(rng), } } } @@ -680,6 +681,7 @@ impl Distribution for EncodeDist { tee_support: self.sample(rng), first_tee_processed_batch: L1BatchNumber(rng.gen()), tee_proof_generation_timeout_in_secs: self.sample(rng), + tee_batch_permanently_ignored_timeout_in_hours: self.sample(rng), }, } } @@ -972,6 +974,7 @@ impl Distribution for EncodeDist { max_rows_to_dispatch: self.sample(rng), max_retries: self.sample(rng), use_dummy_inclusion_data: self.sample(rng), + max_concurrent_requests: self.sample(rng), } } } diff --git a/core/lib/constants/Cargo.toml b/core/lib/constants/Cargo.toml index b741b5734902..bc4d1f7bb57f 100644 --- a/core/lib/constants/Cargo.toml +++ b/core/lib/constants/Cargo.toml @@ -12,6 +12,5 @@ categories.workspace = true [dependencies] zksync_basic_types.workspace = true -zksync_utils.workspace = true once_cell.workspace = true diff --git a/core/lib/constants/src/trusted_slots.rs b/core/lib/constants/src/trusted_slots.rs index e5a626d49036..d66b2bfd4729 100644 --- a/core/lib/constants/src/trusted_slots.rs +++ b/core/lib/constants/src/trusted_slots.rs @@ -1,6 +1,5 @@ use once_cell::sync::Lazy; -use zksync_basic_types::{H256, U256}; -use zksync_utils::h256_to_u256; +use zksync_basic_types::{h256_to_u256, H256, U256}; /// /// Well known-slots (e.g. proxy addresses in popular EIPs). diff --git a/core/lib/contract_verifier/Cargo.toml b/core/lib/contract_verifier/Cargo.toml index bdbfa90bf76a..c2cf97826561 100644 --- a/core/lib/contract_verifier/Cargo.toml +++ b/core/lib/contract_verifier/Cargo.toml @@ -28,10 +28,16 @@ hex.workspace = true serde = { workspace = true, features = ["derive"] } tempfile.workspace = true regex.workspace = true +reqwest.workspace = true tracing.workspace = true semver.workspace = true +octocrab = { workspace = true, features = ["stream"] } +futures-util.workspace = true +rustls.workspace = true [dev-dependencies] zksync_node_test_utils.workspace = true zksync_vm_interface.workspace = true + +assert_matches.workspace = true test-casing.workspace = true diff --git a/core/lib/contract_verifier/src/compilers/mod.rs b/core/lib/contract_verifier/src/compilers/mod.rs index a56b4e32d1a1..c82a6575ee4c 100644 --- a/core/lib/contract_verifier/src/compilers/mod.rs +++ b/core/lib/contract_verifier/src/compilers/mod.rs @@ -1,18 +1,50 @@ +use std::collections::HashMap; + use anyhow::Context as _; use serde::{Deserialize, Serialize}; use zksync_types::contract_verification_api::CompilationArtifacts; pub(crate) use self::{ solc::{Solc, SolcInput}, + vyper::{Vyper, VyperInput}, zksolc::{ZkSolc, ZkSolcInput}, - zkvyper::{ZkVyper, ZkVyperInput}, + zkvyper::ZkVyper, }; use crate::error::ContractVerifierError; mod solc; +mod vyper; mod zksolc; mod zkvyper; +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub(crate) struct StandardJson { + pub language: String, + pub sources: HashMap, + #[serde(default)] + settings: Settings, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +struct Settings { + /// The output selection filters. + output_selection: Option, + /// Other settings (only filled when parsing `StandardJson` input from the request). + #[serde(flatten)] + other: serde_json::Value, +} + +impl Default for Settings { + fn default() -> Self { + Self { + output_selection: None, + other: serde_json::json!({}), + } + } +} + #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub(crate) struct Source { @@ -20,6 +52,18 @@ pub(crate) struct Source { pub content: String, } +/// Users may provide either just contract name or source file name and contract name joined with ":". +fn process_contract_name(original_name: &str, extension: &str) -> (String, String) { + if let Some((file_name, contract_name)) = original_name.rsplit_once(':') { + (file_name.to_owned(), contract_name.to_owned()) + } else { + ( + format!("{original_name}.{extension}"), + original_name.to_owned(), + ) + } +} + /// Parsing logic shared between `solc` and `zksolc`. fn parse_standard_json_output( output: &serde_json::Value, @@ -31,11 +75,16 @@ fn parse_standard_json_output( let errors = errors.as_array().unwrap().clone(); if errors .iter() - .any(|err| err["severity"].as_str().unwrap() == "error") + .any(|err| err["severity"].as_str() == Some("error")) { let error_messages = errors .into_iter() - .map(|err| err["formattedMessage"].clone()) + .filter_map(|err| { + // `formattedMessage` is an optional field + err.get("formattedMessage") + .or_else(|| err.get("message")) + .cloned() + }) .collect(); return Err(ContractVerifierError::CompilationError( serde_json::Value::Array(error_messages), @@ -50,28 +99,35 @@ fn parse_standard_json_output( return Err(ContractVerifierError::MissingContract(contract_name)); }; - let Some(bytecode_str) = contract - .pointer("/evm/bytecode/object") - .context("missing bytecode in solc / zksolc output")? - .as_str() - else { + let Some(bytecode_str) = contract.pointer("/evm/bytecode/object") else { return Err(ContractVerifierError::AbstractContract(contract_name)); }; + let bytecode_str = bytecode_str + .as_str() + .context("unexpected `/evm/bytecode/object` value")?; + // Strip an optional `0x` prefix (output by `vyper`, but not by `solc` / `zksolc`) + let bytecode_str = bytecode_str.strip_prefix("0x").unwrap_or(bytecode_str); let bytecode = hex::decode(bytecode_str).context("invalid bytecode")?; let deployed_bytecode = if get_deployed_bytecode { - let bytecode_str = contract - .pointer("/evm/deployedBytecode/object") - .context("missing deployed bytecode in solc output")? + let Some(bytecode_str) = contract.pointer("/evm/deployedBytecode/object") else { + return Err(ContractVerifierError::AbstractContract(contract_name)); + }; + let bytecode_str = bytecode_str .as_str() - .ok_or(ContractVerifierError::AbstractContract(contract_name))?; + .context("unexpected `/evm/deployedBytecode/object` value")?; + let bytecode_str = bytecode_str.strip_prefix("0x").unwrap_or(bytecode_str); Some(hex::decode(bytecode_str).context("invalid deployed bytecode")?) } else { None }; - let abi = contract["abi"].clone(); - if !abi.is_array() { + let mut abi = contract["abi"].clone(); + if abi.is_null() { + // ABI is undefined for Yul contracts when compiled with standalone `solc`. For uniformity with `zksolc`, + // replace it with an empty array. + abi = serde_json::json!([]); + } else if !abi.is_array() { let err = anyhow::anyhow!( "unexpected value for ABI: {}", serde_json::to_string_pretty(&abi).unwrap() diff --git a/core/lib/contract_verifier/src/compilers/solc.rs b/core/lib/contract_verifier/src/compilers/solc.rs index bb453cb729c2..10adcad3542e 100644 --- a/core/lib/contract_verifier/src/compilers/solc.rs +++ b/core/lib/contract_verifier/src/compilers/solc.rs @@ -1,14 +1,13 @@ use std::{collections::HashMap, path::PathBuf, process::Stdio}; use anyhow::Context; -use serde::{Deserialize, Serialize}; use tokio::io::AsyncWriteExt; use zksync_queued_job_processor::async_trait; use zksync_types::contract_verification_api::{ CompilationArtifacts, SourceCodeData, VerificationIncomingRequest, }; -use super::{parse_standard_json_output, Source}; +use super::{parse_standard_json_output, process_contract_name, Settings, Source, StandardJson}; use crate::{error::ContractVerifierError, resolver::Compiler}; // Here and below, fields are public for testing purposes. @@ -19,24 +18,6 @@ pub(crate) struct SolcInput { pub file_name: String, } -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub(crate) struct StandardJson { - pub language: String, - pub sources: HashMap, - settings: Settings, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -struct Settings { - /// The output selection filters. - output_selection: Option, - /// Other settings (only filled when parsing `StandardJson` input from the request). - #[serde(flatten)] - other: serde_json::Value, -} - #[derive(Debug)] pub(crate) struct Solc { path: PathBuf, @@ -50,17 +31,7 @@ impl Solc { pub fn build_input( req: VerificationIncomingRequest, ) -> Result { - // Users may provide either just contract name or - // source file name and contract name joined with ":". - let (file_name, contract_name) = - if let Some((file_name, contract_name)) = req.contract_name.rsplit_once(':') { - (file_name.to_string(), contract_name.to_string()) - } else { - ( - format!("{}.sol", req.contract_name), - req.contract_name.clone(), - ) - }; + let (file_name, contract_name) = process_contract_name(&req.contract_name, "sol"); let default_output_selection = serde_json::json!({ "*": { "*": [ "abi", "evm.bytecode", "evm.deployedBytecode" ], diff --git a/core/lib/contract_verifier/src/compilers/vyper.rs b/core/lib/contract_verifier/src/compilers/vyper.rs new file mode 100644 index 000000000000..59b950f9f17f --- /dev/null +++ b/core/lib/contract_verifier/src/compilers/vyper.rs @@ -0,0 +1,114 @@ +use std::{collections::HashMap, mem, path::PathBuf, process::Stdio}; + +use anyhow::Context; +use tokio::io::AsyncWriteExt; +use zksync_queued_job_processor::async_trait; +use zksync_types::contract_verification_api::{ + CompilationArtifacts, SourceCodeData, VerificationIncomingRequest, +}; + +use super::{parse_standard_json_output, process_contract_name, Settings, Source, StandardJson}; +use crate::{error::ContractVerifierError, resolver::Compiler}; + +#[derive(Debug)] +pub(crate) struct VyperInput { + pub contract_name: String, + pub file_name: String, + pub sources: HashMap, + pub optimizer_mode: Option, +} + +impl VyperInput { + pub fn new(req: VerificationIncomingRequest) -> Result { + let (file_name, contract_name) = process_contract_name(&req.contract_name, "vy"); + + let sources = match req.source_code_data { + SourceCodeData::VyperMultiFile(s) => s, + other => unreachable!("unexpected `SourceCodeData` variant: {other:?}"), + }; + Ok(Self { + contract_name, + file_name, + sources, + optimizer_mode: if req.optimization_used { + req.optimizer_mode + } else { + // `none` mode is not the default mode (which is `gas`), so we must specify it explicitly here + Some("none".to_owned()) + }, + }) + } + + fn take_standard_json(&mut self) -> StandardJson { + let sources = mem::take(&mut self.sources); + let sources = sources + .into_iter() + .map(|(name, content)| (name, Source { content })); + + StandardJson { + language: "Vyper".to_owned(), + sources: sources.collect(), + settings: Settings { + output_selection: Some(serde_json::json!({ + "*": [ "abi", "evm.bytecode", "evm.deployedBytecode" ], + })), + other: serde_json::json!({ + "optimize": self.optimizer_mode.as_deref(), + }), + }, + } + } +} + +#[derive(Debug)] +pub(crate) struct Vyper { + path: PathBuf, +} + +impl Vyper { + pub fn new(path: PathBuf) -> Self { + Self { path } + } +} + +#[async_trait] +impl Compiler for Vyper { + async fn compile( + self: Box, + mut input: VyperInput, + ) -> Result { + let mut command = tokio::process::Command::new(&self.path); + let mut child = command + .arg("--standard-json") + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn() + .context("cannot spawn vyper")?; + let mut stdin = child.stdin.take().unwrap(); + let standard_json = input.take_standard_json(); + let content = serde_json::to_vec(&standard_json) + .context("cannot encode standard JSON input for vyper")?; + stdin + .write_all(&content) + .await + .context("failed writing standard JSON to vyper stdin")?; + stdin + .flush() + .await + .context("failed flushing standard JSON to vyper")?; + drop(stdin); + + let output = child.wait_with_output().await.context("vyper failed")?; + if output.status.success() { + let output = + serde_json::from_slice(&output.stdout).context("vyper output is not valid JSON")?; + parse_standard_json_output(&output, input.contract_name, input.file_name, true) + } else { + Err(ContractVerifierError::CompilerError( + "vyper", + String::from_utf8_lossy(&output.stderr).to_string(), + )) + } + } +} diff --git a/core/lib/contract_verifier/src/compilers/zksolc.rs b/core/lib/contract_verifier/src/compilers/zksolc.rs index 0d6b5828e31c..ff435e96aeb6 100644 --- a/core/lib/contract_verifier/src/compilers/zksolc.rs +++ b/core/lib/contract_verifier/src/compilers/zksolc.rs @@ -10,7 +10,7 @@ use zksync_types::contract_verification_api::{ CompilationArtifacts, SourceCodeData, VerificationIncomingRequest, }; -use super::{parse_standard_json_output, Source}; +use super::{parse_standard_json_output, process_contract_name, Source}; use crate::{ error::ContractVerifierError, resolver::{Compiler, CompilerPaths}, @@ -85,17 +85,7 @@ impl ZkSolc { pub fn build_input( req: VerificationIncomingRequest, ) -> Result { - // Users may provide either just contract name or - // source file name and contract name joined with ":". - let (file_name, contract_name) = - if let Some((file_name, contract_name)) = req.contract_name.rsplit_once(':') { - (file_name.to_string(), contract_name.to_string()) - } else { - ( - format!("{}.sol", req.contract_name), - req.contract_name.clone(), - ) - }; + let (file_name, contract_name) = process_contract_name(&req.contract_name, "sol"); let default_output_selection = serde_json::json!({ "*": { "*": [ "abi" ], diff --git a/core/lib/contract_verifier/src/compilers/zkvyper.rs b/core/lib/contract_verifier/src/compilers/zkvyper.rs index b3dacce64e77..4f7c10214f8a 100644 --- a/core/lib/contract_verifier/src/compilers/zkvyper.rs +++ b/core/lib/contract_verifier/src/compilers/zkvyper.rs @@ -1,21 +1,54 @@ -use std::{collections::HashMap, fs::File, io::Write, path::Path, process::Stdio}; +use std::{ffi::OsString, path, path::Path, process::Stdio}; use anyhow::Context as _; +use tokio::{fs, io::AsyncWriteExt}; use zksync_queued_job_processor::async_trait; -use zksync_types::contract_verification_api::{ - CompilationArtifacts, SourceCodeData, VerificationIncomingRequest, -}; +use zksync_types::contract_verification_api::CompilationArtifacts; +use super::VyperInput; use crate::{ error::ContractVerifierError, resolver::{Compiler, CompilerPaths}, }; -#[derive(Debug)] -pub(crate) struct ZkVyperInput { - pub contract_name: String, - pub sources: HashMap, - pub optimizer_mode: Option, +impl VyperInput { + async fn write_files(&self, root_dir: &Path) -> anyhow::Result> { + let mut paths = Vec::with_capacity(self.sources.len()); + for (name, content) in &self.sources { + let mut name = name.clone(); + if !name.ends_with(".vy") { + name += ".vy"; + } + + let name_path = Path::new(&name); + anyhow::ensure!( + !name_path.is_absolute(), + "absolute contract filename: {name}" + ); + let normal_components = name_path + .components() + .all(|component| matches!(component, path::Component::Normal(_))); + anyhow::ensure!( + normal_components, + "contract filename contains disallowed components: {name}" + ); + + let path = root_dir.join(name_path); + if let Some(prefix) = path.parent() { + fs::create_dir_all(prefix) + .await + .with_context(|| format!("failed creating parent dir for `{name}`"))?; + } + let mut file = fs::File::create(&path) + .await + .with_context(|| format!("failed creating file for `{name}`"))?; + file.write_all(content.as_bytes()) + .await + .with_context(|| format!("failed writing to `{name}`"))?; + paths.push(path.into_os_string()); + } + Ok(paths) + } } #[derive(Debug)] @@ -28,28 +61,6 @@ impl ZkVyper { Self { paths } } - pub fn build_input( - req: VerificationIncomingRequest, - ) -> Result { - // Users may provide either just contract name or - // source file name and contract name joined with ":". - let contract_name = if let Some((_, contract_name)) = req.contract_name.rsplit_once(':') { - contract_name.to_owned() - } else { - req.contract_name.clone() - }; - - let sources = match req.source_code_data { - SourceCodeData::VyperMultiFile(s) => s, - other => unreachable!("unexpected `SourceCodeData` variant: {other:?}"), - }; - Ok(ZkVyperInput { - contract_name, - sources, - optimizer_mode: req.optimizer_mode, - }) - } - fn parse_output( output: &serde_json::Value, contract_name: String, @@ -80,10 +91,10 @@ impl ZkVyper { } #[async_trait] -impl Compiler for ZkVyper { +impl Compiler for ZkVyper { async fn compile( self: Box, - input: ZkVyperInput, + input: VyperInput, ) -> Result { let mut command = tokio::process::Command::new(&self.paths.zk); if let Some(o) = input.optimizer_mode.as_ref() { @@ -97,22 +108,15 @@ impl Compiler for ZkVyper { .stdout(Stdio::piped()) .stderr(Stdio::piped()); - let temp_dir = tempfile::tempdir().context("failed creating temporary dir")?; - for (mut name, content) in input.sources { - if !name.ends_with(".vy") { - name += ".vy"; - } - let path = temp_dir.path().join(&name); - if let Some(prefix) = path.parent() { - std::fs::create_dir_all(prefix) - .with_context(|| format!("failed creating parent dir for `{name}`"))?; - } - let mut file = File::create(&path) - .with_context(|| format!("failed creating file for `{name}`"))?; - file.write_all(content.as_bytes()) - .with_context(|| format!("failed writing to `{name}`"))?; - command.arg(path.into_os_string()); - } + let temp_dir = tokio::task::spawn_blocking(tempfile::tempdir) + .await + .context("panicked creating temporary dir")? + .context("failed creating temporary dir")?; + let file_paths = input + .write_files(temp_dir.path()) + .await + .context("failed writing Vyper files to temp dir")?; + command.args(file_paths); let child = command.spawn().context("cannot spawn zkvyper")?; let output = child.wait_with_output().await.context("zkvyper failed")?; @@ -128,3 +132,36 @@ impl Compiler for ZkVyper { } } } + +#[cfg(test)] +mod tests { + use std::collections::HashMap; + + use super::*; + + #[tokio::test] + async fn sanitizing_contract_paths() { + let mut input = VyperInput { + contract_name: "Test".to_owned(), + file_name: "test.vy".to_owned(), + sources: HashMap::from([("/etc/shadow".to_owned(), String::new())]), + optimizer_mode: None, + }; + + let temp_dir = tempfile::TempDir::new().unwrap(); + let err = input + .write_files(temp_dir.path()) + .await + .unwrap_err() + .to_string(); + assert!(err.contains("absolute"), "{err}"); + + input.sources = HashMap::from([("../../../etc/shadow".to_owned(), String::new())]); + let err = input + .write_files(temp_dir.path()) + .await + .unwrap_err() + .to_string(); + assert!(err.contains("disallowed components"), "{err}"); + } +} diff --git a/core/lib/contract_verifier/src/lib.rs b/core/lib/contract_verifier/src/lib.rs index 686bb0d7bdc3..284d9921a674 100644 --- a/core/lib/contract_verifier/src/lib.rs +++ b/core/lib/contract_verifier/src/lib.rs @@ -9,20 +9,21 @@ use std::{ use anyhow::Context as _; use chrono::Utc; use ethabi::{Contract, Token}; +use resolver::{GitHubCompilerResolver, ResolverMultiplexer}; use tokio::time; use zksync_dal::{contract_verification_dal::DeployedContractData, ConnectionPool, Core, CoreDal}; use zksync_queued_job_processor::{async_trait, JobProcessor}; use zksync_types::{ + bytecode::{trim_padded_evm_bytecode, BytecodeMarker}, contract_verification_api::{ self as api, CompilationArtifacts, VerificationIncomingRequest, VerificationInfo, VerificationRequest, }, Address, CONTRACT_DEPLOYER_ADDRESS, }; -use zksync_utils::bytecode::{prepare_evm_bytecode, BytecodeMarker}; use crate::{ - compilers::{Solc, ZkSolc, ZkVyper}, + compilers::{Solc, VyperInput, ZkSolc}, error::ContractVerifierError, metrics::API_CONTRACT_VERIFIER_METRICS, resolver::{CompilerResolver, EnvCompilerResolver}, @@ -47,7 +48,6 @@ struct ZkCompilerVersions { #[derive(Debug)] enum VersionedCompiler { Solc(String), - #[allow(dead_code)] // TODO (EVM-864): add vyper support Vyper(String), ZkSolc(ZkCompilerVersions), ZkVyper(ZkCompilerVersions), @@ -122,12 +122,20 @@ impl ContractVerifier { compilation_timeout: Duration, connection_pool: ConnectionPool, ) -> anyhow::Result { - Self::with_resolver( - compilation_timeout, - connection_pool, - Arc::::default(), - ) - .await + let env_resolver = Arc::::default(); + let gh_resolver = Arc::new(GitHubCompilerResolver::new().await?); + let mut resolver = ResolverMultiplexer::new(env_resolver); + + // Killer switch: if anything goes wrong with GH resolver, we can disable it without having to rollback. + // TODO: Remove once GH resolver is proven to be stable. + let disable_gh_resolver = std::env::var("DISABLE_GITHUB_RESOLVER").is_ok(); + if !disable_gh_resolver { + resolver = resolver.with_resolver(gh_resolver); + } else { + tracing::warn!("GitHub resolver was disabled via DISABLE_GITHUB_RESOLVER env variable") + } + + Self::with_resolver(compilation_timeout, connection_pool, Arc::new(resolver)).await } async fn with_resolver( @@ -135,21 +143,42 @@ impl ContractVerifier { connection_pool: ConnectionPool, compiler_resolver: Arc, ) -> anyhow::Result { - let this = Self { + Self::sync_compiler_versions(compiler_resolver.as_ref(), &connection_pool).await?; + Ok(Self { compilation_timeout, contract_deployer: zksync_contracts::deployer_contract(), connection_pool, compiler_resolver, - }; - this.sync_compiler_versions().await?; - Ok(this) + }) + } + + /// Returns a future that would periodically update the supported compiler versions + /// in the database. + pub fn sync_compiler_versions_task( + &self, + ) -> impl std::future::Future> { + const UPDATE_INTERVAL: Duration = Duration::from_secs(60 * 60); // 1 hour. + + let resolver = self.compiler_resolver.clone(); + let pool = self.connection_pool.clone(); + async move { + loop { + tracing::info!("Updating compiler versions"); + if let Err(err) = Self::sync_compiler_versions(resolver.as_ref(), &pool).await { + tracing::error!("Failed to sync compiler versions: {:?}", err); + } + tokio::time::sleep(UPDATE_INTERVAL).await; + } + } } /// Synchronizes compiler versions. #[tracing::instrument(level = "debug", skip_all)] - async fn sync_compiler_versions(&self) -> anyhow::Result<()> { - let supported_versions = self - .compiler_resolver + async fn sync_compiler_versions( + resolver: &dyn CompilerResolver, + pool: &ConnectionPool, + ) -> anyhow::Result<()> { + let supported_versions = resolver .supported_versions() .await .context("cannot get supported compilers")?; @@ -164,26 +193,23 @@ impl ContractVerifier { "persisting supported compiler versions" ); - let mut storage = self - .connection_pool - .connection_tagged("contract_verifier") - .await?; + let mut storage = pool.connection_tagged("contract_verifier").await?; let mut transaction = storage.start_transaction().await?; transaction .contract_verification_dal() - .set_zksolc_versions(&supported_versions.zksolc) + .set_zksolc_versions(&supported_versions.zksolc.into_iter().collect::>()) .await?; transaction .contract_verification_dal() - .set_solc_versions(&supported_versions.solc) + .set_solc_versions(&supported_versions.solc.into_iter().collect::>()) .await?; transaction .contract_verification_dal() - .set_zkvyper_versions(&supported_versions.zkvyper) + .set_zkvyper_versions(&supported_versions.zkvyper.into_iter().collect::>()) .await?; transaction .contract_verification_dal() - .set_vyper_versions(&supported_versions.vyper) + .set_vyper_versions(&supported_versions.vyper.into_iter().collect::>()) .await?; transaction.commit().await?; Ok(()) @@ -231,7 +257,7 @@ impl ContractVerifier { let deployed_bytecode = match bytecode_marker { BytecodeMarker::EraVm => deployed_contract.bytecode.as_slice(), - BytecodeMarker::Evm => prepare_evm_bytecode(&deployed_contract.bytecode) + BytecodeMarker::Evm => trim_padded_evm_bytecode(&deployed_contract.bytecode) .context("invalid stored EVM bytecode")?, }; @@ -292,7 +318,7 @@ impl ContractVerifier { ) -> Result { let zkvyper = self.compiler_resolver.resolve_zkvyper(version).await?; tracing::debug!(?zkvyper, ?version, "resolved compiler"); - let input = ZkVyper::build_input(req)?; + let input = VyperInput::new(req)?; time::timeout(self.compilation_timeout, zkvyper.compile(input)) .await .map_err(|_| ContractVerifierError::CompilationTimeout)? @@ -312,6 +338,20 @@ impl ContractVerifier { .map_err(|_| ContractVerifierError::CompilationTimeout)? } + async fn compile_vyper( + &self, + version: &str, + req: VerificationIncomingRequest, + ) -> Result { + let vyper = self.compiler_resolver.resolve_vyper(version).await?; + tracing::debug!(?vyper, ?req.compiler_versions, "resolved compiler"); + let input = VyperInput::new(req)?; + + time::timeout(self.compilation_timeout, vyper.compile(input)) + .await + .map_err(|_| ContractVerifierError::CompilationTimeout)? + } + #[tracing::instrument(level = "debug", skip_all)] async fn compile( &self, @@ -340,11 +380,7 @@ impl ContractVerifier { match &compiler { VersionedCompiler::Solc(version) => self.compile_solc(version, req).await, - VersionedCompiler::Vyper(_) => { - // TODO (EVM-864): add vyper support - let err = anyhow::anyhow!("vyper toolchain is not yet supported for EVM contracts"); - return Err(err.into()); - } + VersionedCompiler::Vyper(version) => self.compile_vyper(version, req).await, VersionedCompiler::ZkSolc(version) => self.compile_zksolc(version, req).await, VersionedCompiler::ZkVyper(version) => self.compile_zkvyper(version, req).await, } diff --git a/core/lib/contract_verifier/src/resolver.rs b/core/lib/contract_verifier/src/resolver.rs deleted file mode 100644 index 34a70b759797..000000000000 --- a/core/lib/contract_verifier/src/resolver.rs +++ /dev/null @@ -1,237 +0,0 @@ -use std::{ - fmt, - path::{Path, PathBuf}, -}; - -use anyhow::Context as _; -use tokio::fs; -use zksync_queued_job_processor::async_trait; -use zksync_types::contract_verification_api::CompilationArtifacts; -use zksync_utils::env::Workspace; - -use crate::{ - compilers::{Solc, SolcInput, ZkSolc, ZkSolcInput, ZkVyper, ZkVyperInput}, - error::ContractVerifierError, - ZkCompilerVersions, -}; - -#[derive(Debug, Clone, Copy)] -enum CompilerType { - Solc, - ZkSolc, - Vyper, - ZkVyper, -} - -impl CompilerType { - fn as_str(self) -> &'static str { - match self { - Self::Solc => "solc", - Self::ZkSolc => "zksolc", - Self::Vyper => "vyper", - Self::ZkVyper => "zkvyper", - } - } - - /// Returns the absolute path to the compiler binary. - fn bin_path_unchecked(self, home_dir: &Path, version: &str) -> PathBuf { - let compiler_dir = match self { - Self::Solc => "solc-bin", - Self::ZkSolc => "zksolc-bin", - Self::Vyper => "vyper-bin", - Self::ZkVyper => "zkvyper-bin", - }; - home_dir - .join("etc") - .join(compiler_dir) - .join(version) - .join(self.as_str()) - } - - async fn bin_path( - self, - home_dir: &Path, - version: &str, - ) -> Result { - let path = self.bin_path_unchecked(home_dir, version); - if !fs::try_exists(&path) - .await - .with_context(|| format!("failed accessing `{}`", self.as_str()))? - { - return Err(ContractVerifierError::UnknownCompilerVersion( - self.as_str(), - version.to_owned(), - )); - } - Ok(path) - } -} - -/// Compiler versions supported by a [`CompilerResolver`]. -#[derive(Debug)] -pub(crate) struct SupportedCompilerVersions { - pub solc: Vec, - pub zksolc: Vec, - pub vyper: Vec, - pub zkvyper: Vec, -} - -impl SupportedCompilerVersions { - pub fn lacks_any_compiler(&self) -> bool { - self.solc.is_empty() - || self.zksolc.is_empty() - || self.vyper.is_empty() - || self.zkvyper.is_empty() - } -} - -#[derive(Debug, Clone)] -pub(crate) struct CompilerPaths { - /// Path to the base (non-zk) compiler. - pub base: PathBuf, - /// Path to the zk compiler. - pub zk: PathBuf, -} - -/// Encapsulates compiler paths resolution. -#[async_trait] -pub(crate) trait CompilerResolver: fmt::Debug + Send + Sync { - /// Returns compiler versions supported by this resolver. - /// - /// # Errors - /// - /// Returned errors are assumed to be fatal. - async fn supported_versions(&self) -> anyhow::Result; - - /// Resolves a `solc` compiler. - async fn resolve_solc( - &self, - version: &str, - ) -> Result>, ContractVerifierError>; - - /// Resolves a `zksolc` compiler. - async fn resolve_zksolc( - &self, - version: &ZkCompilerVersions, - ) -> Result>, ContractVerifierError>; - - /// Resolves a `zkvyper` compiler. - async fn resolve_zkvyper( - &self, - version: &ZkCompilerVersions, - ) -> Result>, ContractVerifierError>; -} - -/// Encapsulates a one-off compilation process. -#[async_trait] -pub(crate) trait Compiler: Send + fmt::Debug { - /// Performs compilation. - async fn compile( - self: Box, - input: In, - ) -> Result; -} - -/// Default [`CompilerResolver`] using pre-downloaded compilers in the `/etc` subdirectories (relative to the workspace). -#[derive(Debug)] -pub(crate) struct EnvCompilerResolver { - home_dir: PathBuf, -} - -impl Default for EnvCompilerResolver { - fn default() -> Self { - Self { - home_dir: Workspace::locate().core(), - } - } -} - -impl EnvCompilerResolver { - async fn read_dir(&self, dir: &str) -> anyhow::Result> { - let mut dir_entries = fs::read_dir(self.home_dir.join(dir)) - .await - .context("failed reading dir")?; - let mut versions = vec![]; - while let Some(entry) = dir_entries.next_entry().await? { - let Ok(file_type) = entry.file_type().await else { - continue; - }; - if file_type.is_dir() { - if let Ok(name) = entry.file_name().into_string() { - versions.push(name); - } - } - } - Ok(versions) - } -} - -#[async_trait] -impl CompilerResolver for EnvCompilerResolver { - async fn supported_versions(&self) -> anyhow::Result { - Ok(SupportedCompilerVersions { - solc: self - .read_dir("etc/solc-bin") - .await - .context("failed reading solc dir")?, - zksolc: self - .read_dir("etc/zksolc-bin") - .await - .context("failed reading zksolc dir")?, - vyper: self - .read_dir("etc/vyper-bin") - .await - .context("failed reading vyper dir")?, - zkvyper: self - .read_dir("etc/zkvyper-bin") - .await - .context("failed reading zkvyper dir")?, - }) - } - - async fn resolve_solc( - &self, - version: &str, - ) -> Result>, ContractVerifierError> { - let solc_path = CompilerType::Solc.bin_path(&self.home_dir, version).await?; - Ok(Box::new(Solc::new(solc_path))) - } - - async fn resolve_zksolc( - &self, - version: &ZkCompilerVersions, - ) -> Result>, ContractVerifierError> { - let zksolc_version = &version.zk; - let zksolc_path = CompilerType::ZkSolc - .bin_path(&self.home_dir, zksolc_version) - .await?; - let solc_path = CompilerType::Solc - .bin_path(&self.home_dir, &version.base) - .await?; - let compiler_paths = CompilerPaths { - base: solc_path, - zk: zksolc_path, - }; - Ok(Box::new(ZkSolc::new( - compiler_paths, - zksolc_version.to_owned(), - ))) - } - - async fn resolve_zkvyper( - &self, - version: &ZkCompilerVersions, - ) -> Result>, ContractVerifierError> { - let zkvyper_path = CompilerType::ZkVyper - .bin_path(&self.home_dir, &version.zk) - .await?; - let vyper_path = CompilerType::Vyper - .bin_path(&self.home_dir, &version.base) - .await?; - let compiler_paths = CompilerPaths { - base: vyper_path, - zk: zkvyper_path, - }; - Ok(Box::new(ZkVyper::new(compiler_paths))) - } -} diff --git a/core/lib/contract_verifier/src/resolver/env.rs b/core/lib/contract_verifier/src/resolver/env.rs new file mode 100644 index 000000000000..798efde64348 --- /dev/null +++ b/core/lib/contract_verifier/src/resolver/env.rs @@ -0,0 +1,132 @@ +use std::{collections::HashSet, path::PathBuf}; + +use anyhow::Context as _; +use tokio::fs; +use zksync_queued_job_processor::async_trait; +use zksync_utils::env::Workspace; + +use crate::{ + compilers::{Solc, SolcInput, Vyper, VyperInput, ZkSolc, ZkSolcInput, ZkVyper}, + error::ContractVerifierError, + resolver::{ + Compiler, CompilerPaths, CompilerResolver, CompilerType, SupportedCompilerVersions, + }, + ZkCompilerVersions, +}; + +/// Default [`CompilerResolver`] using pre-downloaded compilers in the `/etc` subdirectories (relative to the workspace). +#[derive(Debug)] +pub(crate) struct EnvCompilerResolver { + home_dir: PathBuf, +} + +impl Default for EnvCompilerResolver { + fn default() -> Self { + Self { + home_dir: Workspace::locate().core(), + } + } +} + +impl EnvCompilerResolver { + async fn read_dir(&self, dir: &str) -> anyhow::Result> { + let mut dir_entries = fs::read_dir(self.home_dir.join(dir)) + .await + .context("failed reading dir")?; + let mut versions = HashSet::new(); + while let Some(entry) = dir_entries.next_entry().await? { + let Ok(file_type) = entry.file_type().await else { + continue; + }; + if file_type.is_dir() { + if let Ok(name) = entry.file_name().into_string() { + versions.insert(name); + } + } + } + Ok(versions) + } +} + +#[async_trait] +impl CompilerResolver for EnvCompilerResolver { + async fn supported_versions(&self) -> anyhow::Result { + let versions = SupportedCompilerVersions { + solc: self + .read_dir("etc/solc-bin") + .await + .context("failed reading solc dir")?, + zksolc: self + .read_dir("etc/zksolc-bin") + .await + .context("failed reading zksolc dir")?, + vyper: self + .read_dir("etc/vyper-bin") + .await + .context("failed reading vyper dir")?, + zkvyper: self + .read_dir("etc/zkvyper-bin") + .await + .context("failed reading zkvyper dir")?, + }; + tracing::info!("EnvResolver supported versions: {:?}", versions); + + Ok(versions) + } + + async fn resolve_solc( + &self, + version: &str, + ) -> Result>, ContractVerifierError> { + let solc_path = CompilerType::Solc.bin_path(&self.home_dir, version).await?; + Ok(Box::new(Solc::new(solc_path))) + } + + async fn resolve_zksolc( + &self, + version: &ZkCompilerVersions, + ) -> Result>, ContractVerifierError> { + let zksolc_version = &version.zk; + let zksolc_path = CompilerType::ZkSolc + .bin_path(&self.home_dir, zksolc_version) + .await?; + let solc_path = CompilerType::Solc + .bin_path(&self.home_dir, &version.base) + .await?; + let compiler_paths = CompilerPaths { + base: solc_path, + zk: zksolc_path, + }; + Ok(Box::new(ZkSolc::new( + compiler_paths, + zksolc_version.to_owned(), + ))) + } + + async fn resolve_vyper( + &self, + version: &str, + ) -> Result>, ContractVerifierError> { + let vyper_path = CompilerType::Vyper + .bin_path(&self.home_dir, version) + .await?; + Ok(Box::new(Vyper::new(vyper_path))) + } + + async fn resolve_zkvyper( + &self, + version: &ZkCompilerVersions, + ) -> Result>, ContractVerifierError> { + let zkvyper_path = CompilerType::ZkVyper + .bin_path(&self.home_dir, &version.zk) + .await?; + let vyper_path = CompilerType::Vyper + .bin_path(&self.home_dir, &version.base) + .await?; + let compiler_paths = CompilerPaths { + base: vyper_path, + zk: zkvyper_path, + }; + Ok(Box::new(ZkVyper::new(compiler_paths))) + } +} diff --git a/core/lib/contract_verifier/src/resolver/github/gh_api.rs b/core/lib/contract_verifier/src/resolver/github/gh_api.rs new file mode 100644 index 000000000000..8c9ac6723249 --- /dev/null +++ b/core/lib/contract_verifier/src/resolver/github/gh_api.rs @@ -0,0 +1,240 @@ +//! A thin wrapper over the GitHub API for the purposes of the contract verifier. + +use std::{collections::HashMap, sync::Arc, time::Duration}; + +use anyhow::Context as _; +use futures_util::TryStreamExt as _; +use octocrab::service::middleware::retry::RetryConfig; + +/// Representation of releases of the compiler. +/// The main difference from the `CompilerType` used in the `resolver` module is that +/// we treat `ZkVmSolc` differently, as it's stored in a different repository. +#[derive(Debug, Clone, Copy)] +pub(super) enum CompilerGitHubRelease { + /// "Upstream" Solidity + Solc, + /// "Upstream" Vyper + Vyper, + /// ZkSync's fork of the Solidity compiler + /// Used as a dependency for ZkSolc + ZkVmSolc, + /// Solidity compiler for EraVM + ZkSolc, + /// Vyper compiler for EraVM + ZkVyper, +} + +impl CompilerGitHubRelease { + fn organization(self) -> &'static str { + match self { + Self::Solc => "ethereum", + Self::Vyper => "vyperlang", + Self::ZkVmSolc => "matter-labs", + Self::ZkSolc => "matter-labs", + Self::ZkVyper => "matter-labs", + } + } + + fn repo(self) -> &'static str { + match self { + Self::Solc => "solidity", + Self::Vyper => "vyper", + Self::ZkVmSolc => "era-solidity", + Self::ZkSolc => "era-compiler-solidity", + Self::ZkVyper => "era-compiler-vyper", + } + } + + /// Check if version is blacklisted, e.g. it shouldn't be available in the contract verifier. + fn is_version_blacklisted(self, version: &str) -> bool { + match self { + Self::Solc => { + let Ok(version) = semver::Version::parse(version) else { + tracing::error!( + "Incorrect version passed to blacklist check: {self:?}:{version}" + ); + return true; + }; + // The earliest supported version is 0.4.10. + version < semver::Version::new(0, 4, 10) + } + Self::Vyper => { + let Ok(version) = semver::Version::parse(version) else { + tracing::error!( + "Incorrect version passed to blacklist check: {self:?}:{version}" + ); + return true; + }; + + // Versions below `0.3` are not supported. + if version < semver::Version::new(0, 3, 0) { + return true; + } + + // In `0.3.x` we only allow `0.3.3`, `0.3.9`, and `0.3.10`. + if version.minor == 3 { + return !matches!(version.patch, 3 | 9 | 10); + } + + false + } + _ => false, + } + } + + fn extract_version(self, tag_name: &str) -> Option { + match self { + Self::Solc | Self::Vyper => { + // Solidity and Vyper releases are tagged with version numbers in form of `vX.Y.Z`. + tag_name + .strip_prefix('v') + .filter(|v| semver::Version::parse(v).is_ok()) + .map(|v| v.to_string()) + } + Self::ZkVmSolc => { + // ZkVmSolc releases are tagged with version numbers in form of `X.Y.Z-A.B.C`, where + // `X.Y.Z` is the version of the Solidity compiler, and `A.B.C` is the version of the ZkSync fork. + if let Some((main, fork)) = tag_name.split_once('-') { + if semver::Version::parse(main).is_ok() && semver::Version::parse(fork).is_ok() + { + // In contract verifier, our fork is prefixed with `zkVM-`. + return Some(format!("zkVM-{tag_name}")); + } + } + None + } + Self::ZkSolc | Self::ZkVyper => { + // zksolc and zkvyper releases are tagged with version numbers in form of `X.Y.Z` (without 'v'). + if semver::Version::parse(tag_name).is_ok() { + Some(tag_name.to_string()) + } else { + None + } + } + } + } + + fn match_asset(&self, asset_url: &str) -> bool { + match self { + Self::Solc => asset_url.contains("solc-static-linux"), + Self::Vyper => asset_url.contains(".linux"), + Self::ZkVmSolc => asset_url.contains("solc-linux-amd64"), + Self::ZkSolc => asset_url.contains("zksolc-linux-amd64-musl"), + Self::ZkVyper => asset_url.contains("zkvyper-linux-amd64-musl"), + } + } +} + +/// A thin wrapper over the GitHub API for the purposes of the contract verifier. +#[derive(Debug)] +pub(super) struct GitHubApi { + client: Arc, +} + +impl GitHubApi { + /// Creates a new instance of the GitHub API wrapper. + pub(super) fn new() -> Self { + // Octocrab requires rustls to be configured. + rustls::crypto::aws_lc_rs::default_provider() + .install_default() + .ok(); + + let client = Arc::new( + octocrab::Octocrab::builder() + .add_retry_config(Self::retry_config()) + .set_connect_timeout(Some(Self::connect_timeout())) + .set_read_timeout(Some(Self::read_timeout())) + .build() + .unwrap(), + ); + Self { client } + } + + fn retry_config() -> RetryConfig { + RetryConfig::Simple(4) + } + + fn connect_timeout() -> Duration { + Duration::from_secs(10) + } + + fn read_timeout() -> Duration { + Duration::from_secs(60) + } + + /// Returns versions for both upstream and our fork of solc. + pub async fn solc_versions(&self) -> anyhow::Result> { + let mut versions = self + .extract_versions(CompilerGitHubRelease::Solc) + .await + .context("Can't fetch upstream solc versions")?; + versions.extend( + self.extract_versions(CompilerGitHubRelease::ZkVmSolc) + .await + .context("Can't fetch zkVM solc versions")?, + ); + Ok(versions) + } + + pub async fn zksolc_versions(&self) -> anyhow::Result> { + self.extract_versions(CompilerGitHubRelease::ZkSolc).await + } + + pub async fn vyper_versions(&self) -> anyhow::Result> { + self.extract_versions(CompilerGitHubRelease::Vyper).await + } + + pub async fn zkvyper_versions(&self) -> anyhow::Result> { + self.extract_versions(CompilerGitHubRelease::ZkVyper).await + } + + /// Will scan all the releases for a specific compiler. + async fn extract_versions( + &self, + compiler: CompilerGitHubRelease, + ) -> anyhow::Result> { + // Create a stream over all the versions to not worry about pagination. + let releases = self + .client + .repos(compiler.organization(), compiler.repo()) + .releases() + .list() + .per_page(100) + .send() + .await? + .into_stream(&self.client); + tokio::pin!(releases); + + // Go through all the releases, filter ones that match the version. + // For matching versions, find a suitable asset and store its URL. + let mut versions = HashMap::new(); + while let Some(release) = releases.try_next().await? { + // Skip pre-releases. + if release.prerelease { + continue; + } + + if let Some(version) = compiler.extract_version(&release.tag_name) { + if compiler.is_version_blacklisted(&version) { + tracing::debug!("Skipping {compiler:?}:{version} due to blacklist"); + continue; + } + + let mut found = false; + for asset in release.assets { + if compiler.match_asset(asset.browser_download_url.as_str()) { + tracing::info!("Discovered release {compiler:?}:{version}"); + versions.insert(version.clone(), asset.browser_download_url.clone()); + found = true; + break; + } + } + if !found { + tracing::warn!("Didn't find a matching artifact for {compiler:?}:{version}"); + } + } + } + + Ok(versions) + } +} diff --git a/core/lib/contract_verifier/src/resolver/github/mod.rs b/core/lib/contract_verifier/src/resolver/github/mod.rs new file mode 100644 index 000000000000..a50d0151b7ff --- /dev/null +++ b/core/lib/contract_verifier/src/resolver/github/mod.rs @@ -0,0 +1,311 @@ +use std::{ + collections::HashMap, + time::{Duration, Instant}, +}; + +use anyhow::Context as _; +use tokio::{io::AsyncWriteExt as _, sync::RwLock}; +use zksync_queued_job_processor::async_trait; + +use self::gh_api::GitHubApi; +use crate::{ + compilers::{Solc, SolcInput, Vyper, VyperInput, ZkSolc, ZkSolcInput, ZkVyper}, + error::ContractVerifierError, + resolver::{ + Compiler, CompilerPaths, CompilerResolver, CompilerType, SupportedCompilerVersions, + }, + ZkCompilerVersions, +}; + +mod gh_api; + +/// [`CompilerResolver`] that can dynamically download missing compilers from GitHub releases. +/// +/// Note: this resolver does not interact with [`EnvCompilerResolver`](super::EnvCompilerResolver). +/// This is important for the context of zksolc/zkvyper, as there we have two separate compilers +/// required for compilation. This resolver will download both of them, even if one of the versions +/// is available in the `EnvCompilerResolver`. +#[derive(Debug)] +pub(crate) struct GitHubCompilerResolver { + /// We expect that contract-verifier will be running in docker without any persistent storage, + /// so we explicitly don't expect any artifacts to survive restart. + artifacts_dir: tempfile::TempDir, + gh_client: GitHubApi, + client: reqwest::Client, + supported_versions: RwLock, + /// List of downloads performed right now. + /// `broadcast` receiver can be used to wait until the download is finished. + active_downloads: RwLock>>, +} + +#[derive(Debug)] +struct SupportedVersions { + /// Holds versions for both upstream and zkVM solc. + solc_versions: HashMap, + zksolc_versions: HashMap, + vyper_versions: HashMap, + zkvyper_versions: HashMap, + last_updated: Instant, +} + +impl Default for SupportedVersions { + fn default() -> Self { + Self::new() + } +} + +impl SupportedVersions { + // Note: We assume that contract verifier will run the task to update supported versions + // rarely, but we still want to protect ourselves from accidentally spamming GitHub API. + // So, this interval is smaller than the expected time between updates (this way we don't + // run into an issue where intervals are slightly out of sync, causing a delay in "real" + // update time). + const CACHE_INTERVAL: Duration = Duration::from_secs(10 * 60); // 10 minutes + + fn new() -> Self { + Self { + solc_versions: HashMap::new(), + zksolc_versions: HashMap::new(), + vyper_versions: HashMap::new(), + zkvyper_versions: HashMap::new(), + last_updated: Instant::now(), + } + } + + fn is_outdated(&self) -> bool { + self.last_updated.elapsed() > Self::CACHE_INTERVAL + } + + async fn update(&mut self, gh_client: &GitHubApi) -> anyhow::Result<()> { + // Non-atomic update is fine here: the fields are independent, so if + // at least one update succeeds, it's worth persisting. We won't be changing + // the last update timestamp in case of failure though, so it will be retried + // next time. + self.solc_versions = gh_client + .solc_versions() + .await + .context("failed fetching solc versions")?; + self.zksolc_versions = gh_client + .zksolc_versions() + .await + .context("failed fetching zksolc versions")?; + self.vyper_versions = gh_client + .vyper_versions() + .await + .context("failed fetching vyper versions")?; + self.zkvyper_versions = gh_client + .zkvyper_versions() + .await + .context("failed fetching zkvyper versions")?; + self.last_updated = Instant::now(); + Ok(()) + } + + async fn update_if_needed(&mut self, gh_client: &GitHubApi) -> anyhow::Result<()> { + if self.is_outdated() { + tracing::info!("GH compiler versions cache outdated, updating"); + self.update(gh_client).await?; + } + Ok(()) + } +} + +impl GitHubCompilerResolver { + pub async fn new() -> anyhow::Result { + let artifacts_dir = tempfile::tempdir().context("failed creating temp dir")?; + let gh_client = GitHubApi::new(); + let mut supported_versions = SupportedVersions::default(); + if let Err(err) = supported_versions.update(&gh_client).await { + // We don't want the resolver to fail at creation if versions can't be fetched. + // It shouldn't bring down the whole application, so the expectation here is that + // the versions will be fetched later. + tracing::error!("failed syncing compiler versions at start: {:?}", err); + } + + Ok(Self { + artifacts_dir, + gh_client, + client: reqwest::Client::new(), + supported_versions: RwLock::new(supported_versions), + active_downloads: RwLock::default(), + }) + } +} + +impl GitHubCompilerResolver { + async fn download_version_if_needed( + &self, + compiler: CompilerType, + version: &str, + ) -> anyhow::Result<()> { + // We need to check the lock first, because the compiler may still be downloading. + // We must hold the lock until we know if we need to download the compiler. + let mut lock = self.active_downloads.write().await; + if let Some(rx) = lock.get(&(compiler, version.to_string())) { + let mut rx = rx.resubscribe(); + drop(lock); + tracing::debug!( + "Waiting for {}:{} download to finish", + compiler.as_str(), + version + ); + rx.recv().await?; + return Ok(()); + } + + if compiler.exists(self.artifacts_dir.path(), version).await? { + tracing::debug!("Compiler {}:{} exists", compiler.as_str(), version); + return Ok(()); + } + + // Mark the compiler as downloading. + let (tx, rx) = tokio::sync::broadcast::channel(1); + lock.insert((compiler, version.to_string()), rx); + drop(lock); + + tracing::info!("Downloading {}:{}", compiler.as_str(), version); + let lock = self.supported_versions.read().await; + let versions = match compiler { + CompilerType::Solc => &lock.solc_versions, + CompilerType::ZkSolc => &lock.zksolc_versions, + CompilerType::Vyper => &lock.vyper_versions, + CompilerType::ZkVyper => &lock.zkvyper_versions, + }; + + let version_url = versions + .get(version) + .ok_or_else(|| { + ContractVerifierError::UnknownCompilerVersion("solc", version.to_owned()) + })? + .clone(); + drop(lock); + let path = compiler.bin_path_unchecked(self.artifacts_dir.path(), version); + + let response = self.client.get(version_url).send().await?; + let body = response.bytes().await?; + + tracing::info!("Saving {}:{} to {:?}", compiler.as_str(), version, path); + + tokio::fs::create_dir_all(path.parent().unwrap()) + .await + .context("failed to create dir")?; + + let mut file = tokio::fs::File::create_new(path) + .await + .context("failed to create file")?; + file.write_all(&body) + .await + .context("failed to write to file")?; + file.flush().await.context("failed to flush file")?; + + // On UNIX-like systems, make file executable. + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let mut perms = file.metadata().await?.permissions(); + perms.set_mode(0o700); // Only owner can execute and access. + file.set_permissions(perms).await?; + } + + tracing::info!("Finished downloading {}:{}", compiler.as_str(), version); + + // Notify other waiters that the compiler is downloaded. + tx.send(()).ok(); + let mut lock = self.active_downloads.write().await; + lock.remove(&(compiler, version.to_string())); + drop(lock); + + Ok(()) + } +} + +#[async_trait] +impl CompilerResolver for GitHubCompilerResolver { + async fn supported_versions(&self) -> anyhow::Result { + let mut lock = self.supported_versions.write().await; + lock.update_if_needed(&self.gh_client).await?; + + let versions = SupportedCompilerVersions { + solc: lock.solc_versions.keys().cloned().collect(), + zksolc: lock.zksolc_versions.keys().cloned().collect(), + vyper: lock.vyper_versions.keys().cloned().collect(), + zkvyper: lock.zkvyper_versions.keys().cloned().collect(), + }; + tracing::info!("GitHubResolver supported versions: {:?}", versions); + Ok(versions) + } + + async fn resolve_solc( + &self, + version: &str, + ) -> Result>, ContractVerifierError> { + self.download_version_if_needed(CompilerType::Solc, version) + .await?; + + let solc_path = CompilerType::Solc + .bin_path(self.artifacts_dir.path(), version) + .await?; + Ok(Box::new(Solc::new(solc_path))) + } + + async fn resolve_zksolc( + &self, + version: &ZkCompilerVersions, + ) -> Result>, ContractVerifierError> { + self.download_version_if_needed(CompilerType::Solc, &version.base) + .await?; + self.download_version_if_needed(CompilerType::ZkSolc, &version.zk) + .await?; + + let zksolc_version = &version.zk; + let zksolc_path = CompilerType::ZkSolc + .bin_path(self.artifacts_dir.path(), zksolc_version) + .await?; + let solc_path = CompilerType::Solc + .bin_path(self.artifacts_dir.path(), &version.base) + .await?; + let compiler_paths = CompilerPaths { + base: solc_path, + zk: zksolc_path, + }; + Ok(Box::new(ZkSolc::new( + compiler_paths, + zksolc_version.to_owned(), + ))) + } + + async fn resolve_vyper( + &self, + version: &str, + ) -> Result>, ContractVerifierError> { + self.download_version_if_needed(CompilerType::Vyper, version) + .await?; + + let vyper_path = CompilerType::Vyper + .bin_path(self.artifacts_dir.path(), version) + .await?; + Ok(Box::new(Vyper::new(vyper_path))) + } + + async fn resolve_zkvyper( + &self, + version: &ZkCompilerVersions, + ) -> Result>, ContractVerifierError> { + self.download_version_if_needed(CompilerType::Vyper, &version.base) + .await?; + self.download_version_if_needed(CompilerType::ZkVyper, &version.zk) + .await?; + + let zkvyper_path = CompilerType::ZkVyper + .bin_path(self.artifacts_dir.path(), &version.zk) + .await?; + let vyper_path = CompilerType::Vyper + .bin_path(self.artifacts_dir.path(), &version.base) + .await?; + let compiler_paths = CompilerPaths { + base: vyper_path, + zk: zkvyper_path, + }; + Ok(Box::new(ZkVyper::new(compiler_paths))) + } +} diff --git a/core/lib/contract_verifier/src/resolver/mod.rs b/core/lib/contract_verifier/src/resolver/mod.rs new file mode 100644 index 000000000000..a9d2bcf9049d --- /dev/null +++ b/core/lib/contract_verifier/src/resolver/mod.rs @@ -0,0 +1,278 @@ +use std::{ + collections::HashSet, + fmt, + path::{Path, PathBuf}, + sync::Arc, +}; + +use anyhow::Context as _; +use tokio::fs; +use zksync_queued_job_processor::async_trait; +use zksync_types::contract_verification_api::CompilationArtifacts; + +pub(crate) use self::{env::EnvCompilerResolver, github::GitHubCompilerResolver}; +use crate::{ + compilers::{SolcInput, VyperInput, ZkSolcInput}, + error::ContractVerifierError, + ZkCompilerVersions, +}; + +mod env; +mod github; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +enum CompilerType { + Solc, + ZkSolc, + Vyper, + ZkVyper, +} + +impl CompilerType { + fn as_str(self) -> &'static str { + match self { + Self::Solc => "solc", + Self::ZkSolc => "zksolc", + Self::Vyper => "vyper", + Self::ZkVyper => "zkvyper", + } + } + + /// Returns the absolute path to the compiler binary. + fn bin_path_unchecked(self, home_dir: &Path, version: &str) -> PathBuf { + let compiler_dir = match self { + Self::Solc => "solc-bin", + Self::ZkSolc => "zksolc-bin", + Self::Vyper => "vyper-bin", + Self::ZkVyper => "zkvyper-bin", + }; + home_dir + .join("etc") + .join(compiler_dir) + .join(version) + .join(self.as_str()) + } + + async fn exists(self, home_dir: &Path, version: &str) -> Result { + let path = self.bin_path_unchecked(home_dir, version); + let exists = fs::try_exists(&path) + .await + .with_context(|| format!("failed accessing `{}`", self.as_str()))?; + Ok(exists) + } + + async fn bin_path( + self, + home_dir: &Path, + version: &str, + ) -> Result { + let path = self.bin_path_unchecked(home_dir, version); + if !fs::try_exists(&path) + .await + .with_context(|| format!("failed accessing `{}`", self.as_str()))? + { + return Err(ContractVerifierError::UnknownCompilerVersion( + self.as_str(), + version.to_owned(), + )); + } + Ok(path) + } +} + +/// Compiler versions supported by a [`CompilerResolver`]. +#[derive(Debug, Default)] +pub(crate) struct SupportedCompilerVersions { + /// Note: solc can have two "flavors": "upstream" solc (e.g. "real" solc used for L1 development), + /// and "zksync" solc (e.g. ZKsync fork of the solc used by `zksolc`). + /// They both are considered as "solc", but they have different versioning scheme, e.g. + /// "upstream" solc can have version `0.8.0`, while "zksync" solc can have version `zkVM-0.8.0-1.0.1`. + pub solc: HashSet, + pub zksolc: HashSet, + pub vyper: HashSet, + pub zkvyper: HashSet, +} + +impl SupportedCompilerVersions { + fn merge(&mut self, other: SupportedCompilerVersions) { + self.solc.extend(other.solc); + self.zksolc.extend(other.zksolc); + self.vyper.extend(other.vyper); + self.zkvyper.extend(other.zkvyper); + } +} + +impl SupportedCompilerVersions { + pub fn lacks_any_compiler(&self) -> bool { + self.solc.is_empty() + || self.zksolc.is_empty() + || self.vyper.is_empty() + || self.zkvyper.is_empty() + } +} + +#[derive(Debug, Clone)] +pub(crate) struct CompilerPaths { + /// Path to the base (non-zk) compiler. + pub base: PathBuf, + /// Path to the zk compiler. + pub zk: PathBuf, +} + +/// Encapsulates compiler paths resolution. +#[async_trait] +pub(crate) trait CompilerResolver: fmt::Debug + Send + Sync { + /// Returns compiler versions supported by this resolver. + /// + /// # Errors + /// + /// Returned errors are assumed to be fatal. + async fn supported_versions(&self) -> anyhow::Result; + + /// Resolves a `solc` compiler. + async fn resolve_solc( + &self, + version: &str, + ) -> Result>, ContractVerifierError>; + + /// Resolves a `zksolc` compiler. + async fn resolve_zksolc( + &self, + version: &ZkCompilerVersions, + ) -> Result>, ContractVerifierError>; + + /// Resolves a `vyper` compiler. + async fn resolve_vyper( + &self, + version: &str, + ) -> Result>, ContractVerifierError>; + + /// Resolves a `zkvyper` compiler. + async fn resolve_zkvyper( + &self, + version: &ZkCompilerVersions, + ) -> Result>, ContractVerifierError>; +} + +/// Encapsulates a one-off compilation process. +#[async_trait] +pub(crate) trait Compiler: Send + fmt::Debug { + /// Performs compilation. + async fn compile( + self: Box, + input: In, + ) -> Result; +} + +#[derive(Debug)] +pub struct ResolverMultiplexer { + resolvers: Vec>, +} + +impl ResolverMultiplexer { + pub fn new(resolver: Arc) -> Self { + Self { + resolvers: vec![resolver], + } + } + + pub fn with_resolver(mut self, resolver: Arc) -> Self { + self.resolvers.push(resolver); + self + } +} + +#[async_trait] +impl CompilerResolver for ResolverMultiplexer { + async fn supported_versions(&self) -> anyhow::Result { + let mut versions = SupportedCompilerVersions::default(); + for resolver in &self.resolvers { + versions.merge(resolver.supported_versions().await?); + } + Ok(versions) + } + + /// Resolves a `solc` compiler. + async fn resolve_solc( + &self, + version: &str, + ) -> Result>, ContractVerifierError> { + for resolver in &self.resolvers { + match resolver.resolve_solc(version).await { + Ok(compiler) => return Ok(compiler), + Err(ContractVerifierError::UnknownCompilerVersion(..)) => { + continue; + } + Err(err) => return Err(err), + } + } + Err(ContractVerifierError::UnknownCompilerVersion( + "solc", + version.to_owned(), + )) + } + + /// Resolves a `zksolc` compiler. + async fn resolve_zksolc( + &self, + version: &ZkCompilerVersions, + ) -> Result>, ContractVerifierError> { + let mut last_error = Err(ContractVerifierError::UnknownCompilerVersion( + "zksolc", + version.zk.to_owned(), + )); + for resolver in &self.resolvers { + match resolver.resolve_zksolc(version).await { + Ok(compiler) => return Ok(compiler), + err @ Err(ContractVerifierError::UnknownCompilerVersion(..)) => { + last_error = err; + continue; + } + Err(err) => return Err(err), + } + } + last_error + } + + /// Resolves a `vyper` compiler. + async fn resolve_vyper( + &self, + version: &str, + ) -> Result>, ContractVerifierError> { + for resolver in &self.resolvers { + match resolver.resolve_vyper(version).await { + Ok(compiler) => return Ok(compiler), + Err(ContractVerifierError::UnknownCompilerVersion(..)) => { + continue; + } + Err(err) => return Err(err), + } + } + Err(ContractVerifierError::UnknownCompilerVersion( + "vyper", + version.to_owned(), + )) + } + + /// Resolves a `zkvyper` compiler. + async fn resolve_zkvyper( + &self, + version: &ZkCompilerVersions, + ) -> Result>, ContractVerifierError> { + let mut last_error = Err(ContractVerifierError::UnknownCompilerVersion( + "zkvyper", + version.zk.to_owned(), + )); + for resolver in &self.resolvers { + match resolver.resolve_zkvyper(version).await { + Ok(compiler) => return Ok(compiler), + err @ Err(ContractVerifierError::UnknownCompilerVersion(..)) => { + last_error = err; + continue; + } + Err(err) => return Err(err), + } + } + last_error + } +} diff --git a/core/lib/contract_verifier/src/tests/mod.rs b/core/lib/contract_verifier/src/tests/mod.rs index 7caa5f32c991..f66732675ce6 100644 --- a/core/lib/contract_verifier/src/tests/mod.rs +++ b/core/lib/contract_verifier/src/tests/mod.rs @@ -1,12 +1,17 @@ //! Tests for the contract verifier. -use std::{collections::HashMap, iter}; +use std::{ + collections::{HashMap, HashSet}, + iter, +}; use test_casing::{test_casing, Product}; use tokio::sync::watch; use zksync_dal::Connection; use zksync_node_test_utils::{create_l1_batch, create_l2_block}; use zksync_types::{ + address_to_h256, + bytecode::BytecodeHash, contract_verification_api::{CompilerVersions, SourceCodeData, VerificationIncomingRequest}, get_code_key, get_known_code_key, l2::L2Tx, @@ -14,15 +19,11 @@ use zksync_types::{ Execute, L1BatchNumber, L2BlockNumber, ProtocolVersion, StorageLog, CONTRACT_DEPLOYER_ADDRESS, H256, U256, }; -use zksync_utils::{ - address_to_h256, - bytecode::{hash_bytecode, hash_evm_bytecode}, -}; use zksync_vm_interface::{tracer::ValidationTraces, TransactionExecutionMetrics, VmEvent}; use super::*; use crate::{ - compilers::{SolcInput, ZkSolcInput, ZkVyperInput}, + compilers::{SolcInput, VyperInput, ZkSolcInput}, resolver::{Compiler, SupportedCompilerVersions}, }; @@ -55,6 +56,39 @@ const COUNTER_CONTRACT_WITH_CONSTRUCTOR: &str = r#" } } "#; +const COUNTER_CONTRACT_WITH_INTERFACE: &str = r#" + interface ICounter { + function increment(uint256 x) external; + } + + contract Counter is ICounter { + uint256 value; + + function increment(uint256 x) external override { + value += x; + } + } +"#; +const COUNTER_VYPER_CONTRACT: &str = r#" +#pragma version ^0.3.10 + +value: uint256 + +@external +def increment(x: uint256): + self.value += x +"#; +const EMPTY_YUL_CONTRACT: &str = r#" +object "Empty" { + code { + mstore(0, 0) + return(0, 32) + } + object "Empty_deployed" { + code { } + } +} +"#; #[derive(Debug, Clone, Copy)] enum TestContract { @@ -108,7 +142,7 @@ async fn mock_deployment( bytecode: Vec, constructor_args: &[Token], ) { - let bytecode_hash = hash_bytecode(&bytecode); + let bytecode_hash = BytecodeHash::for_bytecode(&bytecode).value(); let deployment = Execute::for_deploy(H256::zero(), bytecode.clone(), constructor_args); mock_deployment_inner(storage, address, bytecode_hash, bytecode, deployment).await; } @@ -124,12 +158,12 @@ async fn mock_evm_deployment( calldata.extend_from_slice(ðabi::encode(constructor_args)); let deployment = Execute { contract_address: None, - calldata, // FIXME: check + calldata, value: 0.into(), factory_deps: vec![], }; let bytecode = pad_evm_bytecode(deployed_bytecode); - let bytecode_hash = hash_evm_bytecode(&bytecode); + let bytecode_hash = BytecodeHash::for_evm_bytecode(&bytecode).value(); mock_deployment_inner(storage, address, bytecode_hash, bytecode, deployment).await; } @@ -258,10 +292,10 @@ impl Compiler for MockCompilerResolver { impl CompilerResolver for MockCompilerResolver { async fn supported_versions(&self) -> anyhow::Result { Ok(SupportedCompilerVersions { - solc: vec![SOLC_VERSION.to_owned()], - zksolc: vec![ZKSOLC_VERSION.to_owned()], - vyper: vec![], - zkvyper: vec![], + solc: [SOLC_VERSION.to_owned()].into_iter().collect(), + zksolc: [ZKSOLC_VERSION.to_owned()].into_iter().collect(), + vyper: HashSet::default(), + zkvyper: HashSet::default(), }) } @@ -297,10 +331,17 @@ impl CompilerResolver for MockCompilerResolver { Ok(Box::new(self.clone())) } + async fn resolve_vyper( + &self, + _version: &str, + ) -> Result>, ContractVerifierError> { + unreachable!("not tested") + } + async fn resolve_zkvyper( &self, _version: &ZkCompilerVersions, - ) -> Result>, ContractVerifierError> { + ) -> Result>, ContractVerifierError> { unreachable!("not tested") } } @@ -445,10 +486,32 @@ async fn assert_request_success( .unwrap() .expect("no verification info"); assert_eq!(verification_info.artifacts.bytecode, *expected_bytecode); - assert_eq!(verification_info.artifacts.abi, counter_contract_abi()); + assert_eq!( + without_internal_types(verification_info.artifacts.abi.clone()), + without_internal_types(counter_contract_abi()) + ); verification_info } +fn without_internal_types(mut abi: serde_json::Value) -> serde_json::Value { + let items = abi.as_array_mut().unwrap(); + for item in items { + if let Some(inputs) = item.get_mut("inputs") { + let inputs = inputs.as_array_mut().unwrap(); + for input in inputs { + input.as_object_mut().unwrap().remove("internalType"); + } + } + if let Some(outputs) = item.get_mut("outputs") { + let outputs = outputs.as_array_mut().unwrap(); + for output in outputs { + output.as_object_mut().unwrap().remove("internalType"); + } + } + } + abi +} + #[test_casing(2, TestContract::ALL)] #[tokio::test] async fn verifying_evm_bytecode(contract: TestContract) { diff --git a/core/lib/contract_verifier/src/tests/real.rs b/core/lib/contract_verifier/src/tests/real.rs index a7113044b405..ba7615528e15 100644 --- a/core/lib/contract_verifier/src/tests/real.rs +++ b/core/lib/contract_verifier/src/tests/real.rs @@ -4,25 +4,40 @@ use std::{env, sync::Arc, time::Duration}; -use zksync_utils::bytecode::validate_bytecode; +use assert_matches::assert_matches; +use zksync_types::bytecode::validate_bytecode; use super::*; +#[derive(Debug, Clone, Copy)] +enum Toolchain { + Solidity, + Vyper, +} + +impl Toolchain { + const ALL: [Self; 2] = [Self::Solidity, Self::Vyper]; +} + #[derive(Debug, Clone)] struct TestCompilerVersions { solc: String, zksolc: String, + vyper: String, + zkvyper: String, } impl TestCompilerVersions { - fn new(mut versions: SupportedCompilerVersions) -> Option { + fn new(versions: SupportedCompilerVersions) -> Option { let solc = versions .solc .into_iter() .find(|ver| !ver.starts_with("zkVM"))?; Some(Self { solc, - zksolc: versions.zksolc.pop()?, + zksolc: versions.zksolc.into_iter().next()?, + vyper: versions.vyper.into_iter().next()?, + zkvyper: versions.zkvyper.into_iter().next()?, }) } @@ -42,6 +57,23 @@ impl TestCompilerVersions { }, } } + + fn zkvyper(self) -> ZkCompilerVersions { + ZkCompilerVersions { + base: self.vyper, + zk: self.zkvyper, + } + } + + fn vyper_for_api(self, bytecode_kind: BytecodeMarker) -> CompilerVersions { + CompilerVersions::Vyper { + compiler_vyper_version: self.vyper, + compiler_zkvyper_version: match bytecode_kind { + BytecodeMarker::Evm => None, + BytecodeMarker::EraVm => Some(self.zkvyper), + }, + } + } } async fn checked_env_resolver() -> Option<(EnvCompilerResolver, TestCompilerVersions)> { @@ -76,18 +108,23 @@ macro_rules! real_resolver { }; } +#[test_casing(2, [false, true])] #[tokio::test] -async fn using_real_compiler() { +async fn using_real_zksolc(specify_contract_file: bool) { let (compiler_resolver, supported_compilers) = real_resolver!(); let compiler = compiler_resolver .resolve_zksolc(&supported_compilers.clone().zksolc()) .await .unwrap(); - let req = VerificationIncomingRequest { + let mut req = VerificationIncomingRequest { compiler_versions: supported_compilers.solc_for_api(BytecodeMarker::EraVm), ..test_request(Address::repeat_byte(1), COUNTER_CONTRACT) }; + if specify_contract_file { + set_multi_file_solc_input(&mut req); + } + let input = ZkSolc::build_input(req).unwrap(); let output = compiler.compile(input).await.unwrap(); @@ -95,19 +132,43 @@ async fn using_real_compiler() { assert_eq!(output.abi, counter_contract_abi()); } +fn set_multi_file_solc_input(req: &mut VerificationIncomingRequest) { + let input = serde_json::json!({ + "language": "Solidity", + "sources": { + "contracts/test.sol": { + "content": COUNTER_CONTRACT, + }, + }, + "settings": { + "optimizer": { "enabled": true }, + }, + }); + let serde_json::Value::Object(input) = input else { + unreachable!(); + }; + req.source_code_data = SourceCodeData::StandardJsonInput(input); + req.contract_name = "contracts/test.sol:Counter".to_owned(); +} + +#[test_casing(2, [false, true])] #[tokio::test] -async fn using_standalone_solc() { +async fn using_standalone_solc(specify_contract_file: bool) { let (compiler_resolver, supported_compilers) = real_resolver!(); let version = &supported_compilers.solc; let compiler = compiler_resolver.resolve_solc(version).await.unwrap(); - let req = VerificationIncomingRequest { + let mut req = VerificationIncomingRequest { compiler_versions: CompilerVersions::Solc { compiler_solc_version: version.clone(), compiler_zksolc_version: None, }, ..test_request(Address::repeat_byte(1), COUNTER_CONTRACT) }; + if specify_contract_file { + set_multi_file_solc_input(&mut req); + } + let input = Solc::build_input(req).unwrap(); let output = compiler.compile(input).await.unwrap(); @@ -115,18 +176,271 @@ async fn using_standalone_solc() { assert_eq!(output.abi, counter_contract_abi()); } -#[test_casing(2, BYTECODE_KINDS)] +#[test_casing(2, [false, true])] #[tokio::test] -async fn using_real_compiler_in_verifier(bytecode_kind: BytecodeMarker) { +async fn using_zksolc_with_abstract_contract(specify_contract_file: bool) { let (compiler_resolver, supported_compilers) = real_resolver!(); + let compiler = compiler_resolver + .resolve_zksolc(&supported_compilers.clone().zksolc()) + .await + .unwrap(); + let (source_code_data, contract_name) = if specify_contract_file { + let input = serde_json::json!({ + "language": "Solidity", + "sources": { + "contracts/test.sol": { + "content": COUNTER_CONTRACT_WITH_INTERFACE, + }, + }, + "settings": { + "optimizer": { "enabled": true }, + }, + }); + let serde_json::Value::Object(input) = input else { + unreachable!(); + }; + ( + SourceCodeData::StandardJsonInput(input), + "contracts/test.sol:ICounter", + ) + } else { + ( + SourceCodeData::SolSingleFile(COUNTER_CONTRACT_WITH_INTERFACE.to_owned()), + "ICounter", + ) + }; + let req = VerificationIncomingRequest { - compiler_versions: supported_compilers.clone().solc_for_api(bytecode_kind), - ..test_request(Address::repeat_byte(1), COUNTER_CONTRACT) + contract_address: Address::repeat_byte(1), + compiler_versions: supported_compilers.solc_for_api(BytecodeMarker::EraVm), + optimization_used: true, + optimizer_mode: None, + constructor_arguments: Default::default(), + is_system: false, + source_code_data, + contract_name: contract_name.to_owned(), + force_evmla: false, + }; + + let input = ZkSolc::build_input(req).unwrap(); + let err = compiler.compile(input).await.unwrap_err(); + assert_matches!( + err, + ContractVerifierError::AbstractContract(name) if name == "ICounter" + ); +} + +fn test_yul_request(compiler_versions: CompilerVersions) -> VerificationIncomingRequest { + VerificationIncomingRequest { + contract_address: Default::default(), + source_code_data: SourceCodeData::YulSingleFile(EMPTY_YUL_CONTRACT.to_owned()), + contract_name: "Empty".to_owned(), + compiler_versions, + optimization_used: true, + optimizer_mode: None, + constructor_arguments: Default::default(), + is_system: false, + force_evmla: false, + } +} + +#[tokio::test] +async fn compiling_yul_with_zksolc() { + let (compiler_resolver, supported_compilers) = real_resolver!(); + + let version = supported_compilers.clone().zksolc(); + let compiler = compiler_resolver.resolve_zksolc(&version).await.unwrap(); + let req = test_yul_request(supported_compilers.solc_for_api(BytecodeMarker::EraVm)); + let input = ZkSolc::build_input(req).unwrap(); + let output = compiler.compile(input).await.unwrap(); + + assert!(!output.bytecode.is_empty()); + assert!(output.deployed_bytecode.is_none()); + assert_eq!(output.abi, serde_json::json!([])); +} + +#[tokio::test] +async fn compiling_standalone_yul() { + let (compiler_resolver, supported_compilers) = real_resolver!(); + + let version = &supported_compilers.solc; + let compiler = compiler_resolver.resolve_solc(version).await.unwrap(); + let req = test_yul_request(CompilerVersions::Solc { + compiler_solc_version: version.clone(), + compiler_zksolc_version: None, + }); + let input = Solc::build_input(req).unwrap(); + let output = compiler.compile(input).await.unwrap(); + + assert!(!output.bytecode.is_empty()); + assert_ne!(output.deployed_bytecode.unwrap(), output.bytecode); + assert_eq!(output.abi, serde_json::json!([])); +} + +fn test_vyper_request( + filename: &str, + contract_name: &str, + supported_compilers: TestCompilerVersions, + bytecode_kind: BytecodeMarker, +) -> VerificationIncomingRequest { + VerificationIncomingRequest { + contract_address: Address::repeat_byte(1), + source_code_data: SourceCodeData::VyperMultiFile(HashMap::from([( + filename.to_owned(), + COUNTER_VYPER_CONTRACT.to_owned(), + )])), + contract_name: contract_name.to_owned(), + compiler_versions: supported_compilers.vyper_for_api(bytecode_kind), + optimization_used: true, + optimizer_mode: None, + constructor_arguments: Default::default(), + is_system: false, + force_evmla: false, + } +} + +#[test_casing(2, [false, true])] +#[tokio::test] +async fn using_real_zkvyper(specify_contract_file: bool) { + let (compiler_resolver, supported_compilers) = real_resolver!(); + + let compiler = compiler_resolver + .resolve_zkvyper(&supported_compilers.clone().zkvyper()) + .await + .unwrap(); + let (filename, contract_name) = if specify_contract_file { + ("contracts/Counter.vy", "contracts/Counter.vy:Counter") + } else { + ("Counter", "Counter") + }; + let req = test_vyper_request( + filename, + contract_name, + supported_compilers, + BytecodeMarker::EraVm, + ); + let input = VyperInput::new(req).unwrap(); + let output = compiler.compile(input).await.unwrap(); + + validate_bytecode(&output.bytecode).unwrap(); + assert_eq!(output.abi, without_internal_types(counter_contract_abi())); +} + +#[test_casing(2, [false, true])] +#[tokio::test] +async fn using_standalone_vyper(specify_contract_file: bool) { + let (compiler_resolver, supported_compilers) = real_resolver!(); + + let version = &supported_compilers.vyper; + let compiler = compiler_resolver.resolve_vyper(version).await.unwrap(); + let (filename, contract_name) = if specify_contract_file { + ("contracts/Counter.vy", "contracts/Counter.vy:Counter") + } else { + ("Counter.vy", "Counter") + }; + let req = test_vyper_request( + filename, + contract_name, + supported_compilers, + BytecodeMarker::Evm, + ); + let input = VyperInput::new(req).unwrap(); + let output = compiler.compile(input).await.unwrap(); + + assert!(output.deployed_bytecode.is_some()); + assert_eq!(output.abi, without_internal_types(counter_contract_abi())); +} + +#[tokio::test] +async fn using_standalone_vyper_without_optimization() { + let (compiler_resolver, supported_compilers) = real_resolver!(); + + let version = &supported_compilers.vyper; + let compiler = compiler_resolver.resolve_vyper(version).await.unwrap(); + let mut req = test_vyper_request( + "counter.vy", + "counter", + supported_compilers, + BytecodeMarker::Evm, + ); + req.optimization_used = false; + let input = VyperInput::new(req).unwrap(); + let output = compiler.compile(input).await.unwrap(); + + assert!(output.deployed_bytecode.is_some()); + assert_eq!(output.abi, without_internal_types(counter_contract_abi())); +} + +#[tokio::test] +async fn using_standalone_vyper_with_code_size_optimization() { + let (compiler_resolver, supported_compilers) = real_resolver!(); + + let version = &supported_compilers.vyper; + let compiler = compiler_resolver.resolve_vyper(version).await.unwrap(); + let mut req = test_vyper_request( + "counter.vy", + "counter", + supported_compilers, + BytecodeMarker::Evm, + ); + req.optimization_used = true; + req.optimizer_mode = Some("codesize".to_owned()); + let input = VyperInput::new(req).unwrap(); + let output = compiler.compile(input).await.unwrap(); + + assert!(output.deployed_bytecode.is_some()); + assert_eq!(output.abi, without_internal_types(counter_contract_abi())); +} + +#[tokio::test] +async fn using_standalone_vyper_with_bogus_optimization() { + let (compiler_resolver, supported_compilers) = real_resolver!(); + + let version = &supported_compilers.vyper; + let compiler = compiler_resolver.resolve_vyper(version).await.unwrap(); + let mut req = test_vyper_request( + "counter.vy", + "counter", + supported_compilers, + BytecodeMarker::Evm, + ); + req.optimization_used = true; + req.optimizer_mode = Some("???".to_owned()); + let input = VyperInput::new(req).unwrap(); + let err = compiler.compile(input).await.unwrap_err(); + + let ContractVerifierError::CompilationError(serde_json::Value::Array(errors)) = err else { + panic!("unexpected error: {err:?}"); + }; + let has_opt_level_error = errors + .iter() + .any(|err| err.as_str().unwrap().contains("optimization level")); + assert!(has_opt_level_error, "{errors:?}"); +} + +#[test_casing(4, Product((BYTECODE_KINDS, Toolchain::ALL)))] +#[tokio::test] +async fn using_real_compiler_in_verifier(bytecode_kind: BytecodeMarker, toolchain: Toolchain) { + let (compiler_resolver, supported_compilers) = real_resolver!(); + + let req = match toolchain { + Toolchain::Solidity => VerificationIncomingRequest { + compiler_versions: supported_compilers.clone().solc_for_api(bytecode_kind), + ..test_request(Address::repeat_byte(1), COUNTER_CONTRACT) + }, + Toolchain::Vyper => VerificationIncomingRequest { + compiler_versions: supported_compilers.clone().vyper_for_api(bytecode_kind), + source_code_data: SourceCodeData::VyperMultiFile(HashMap::from([( + "Counter.vy".to_owned(), + COUNTER_VYPER_CONTRACT.to_owned(), + )])), + ..test_request(Address::repeat_byte(1), COUNTER_CONTRACT) + }, }; let address = Address::repeat_byte(1); - let output = match bytecode_kind { - BytecodeMarker::EraVm => { + let output = match (bytecode_kind, toolchain) { + (BytecodeMarker::EraVm, Toolchain::Solidity) => { let compiler = compiler_resolver .resolve_zksolc(&supported_compilers.zksolc()) .await @@ -134,12 +448,26 @@ async fn using_real_compiler_in_verifier(bytecode_kind: BytecodeMarker) { let input = ZkSolc::build_input(req.clone()).unwrap(); compiler.compile(input).await.unwrap() } - BytecodeMarker::Evm => { + (BytecodeMarker::Evm, Toolchain::Solidity) => { let solc_version = &supported_compilers.solc; let compiler = compiler_resolver.resolve_solc(solc_version).await.unwrap(); let input = Solc::build_input(req.clone()).unwrap(); compiler.compile(input).await.unwrap() } + (_, Toolchain::Vyper) => { + let compiler = match bytecode_kind { + BytecodeMarker::EraVm => compiler_resolver + .resolve_zkvyper(&supported_compilers.zkvyper()) + .await + .unwrap(), + BytecodeMarker::Evm => compiler_resolver + .resolve_vyper(&supported_compilers.vyper) + .await + .unwrap(), + }; + let input = VyperInput::new(req.clone()).unwrap(); + compiler.compile(input).await.unwrap() + } }; let pool = ConnectionPool::test_pool().await; diff --git a/core/lib/contracts/Cargo.toml b/core/lib/contracts/Cargo.toml index 2b80295cf440..0a24012f1ba6 100644 --- a/core/lib/contracts/Cargo.toml +++ b/core/lib/contracts/Cargo.toml @@ -11,11 +11,14 @@ keywords.workspace = true categories.workspace = true [dependencies] +zksync_basic_types.workspace = true zksync_utils.workspace = true -ethabi.workspace = true serde_json.workspace = true serde.workspace = true once_cell.workspace = true hex.workspace = true envy.workspace = true + +[dev-dependencies] +bincode.workspace = true diff --git a/core/lib/contracts/src/lib.rs b/core/lib/contracts/src/lib.rs index cb5be504c8a0..74efa72793aa 100644 --- a/core/lib/contracts/src/lib.rs +++ b/core/lib/contracts/src/lib.rs @@ -10,15 +10,16 @@ use std::{ path::{Path, PathBuf}, }; -use ethabi::{ - ethereum_types::{H256, U256}, - Contract, Event, Function, -}; use once_cell::sync::Lazy; use serde::{Deserialize, Serialize}; -use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words, env::Workspace}; +use zksync_basic_types::{ + bytecode::BytecodeHash, + ethabi::{Contract, Event, Function}, + H256, +}; +use zksync_utils::env::Workspace; -pub mod test_contracts; +mod serde_bytecode; #[derive(Debug, Clone)] pub enum ContractLanguage { @@ -60,10 +61,6 @@ const _IERC20_CONTRACT_FILE: &str = "contracts/l1-contracts/artifacts/contracts/common/interfaces/IERC20.sol/IERC20.json"; const _FAIL_ON_RECEIVE_CONTRACT_FILE: &str = "contracts/l1-contracts/artifacts/contracts/zksync/dev-contracts/FailOnReceive.sol/FailOnReceive.json"; -const LOADNEXT_CONTRACT_FILE: &str = - "etc/contracts-test-data/artifacts-zk/contracts/loadnext/loadnext_contract.sol/LoadnextContract.json"; -const LOADNEXT_SIMPLE_CONTRACT_FILE: &str = - "etc/contracts-test-data/artifacts-zk/contracts/loadnext/loadnext_contract.sol/Foo.json"; fn home_path() -> PathBuf { Workspace::locate().core() @@ -173,33 +170,6 @@ pub fn verifier_contract() -> Contract { load_contract_for_both_compilers(VERIFIER_CONTRACT_FILE) } -#[derive(Debug, Clone)] -pub struct TestContract { - /// Contract bytecode to be used for sending deploy transaction. - pub bytecode: Vec, - /// Contract ABI. - pub contract: Contract, - - pub factory_deps: Vec>, -} - -/// Reads test contract bytecode and its ABI. -pub fn get_loadnext_contract() -> TestContract { - let bytecode = read_bytecode(LOADNEXT_CONTRACT_FILE); - let dep = read_bytecode(LOADNEXT_SIMPLE_CONTRACT_FILE); - - TestContract { - bytecode, - contract: loadnext_contract(), - factory_deps: vec![dep], - } -} - -// Returns loadnext contract and its factory dependencies -fn loadnext_contract() -> Contract { - load_contract("etc/contracts-test-data/artifacts-zk/contracts/loadnext/loadnext_contract.sol/LoadnextContract.json") -} - pub fn deployer_contract() -> Contract { load_sys_contract("ContractDeployer") } @@ -379,7 +349,8 @@ fn read_zbin_bytecode_from_hex_file(bytecode_path: PathBuf) -> Vec { /// Hash of code and code which consists of 32 bytes words #[derive(Debug, Clone, Serialize, Deserialize)] pub struct SystemContractCode { - pub code: Vec, + #[serde(with = "serde_bytecode")] + pub code: Vec, pub hash: H256, } @@ -409,18 +380,16 @@ impl PartialEq for BaseSystemContracts { impl BaseSystemContracts { fn load_with_bootloader(bootloader_bytecode: Vec) -> Self { - let hash = hash_bytecode(&bootloader_bytecode); - + let hash = BytecodeHash::for_bytecode(&bootloader_bytecode).value(); let bootloader = SystemContractCode { - code: bytes_to_be_words(bootloader_bytecode), + code: bootloader_bytecode, hash, }; let bytecode = read_sys_contract_bytecode("", "DefaultAccount", ContractLanguage::Sol); - let hash = hash_bytecode(&bytecode); - + let hash = BytecodeHash::for_bytecode(&bytecode).value(); let default_aa = SystemContractCode { - code: bytes_to_be_words(bytecode), + code: bytecode, hash, }; @@ -440,9 +409,9 @@ impl BaseSystemContracts { /// Loads the latest EVM emulator for these base system contracts. Logically, it only makes sense to do for the latest protocol version. pub fn with_latest_evm_emulator(mut self) -> Self { let bytecode = read_sys_contract_bytecode("", "EvmEmulator", ContractLanguage::Yul); - let hash = hash_bytecode(&bytecode); + let hash = BytecodeHash::for_bytecode(&bytecode).value(); self.evm_emulator = Some(SystemContractCode { - code: bytes_to_be_words(bytecode), + code: bytecode, hash, }); self diff --git a/core/lib/contracts/src/serde_bytecode.rs b/core/lib/contracts/src/serde_bytecode.rs new file mode 100644 index 000000000000..8f250fe4672a --- /dev/null +++ b/core/lib/contracts/src/serde_bytecode.rs @@ -0,0 +1,112 @@ +use std::fmt; + +use serde::{de, de::SeqAccess, ser, ser::SerializeSeq, Deserializer, Serializer}; +use zksync_basic_types::U256; + +pub(super) fn serialize(bytes: &[u8], serializer: S) -> Result { + if bytes.len() % 32 != 0 { + return Err(ser::Error::custom("bytecode length is not divisible by 32")); + } + let mut seq = serializer.serialize_seq(Some(bytes.len() / 32))?; + for chunk in bytes.chunks(32) { + let word = U256::from_big_endian(chunk); + seq.serialize_element(&word)?; + } + seq.end() +} + +#[derive(Debug)] +struct SeqVisitor; + +impl<'de> de::Visitor<'de> for SeqVisitor { + type Value = Vec; + + fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(formatter, "sequence of `U256` words") + } + + fn visit_seq>(self, mut seq: A) -> Result { + let len = seq.size_hint().unwrap_or(0) * 32; + let mut bytes = Vec::with_capacity(len); + while let Some(value) = seq.next_element::()? { + let prev_len = bytes.len(); + bytes.resize(prev_len + 32, 0); + value.to_big_endian(&mut bytes[prev_len..]); + } + Ok(bytes) + } +} + +pub(super) fn deserialize<'de, D: Deserializer<'de>>(deserializer: D) -> Result, D::Error> { + deserializer.deserialize_seq(SeqVisitor) +} + +#[cfg(test)] +mod tests { + use serde::{Deserialize, Serialize}; + use zksync_basic_types::{H256, U256}; + + use crate::SystemContractCode; + + /// Code with legacy serialization logic. + #[derive(Debug, Serialize, Deserialize)] + struct LegacySystemContractCode { + code: Vec, + hash: H256, + } + + impl From<&SystemContractCode> for LegacySystemContractCode { + fn from(value: &SystemContractCode) -> Self { + Self { + code: value.code.chunks(32).map(U256::from_big_endian).collect(), + hash: value.hash, + } + } + } + + fn test_code() -> SystemContractCode { + let mut code = vec![0; 32]; + code.extend_from_slice(&[0; 30]); + code.extend_from_slice(&[0xab, 0xcd]); + code.extend_from_slice(&[0x23; 32]); + + SystemContractCode { + hash: H256::repeat_byte(0x42), + code, + } + } + + #[test] + fn serializing_system_contract_code() { + let system_contract_code = test_code(); + let json = serde_json::to_value(&system_contract_code).unwrap(); + assert_eq!( + json, + serde_json::json!({ + "code": ["0x0", "0xabcd", "0x2323232323232323232323232323232323232323232323232323232323232323"], + "hash": "0x4242424242424242424242424242424242424242424242424242424242424242", + }) + ); + + let legacy_code = LegacySystemContractCode::from(&system_contract_code); + let legacy_json = serde_json::to_value(&legacy_code).unwrap(); + assert_eq!(legacy_json, json); + + let restored: SystemContractCode = serde_json::from_value(json).unwrap(); + assert_eq!(restored.code, system_contract_code.code); + assert_eq!(restored.hash, system_contract_code.hash); + } + + #[test] + fn serializing_system_contract_code_using_bincode() { + let system_contract_code = test_code(); + let bytes = bincode::serialize(&system_contract_code).unwrap(); + let restored: SystemContractCode = bincode::deserialize(&bytes).unwrap(); + assert_eq!(restored.code, system_contract_code.code); + assert_eq!(restored.hash, system_contract_code.hash); + + let legacy_code = LegacySystemContractCode::from(&system_contract_code); + let legacy_bytes = bincode::serialize(&legacy_code).unwrap(); + assert_eq!(legacy_bytes, bytes); + } +} diff --git a/core/lib/contracts/src/test_contracts.rs b/core/lib/contracts/src/test_contracts.rs deleted file mode 100644 index eab1587f8335..000000000000 --- a/core/lib/contracts/src/test_contracts.rs +++ /dev/null @@ -1,64 +0,0 @@ -use ethabi::{ethereum_types::U256, Bytes, Token}; -use serde::Deserialize; - -use crate::get_loadnext_contract; - -#[derive(Debug, Clone, Deserialize)] -pub struct LoadnextContractExecutionParams { - pub reads: usize, - pub writes: usize, - pub events: usize, - pub hashes: usize, - pub recursive_calls: usize, - pub deploys: usize, -} - -impl LoadnextContractExecutionParams { - pub fn from_env() -> Option { - envy::prefixed("CONTRACT_EXECUTION_PARAMS_").from_env().ok() - } - - pub fn empty() -> Self { - Self { - reads: 0, - writes: 0, - events: 0, - hashes: 0, - recursive_calls: 0, - deploys: 0, - } - } -} - -impl Default for LoadnextContractExecutionParams { - fn default() -> Self { - Self { - reads: 10, - writes: 10, - events: 10, - hashes: 10, - recursive_calls: 1, - deploys: 1, - } - } -} - -impl LoadnextContractExecutionParams { - pub fn to_bytes(&self) -> Bytes { - let loadnext_contract = get_loadnext_contract(); - let contract_function = loadnext_contract.contract.function("execute").unwrap(); - - let params = vec![ - Token::Uint(U256::from(self.reads)), - Token::Uint(U256::from(self.writes)), - Token::Uint(U256::from(self.hashes)), - Token::Uint(U256::from(self.events)), - Token::Uint(U256::from(self.recursive_calls)), - Token::Uint(U256::from(self.deploys)), - ]; - - contract_function - .encode_input(¶ms) - .expect("failed to encode parameters") - } -} diff --git a/core/lib/crypto_primitives/Cargo.toml b/core/lib/crypto_primitives/Cargo.toml index 7efe5279b598..651609ec7949 100644 --- a/core/lib/crypto_primitives/Cargo.toml +++ b/core/lib/crypto_primitives/Cargo.toml @@ -15,7 +15,6 @@ categories.workspace = true secp256k1 = { workspace = true, features = ["global-context"] } sha2.workspace = true blake2.workspace = true -zksync_utils.workspace = true zksync_basic_types.workspace = true thiserror.workspace = true serde_json.workspace = true diff --git a/core/lib/crypto_primitives/src/packed_eth_signature.rs b/core/lib/crypto_primitives/src/packed_eth_signature.rs index 3d76de73560e..c4a26bf351b4 100644 --- a/core/lib/crypto_primitives/src/packed_eth_signature.rs +++ b/core/lib/crypto_primitives/src/packed_eth_signature.rs @@ -1,7 +1,6 @@ use serde::{Deserialize, Deserializer, Serialize, Serializer}; use thiserror::Error; -use zksync_basic_types::{web3::keccak256, Address, H256}; -use zksync_utils::ZeroPrefixHexSerde; +use zksync_basic_types::{serde_wrappers::ZeroPrefixHexSerde, web3::keccak256, Address, H256}; use crate::{ ecdsa_signature::{ diff --git a/core/lib/dal/.sqlx/query-0fdfa0f31142899f3d5f808688d76ec553688e69dfd330ca408505b8b2cdee5e.json b/core/lib/dal/.sqlx/query-0fdfa0f31142899f3d5f808688d76ec553688e69dfd330ca408505b8b2cdee5e.json new file mode 100644 index 000000000000..355f9993264f --- /dev/null +++ b/core/lib/dal/.sqlx/query-0fdfa0f31142899f3d5f808688d76ec553688e69dfd330ca408505b8b2cdee5e.json @@ -0,0 +1,38 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n l1_batch_number,\n blob_id,\n inclusion_data,\n sent_at\n FROM\n data_availability\n WHERE\n inclusion_data IS NULL\n ORDER BY\n l1_batch_number\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "blob_id", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "inclusion_data", + "type_info": "Bytea" + }, + { + "ordinal": 3, + "name": "sent_at", + "type_info": "Timestamp" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false, + true, + false + ] + }, + "hash": "0fdfa0f31142899f3d5f808688d76ec553688e69dfd330ca408505b8b2cdee5e" +} diff --git a/core/lib/dal/.sqlx/query-77864e5eb5eada8edf8f4457aa153369701d7cd5f75ca031bf77ca27d0437cb9.json b/core/lib/dal/.sqlx/query-1df6a0c44dafb0d8932e9c9162b634d167462ce5de6a9c240f990856be8c4c88.json similarity index 88% rename from core/lib/dal/.sqlx/query-77864e5eb5eada8edf8f4457aa153369701d7cd5f75ca031bf77ca27d0437cb9.json rename to core/lib/dal/.sqlx/query-1df6a0c44dafb0d8932e9c9162b634d167462ce5de6a9c240f990856be8c4c88.json index f4e08abe31c5..294799d4906c 100644 --- a/core/lib/dal/.sqlx/query-77864e5eb5eada8edf8f4457aa153369701d7cd5f75ca031bf77ca27d0437cb9.json +++ b/core/lib/dal/.sqlx/query-1df6a0c44dafb0d8932e9c9162b634d167462ce5de6a9c240f990856be8c4c88.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n system_logs,\n compressed_state_diffs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n is_sealed\n AND number = $1\n ", + "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n system_logs,\n compressed_state_diffs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n data_availability.blob_id AS \"blob_id?\"\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n is_sealed\n AND number = $1\n ", "describe": { "columns": [ { @@ -162,6 +162,11 @@ "ordinal": 31, "name": "inclusion_data", "type_info": "Bytea" + }, + { + "ordinal": 32, + "name": "blob_id?", + "type_info": "Text" } ], "parameters": { @@ -201,8 +206,9 @@ true, true, true, - true + true, + false ] }, - "hash": "77864e5eb5eada8edf8f4457aa153369701d7cd5f75ca031bf77ca27d0437cb9" + "hash": "1df6a0c44dafb0d8932e9c9162b634d167462ce5de6a9c240f990856be8c4c88" } diff --git a/core/lib/dal/.sqlx/query-a42121cd85daeb95ee268ba5cff1806fcc54d73216a7dc54be6ba210ef02d789.json b/core/lib/dal/.sqlx/query-47c31073d726572d282232bf550f900a8e5e705543f529e48d9fe96c35ddde75.json similarity index 80% rename from core/lib/dal/.sqlx/query-a42121cd85daeb95ee268ba5cff1806fcc54d73216a7dc54be6ba210ef02d789.json rename to core/lib/dal/.sqlx/query-47c31073d726572d282232bf550f900a8e5e705543f529e48d9fe96c35ddde75.json index 9a93ba45978e..64dbd1dcd019 100644 --- a/core/lib/dal/.sqlx/query-a42121cd85daeb95ee268ba5cff1806fcc54d73216a7dc54be6ba210ef02d789.json +++ b/core/lib/dal/.sqlx/query-47c31073d726572d282232bf550f900a8e5e705543f529e48d9fe96c35ddde75.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n system_logs,\n compressed_state_diffs,\n protocol_version,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data\n FROM\n (\n SELECT\n l1_batches.*,\n ROW_NUMBER() OVER (\n ORDER BY\n number ASC\n ) AS row_number\n FROM\n l1_batches\n WHERE\n eth_commit_tx_id IS NOT NULL\n AND l1_batches.skip_proof = TRUE\n AND l1_batches.number > $1\n ORDER BY\n number\n LIMIT\n $2\n ) inn\n LEFT JOIN commitments ON commitments.l1_batch_number = inn.number\n LEFT JOIN data_availability ON data_availability.l1_batch_number = inn.number\n WHERE\n number - row_number = $1\n ", + "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n system_logs,\n compressed_state_diffs,\n protocol_version,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n data_availability.blob_id AS \"blob_id?\"\n FROM\n (\n SELECT\n l1_batches.*,\n ROW_NUMBER() OVER (\n ORDER BY\n number ASC\n ) AS row_number\n FROM\n l1_batches\n WHERE\n eth_commit_tx_id IS NOT NULL\n AND l1_batches.skip_proof = TRUE\n AND l1_batches.number > $1\n ORDER BY\n number\n LIMIT\n $2\n ) inn\n LEFT JOIN commitments ON commitments.l1_batch_number = inn.number\n LEFT JOIN data_availability ON data_availability.l1_batch_number = inn.number\n WHERE\n number - row_number = $1\n ", "describe": { "columns": [ { @@ -162,6 +162,11 @@ "ordinal": 31, "name": "inclusion_data", "type_info": "Bytea" + }, + { + "ordinal": 32, + "name": "blob_id?", + "type_info": "Text" } ], "parameters": { @@ -202,8 +207,9 @@ true, true, true, - true + true, + false ] }, - "hash": "a42121cd85daeb95ee268ba5cff1806fcc54d73216a7dc54be6ba210ef02d789" + "hash": "47c31073d726572d282232bf550f900a8e5e705543f529e48d9fe96c35ddde75" } diff --git a/core/lib/dal/.sqlx/query-c5aedd2b1871d8f6276a31482caa673e4b5bba059ebe07bbbb64578881db030b.json b/core/lib/dal/.sqlx/query-57686ab3e929331f7efafff78fa48d3973cf8ce53871a2fab4febac60bb56583.json similarity index 85% rename from core/lib/dal/.sqlx/query-c5aedd2b1871d8f6276a31482caa673e4b5bba059ebe07bbbb64578881db030b.json rename to core/lib/dal/.sqlx/query-57686ab3e929331f7efafff78fa48d3973cf8ce53871a2fab4febac60bb56583.json index f97ea8a6ccd5..f310b82954da 100644 --- a/core/lib/dal/.sqlx/query-c5aedd2b1871d8f6276a31482caa673e4b5bba059ebe07bbbb64578881db030b.json +++ b/core/lib/dal/.sqlx/query-57686ab3e929331f7efafff78fa48d3973cf8ce53871a2fab4febac60bb56583.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n eth_prove_tx_id IS NOT NULL\n AND eth_execute_tx_id IS NULL\n ORDER BY\n number\n LIMIT\n $1\n ", + "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n data_availability.blob_id AS \"blob_id?\"\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n eth_prove_tx_id IS NOT NULL\n AND eth_execute_tx_id IS NULL\n ORDER BY\n number\n LIMIT\n $1\n ", "describe": { "columns": [ { @@ -162,6 +162,11 @@ "ordinal": 31, "name": "inclusion_data", "type_info": "Bytea" + }, + { + "ordinal": 32, + "name": "blob_id?", + "type_info": "Text" } ], "parameters": { @@ -201,8 +206,9 @@ true, true, true, - true + true, + false ] }, - "hash": "c5aedd2b1871d8f6276a31482caa673e4b5bba059ebe07bbbb64578881db030b" + "hash": "57686ab3e929331f7efafff78fa48d3973cf8ce53871a2fab4febac60bb56583" } diff --git a/core/lib/dal/.sqlx/query-746d8b62d576b4b9596458aa865e0294e53eb37c1a2dbcc3044b8311200d549a.json b/core/lib/dal/.sqlx/query-6069d168d5c4b5131b50500302cdde79388b62926ff83d954b4d93dedfe2503a.json similarity index 85% rename from core/lib/dal/.sqlx/query-746d8b62d576b4b9596458aa865e0294e53eb37c1a2dbcc3044b8311200d549a.json rename to core/lib/dal/.sqlx/query-6069d168d5c4b5131b50500302cdde79388b62926ff83d954b4d93dedfe2503a.json index 306f193861f1..98d228726d48 100644 --- a/core/lib/dal/.sqlx/query-746d8b62d576b4b9596458aa865e0294e53eb37c1a2dbcc3044b8311200d549a.json +++ b/core/lib/dal/.sqlx/query-6069d168d5c4b5131b50500302cdde79388b62926ff83d954b4d93dedfe2503a.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE l1_batches\n SET\n l1_tx_count = $2,\n l2_tx_count = $3,\n l2_to_l1_messages = $4,\n bloom = $5,\n priority_ops_onchain_data = $6,\n predicted_commit_gas_cost = $7,\n predicted_prove_gas_cost = $8,\n predicted_execute_gas_cost = $9,\n initial_bootloader_heap_content = $10,\n used_contract_hashes = $11,\n bootloader_code_hash = $12,\n default_aa_code_hash = $13,\n evm_emulator_code_hash = $14,\n protocol_version = $15,\n system_logs = $16,\n storage_refunds = $17,\n pubdata_costs = $18,\n pubdata_input = $19,\n predicted_circuits_by_type = $20,\n updated_at = NOW(),\n is_sealed = TRUE\n WHERE\n number = $1\n ", + "query": "\n UPDATE l1_batches\n SET\n l1_tx_count = $2,\n l2_tx_count = $3,\n l2_to_l1_messages = $4,\n bloom = $5,\n priority_ops_onchain_data = $6,\n predicted_commit_gas_cost = $7,\n predicted_prove_gas_cost = $8,\n predicted_execute_gas_cost = $9,\n initial_bootloader_heap_content = $10,\n used_contract_hashes = $11,\n bootloader_code_hash = $12,\n default_aa_code_hash = $13,\n evm_emulator_code_hash = $14,\n protocol_version = $15,\n system_logs = $16,\n storage_refunds = $17,\n pubdata_costs = $18,\n pubdata_input = $19,\n predicted_circuits_by_type = $20,\n updated_at = NOW(),\n sealed_at = NOW(),\n is_sealed = TRUE\n WHERE\n number = $1\n ", "describe": { "columns": [], "parameters": { @@ -29,5 +29,5 @@ }, "nullable": [] }, - "hash": "746d8b62d576b4b9596458aa865e0294e53eb37c1a2dbcc3044b8311200d549a" + "hash": "6069d168d5c4b5131b50500302cdde79388b62926ff83d954b4d93dedfe2503a" } diff --git a/core/lib/dal/.sqlx/query-1cb61327bed4d65a3fc81aa2229e01396dacefc0cea8cbcf5807185eb00fc0f7.json b/core/lib/dal/.sqlx/query-6d4746aab463789bdd3ccb251f6b6cc4a3da487ee4a928de1513b13b7b918575.json similarity index 86% rename from core/lib/dal/.sqlx/query-1cb61327bed4d65a3fc81aa2229e01396dacefc0cea8cbcf5807185eb00fc0f7.json rename to core/lib/dal/.sqlx/query-6d4746aab463789bdd3ccb251f6b6cc4a3da487ee4a928de1513b13b7b918575.json index 48adcd412676..2dd50bd6b4d9 100644 --- a/core/lib/dal/.sqlx/query-1cb61327bed4d65a3fc81aa2229e01396dacefc0cea8cbcf5807185eb00fc0f7.json +++ b/core/lib/dal/.sqlx/query-6d4746aab463789bdd3ccb251f6b6cc4a3da487ee4a928de1513b13b7b918575.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n number = 0\n OR eth_commit_tx_id IS NOT NULL\n AND commitment IS NOT NULL\n ORDER BY\n number DESC\n LIMIT\n 1\n ", + "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n data_availability.blob_id AS \"blob_id?\"\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n number = 0\n OR eth_commit_tx_id IS NOT NULL\n AND commitment IS NOT NULL\n ORDER BY\n number DESC\n LIMIT\n 1\n ", "describe": { "columns": [ { @@ -162,6 +162,11 @@ "ordinal": 31, "name": "inclusion_data", "type_info": "Bytea" + }, + { + "ordinal": 32, + "name": "blob_id?", + "type_info": "Text" } ], "parameters": { @@ -199,8 +204,9 @@ true, true, true, - true + true, + false ] }, - "hash": "1cb61327bed4d65a3fc81aa2229e01396dacefc0cea8cbcf5807185eb00fc0f7" + "hash": "6d4746aab463789bdd3ccb251f6b6cc4a3da487ee4a928de1513b13b7b918575" } diff --git a/core/lib/dal/.sqlx/query-6ec93ebdd58bdc0259d98ef5ae0d087ed816920e8e75a163b87a19e39db86227.json b/core/lib/dal/.sqlx/query-6ec93ebdd58bdc0259d98ef5ae0d087ed816920e8e75a163b87a19e39db86227.json new file mode 100644 index 000000000000..bdc6b007e9df --- /dev/null +++ b/core/lib/dal/.sqlx/query-6ec93ebdd58bdc0259d98ef5ae0d087ed816920e8e75a163b87a19e39db86227.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n sealed_at\n FROM\n l1_batches\n WHERE\n number = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "sealed_at", + "type_info": "Timestamp" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + true + ] + }, + "hash": "6ec93ebdd58bdc0259d98ef5ae0d087ed816920e8e75a163b87a19e39db86227" +} diff --git a/core/lib/dal/.sqlx/query-b7d448837439a3e3dfe73070d3c20e9c138d0a6d35e9ce7fc396c5e76fbc25dd.json b/core/lib/dal/.sqlx/query-8a3f130f3b1309b30b3f23bc3cff186551207484769344d211d6c9d2fc452ef3.json similarity index 81% rename from core/lib/dal/.sqlx/query-b7d448837439a3e3dfe73070d3c20e9c138d0a6d35e9ce7fc396c5e76fbc25dd.json rename to core/lib/dal/.sqlx/query-8a3f130f3b1309b30b3f23bc3cff186551207484769344d211d6c9d2fc452ef3.json index 8a68b1a9b9bd..b95fb8c82321 100644 --- a/core/lib/dal/.sqlx/query-b7d448837439a3e3dfe73070d3c20e9c138d0a6d35e9ce7fc396c5e76fbc25dd.json +++ b/core/lib/dal/.sqlx/query-8a3f130f3b1309b30b3f23bc3cff186551207484769344d211d6c9d2fc452ef3.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n l1_batches.timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n l1_batches.bootloader_code_hash,\n l1_batches.default_aa_code_hash,\n l1_batches.evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n JOIN protocol_versions ON protocol_versions.id = l1_batches.protocol_version\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n eth_commit_tx_id IS NULL\n AND number != 0\n AND protocol_versions.bootloader_code_hash = $1\n AND protocol_versions.default_account_code_hash = $2\n AND commitment IS NOT NULL\n AND (\n protocol_versions.id = $3\n OR protocol_versions.upgrade_tx_hash IS NULL\n )\n ORDER BY\n number\n LIMIT\n $4\n ", + "query": "\n SELECT\n number,\n l1_batches.timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n l1_batches.bootloader_code_hash,\n l1_batches.default_aa_code_hash,\n l1_batches.evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n data_availability.blob_id AS \"blob_id?\"\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n JOIN protocol_versions ON protocol_versions.id = l1_batches.protocol_version\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n eth_commit_tx_id IS NULL\n AND number != 0\n AND protocol_versions.bootloader_code_hash = $1\n AND protocol_versions.default_account_code_hash = $2\n AND commitment IS NOT NULL\n AND (\n protocol_versions.id = $3\n OR protocol_versions.upgrade_tx_hash IS NULL\n )\n ORDER BY\n number\n LIMIT\n $4\n ", "describe": { "columns": [ { @@ -162,6 +162,11 @@ "ordinal": 31, "name": "inclusion_data", "type_info": "Bytea" + }, + { + "ordinal": 32, + "name": "blob_id?", + "type_info": "Text" } ], "parameters": { @@ -204,8 +209,9 @@ true, true, true, - true + true, + false ] }, - "hash": "b7d448837439a3e3dfe73070d3c20e9c138d0a6d35e9ce7fc396c5e76fbc25dd" + "hash": "8a3f130f3b1309b30b3f23bc3cff186551207484769344d211d6c9d2fc452ef3" } diff --git a/core/lib/dal/.sqlx/query-4bd1a4e612d10f2ca26068c140442f38816f163a3e3fba4fdbb81076b969e970.json b/core/lib/dal/.sqlx/query-96de8839bee7d58e2807f98101271fca0e375f1309b34ce09a5beb8ed688c3ef.json similarity index 94% rename from core/lib/dal/.sqlx/query-4bd1a4e612d10f2ca26068c140442f38816f163a3e3fba4fdbb81076b969e970.json rename to core/lib/dal/.sqlx/query-96de8839bee7d58e2807f98101271fca0e375f1309b34ce09a5beb8ed688c3ef.json index 66d3e18075bf..e45f0ceb6ef9 100644 --- a/core/lib/dal/.sqlx/query-4bd1a4e612d10f2ca26068c140442f38816f163a3e3fba4fdbb81076b969e970.json +++ b/core/lib/dal/.sqlx/query-96de8839bee7d58e2807f98101271fca0e375f1309b34ce09a5beb8ed688c3ef.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n number BETWEEN $1 AND $2\n ORDER BY\n number\n LIMIT\n $3\n ", + "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n data_availability.blob_id AS \"blob_id?\"\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n number BETWEEN $1 AND $2\n ORDER BY\n number\n LIMIT\n $3\n ", "describe": { "columns": [ { @@ -162,6 +162,11 @@ "ordinal": 31, "name": "inclusion_data", "type_info": "Bytea" + }, + { + "ordinal": 32, + "name": "blob_id?", + "type_info": "Text" } ], "parameters": { @@ -203,8 +208,9 @@ true, true, true, - true + true, + false ] }, - "hash": "4bd1a4e612d10f2ca26068c140442f38816f163a3e3fba4fdbb81076b969e970" + "hash": "96de8839bee7d58e2807f98101271fca0e375f1309b34ce09a5beb8ed688c3ef" } diff --git a/core/lib/dal/.sqlx/query-45154c2efc8d07c4f83ae3e229f9892118f5732374e62f35e27800422afb5746.json b/core/lib/dal/.sqlx/query-af2bab04895e886343f80077af31afd7240ef53d95408a0d38bff65f786b038a.json similarity index 78% rename from core/lib/dal/.sqlx/query-45154c2efc8d07c4f83ae3e229f9892118f5732374e62f35e27800422afb5746.json rename to core/lib/dal/.sqlx/query-af2bab04895e886343f80077af31afd7240ef53d95408a0d38bff65f786b038a.json index 11bff1102932..63b5a6501105 100644 --- a/core/lib/dal/.sqlx/query-45154c2efc8d07c4f83ae3e229f9892118f5732374e62f35e27800422afb5746.json +++ b/core/lib/dal/.sqlx/query-af2bab04895e886343f80077af31afd7240ef53d95408a0d38bff65f786b038a.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n l1_batches.timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n l1_batches.bootloader_code_hash,\n l1_batches.default_aa_code_hash,\n l1_batches.evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n JOIN protocol_versions ON protocol_versions.id = l1_batches.protocol_version\n WHERE\n eth_commit_tx_id IS NULL\n AND number != 0\n AND protocol_versions.bootloader_code_hash = $1\n AND protocol_versions.default_account_code_hash = $2\n AND commitment IS NOT NULL\n AND (\n protocol_versions.id = $3\n OR protocol_versions.upgrade_tx_hash IS NULL\n )\n AND events_queue_commitment IS NOT NULL\n AND bootloader_initial_content_commitment IS NOT NULL\n AND (\n data_availability.inclusion_data IS NOT NULL\n OR $4 IS FALSE\n )\n ORDER BY\n number\n LIMIT\n $5\n ", + "query": "\n SELECT\n number,\n l1_batches.timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n l1_batches.bootloader_code_hash,\n l1_batches.default_aa_code_hash,\n l1_batches.evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n data_availability.blob_id AS \"blob_id?\"\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n JOIN protocol_versions ON protocol_versions.id = l1_batches.protocol_version\n WHERE\n eth_commit_tx_id IS NULL\n AND number != 0\n AND protocol_versions.bootloader_code_hash = $1\n AND protocol_versions.default_account_code_hash = $2\n AND commitment IS NOT NULL\n AND (\n protocol_versions.id = $3\n OR protocol_versions.upgrade_tx_hash IS NULL\n )\n AND events_queue_commitment IS NOT NULL\n AND bootloader_initial_content_commitment IS NOT NULL\n AND (\n data_availability.inclusion_data IS NOT NULL\n OR $4 IS FALSE\n )\n ORDER BY\n number\n LIMIT\n $5\n ", "describe": { "columns": [ { @@ -162,6 +162,11 @@ "ordinal": 31, "name": "inclusion_data", "type_info": "Bytea" + }, + { + "ordinal": 32, + "name": "blob_id?", + "type_info": "Text" } ], "parameters": { @@ -205,8 +210,9 @@ true, true, true, - true + true, + false ] }, - "hash": "45154c2efc8d07c4f83ae3e229f9892118f5732374e62f35e27800422afb5746" + "hash": "af2bab04895e886343f80077af31afd7240ef53d95408a0d38bff65f786b038a" } diff --git a/core/lib/dal/.sqlx/query-cee7a608bd77815e9582531383481b01395cfd2a3e95fb4593229bd878163320.json b/core/lib/dal/.sqlx/query-e46c99b23db91800b27c717100f8203a62629904bc4956249e690a8ad7a48983.json similarity index 50% rename from core/lib/dal/.sqlx/query-cee7a608bd77815e9582531383481b01395cfd2a3e95fb4593229bd878163320.json rename to core/lib/dal/.sqlx/query-e46c99b23db91800b27c717100f8203a62629904bc4956249e690a8ad7a48983.json index 4b219bfee0a5..7ca2c9e7e9fa 100644 --- a/core/lib/dal/.sqlx/query-cee7a608bd77815e9582531383481b01395cfd2a3e95fb4593229bd878163320.json +++ b/core/lib/dal/.sqlx/query-e46c99b23db91800b27c717100f8203a62629904bc4956249e690a8ad7a48983.json @@ -1,12 +1,17 @@ { "db_name": "PostgreSQL", - "query": "\n WITH upsert AS (\n SELECT\n p.l1_batch_number\n FROM\n proof_generation_details p\n LEFT JOIN\n tee_proof_generation_details tee\n ON\n p.l1_batch_number = tee.l1_batch_number\n AND tee.tee_type = $1\n WHERE\n (\n p.l1_batch_number >= $5\n AND p.vm_run_data_blob_url IS NOT NULL\n AND p.proof_gen_data_blob_url IS NOT NULL\n )\n AND (\n tee.l1_batch_number IS NULL\n OR (\n tee.status = $3\n OR (\n tee.status = $2\n AND tee.prover_taken_at < NOW() - $4::INTERVAL\n )\n )\n )\n FETCH FIRST ROW ONLY\n )\n \n INSERT INTO\n tee_proof_generation_details (\n l1_batch_number, tee_type, status, created_at, updated_at, prover_taken_at\n )\n SELECT\n l1_batch_number,\n $1,\n $2,\n NOW(),\n NOW(),\n NOW()\n FROM\n upsert\n ON CONFLICT (l1_batch_number, tee_type) DO\n UPDATE\n SET\n status = $2,\n updated_at = NOW(),\n prover_taken_at = NOW()\n RETURNING\n l1_batch_number\n ", + "query": "\n WITH upsert AS (\n SELECT\n p.l1_batch_number\n FROM\n proof_generation_details p\n LEFT JOIN\n tee_proof_generation_details tee\n ON\n p.l1_batch_number = tee.l1_batch_number\n AND tee.tee_type = $1\n WHERE\n (\n p.l1_batch_number >= $5\n AND p.vm_run_data_blob_url IS NOT NULL\n AND p.proof_gen_data_blob_url IS NOT NULL\n )\n AND (\n tee.l1_batch_number IS NULL\n OR (\n (tee.status = $2 OR tee.status = $3)\n AND tee.prover_taken_at < NOW() - $4::INTERVAL\n )\n )\n FETCH FIRST ROW ONLY\n )\n \n INSERT INTO\n tee_proof_generation_details (\n l1_batch_number, tee_type, status, created_at, updated_at, prover_taken_at\n )\n SELECT\n l1_batch_number,\n $1,\n $2,\n NOW(),\n NOW(),\n NOW()\n FROM\n upsert\n ON CONFLICT (l1_batch_number, tee_type) DO\n UPDATE\n SET\n status = $2,\n updated_at = NOW(),\n prover_taken_at = NOW()\n RETURNING\n l1_batch_number,\n created_at\n ", "describe": { "columns": [ { "ordinal": 0, "name": "l1_batch_number", "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "created_at", + "type_info": "Timestamp" } ], "parameters": { @@ -19,8 +24,9 @@ ] }, "nullable": [ + false, false ] }, - "hash": "cee7a608bd77815e9582531383481b01395cfd2a3e95fb4593229bd878163320" + "hash": "e46c99b23db91800b27c717100f8203a62629904bc4956249e690a8ad7a48983" } diff --git a/core/lib/dal/.sqlx/query-62e8330881b73917394384adbf73911add046315e5f8877bc57a34e3dadf9e37.json b/core/lib/dal/.sqlx/query-fa52ecb8ee44f02f8d5a2061266c277d67f184d29082a03bc70b9d95700e8c05.json similarity index 86% rename from core/lib/dal/.sqlx/query-62e8330881b73917394384adbf73911add046315e5f8877bc57a34e3dadf9e37.json rename to core/lib/dal/.sqlx/query-fa52ecb8ee44f02f8d5a2061266c277d67f184d29082a03bc70b9d95700e8c05.json index dfdb4b6c82e7..e2c6df469102 100644 --- a/core/lib/dal/.sqlx/query-62e8330881b73917394384adbf73911add046315e5f8877bc57a34e3dadf9e37.json +++ b/core/lib/dal/.sqlx/query-fa52ecb8ee44f02f8d5a2061266c277d67f184d29082a03bc70b9d95700e8c05.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n eth_commit_tx_id IS NOT NULL\n AND eth_prove_tx_id IS NULL\n ORDER BY\n number\n LIMIT\n $1\n ", + "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n data_availability.blob_id AS \"blob_id?\"\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n eth_commit_tx_id IS NOT NULL\n AND eth_prove_tx_id IS NULL\n ORDER BY\n number\n LIMIT\n $1\n ", "describe": { "columns": [ { @@ -162,6 +162,11 @@ "ordinal": 31, "name": "inclusion_data", "type_info": "Bytea" + }, + { + "ordinal": 32, + "name": "blob_id?", + "type_info": "Text" } ], "parameters": { @@ -201,8 +206,9 @@ true, true, true, - true + true, + false ] }, - "hash": "62e8330881b73917394384adbf73911add046315e5f8877bc57a34e3dadf9e37" + "hash": "fa52ecb8ee44f02f8d5a2061266c277d67f184d29082a03bc70b9d95700e8c05" } diff --git a/core/lib/dal/Cargo.toml b/core/lib/dal/Cargo.toml index db03b8de9825..4b093dd181bb 100644 --- a/core/lib/dal/Cargo.toml +++ b/core/lib/dal/Cargo.toml @@ -15,7 +15,6 @@ links = "zksync_dal_proto" [dependencies] vise.workspace = true zksync_vm_interface.workspace = true -zksync_utils.workspace = true zksync_system_constants.workspace = true zksync_contracts.workspace = true zksync_types.workspace = true @@ -56,7 +55,7 @@ tracing.workspace = true chrono = { workspace = true, features = ["serde"] } [dev-dependencies] -zksync_test_account.workspace = true +zksync_test_contracts.workspace = true zksync_concurrency.workspace = true [build-dependencies] diff --git a/core/lib/dal/doc/TeeProofGenerationDal.md b/core/lib/dal/doc/TeeProofGenerationDal.md index fcfa379816c7..d9ae70aeb2fd 100644 --- a/core/lib/dal/doc/TeeProofGenerationDal.md +++ b/core/lib/dal/doc/TeeProofGenerationDal.md @@ -11,9 +11,11 @@ title: Status Diagram --- stateDiagram-v2 -[*] --> unpicked : insert_tee_proof_generation_job -unpicked --> picked_by_prover : lock_batch_for_proving +[*] --> picked_by_prover : lock picked_by_prover --> generated : save_proof_artifacts_metadata -picked_by_prover --> unpicked : unlock_batch +picked_by_prover --> permanently_ignored : unlock_batch +picked_by_prover --> failed : unlock_batch +failed --> picked_by_prover : lock +permanently_ignored --> [*] generated --> [*] ``` diff --git a/core/lib/dal/migrations/20240930110000_tee_add_permanently_ignored_state.down.sql b/core/lib/dal/migrations/20240930110000_tee_add_permanently_ignored_state.down.sql new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/core/lib/dal/migrations/20240930110000_tee_add_permanently_ignored_state.up.sql b/core/lib/dal/migrations/20240930110000_tee_add_permanently_ignored_state.up.sql new file mode 100644 index 000000000000..12a21d1728c8 --- /dev/null +++ b/core/lib/dal/migrations/20240930110000_tee_add_permanently_ignored_state.up.sql @@ -0,0 +1,8 @@ +-- There were manually added tee_proof_generation_details entries with status 'permanently_ignore'. + +UPDATE tee_proof_generation_details SET status = 'permanently_ignored' WHERE status = 'permanently_ignore'; + +-- Entries with the status 'unpicked' were not used at all after the migration to the logic +-- introduced in https://github.com/matter-labs/zksync-era/pull/3017. This was overlooked. + +DELETE FROM tee_proof_generation_details WHERE status = 'unpicked'; diff --git a/core/lib/dal/migrations/20241108051505_sealed_at.down.sql b/core/lib/dal/migrations/20241108051505_sealed_at.down.sql new file mode 100644 index 000000000000..6e20a782e7a4 --- /dev/null +++ b/core/lib/dal/migrations/20241108051505_sealed_at.down.sql @@ -0,0 +1,2 @@ +-- Add down migration script here +ALTER TABLE l1_batches DROP COLUMN IF EXISTS sealed_at; diff --git a/core/lib/dal/migrations/20241108051505_sealed_at.up.sql b/core/lib/dal/migrations/20241108051505_sealed_at.up.sql new file mode 100644 index 000000000000..e442fd6bf309 --- /dev/null +++ b/core/lib/dal/migrations/20241108051505_sealed_at.up.sql @@ -0,0 +1,2 @@ +-- add a sealed_at column for metrics +ALTER TABLE l1_batches ADD COLUMN sealed_at TIMESTAMP; diff --git a/core/lib/dal/src/blocks_dal.rs b/core/lib/dal/src/blocks_dal.rs index 943aa12caf75..93d8574187ab 100644 --- a/core/lib/dal/src/blocks_dal.rs +++ b/core/lib/dal/src/blocks_dal.rs @@ -7,6 +7,7 @@ use std::{ use anyhow::Context as _; use bigdecimal::{BigDecimal, FromPrimitive, ToPrimitive}; +use sqlx::types::chrono::{DateTime, Utc}; use zksync_db_connection::{ connection::Connection, error::{DalResult, SqlxContext}, @@ -348,7 +349,8 @@ impl BlocksDal<'_, '_> { aggregation_root, local_root, state_diff_hash, - data_availability.inclusion_data + data_availability.inclusion_data, + data_availability.blob_id AS "blob_id?" FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number @@ -742,6 +744,7 @@ impl BlocksDal<'_, '_> { pubdata_input = $19, predicted_circuits_by_type = $20, updated_at = NOW(), + sealed_at = NOW(), is_sealed = TRUE WHERE number = $1 @@ -1219,7 +1222,8 @@ impl BlocksDal<'_, '_> { aggregation_root, local_root, state_diff_hash, - data_availability.inclusion_data + data_availability.inclusion_data, + data_availability.blob_id AS "blob_id?" FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number @@ -1414,7 +1418,8 @@ impl BlocksDal<'_, '_> { aggregation_root, local_root, state_diff_hash, - data_availability.inclusion_data + data_availability.inclusion_data, + data_availability.blob_id AS "blob_id?" FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number @@ -1503,7 +1508,8 @@ impl BlocksDal<'_, '_> { aggregation_root, local_root, state_diff_hash, - data_availability.inclusion_data + data_availability.inclusion_data, + data_availability.blob_id AS "blob_id?" FROM ( SELECT @@ -1583,7 +1589,8 @@ impl BlocksDal<'_, '_> { aggregation_root, local_root, state_diff_hash, - data_availability.inclusion_data + data_availability.inclusion_data, + data_availability.blob_id AS "blob_id?" FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number @@ -1719,7 +1726,8 @@ impl BlocksDal<'_, '_> { aggregation_root, local_root, state_diff_hash, - data_availability.inclusion_data + data_availability.inclusion_data, + data_availability.blob_id AS "blob_id?" FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number @@ -1792,7 +1800,8 @@ impl BlocksDal<'_, '_> { aggregation_root, local_root, state_diff_hash, - data_availability.inclusion_data + data_availability.inclusion_data, + data_availability.blob_id AS "blob_id?" FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number @@ -1879,7 +1888,8 @@ impl BlocksDal<'_, '_> { aggregation_root, local_root, state_diff_hash, - data_availability.inclusion_data + data_availability.inclusion_data, + data_availability.blob_id AS "blob_id?" FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number @@ -2394,6 +2404,28 @@ impl BlocksDal<'_, '_> { .flatten()) } + pub async fn get_batch_sealed_at( + &mut self, + l1_batch_number: L1BatchNumber, + ) -> DalResult>> { + Ok(sqlx::query!( + r#" + SELECT + sealed_at + FROM + l1_batches + WHERE + number = $1 + "#, + i64::from(l1_batch_number.0) + ) + .instrument("get_batch_sealed_at") + .with_arg("l1_batch_number", &l1_batch_number) + .fetch_optional(self.storage) + .await? + .and_then(|row| row.sealed_at.map(|d| d.and_utc()))) + } + pub async fn set_protocol_version_for_pending_l2_blocks( &mut self, id: ProtocolVersionId, diff --git a/core/lib/dal/src/blocks_web3_dal.rs b/core/lib/dal/src/blocks_web3_dal.rs index ba843bbf92f3..4699eac4e5eb 100644 --- a/core/lib/dal/src/blocks_web3_dal.rs +++ b/core/lib/dal/src/blocks_web3_dal.rs @@ -11,12 +11,11 @@ use zksync_types::{ web3::{BlockHeader, Bytes}, Bloom, L1BatchNumber, L2BlockNumber, ProtocolVersionId, H160, H256, U256, U64, }; -use zksync_utils::bigdecimal_to_u256; use zksync_vm_interface::Call; use crate::{ models::{ - parse_protocol_version, + bigdecimal_to_u256, parse_protocol_version, storage_block::{ ResolvedL1BatchForL2Block, StorageBlockDetails, StorageL1BatchDetails, LEGACY_BLOCK_GAS_LIMIT, diff --git a/core/lib/dal/src/consensus/conv.rs b/core/lib/dal/src/consensus/conv.rs index f0948adfd1da..3153343d6014 100644 --- a/core/lib/dal/src/consensus/conv.rs +++ b/core/lib/dal/src/consensus/conv.rs @@ -8,15 +8,15 @@ use zksync_types::{ commitment::{L1BatchCommitmentMode, PubdataParams}, ethabi, fee::Fee, + h256_to_u256, l1::{OpProcessingType, PriorityQueueType}, l2::TransactionType, parse_h160, parse_h256, protocol_upgrade::ProtocolUpgradeTxCommonData, transaction_request::PaymasterParams, - Execute, ExecuteTransactionCommon, InputData, L1BatchNumber, L1TxCommonData, L2TxCommonData, - Nonce, PriorityOpId, ProtocolVersionId, Transaction, H256, + u256_to_h256, Execute, ExecuteTransactionCommon, InputData, L1BatchNumber, L1TxCommonData, + L2TxCommonData, Nonce, PriorityOpId, ProtocolVersionId, Transaction, H256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; use super::*; diff --git a/core/lib/dal/src/consensus/tests.rs b/core/lib/dal/src/consensus/tests.rs index df6ee24bfa94..465148dc7b5c 100644 --- a/core/lib/dal/src/consensus/tests.rs +++ b/core/lib/dal/src/consensus/tests.rs @@ -7,7 +7,7 @@ use zksync_protobuf::{ testonly::{test_encode, test_encode_all_formats, FmtConv}, ProtoRepr, }; -use zksync_test_account::Account; +use zksync_test_contracts::Account; use zksync_types::{ commitment::{L1BatchCommitmentMode, PubdataParams}, web3::Bytes, diff --git a/core/lib/dal/src/contract_verification_dal.rs b/core/lib/dal/src/contract_verification_dal.rs index 93a4ce2fd35a..57bea5392cf8 100644 --- a/core/lib/dal/src/contract_verification_dal.rs +++ b/core/lib/dal/src/contract_verification_dal.rs @@ -8,13 +8,13 @@ use std::{ use sqlx::postgres::types::PgInterval; use zksync_db_connection::{error::SqlxContext, instrument::InstrumentExt}; use zksync_types::{ + address_to_h256, contract_verification_api::{ VerificationIncomingRequest, VerificationInfo, VerificationRequest, VerificationRequestStatus, }, web3, Address, CONTRACT_DEPLOYER_ADDRESS, H256, }; -use zksync_utils::address_to_h256; use zksync_vm_interface::VmEvent; use crate::{ @@ -567,11 +567,11 @@ mod tests { use std::collections::HashMap; use zksync_types::{ + bytecode::BytecodeHash, contract_verification_api::{CompilerVersions, SourceCodeData}, tx::IncludedTxLocation, Execute, L1BatchNumber, L2BlockNumber, ProtocolVersion, }; - use zksync_utils::bytecode::hash_bytecode; use zksync_vm_interface::{tracer::ValidationTraces, TransactionExecutionMetrics}; use super::*; @@ -598,7 +598,7 @@ mod tests { let deployed_address = Address::repeat_byte(12); let mut tx = mock_l2_transaction(); let bytecode = vec![1; 32]; - let bytecode_hash = hash_bytecode(&bytecode); + let bytecode_hash = BytecodeHash::for_bytecode(&bytecode).value(); tx.execute = Execute::for_deploy(H256::zero(), bytecode.clone(), &[]); conn.transactions_dal() .insert_transaction_l2( diff --git a/core/lib/dal/src/data_availability_dal.rs b/core/lib/dal/src/data_availability_dal.rs index 41dd7efe2732..c427216425b3 100644 --- a/core/lib/dal/src/data_availability_dal.rs +++ b/core/lib/dal/src/data_availability_dal.rs @@ -175,6 +175,45 @@ impl DataAvailabilityDal<'_, '_> { .map(DataAvailabilityBlob::from)) } + pub async fn get_da_blob_ids_awaiting_inclusion( + &mut self, + ) -> DalResult>> { + let rows = sqlx::query!( + r#" + SELECT + l1_batch_number, + blob_id, + inclusion_data, + sent_at + FROM + data_availability + WHERE + inclusion_data IS NULL + ORDER BY + l1_batch_number + "#, + ) + .instrument("get_da_blobs_awaiting_inclusion") + .fetch_all(self.storage) + .await?; + + Ok(rows + .into_iter() + .map(|row| { + let l1_batch_number_u32 = row.l1_batch_number.try_into(); + if let Ok(l1_batch_number) = l1_batch_number_u32 { + Some(DataAvailabilityBlob { + l1_batch_number: L1BatchNumber(l1_batch_number), + blob_id: row.blob_id, + inclusion_data: row.inclusion_data, + sent_at: row.sent_at.and_utc(), + }) + } else { + None + } + }) + .collect()) + } /// Fetches the pubdata and `l1_batch_number` for the L1 batches that are ready for DA dispatch. pub async fn get_ready_for_da_dispatch_l1_batches( &mut self, diff --git a/core/lib/dal/src/factory_deps_dal.rs b/core/lib/dal/src/factory_deps_dal.rs index 857e2973ae33..424d708da241 100644 --- a/core/lib/dal/src/factory_deps_dal.rs +++ b/core/lib/dal/src/factory_deps_dal.rs @@ -4,7 +4,6 @@ use anyhow::Context as _; use zksync_contracts::{BaseSystemContracts, SystemContractCode}; use zksync_db_connection::{connection::Connection, error::DalResult, instrument::InstrumentExt}; use zksync_types::{L2BlockNumber, H256, U256}; -use zksync_utils::{bytes_to_be_words, bytes_to_chunks}; use crate::Core; @@ -102,7 +101,7 @@ impl FactoryDepsDal<'_, '_> { .context("failed loading bootloader code")? .with_context(|| format!("bootloader code with hash {bootloader_hash:?} should be present in the database"))?; let bootloader_code = SystemContractCode { - code: bytes_to_be_words(bootloader_bytecode), + code: bootloader_bytecode, hash: bootloader_hash, }; @@ -113,7 +112,7 @@ impl FactoryDepsDal<'_, '_> { .with_context(|| format!("default account code with hash {default_aa_hash:?} should be present in the database"))?; let default_aa_code = SystemContractCode { - code: bytes_to_be_words(default_aa_bytecode), + code: default_aa_bytecode, hash: default_aa_hash, }; @@ -125,7 +124,7 @@ impl FactoryDepsDal<'_, '_> { .with_context(|| format!("EVM emulator code with hash {evm_emulator_hash:?} should be present in the database"))?; Some(SystemContractCode { - code: bytes_to_be_words(evm_emulator_bytecode), + code: evm_emulator_bytecode, hash: evm_emulator_hash, }) } else { @@ -140,10 +139,7 @@ impl FactoryDepsDal<'_, '_> { } /// Returns bytecodes for factory deps with the specified `hashes`. - pub async fn get_factory_deps( - &mut self, - hashes: &HashSet, - ) -> HashMap> { + pub async fn get_factory_deps(&mut self, hashes: &HashSet) -> HashMap> { let hashes_as_bytes: Vec<_> = hashes.iter().map(H256::as_bytes).collect(); sqlx::query!( @@ -162,12 +158,7 @@ impl FactoryDepsDal<'_, '_> { .await .unwrap() .into_iter() - .map(|row| { - ( - U256::from_big_endian(&row.bytecode_hash), - bytes_to_chunks(&row.bytecode), - ) - }) + .map(|row| (U256::from_big_endian(&row.bytecode_hash), row.bytecode)) .collect() } diff --git a/core/lib/dal/src/models/mod.rs b/core/lib/dal/src/models/mod.rs index 12e41ac780ad..885dcd46f41f 100644 --- a/core/lib/dal/src/models/mod.rs +++ b/core/lib/dal/src/models/mod.rs @@ -1,6 +1,8 @@ pub mod storage_block; + +use bigdecimal::{num_bigint::BigUint, BigDecimal}; use zksync_db_connection::error::SqlxContext; -use zksync_types::ProtocolVersionId; +use zksync_types::{ProtocolVersionId, U256}; mod call; pub mod storage_base_token_ratio; @@ -24,3 +26,26 @@ pub(crate) fn parse_protocol_version(raw: i32) -> sqlx::Result BigDecimal { + let mut u32_digits = vec![0_u32; 8]; + // `u64_digit`s from `U256` are little-endian + for (i, &u64_digit) in value.0.iter().enumerate() { + u32_digits[2 * i] = u64_digit as u32; + u32_digits[2 * i + 1] = (u64_digit >> 32) as u32; + } + let value = BigUint::new(u32_digits); + BigDecimal::new(value.into(), 0) +} + +/// Converts `BigUint` value into the corresponding `U256` value. +fn biguint_to_u256(value: BigUint) -> U256 { + let bytes = value.to_bytes_le(); + U256::from_little_endian(&bytes) +} + +/// Converts `BigDecimal` value into the corresponding `U256` value. +pub(crate) fn bigdecimal_to_u256(value: BigDecimal) -> U256 { + let bigint = value.with_scale(0).into_bigint_and_exponent().0; + biguint_to_u256(bigint.to_biguint().unwrap()) +} diff --git a/core/lib/dal/src/models/storage_block.rs b/core/lib/dal/src/models/storage_block.rs index 159ed71cc3e9..95625c8b2955 100644 --- a/core/lib/dal/src/models/storage_block.rs +++ b/core/lib/dal/src/models/storage_block.rs @@ -159,6 +159,7 @@ pub(crate) struct StorageL1Batch { pub local_root: Option>, pub state_diff_hash: Option>, pub inclusion_data: Option>, + pub blob_id: Option, } impl StorageL1Batch { @@ -271,6 +272,7 @@ impl TryFrom for L1BatchMetadata { local_root: batch.local_root.map(|v| H256::from_slice(&v)), aggregation_root: batch.aggregation_root.map(|v| H256::from_slice(&v)), da_inclusion_data: batch.inclusion_data, + da_blob_id: batch.blob_id.map(|s| s.into_bytes()), }) } } diff --git a/core/lib/dal/src/models/storage_tee_proof.rs b/core/lib/dal/src/models/storage_tee_proof.rs index 5c93361e7df1..6f80c59511f9 100644 --- a/core/lib/dal/src/models/storage_tee_proof.rs +++ b/core/lib/dal/src/models/storage_tee_proof.rs @@ -1,4 +1,7 @@ -use chrono::NaiveDateTime; +use chrono::{DateTime, NaiveDateTime, Utc}; +use zksync_types::L1BatchNumber; + +use crate::tee_proof_generation_dal::LockedBatch; #[derive(Debug, Clone, sqlx::FromRow)] pub struct StorageTeeProof { @@ -8,3 +11,18 @@ pub struct StorageTeeProof { pub updated_at: NaiveDateTime, pub attestation: Option>, } + +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct StorageLockedBatch { + pub l1_batch_number: i64, + pub created_at: NaiveDateTime, +} + +impl From for LockedBatch { + fn from(tx: StorageLockedBatch) -> LockedBatch { + LockedBatch { + l1_batch_number: L1BatchNumber::from(tx.l1_batch_number as u32), + created_at: DateTime::::from_naive_utc_and_offset(tx.created_at, Utc), + } + } +} diff --git a/core/lib/dal/src/models/storage_transaction.rs b/core/lib/dal/src/models/storage_transaction.rs index 459a3ec0c0fb..cceebc85cf2b 100644 --- a/core/lib/dal/src/models/storage_transaction.rs +++ b/core/lib/dal/src/models/storage_transaction.rs @@ -6,6 +6,7 @@ use sqlx::types::chrono::{DateTime, NaiveDateTime, Utc}; use zksync_types::{ api::{self, TransactionDetails, TransactionReceipt, TransactionStatus}, fee::Fee, + h256_to_address, l1::{OpProcessingType, PriorityQueueType}, l2::TransactionType, protocol_upgrade::ProtocolUpgradeTxCommonData, @@ -16,11 +17,10 @@ use zksync_types::{ TransactionTimeRangeConstraint, EIP_1559_TX_TYPE, EIP_2930_TX_TYPE, EIP_712_TX_TYPE, H160, H256, PRIORITY_OPERATION_L2_TX_TYPE, PROTOCOL_UPGRADE_TX_TYPE, U256, U64, }; -use zksync_utils::{bigdecimal_to_u256, h256_to_account_address}; use zksync_vm_interface::Call; use super::call::{LegacyCall, LegacyMixedCall}; -use crate::BigDecimal; +use crate::{models::bigdecimal_to_u256, BigDecimal}; #[derive(Debug, Clone, sqlx::FromRow)] #[cfg_attr(test, derive(Default))] @@ -403,7 +403,7 @@ impl From for TransactionReceipt { ), contract_address: storage_receipt .contract_address - .map(|addr| h256_to_account_address(&H256::from_slice(&addr))), + .map(|addr| h256_to_address(&H256::from_slice(&addr))), logs: vec![], l2_to_l1_logs: vec![], status, @@ -541,6 +541,13 @@ impl StorageApiTransaction { .or_else(|| self.max_fee_per_gas.clone()) .unwrap_or_else(BigDecimal::zero), }; + // Legacy transactions are not supposed to have `yParity` and are reliant on `v` instead. + // Other transactions are required to have `yParity` which replaces the deprecated `v` value + // (still included for backwards compatibility). + let y_parity = match self.tx_format { + None | Some(0) => None, + _ => signature.as_ref().map(|s| U64::from(s.v())), + }; let mut tx = api::Transaction { hash: H256::from_slice(&self.tx_hash), nonce: U256::from(self.nonce.unwrap_or(0) as u64), @@ -553,6 +560,7 @@ impl StorageApiTransaction { gas_price: Some(bigdecimal_to_u256(gas_price)), gas: bigdecimal_to_u256(self.gas_limit.unwrap_or_else(BigDecimal::zero)), input: serde_json::from_value(self.calldata).expect("incorrect calldata in Postgres"), + y_parity, v: signature.as_ref().map(|s| U64::from(s.v())), r: signature.as_ref().map(|s| U256::from(s.r())), s: signature.as_ref().map(|s| U256::from(s.s())), diff --git a/core/lib/dal/src/models/tests.rs b/core/lib/dal/src/models/tests.rs index b4949dc101d6..c30c84702b13 100644 --- a/core/lib/dal/src/models/tests.rs +++ b/core/lib/dal/src/models/tests.rs @@ -1,4 +1,6 @@ +use bigdecimal::num_bigint::BigInt; use chrono::Utc; +use rand::{prelude::StdRng, Rng, SeedableRng}; use zksync_types::{ fee::Fee, l1::{OpProcessingType, PriorityQueueType}, @@ -7,9 +9,9 @@ use zksync_types::{ Address, Execute, ExecuteTransactionCommon, Transaction, EIP_1559_TX_TYPE, EIP_2930_TX_TYPE, EIP_712_TX_TYPE, H160, H256, PRIORITY_OPERATION_L2_TX_TYPE, PROTOCOL_UPGRADE_TX_TYPE, U256, }; -use zksync_utils::bigdecimal_to_u256; -use crate::{models::storage_transaction::StorageTransaction, BigDecimal}; +use super::*; +use crate::models::storage_transaction::StorageTransaction; fn default_execute() -> Execute { Execute { @@ -96,6 +98,49 @@ fn l2_storage_tx(tx_format: i32) -> StorageTransaction { } } +#[test] +fn test_u256_to_bigdecimal() { + const RNG_SEED: u64 = 123; + + let mut rng = StdRng::seed_from_u64(RNG_SEED); + // Small values. + for _ in 0..10_000 { + let value: u64 = rng.gen(); + let expected = BigDecimal::from(value); + assert_eq!(u256_to_big_decimal(value.into()), expected); + } + + // Arbitrary values + for _ in 0..10_000 { + let u64_digits: [u64; 4] = rng.gen(); + let value = u64_digits + .iter() + .enumerate() + .map(|(i, &digit)| U256::from(digit) << (i * 64)) + .fold(U256::zero(), |acc, x| acc + x); + let expected_value = u64_digits + .iter() + .enumerate() + .map(|(i, &digit)| BigInt::from(digit) << (i * 64)) + .fold(BigInt::from(0), |acc, x| acc + x); + assert_eq!( + u256_to_big_decimal(value), + BigDecimal::new(expected_value, 0) + ); + } +} + +#[test] +fn test_bigdecimal_to_u256() { + let value = BigDecimal::from(100u32); + let expected = U256::from(100u32); + assert_eq!(bigdecimal_to_u256(value), expected); + + let value = BigDecimal::new(BigInt::from(100), -2); + let expected = U256::from(10000u32); + assert_eq!(bigdecimal_to_u256(value), expected); +} + #[test] fn storage_tx_to_l1_tx() { let stx = l1_storage_tx(); diff --git a/core/lib/dal/src/storage_web3_dal.rs b/core/lib/dal/src/storage_web3_dal.rs index 10d2cfe61525..794f49c59ac4 100644 --- a/core/lib/dal/src/storage_web3_dal.rs +++ b/core/lib/dal/src/storage_web3_dal.rs @@ -6,12 +6,11 @@ use zksync_db_connection::{ instrument::{InstrumentExt, Instrumented}, }; use zksync_types::{ - get_code_key, get_nonce_key, + get_code_key, get_nonce_key, h256_to_u256, utils::{decompose_full_nonce, storage_key_for_standard_token_balance}, AccountTreeId, Address, L1BatchNumber, L2BlockNumber, Nonce, StorageKey, FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH, H256, U256, }; -use zksync_utils::h256_to_u256; use crate::{models::storage_block::ResolvedL1BatchForL2Block, Core, CoreDal}; diff --git a/core/lib/dal/src/tee_proof_generation_dal.rs b/core/lib/dal/src/tee_proof_generation_dal.rs index 755d02769101..4d19c3ff0c8b 100644 --- a/core/lib/dal/src/tee_proof_generation_dal.rs +++ b/core/lib/dal/src/tee_proof_generation_dal.rs @@ -1,6 +1,7 @@ #![doc = include_str!("../doc/TeeProofGenerationDal.md")] use std::time::Duration; +use chrono::{DateTime, Utc}; use strum::{Display, EnumString}; use zksync_db_connection::{ connection::Connection, @@ -10,21 +11,47 @@ use zksync_db_connection::{ }; use zksync_types::{tee_types::TeeType, L1BatchNumber}; -use crate::{models::storage_tee_proof::StorageTeeProof, Core}; +use crate::{ + models::storage_tee_proof::{StorageLockedBatch, StorageTeeProof}, + Core, +}; #[derive(Debug)] pub struct TeeProofGenerationDal<'a, 'c> { pub(crate) storage: &'a mut Connection<'c, Core>, } -#[derive(Debug, EnumString, Display)] -enum TeeProofGenerationJobStatus { - #[strum(serialize = "unpicked")] - Unpicked, +#[derive(Debug, Clone, Copy, EnumString, Display)] +pub enum TeeProofGenerationJobStatus { + /// The batch has been picked by a TEE prover and is currently being processed. #[strum(serialize = "picked_by_prover")] PickedByProver, + /// The proof has been successfully generated and submitted for the batch. #[strum(serialize = "generated")] Generated, + /// The proof generation for the batch has failed, which can happen if its inputs (GCS blob + /// files) are incomplete or the API is unavailable. Failed batches are retried for a specified + /// period, as defined in the configuration. + #[strum(serialize = "failed")] + Failed, + /// The batch will not be processed again because the proof generation has been failing for an + /// extended period, as specified in the configuration. + #[strum(serialize = "permanently_ignored")] + PermanentlyIgnored, +} + +/// Represents a locked batch picked by a TEE prover. A batch is locked when taken by a TEE prover +/// ([TeeProofGenerationJobStatus::PickedByProver]). It can transition to one of three states: +/// 1. [TeeProofGenerationJobStatus::Generated]. +/// 2. [TeeProofGenerationJobStatus::Failed]. +/// 3. [TeeProofGenerationJobStatus::PermanentlyIgnored]. +#[derive(Clone, Debug)] +pub struct LockedBatch { + /// Locked batch number. + pub l1_batch_number: L1BatchNumber, + /// The creation time of the job for this batch. It is used to determine if the batch should + /// transition to [TeeProofGenerationJobStatus::PermanentlyIgnored] or [TeeProofGenerationJobStatus::Failed]. + pub created_at: DateTime, } impl TeeProofGenerationDal<'_, '_> { @@ -33,10 +60,11 @@ impl TeeProofGenerationDal<'_, '_> { tee_type: TeeType, processing_timeout: Duration, min_batch_number: L1BatchNumber, - ) -> DalResult> { + ) -> DalResult> { let processing_timeout = pg_interval_from_duration(processing_timeout); let min_batch_number = i64::from(min_batch_number.0); - sqlx::query!( + let locked_batch = sqlx::query_as!( + StorageLockedBatch, r#" WITH upsert AS ( SELECT @@ -57,11 +85,8 @@ impl TeeProofGenerationDal<'_, '_> { AND ( tee.l1_batch_number IS NULL OR ( - tee.status = $3 - OR ( - tee.status = $2 - AND tee.prover_taken_at < NOW() - $4::INTERVAL - ) + (tee.status = $2 OR tee.status = $3) + AND tee.prover_taken_at < NOW() - $4::INTERVAL ) ) FETCH FIRST ROW ONLY @@ -87,11 +112,12 @@ impl TeeProofGenerationDal<'_, '_> { updated_at = NOW(), prover_taken_at = NOW() RETURNING - l1_batch_number + l1_batch_number, + created_at "#, tee_type.to_string(), TeeProofGenerationJobStatus::PickedByProver.to_string(), - TeeProofGenerationJobStatus::Unpicked.to_string(), + TeeProofGenerationJobStatus::Failed.to_string(), processing_timeout, min_batch_number ) @@ -100,14 +126,17 @@ impl TeeProofGenerationDal<'_, '_> { .with_arg("processing_timeout", &processing_timeout) .with_arg("l1_batch_number", &min_batch_number) .fetch_optional(self.storage) - .await - .map(|record| record.map(|record| L1BatchNumber(record.l1_batch_number as u32))) + .await? + .map(Into::into); + + Ok(locked_batch) } pub async fn unlock_batch( &mut self, l1_batch_number: L1BatchNumber, tee_type: TeeType, + status: TeeProofGenerationJobStatus, ) -> DalResult<()> { let batch_number = i64::from(l1_batch_number.0); sqlx::query!( @@ -120,7 +149,7 @@ impl TeeProofGenerationDal<'_, '_> { l1_batch_number = $2 AND tee_type = $3 "#, - TeeProofGenerationJobStatus::Unpicked.to_string(), + status.to_string(), batch_number, tee_type.to_string() ) @@ -266,7 +295,7 @@ impl TeeProofGenerationDal<'_, '_> { "#, batch_number, tee_type.to_string(), - TeeProofGenerationJobStatus::Unpicked.to_string(), + TeeProofGenerationJobStatus::PickedByProver.to_string(), ); let instrumentation = Instrumented::new("insert_tee_proof_generation_job") .with_arg("l1_batch_number", &batch_number) @@ -281,7 +310,7 @@ impl TeeProofGenerationDal<'_, '_> { } /// For testing purposes only. - pub async fn get_oldest_unpicked_batch(&mut self) -> DalResult> { + pub async fn get_oldest_picked_by_prover_batch(&mut self) -> DalResult> { let query = sqlx::query!( r#" SELECT @@ -295,7 +324,7 @@ impl TeeProofGenerationDal<'_, '_> { LIMIT 1 "#, - TeeProofGenerationJobStatus::Unpicked.to_string(), + TeeProofGenerationJobStatus::PickedByProver.to_string(), ); let batch_number = Instrumented::new("get_oldest_unpicked_batch") .with(query) diff --git a/core/lib/dal/src/transactions_dal.rs b/core/lib/dal/src/transactions_dal.rs index 9c0889ebfc75..a5dfb8932ddb 100644 --- a/core/lib/dal/src/transactions_dal.rs +++ b/core/lib/dal/src/transactions_dal.rs @@ -15,15 +15,15 @@ use zksync_types::{ L1BlockNumber, L2BlockNumber, PriorityOpId, ProtocolVersionId, Transaction, TransactionTimeRangeConstraint, H256, PROTOCOL_UPGRADE_TX_TYPE, U256, }; -use zksync_utils::u256_to_big_decimal; use zksync_vm_interface::{ tracer::ValidationTraces, Call, TransactionExecutionMetrics, TransactionExecutionResult, TxExecutionStatus, }; use crate::{ - models::storage_transaction::{ - parse_call_trace, serialize_call_into_bytes, StorageTransaction, + models::{ + storage_transaction::{parse_call_trace, serialize_call_into_bytes, StorageTransaction}, + u256_to_big_decimal, }, Core, CoreDal, }; diff --git a/core/lib/env_config/src/da_dispatcher.rs b/core/lib/env_config/src/da_dispatcher.rs index 246752db91ac..805e6b2234b5 100644 --- a/core/lib/env_config/src/da_dispatcher.rs +++ b/core/lib/env_config/src/da_dispatcher.rs @@ -21,12 +21,14 @@ mod tests { interval: u32, rows_limit: u32, max_retries: u16, + max_concurrent_requests: u32, ) -> DADispatcherConfig { DADispatcherConfig { polling_interval_ms: Some(interval), max_rows_to_dispatch: Some(rows_limit), max_retries: Some(max_retries), use_dummy_inclusion_data: Some(true), + max_concurrent_requests: Some(max_concurrent_requests), } } @@ -38,9 +40,10 @@ mod tests { DA_DISPATCHER_MAX_ROWS_TO_DISPATCH=60 DA_DISPATCHER_MAX_RETRIES=7 DA_DISPATCHER_USE_DUMMY_INCLUSION_DATA="true" + DA_DISPATCHER_MAX_CONCURRENT_REQUESTS=10 "#; lock.set_env(config); let actual = DADispatcherConfig::from_env().unwrap(); - assert_eq!(actual, expected_da_layer_config(5000, 60, 7)); + assert_eq!(actual, expected_da_layer_config(5000, 60, 7, 10)); } } diff --git a/core/lib/env_config/src/database.rs b/core/lib/env_config/src/database.rs index 119d64b7738c..ae4c3059ce32 100644 --- a/core/lib/env_config/src/database.rs +++ b/core/lib/env_config/src/database.rs @@ -88,6 +88,7 @@ mod tests { DATABASE_MERKLE_TREE_MAX_L1_BATCHES_PER_ITER=50 DATABASE_EXPERIMENTAL_STATE_KEEPER_DB_BLOCK_CACHE_CAPACITY_MB=64 DATABASE_EXPERIMENTAL_STATE_KEEPER_DB_MAX_OPEN_FILES=100 + DATABASE_EXPERIMENTAL_MERKLE_TREE_REPAIR_STALE_KEYS=true "#; lock.set_env(config); @@ -109,6 +110,7 @@ mod tests { db_config.experimental.state_keeper_db_max_open_files, NonZeroU32::new(100) ); + assert!(db_config.experimental.merkle_tree_repair_stale_keys); } #[test] @@ -118,6 +120,7 @@ mod tests { "DATABASE_STATE_KEEPER_DB_PATH", "DATABASE_EXPERIMENTAL_STATE_KEEPER_DB_MAX_OPEN_FILES", "DATABASE_EXPERIMENTAL_STATE_KEEPER_DB_BLOCK_CACHE_CAPACITY_MB", + "DATABASE_EXPERIMENTAL_MERKLE_TREE_REPAIR_STALE_KEYS", "DATABASE_MERKLE_TREE_BACKUP_PATH", "DATABASE_MERKLE_TREE_PATH", "DATABASE_MERKLE_TREE_MODE", @@ -144,6 +147,7 @@ mod tests { 128 ); assert_eq!(db_config.experimental.state_keeper_db_max_open_files, None); + assert!(!db_config.experimental.merkle_tree_repair_stale_keys); // Check that new env variable for Merkle tree path is supported lock.set_env("DATABASE_MERKLE_TREE_PATH=/db/tree/main"); diff --git a/core/lib/env_config/src/proof_data_handler.rs b/core/lib/env_config/src/proof_data_handler.rs index 47848585e769..65fd1d516de3 100644 --- a/core/lib/env_config/src/proof_data_handler.rs +++ b/core/lib/env_config/src/proof_data_handler.rs @@ -29,6 +29,7 @@ mod tests { tee_support: true, first_tee_processed_batch: L1BatchNumber(1337), tee_proof_generation_timeout_in_secs: 600, + tee_batch_permanently_ignored_timeout_in_hours: 240, }, } } @@ -41,6 +42,7 @@ mod tests { PROOF_DATA_HANDLER_TEE_SUPPORT="true" PROOF_DATA_HANDLER_FIRST_TEE_PROCESSED_BATCH="1337" PROOF_DATA_HANDLER_TEE_PROOF_GENERATION_TIMEOUT_IN_SECS="600" + PROOF_DATA_HANDLER_TEE_BATCH_PERMANENTLY_IGNORED_TIMEOUT_IN_HOURS="240" "#; let mut lock = MUTEX.lock(); lock.set_env(config); diff --git a/core/lib/l1_contract_interface/src/i_executor/structures/commit_batch_info.rs b/core/lib/l1_contract_interface/src/i_executor/structures/commit_batch_info.rs index 6438aeb7f55c..3c8a3b26b935 100644 --- a/core/lib/l1_contract_interface/src/i_executor/structures/commit_batch_info.rs +++ b/core/lib/l1_contract_interface/src/i_executor/structures/commit_batch_info.rs @@ -216,7 +216,16 @@ impl Tokenizable for CommitBatchInfo<'_> { panic!("Custom pubdata DA is incompatible with Rollup mode") } (L1BatchCommitmentMode::Validium, PubdataSendingMode::Custom) => { - vec![PUBDATA_SOURCE_CUSTOM] + let mut operator_da_input = vec![PUBDATA_SOURCE_CUSTOM]; + operator_da_input.extend( + &self + .l1_batch_with_metadata + .metadata + .da_blob_id + .clone() + .unwrap_or_default(), + ); + operator_da_input } ( diff --git a/core/lib/merkle_tree/Cargo.toml b/core/lib/merkle_tree/Cargo.toml index 579350bccf4e..e615258ba646 100644 --- a/core/lib/merkle_tree/Cargo.toml +++ b/core/lib/merkle_tree/Cargo.toml @@ -16,7 +16,6 @@ zksync_types.workspace = true zksync_crypto_primitives.workspace = true zksync_storage.workspace = true zksync_prover_interface.workspace = true -zksync_utils.workspace = true anyhow.workspace = true leb128.workspace = true diff --git a/core/lib/merkle_tree/src/domain.rs b/core/lib/merkle_tree/src/domain.rs index 5064c791ed5b..5265f93264f2 100644 --- a/core/lib/merkle_tree/src/domain.rs +++ b/core/lib/merkle_tree/src/domain.rs @@ -410,6 +410,11 @@ impl ZkSyncTreeReader { &self.0.db } + /// Converts this reader to the underlying DB. + pub fn into_db(self) -> RocksDBWrapper { + self.0.db + } + /// Returns the root hash and leaf count at the specified L1 batch. pub fn root_info(&self, l1_batch_number: L1BatchNumber) -> Option<(ValueHash, u64)> { let root = self.0.root(l1_batch_number.0.into())?; diff --git a/core/lib/merkle_tree/src/lib.rs b/core/lib/merkle_tree/src/lib.rs index 5e97d6d77c69..1782f373954c 100644 --- a/core/lib/merkle_tree/src/lib.rs +++ b/core/lib/merkle_tree/src/lib.rs @@ -71,6 +71,7 @@ mod hasher; mod metrics; mod pruning; pub mod recovery; +pub mod repair; mod storage; mod types; mod utils; @@ -200,6 +201,21 @@ impl MerkleTree { root.unwrap_or(Root::Empty) } + /// Incorrect version of [`Self::truncate_recent_versions()`] that doesn't remove stale keys for the truncated tree versions. + #[cfg(test)] + fn truncate_recent_versions_incorrectly( + &mut self, + retained_version_count: u64, + ) -> anyhow::Result<()> { + let mut manifest = self.db.manifest().unwrap_or_default(); + if manifest.version_count > retained_version_count { + manifest.version_count = retained_version_count; + let patch = PatchSet::from_manifest(manifest); + self.db.apply_patch(patch)?; + } + Ok(()) + } + /// Extends this tree by creating its new version. /// /// # Return value diff --git a/core/lib/merkle_tree/src/pruning.rs b/core/lib/merkle_tree/src/pruning.rs index 2e328d0a2bb5..ae8300b893ab 100644 --- a/core/lib/merkle_tree/src/pruning.rs +++ b/core/lib/merkle_tree/src/pruning.rs @@ -250,6 +250,7 @@ mod tests { use super::*; use crate::{ types::{Node, NodeKey}, + utils::testonly::setup_tree_with_stale_keys, Database, Key, MerkleTree, PatchSet, RocksDBWrapper, TreeEntry, ValueHash, }; @@ -507,47 +508,17 @@ mod tests { test_keys_are_removed_by_pruning_when_overwritten_in_multiple_batches(true); } - fn test_pruning_with_truncation(db: impl PruneDatabase) { - let mut tree = MerkleTree::new(db).unwrap(); - let kvs: Vec<_> = (0_u64..100) - .map(|i| TreeEntry::new(Key::from(i), i + 1, ValueHash::zero())) - .collect(); - tree.extend(kvs).unwrap(); - - let overridden_kvs = vec![TreeEntry::new( - Key::from(0), - 1, - ValueHash::repeat_byte(0xaa), - )]; - tree.extend(overridden_kvs).unwrap(); - - let stale_keys = tree.db.stale_keys(1); - assert!( - stale_keys.iter().any(|key| !key.is_empty()), - "{stale_keys:?}" - ); - - // Revert `overridden_kvs`. - tree.truncate_recent_versions(1).unwrap(); - assert_eq!(tree.latest_version(), Some(0)); - let future_stale_keys = tree.db.stale_keys(1); - assert!(future_stale_keys.is_empty()); - - // Add a new version without the key. To make the matter more egregious, the inserted key - // differs from all existing keys, starting from the first nibble. - let new_key = Key::from_big_endian(&[0xaa; 32]); - let new_kvs = vec![TreeEntry::new(new_key, 101, ValueHash::repeat_byte(0xaa))]; - tree.extend(new_kvs).unwrap(); - assert_eq!(tree.latest_version(), Some(1)); + fn test_pruning_with_truncation(mut db: impl PruneDatabase) { + setup_tree_with_stale_keys(&mut db, false); - let stale_keys = tree.db.stale_keys(1); + let stale_keys = db.stale_keys(1); assert_eq!(stale_keys.len(), 1); assert!( stale_keys[0].is_empty() && stale_keys[0].version == 0, "{stale_keys:?}" ); - let (mut pruner, _) = MerkleTreePruner::new(tree.db); + let (mut pruner, _) = MerkleTreePruner::new(db); let prunable_version = pruner.last_prunable_version().unwrap(); assert_eq!(prunable_version, 1); let stats = pruner diff --git a/core/lib/merkle_tree/src/repair.rs b/core/lib/merkle_tree/src/repair.rs new file mode 100644 index 000000000000..c83569e96b13 --- /dev/null +++ b/core/lib/merkle_tree/src/repair.rs @@ -0,0 +1,376 @@ +//! Service tasks for the Merkle tree. + +use std::{ + ops, + sync::{mpsc, Arc, Mutex}, + time::{Duration, Instant}, +}; + +use anyhow::Context as _; +use rayon::prelude::*; + +use crate::{ + types::{NodeKey, StaleNodeKey}, + Database, PruneDatabase, RocksDBWrapper, +}; + +/// Persisted information about stale keys repair progress. +#[derive(Debug)] +pub(crate) struct StaleKeysRepairData { + pub next_version: u64, +} + +/// [`StaleKeysRepairTask`] progress stats. +#[derive(Debug, Clone, Default)] +pub struct StaleKeysRepairStats { + /// Versions checked by the task, or `None` if no versions have been checked. + pub checked_versions: Option>, + /// Number of repaired stale keys. + pub repaired_key_count: usize, +} + +#[derive(Debug)] +struct StepStats { + checked_versions: ops::RangeInclusive, + repaired_key_count: usize, +} + +/// Handle for a [`StaleKeysRepairTask`] allowing to abort its operation. +/// +/// The task is aborted once the handle is dropped. +#[must_use = "Paired `StaleKeysRepairTask` is aborted once handle is dropped"] +#[derive(Debug)] +pub struct StaleKeysRepairHandle { + stats: Arc>, + _aborted_sender: mpsc::Sender<()>, +} + +impl StaleKeysRepairHandle { + /// Returns stats for the paired task. + #[allow(clippy::missing_panics_doc)] // mutex poisoning shouldn't happen + pub fn stats(&self) -> StaleKeysRepairStats { + self.stats.lock().expect("stats mutex poisoned").clone() + } +} + +/// Task that repairs stale keys for the tree. +/// +/// Early tree versions contained a bug: If a tree version was truncated, stale keys for it remained intact. +/// If an overwritten tree version did not contain the same keys, this could lead to keys incorrectly marked as stale, +/// meaning that after pruning, a tree may end up broken. +#[derive(Debug)] +pub struct StaleKeysRepairTask { + db: RocksDBWrapper, + parallelism: u64, + poll_interval: Duration, + stats: Arc>, + aborted_receiver: mpsc::Receiver<()>, +} + +impl StaleKeysRepairTask { + /// Creates a new task. + pub fn new(db: RocksDBWrapper) -> (Self, StaleKeysRepairHandle) { + let (aborted_sender, aborted_receiver) = mpsc::channel(); + let stats = Arc::>::default(); + let this = Self { + db, + parallelism: (rayon::current_num_threads() as u64).max(1), + poll_interval: Duration::from_secs(60), + stats: stats.clone(), + aborted_receiver, + }; + let handle = StaleKeysRepairHandle { + stats, + _aborted_sender: aborted_sender, + }; + (this, handle) + } + + /// Sets the poll interval for this task. + pub fn set_poll_interval(&mut self, poll_interval: Duration) { + self.poll_interval = poll_interval; + } + + /// Runs stale key detection for a single tree version. + #[tracing::instrument(skip(db))] + pub fn bogus_stale_keys(db: &RocksDBWrapper, version: u64) -> Vec { + const SAMPLE_COUNT: usize = 5; + + let version_keys = db.all_keys_for_version(version).unwrap_or_else(|err| { + panic!("failed loading keys changed in tree version {version}: {err}") + }); + let stale_keys = db.stale_keys(version); + + if !version_keys.unreachable_keys.is_empty() { + let keys_sample: Vec<_> = version_keys + .unreachable_keys + .iter() + .take(SAMPLE_COUNT) + .collect::>(); + tracing::warn!( + version, + unreachable_keys.len = version_keys.unreachable_keys.len(), + unreachable_keys.sample = ?keys_sample, + "Found unreachable keys in tree" + ); + } + + let mut bogus_stale_keys = vec![]; + for stale_key in stale_keys { + if version_keys.valid_keys.contains(&stale_key.nibbles) { + // Normal case: a new node obsoletes a previous version. + } else if version_keys.unreachable_keys.contains(&stale_key.nibbles) { + // Explainable bogus stale key: a node that was updated in `version` before the truncation is no longer updated after truncation. + bogus_stale_keys.push(stale_key); + } else { + tracing::warn!( + version, + ?stale_key, + "Unexplained bogus stale key: not present in any nodes changed in the tree version" + ); + bogus_stale_keys.push(stale_key); + } + } + + if bogus_stale_keys.is_empty() { + return vec![]; + } + + let keys_sample: Vec<_> = bogus_stale_keys.iter().take(SAMPLE_COUNT).collect(); + tracing::info!( + stale_keys.len = bogus_stale_keys.len(), + stale_keys.sample = ?keys_sample, + "Found bogus stale keys" + ); + bogus_stale_keys + } + + /// Returns a boolean flag indicating whether the task data was updated. + fn step(&mut self) -> anyhow::Result> { + let repair_data = self + .db + .stale_keys_repair_data() + .context("failed getting repair data")?; + let min_stale_key_version = self.db.min_stale_key_version(); + let start_version = match (repair_data, min_stale_key_version) { + (_, None) => { + tracing::debug!("No stale keys in tree, nothing to do"); + return Ok(None); + } + (None, Some(version)) => version, + (Some(data), Some(version)) => data.next_version.max(version), + }; + + let latest_version = self + .db + .manifest() + .and_then(|manifest| manifest.version_count.checked_sub(1)); + let Some(latest_version) = latest_version else { + tracing::warn!( + min_stale_key_version, + "Tree has stale keys, but no latest versions" + ); + return Ok(None); + }; + + let end_version = (start_version + self.parallelism - 1).min(latest_version); + let versions = start_version..=end_version; + if versions.is_empty() { + tracing::debug!(?versions, latest_version, "No tree versions to check"); + return Ok(None); + } + + tracing::debug!( + ?versions, + latest_version, + ?min_stale_key_version, + "Checking stale keys" + ); + + let stale_keys = versions + .clone() + .into_par_iter() + .map(|version| { + Self::bogus_stale_keys(&self.db, version) + .into_iter() + .map(|key| StaleNodeKey::new(key, version)) + .collect::>() + }) + .reduce(Vec::new, |mut acc, keys| { + acc.extend(keys); + acc + }); + self.update_task_data(versions.clone(), &stale_keys)?; + + Ok(Some(StepStats { + checked_versions: versions, + repaired_key_count: stale_keys.len(), + })) + } + + #[tracing::instrument( + level = "debug", + err, + skip(self, removed_keys), + fields(removed_keys.len = removed_keys.len()), + )] + fn update_task_data( + &mut self, + versions: ops::RangeInclusive, + removed_keys: &[StaleNodeKey], + ) -> anyhow::Result<()> { + tracing::debug!("Updating task data"); + let started_at = Instant::now(); + let new_data = StaleKeysRepairData { + next_version: *versions.end() + 1, + }; + self.db + .repair_stale_keys(&new_data, removed_keys) + .context("failed removing bogus stale keys")?; + let latency = started_at.elapsed(); + tracing::debug!(?latency, "Updated task data"); + Ok(()) + } + + fn wait_for_abort(&mut self, timeout: Duration) -> bool { + match self.aborted_receiver.recv_timeout(timeout) { + Ok(()) | Err(mpsc::RecvTimeoutError::Disconnected) => true, + Err(mpsc::RecvTimeoutError::Timeout) => false, + } + } + + fn update_stats(&self, step_stats: StepStats) { + let mut stats = self.stats.lock().expect("stats mutex poisoned"); + if let Some(versions) = &mut stats.checked_versions { + *versions = *versions.start()..=*step_stats.checked_versions.end(); + } else { + stats.checked_versions = Some(step_stats.checked_versions); + } + stats.repaired_key_count += step_stats.repaired_key_count; + } + + /// Runs this task indefinitely. + /// + /// # Errors + /// + /// Propagates RocksDB I/O errors. + pub fn run(mut self) -> anyhow::Result<()> { + let repair_data = self + .db + .stale_keys_repair_data() + .context("failed getting repair data")?; + tracing::info!( + paralellism = self.parallelism, + poll_interval = ?self.poll_interval, + ?repair_data, + "Starting repair task" + ); + + let mut wait_interval = Duration::ZERO; + while !self.wait_for_abort(wait_interval) { + wait_interval = if let Some(step_stats) = self.step()? { + self.update_stats(step_stats); + Duration::ZERO + } else { + self.poll_interval + }; + } + tracing::info!("Stop signal received, stale keys repair is shut down"); + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use std::thread; + + use super::*; + use crate::{ + utils::testonly::setup_tree_with_stale_keys, Key, MerkleTree, MerkleTreePruner, TreeEntry, + ValueHash, + }; + + #[test] + fn stale_keys_repair_with_normal_tree() { + let temp_dir = tempfile::TempDir::new().unwrap(); + let mut db = RocksDBWrapper::new(temp_dir.path()).unwrap(); + + // The task should work fine with future tree versions. + for version in [0, 1, 100] { + let bogus_stale_keys = StaleKeysRepairTask::bogus_stale_keys(&db, version); + assert!(bogus_stale_keys.is_empty()); + } + + let kvs: Vec<_> = (0_u64..100) + .map(|i| TreeEntry::new(Key::from(i), i + 1, ValueHash::zero())) + .collect(); + MerkleTree::new(&mut db).unwrap().extend(kvs).unwrap(); + + let bogus_stale_keys = StaleKeysRepairTask::bogus_stale_keys(&db, 0); + assert!(bogus_stale_keys.is_empty()); + } + + #[test] + fn detecting_bogus_stale_keys() { + let temp_dir = tempfile::TempDir::new().unwrap(); + let mut db = RocksDBWrapper::new(temp_dir.path()).unwrap(); + setup_tree_with_stale_keys(&mut db, true); + + let bogus_stale_keys = StaleKeysRepairTask::bogus_stale_keys(&db, 1); + assert!(!bogus_stale_keys.is_empty()); + + let (mut task, _handle) = StaleKeysRepairTask::new(db); + task.parallelism = 10; // Ensure that all tree versions are checked at once. + // Repair the tree. + let step_stats = task.step().unwrap().expect("tree was not repaired"); + assert_eq!(step_stats.checked_versions, 1..=1); + assert!(step_stats.repaired_key_count > 0); + // Check that the tree works fine once it's pruned. + let (mut pruner, _) = MerkleTreePruner::new(&mut task.db); + pruner.prune_up_to(1).unwrap().expect("tree was not pruned"); + + MerkleTree::new(&mut task.db) + .unwrap() + .verify_consistency(1, false) + .unwrap(); + + let bogus_stale_keys = StaleKeysRepairTask::bogus_stale_keys(&task.db, 1); + assert!(bogus_stale_keys.is_empty()); + MerkleTree::new(&mut task.db) + .unwrap() + .verify_consistency(1, false) + .unwrap(); + + assert!(task.step().unwrap().is_none()); + } + + #[test] + fn full_stale_keys_task_workflow() { + let temp_dir = tempfile::TempDir::new().unwrap(); + let mut db = RocksDBWrapper::new(temp_dir.path()).unwrap(); + setup_tree_with_stale_keys(&mut db, true); + + let (task, handle) = StaleKeysRepairTask::new(db.clone()); + let task_thread = thread::spawn(|| task.run()); + + loop { + if let Some(task_data) = db.stale_keys_repair_data().unwrap() { + if task_data.next_version == 2 { + // All tree versions are processed. + break; + } + } + thread::sleep(Duration::from_millis(50)); + } + let stats = handle.stats(); + assert_eq!(stats.checked_versions, Some(1..=1)); + assert!(stats.repaired_key_count > 0, "{stats:?}"); + + assert!(!task_thread.is_finished()); + drop(handle); + task_thread.join().unwrap().unwrap(); + + let bogus_stale_keys = StaleKeysRepairTask::bogus_stale_keys(&db, 1); + assert!(bogus_stale_keys.is_empty()); + } +} diff --git a/core/lib/merkle_tree/src/storage/rocksdb.rs b/core/lib/merkle_tree/src/storage/rocksdb.rs index 6995bbfbfc7f..5a40c82b680c 100644 --- a/core/lib/merkle_tree/src/storage/rocksdb.rs +++ b/core/lib/merkle_tree/src/storage/rocksdb.rs @@ -1,6 +1,13 @@ //! RocksDB implementation of [`Database`]. -use std::{any::Any, cell::RefCell, ops, path::Path, sync::Arc}; +use std::{ + any::Any, + cell::RefCell, + collections::{HashMap, HashSet}, + ops, + path::Path, + sync::Arc, +}; use anyhow::Context as _; use rayon::prelude::*; @@ -15,6 +22,7 @@ use zksync_storage::{ use crate::{ errors::{DeserializeError, ErrorContext}, metrics::ApplyPatchStats, + repair::StaleKeysRepairData, storage::{ database::{PruneDatabase, PrunePatchSet}, Database, NodeKeys, PatchSet, @@ -70,6 +78,15 @@ impl ToDbKey for (NodeKey, bool) { } } +/// All node keys modified in a certain version of the tree, loaded via a prefix iterator. +#[derive(Debug, Default)] +pub(crate) struct VersionKeys { + /// Valid / reachable keys modified in the version. + pub valid_keys: HashSet, + /// Unreachable keys modified in the version, e.g. as a result of truncating the tree and overwriting the version. + pub unreachable_keys: HashSet, +} + /// Main [`Database`] implementation wrapping a [`RocksDB`] reference. /// /// # Cloning @@ -97,6 +114,8 @@ impl RocksDBWrapper { // since the minimum node key is [0, 0, 0, 0, 0, 0, 0, 0]. const MANIFEST_KEY: &'static [u8] = &[0]; + const STALE_KEYS_REPAIR_KEY: &'static [u8] = &[0, 0]; + /// Creates a new wrapper, initializing RocksDB at the specified directory. /// /// # Errors @@ -174,6 +193,83 @@ impl RocksDBWrapper { }) } + pub(crate) fn all_keys_for_version( + &self, + version: u64, + ) -> Result { + let Some(Root::Filled { + node: root_node, .. + }) = self.root(version) + else { + return Ok(VersionKeys::default()); + }; + + let cf = MerkleTreeColumnFamily::Tree; + let version_prefix = version.to_be_bytes(); + let mut nodes = HashMap::from([(Nibbles::EMPTY, root_node)]); + let mut unreachable_keys = HashSet::new(); + + for (raw_key, raw_value) in self.db.prefix_iterator_cf(cf, &version_prefix) { + let key = NodeKey::from_db_key(&raw_key); + let Some((parent_nibbles, nibble)) = key.nibbles.split_last() else { + // Root node, already processed + continue; + }; + let Some(Node::Internal(parent)) = nodes.get(&parent_nibbles) else { + unreachable_keys.insert(key.nibbles); + continue; + }; + let Some(this_ref) = parent.child_ref(nibble) else { + unreachable_keys.insert(key.nibbles); + continue; + }; + if this_ref.version != version { + unreachable_keys.insert(key.nibbles); + continue; + } + + // Now we are sure that `this_ref` actually points to the node we're processing. + let node = Self::deserialize_node(&raw_value, &key, this_ref.is_leaf)?; + nodes.insert(key.nibbles, node); + } + + Ok(VersionKeys { + valid_keys: nodes.into_keys().collect(), + unreachable_keys, + }) + } + + pub(crate) fn repair_stale_keys( + &mut self, + data: &StaleKeysRepairData, + removed_keys: &[StaleNodeKey], + ) -> anyhow::Result<()> { + let mut raw_value = vec![]; + data.serialize(&mut raw_value); + + let mut write_batch = self.db.new_write_batch(); + write_batch.put_cf( + MerkleTreeColumnFamily::Tree, + Self::STALE_KEYS_REPAIR_KEY, + &raw_value, + ); + for key in removed_keys { + write_batch.delete_cf(MerkleTreeColumnFamily::StaleKeys, &key.to_db_key()); + } + self.db + .write(write_batch) + .context("Failed writing a batch to RocksDB") + } + + pub(crate) fn stale_keys_repair_data( + &self, + ) -> Result, DeserializeError> { + let Some(raw_value) = self.raw_node(Self::STALE_KEYS_REPAIR_KEY) else { + return Ok(None); + }; + StaleKeysRepairData::deserialize(&raw_value).map(Some) + } + /// Returns the wrapped RocksDB instance. pub fn into_inner(self) -> RocksDB { self.db diff --git a/core/lib/merkle_tree/src/storage/serialization.rs b/core/lib/merkle_tree/src/storage/serialization.rs index d0c573fd8170..700a4cd5020b 100644 --- a/core/lib/merkle_tree/src/storage/serialization.rs +++ b/core/lib/merkle_tree/src/storage/serialization.rs @@ -4,6 +4,7 @@ use std::{collections::HashMap, str}; use crate::{ errors::{DeserializeError, DeserializeErrorKind, ErrorContext}, + repair::StaleKeysRepairData, types::{ ChildRef, InternalNode, Key, LeafNode, Manifest, Node, RawNode, Root, TreeTags, ValueHash, HASH_SIZE, KEY_SIZE, @@ -355,6 +356,18 @@ impl Manifest { } } +impl StaleKeysRepairData { + pub(super) fn deserialize(mut bytes: &[u8]) -> Result { + let next_version = + leb128::read::unsigned(&mut bytes).map_err(DeserializeErrorKind::Leb128)?; + Ok(Self { next_version }) + } + + pub(super) fn serialize(&self, buffer: &mut Vec) { + leb128::write::unsigned(buffer, self.next_version).unwrap(); + } +} + #[cfg(test)] mod tests { use zksync_types::H256; diff --git a/core/lib/merkle_tree/src/utils.rs b/core/lib/merkle_tree/src/utils.rs index 4771a940f2c8..a3c025a8b7bd 100644 --- a/core/lib/merkle_tree/src/utils.rs +++ b/core/lib/merkle_tree/src/utils.rs @@ -165,6 +165,49 @@ impl Iterator for MergingIter { impl ExactSizeIterator for MergingIter {} +#[cfg(test)] +pub(crate) mod testonly { + use crate::{Key, MerkleTree, PruneDatabase, TreeEntry, ValueHash}; + + pub(crate) fn setup_tree_with_stale_keys(db: impl PruneDatabase, incorrect_truncation: bool) { + let mut tree = MerkleTree::new(db).unwrap(); + let kvs: Vec<_> = (0_u64..100) + .map(|i| TreeEntry::new(Key::from(i), i + 1, ValueHash::zero())) + .collect(); + tree.extend(kvs).unwrap(); + + let overridden_kvs = vec![TreeEntry::new( + Key::from(0), + 1, + ValueHash::repeat_byte(0xaa), + )]; + tree.extend(overridden_kvs).unwrap(); + + let stale_keys = tree.db.stale_keys(1); + assert!( + stale_keys.iter().any(|key| !key.is_empty()), + "{stale_keys:?}" + ); + + // Revert `overridden_kvs`. + if incorrect_truncation { + tree.truncate_recent_versions_incorrectly(1).unwrap(); + } else { + tree.truncate_recent_versions(1).unwrap(); + } + assert_eq!(tree.latest_version(), Some(0)); + let future_stale_keys = tree.db.stale_keys(1); + assert_eq!(future_stale_keys.is_empty(), !incorrect_truncation); + + // Add a new version without the key. To make the matter more egregious, the inserted key + // differs from all existing keys, starting from the first nibble. + let new_key = Key::from_big_endian(&[0xaa; 32]); + let new_kvs = vec![TreeEntry::new(new_key, 101, ValueHash::repeat_byte(0xaa))]; + tree.extend(new_kvs).unwrap(); + assert_eq!(tree.latest_version(), Some(1)); + } +} + #[cfg(test)] mod tests { use zksync_types::U256; diff --git a/core/lib/multivm/Cargo.toml b/core/lib/multivm/Cargo.toml index 27130bc2720d..128e6fc0c4af 100644 --- a/core/lib/multivm/Cargo.toml +++ b/core/lib/multivm/Cargo.toml @@ -26,7 +26,6 @@ circuit_sequencer_api_1_5_0.workspace = true zksync_types.workspace = true zksync_contracts.workspace = true -zksync_utils.workspace = true zksync_system_constants.workspace = true zksync_vm_interface.workspace = true zksync_mini_merkle_tree.workspace = true @@ -45,5 +44,5 @@ assert_matches.workspace = true pretty_assertions.workspace = true rand.workspace = true test-casing.workspace = true -zksync_test_account.workspace = true +zksync_test_contracts.workspace = true zksync_eth_signer.workspace = true diff --git a/core/lib/multivm/src/glue/tracers/mod.rs b/core/lib/multivm/src/glue/tracers/mod.rs index bf2f67cae501..f5a854ecbaaf 100644 --- a/core/lib/multivm/src/glue/tracers/mod.rs +++ b/core/lib/multivm/src/glue/tracers/mod.rs @@ -7,7 +7,7 @@ //! Different VM versions may have distinct requirements and types for Tracers. To accommodate these differences, //! this module defines one primary trait: //! -//! - `MultiVMTracer`: This trait represents a tracer that can be converted into a tracer for +//! - `MultiVmTracer`: This trait represents a tracer that can be converted into a tracer for //! a specific VM version. //! //! Specific traits for each VM version, which support Custom Tracers: @@ -19,22 +19,22 @@ //! into a form compatible with the vm_virtual_blocks version. //! It defines a method `vm_virtual_blocks` for obtaining a boxed tracer. //! -//! For `MultiVMTracer` to be implemented, the Tracer must implement all N currently +//! For `MultiVmTracer` to be implemented, the Tracer must implement all N currently //! existing sub-traits. //! //! ## Adding a new VM version //! -//! To add support for one more VM version to MultiVMTracer, one needs to: +//! To add support for one more VM version to MultiVmTracer, one needs to: //! - Create a new trait performing conversion to the specified VM tracer, e.g., `IntoTracer`. -//! - Add this trait as a trait bound to the `MultiVMTracer`. -//! - Add this trait as a trait bound for `T` in `MultiVMTracer` implementation. +//! - Add this trait as a trait bound to the `MultiVmTracer`. +//! - Add this trait as a trait bound for `T` in `MultiVmTracer` implementation. //! - Implement the trait for `T` with a bound to `VmTracer` for a specific version. use crate::{interface::storage::WriteStorage, tracers::old::OldTracers, HistoryMode}; -pub type MultiVmTracerPointer = Box>; +pub type MultiVmTracerPointer = Box>; -pub trait MultiVMTracer: +pub trait MultiVmTracer: IntoLatestTracer + IntoVmVirtualBlocksTracer + IntoVmRefundsEnhancementTracer @@ -168,7 +168,7 @@ where } } -impl MultiVMTracer for T +impl MultiVmTracer for T where S: WriteStorage, H: HistoryMode, diff --git a/core/lib/multivm/src/glue/types/vm/block_context_mode.rs b/core/lib/multivm/src/glue/types/vm/block_context_mode.rs index 094339705e14..66634e504386 100644 --- a/core/lib/multivm/src/glue/types/vm/block_context_mode.rs +++ b/core/lib/multivm/src/glue/types/vm/block_context_mode.rs @@ -1,4 +1,4 @@ -use zksync_utils::h256_to_u256; +use zksync_types::h256_to_u256; use crate::glue::GlueFrom; diff --git a/core/lib/multivm/src/glue/types/vm/storage_log.rs b/core/lib/multivm/src/glue/types/vm/storage_log.rs index 322bc491e9ab..5f79ca9e9e15 100644 --- a/core/lib/multivm/src/glue/types/vm/storage_log.rs +++ b/core/lib/multivm/src/glue/types/vm/storage_log.rs @@ -1,7 +1,6 @@ use zksync_types::{ - zk_evm_types::LogQuery, StorageLog, StorageLogQuery, StorageLogWithPreviousValue, + u256_to_h256, zk_evm_types::LogQuery, StorageLog, StorageLogQuery, StorageLogWithPreviousValue, }; -use zksync_utils::u256_to_h256; use crate::glue::{GlueFrom, GlueInto}; diff --git a/core/lib/multivm/src/glue/types/zk_evm_1_3_1.rs b/core/lib/multivm/src/glue/types/zk_evm_1_3_1.rs index dfe1121c04ec..08556b7b901a 100644 --- a/core/lib/multivm/src/glue/types/zk_evm_1_3_1.rs +++ b/core/lib/multivm/src/glue/types/zk_evm_1_3_1.rs @@ -1,4 +1,4 @@ -use zksync_utils::u256_to_h256; +use zksync_types::u256_to_h256; use crate::glue::{GlueFrom, GlueInto}; diff --git a/core/lib/multivm/src/glue/types/zk_evm_1_3_3.rs b/core/lib/multivm/src/glue/types/zk_evm_1_3_3.rs index 4c554c1bd53d..ab13de140cfb 100644 --- a/core/lib/multivm/src/glue/types/zk_evm_1_3_3.rs +++ b/core/lib/multivm/src/glue/types/zk_evm_1_3_3.rs @@ -2,8 +2,10 @@ use zk_evm_1_3_3::{ aux_structures::{LogQuery as LogQuery_1_3_3, Timestamp as Timestamp_1_3_3}, zkevm_opcode_defs::FarCallOpcode as FarCallOpcode_1_3_3, }; -use zksync_types::zk_evm_types::{FarCallOpcode, LogQuery, Timestamp}; -use zksync_utils::u256_to_h256; +use zksync_types::{ + u256_to_h256, + zk_evm_types::{FarCallOpcode, LogQuery, Timestamp}, +}; use crate::glue::{GlueFrom, GlueInto}; diff --git a/core/lib/multivm/src/glue/types/zk_evm_1_4_0.rs b/core/lib/multivm/src/glue/types/zk_evm_1_4_0.rs index 5af0e57c4bf9..c25a19b1aa3d 100644 --- a/core/lib/multivm/src/glue/types/zk_evm_1_4_0.rs +++ b/core/lib/multivm/src/glue/types/zk_evm_1_4_0.rs @@ -1,4 +1,4 @@ -use zksync_utils::u256_to_h256; +use zksync_types::u256_to_h256; use crate::glue::GlueFrom; diff --git a/core/lib/multivm/src/glue/types/zk_evm_1_4_1.rs b/core/lib/multivm/src/glue/types/zk_evm_1_4_1.rs index 933eafbb0354..6a8138bc2f24 100644 --- a/core/lib/multivm/src/glue/types/zk_evm_1_4_1.rs +++ b/core/lib/multivm/src/glue/types/zk_evm_1_4_1.rs @@ -2,8 +2,10 @@ use zk_evm_1_4_1::{ aux_structures::{LogQuery as LogQuery_1_4_1, Timestamp as Timestamp_1_4_1}, zkevm_opcode_defs::FarCallOpcode as FarCallOpcode_1_4_1, }; -use zksync_types::zk_evm_types::{FarCallOpcode, LogQuery, Timestamp}; -use zksync_utils::u256_to_h256; +use zksync_types::{ + u256_to_h256, + zk_evm_types::{FarCallOpcode, LogQuery, Timestamp}, +}; use crate::glue::{GlueFrom, GlueInto}; diff --git a/core/lib/multivm/src/glue/types/zk_evm_1_5_0.rs b/core/lib/multivm/src/glue/types/zk_evm_1_5_0.rs index eb1c8e1dd7e8..343843503bdd 100644 --- a/core/lib/multivm/src/glue/types/zk_evm_1_5_0.rs +++ b/core/lib/multivm/src/glue/types/zk_evm_1_5_0.rs @@ -1,4 +1,4 @@ -use zksync_utils::u256_to_h256; +use zksync_types::u256_to_h256; use crate::glue::{GlueFrom, GlueInto}; diff --git a/core/lib/multivm/src/lib.rs b/core/lib/multivm/src/lib.rs index 1cba2c0fb92b..fc4085d9b021 100644 --- a/core/lib/multivm/src/lib.rs +++ b/core/lib/multivm/src/lib.rs @@ -10,7 +10,7 @@ pub use zksync_vm_interface as interface; pub use crate::{ glue::{ history_mode::HistoryMode, - tracers::{MultiVMTracer, MultiVmTracerPointer}, + tracers::{MultiVmTracer, MultiVmTracerPointer}, }, versions::{ vm_1_3_2, vm_1_4_1, vm_1_4_2, vm_boojum_integration, vm_fast, vm_latest, vm_m5, vm_m6, diff --git a/core/lib/multivm/src/pubdata_builders/tests.rs b/core/lib/multivm/src/pubdata_builders/tests.rs index bc24b8e47346..b06cb9405aa7 100644 --- a/core/lib/multivm/src/pubdata_builders/tests.rs +++ b/core/lib/multivm/src/pubdata_builders/tests.rs @@ -1,8 +1,7 @@ use zksync_types::{ - writes::StateDiffRecord, Address, ProtocolVersionId, ACCOUNT_CODE_STORAGE_ADDRESS, - BOOTLOADER_ADDRESS, + u256_to_h256, writes::StateDiffRecord, Address, ProtocolVersionId, + ACCOUNT_CODE_STORAGE_ADDRESS, BOOTLOADER_ADDRESS, }; -use zksync_utils::u256_to_h256; use super::{rollup::RollupPubdataBuilder, validium::ValidiumPubdataBuilder}; use crate::interface::pubdata::{L1MessengerL2ToL1Log, PubdataBuilder, PubdataInput}; diff --git a/core/lib/multivm/src/pubdata_builders/utils.rs b/core/lib/multivm/src/pubdata_builders/utils.rs index 57361a674fb7..83c9b9317640 100644 --- a/core/lib/multivm/src/pubdata_builders/utils.rs +++ b/core/lib/multivm/src/pubdata_builders/utils.rs @@ -1,6 +1,5 @@ use zksync_mini_merkle_tree::MiniMerkleTree; -use zksync_types::web3::keccak256; -use zksync_utils::bytecode::hash_bytecode; +use zksync_types::{bytecode::BytecodeHash, web3::keccak256}; use crate::interface::pubdata::L1MessengerL2ToL1Log; @@ -49,8 +48,9 @@ pub(crate) fn build_chained_bytecode_hash(published_bytecodes: &[Vec]) -> Ve let mut chained_bytecode_hash = vec![0u8; 32]; for bytecode in published_bytecodes { - let hash = hash_bytecode(bytecode).to_fixed_bytes(); - + let hash = BytecodeHash::for_bytecode(bytecode) + .value() + .to_fixed_bytes(); chained_bytecode_hash = keccak256(&[chained_bytecode_hash, hash.to_vec()].concat()).to_vec(); } diff --git a/core/lib/multivm/src/tracers/prestate_tracer/mod.rs b/core/lib/multivm/src/tracers/prestate_tracer/mod.rs index e8a7cc2cc420..363480c016bf 100644 --- a/core/lib/multivm/src/tracers/prestate_tracer/mod.rs +++ b/core/lib/multivm/src/tracers/prestate_tracer/mod.rs @@ -2,10 +2,9 @@ use std::{collections::HashMap, fmt, sync::Arc}; use once_cell::sync::OnceCell; use zksync_types::{ - get_code_key, get_nonce_key, web3::keccak256, AccountTreeId, Address, StorageKey, StorageValue, - H160, H256, L2_BASE_TOKEN_ADDRESS, U256, + address_to_h256, get_code_key, get_nonce_key, h256_to_u256, web3::keccak256, AccountTreeId, + Address, StorageKey, StorageValue, H160, H256, L2_BASE_TOKEN_ADDRESS, U256, }; -use zksync_utils::{address_to_h256, h256_to_u256}; use crate::interface::storage::{StoragePtr, WriteStorage}; diff --git a/core/lib/multivm/src/tracers/validator/mod.rs b/core/lib/multivm/src/tracers/validator/mod.rs index a095be9f3748..88249467a575 100644 --- a/core/lib/multivm/src/tracers/validator/mod.rs +++ b/core/lib/multivm/src/tracers/validator/mod.rs @@ -10,9 +10,9 @@ use zksync_system_constants::{ L2_BASE_TOKEN_ADDRESS, MSG_VALUE_SIMULATOR_ADDRESS, SYSTEM_CONTEXT_ADDRESS, }; use zksync_types::{ - vm::VmVersion, web3::keccak256, AccountTreeId, Address, StorageKey, H256, U256, + address_to_u256, u256_to_h256, vm::VmVersion, web3::keccak256, AccountTreeId, Address, + StorageKey, H256, U256, }; -use zksync_utils::{address_to_u256, be_bytes_to_safe_address, u256_to_h256}; use zksync_vm_interface::{ tracer::{TimestampAsserterParams, ValidationTraces}, L1BatchEnv, @@ -25,6 +25,7 @@ use crate::{ storage::{StoragePtr, WriteStorage}, tracer::{ValidationParams, ViolatedValidationRule}, }, + utils::bytecode::be_bytes_to_safe_address, }; mod types; diff --git a/core/lib/multivm/src/tracers/validator/vm_1_4_1/mod.rs b/core/lib/multivm/src/tracers/validator/vm_1_4_1/mod.rs index d1ddb2b44c80..3b5636c1c528 100644 --- a/core/lib/multivm/src/tracers/validator/vm_1_4_1/mod.rs +++ b/core/lib/multivm/src/tracers/validator/vm_1_4_1/mod.rs @@ -3,8 +3,9 @@ use zk_evm_1_4_1::{ zkevm_opcode_defs::{ContextOpcode, FarCallABI, LogOpcode, Opcode}, }; use zksync_system_constants::KECCAK256_PRECOMPILE_ADDRESS; -use zksync_types::{get_code_key, AccountTreeId, StorageKey, H256}; -use zksync_utils::{h256_to_account_address, u256_to_account_address, u256_to_h256}; +use zksync_types::{ + get_code_key, h256_to_address, u256_to_address, u256_to_h256, AccountTreeId, StorageKey, H256, +}; use crate::{ interface::{ @@ -48,7 +49,7 @@ impl ValidationTracer { let packed_abi = data.src0_value.value; let call_destination_value = data.src1_value.value; - let called_address = u256_to_account_address(&call_destination_value); + let called_address = u256_to_address(&call_destination_value); let far_call_abi = FarCallABI::from_u256(packed_abi); if called_address == KECCAK256_PRECOMPILE_ADDRESS @@ -115,7 +116,7 @@ impl ValidationTracer { let value = storage.borrow_mut().read_value(&storage_key); return Ok(NewTrustedValidationItems { - new_trusted_addresses: vec![h256_to_account_address(&value)], + new_trusted_addresses: vec![h256_to_address(&value)], ..Default::default() }); } diff --git a/core/lib/multivm/src/tracers/validator/vm_1_4_2/mod.rs b/core/lib/multivm/src/tracers/validator/vm_1_4_2/mod.rs index a51644ff9ea2..0a48792aaa9e 100644 --- a/core/lib/multivm/src/tracers/validator/vm_1_4_2/mod.rs +++ b/core/lib/multivm/src/tracers/validator/vm_1_4_2/mod.rs @@ -3,8 +3,9 @@ use zk_evm_1_4_1::{ zkevm_opcode_defs::{ContextOpcode, FarCallABI, LogOpcode, Opcode}, }; use zksync_system_constants::KECCAK256_PRECOMPILE_ADDRESS; -use zksync_types::{get_code_key, AccountTreeId, StorageKey, H256}; -use zksync_utils::{h256_to_account_address, u256_to_account_address, u256_to_h256}; +use zksync_types::{ + get_code_key, h256_to_address, u256_to_address, u256_to_h256, AccountTreeId, StorageKey, H256, +}; use crate::{ interface::{ @@ -48,7 +49,7 @@ impl ValidationTracer { let packed_abi = data.src0_value.value; let call_destination_value = data.src1_value.value; - let called_address = u256_to_account_address(&call_destination_value); + let called_address = u256_to_address(&call_destination_value); let far_call_abi = FarCallABI::from_u256(packed_abi); if called_address == KECCAK256_PRECOMPILE_ADDRESS @@ -115,7 +116,7 @@ impl ValidationTracer { let value = storage.borrow_mut().read_value(&storage_key); return Ok(NewTrustedValidationItems { - new_trusted_addresses: vec![h256_to_account_address(&value)], + new_trusted_addresses: vec![h256_to_address(&value)], ..Default::default() }); } diff --git a/core/lib/multivm/src/tracers/validator/vm_boojum_integration/mod.rs b/core/lib/multivm/src/tracers/validator/vm_boojum_integration/mod.rs index 7f9767a5e632..da6ffd4948cf 100644 --- a/core/lib/multivm/src/tracers/validator/vm_boojum_integration/mod.rs +++ b/core/lib/multivm/src/tracers/validator/vm_boojum_integration/mod.rs @@ -3,8 +3,9 @@ use zk_evm_1_4_0::{ zkevm_opcode_defs::{ContextOpcode, FarCallABI, LogOpcode, Opcode}, }; use zksync_system_constants::KECCAK256_PRECOMPILE_ADDRESS; -use zksync_types::{get_code_key, AccountTreeId, StorageKey, H256}; -use zksync_utils::{h256_to_account_address, u256_to_account_address, u256_to_h256}; +use zksync_types::{ + get_code_key, h256_to_address, u256_to_address, u256_to_h256, AccountTreeId, StorageKey, H256, +}; use crate::{ interface::{ @@ -48,7 +49,7 @@ impl ValidationTracer { let packed_abi = data.src0_value.value; let call_destination_value = data.src1_value.value; - let called_address = u256_to_account_address(&call_destination_value); + let called_address = u256_to_address(&call_destination_value); let far_call_abi = FarCallABI::from_u256(packed_abi); if called_address == KECCAK256_PRECOMPILE_ADDRESS @@ -115,7 +116,7 @@ impl ValidationTracer { let value = storage.borrow_mut().read_value(&storage_key); return Ok(NewTrustedValidationItems { - new_trusted_addresses: vec![h256_to_account_address(&value)], + new_trusted_addresses: vec![h256_to_address(&value)], ..Default::default() }); } diff --git a/core/lib/multivm/src/tracers/validator/vm_latest/mod.rs b/core/lib/multivm/src/tracers/validator/vm_latest/mod.rs index d3dc7fd87c42..3c819384137f 100644 --- a/core/lib/multivm/src/tracers/validator/vm_latest/mod.rs +++ b/core/lib/multivm/src/tracers/validator/vm_latest/mod.rs @@ -3,8 +3,10 @@ use zk_evm_1_5_0::{ zkevm_opcode_defs::{ContextOpcode, FarCallABI, LogOpcode, Opcode}, }; use zksync_system_constants::KECCAK256_PRECOMPILE_ADDRESS; -use zksync_types::{get_code_key, AccountTreeId, StorageKey, H256, U256}; -use zksync_utils::{h256_to_account_address, u256_to_account_address, u256_to_h256}; +use zksync_types::{ + get_code_key, h256_to_address, u256_to_address, u256_to_h256, AccountTreeId, StorageKey, H256, + U256, +}; use crate::{ interface::{ @@ -48,7 +50,7 @@ impl ValidationTracer { let packed_abi = data.src0_value.value; let call_destination_value = data.src1_value.value; - let called_address = u256_to_account_address(&call_destination_value); + let called_address = u256_to_address(&call_destination_value); let far_call_abi = FarCallABI::from_u256(packed_abi); if called_address == KECCAK256_PRECOMPILE_ADDRESS @@ -161,7 +163,7 @@ impl ValidationTracer { let value = storage.borrow_mut().read_value(&storage_key); return Ok(NewTrustedValidationItems { - new_trusted_addresses: vec![h256_to_account_address(&value)], + new_trusted_addresses: vec![h256_to_address(&value)], ..Default::default() }); } diff --git a/core/lib/multivm/src/tracers/validator/vm_refunds_enhancement/mod.rs b/core/lib/multivm/src/tracers/validator/vm_refunds_enhancement/mod.rs index 0badd7c58775..ea95c567181e 100644 --- a/core/lib/multivm/src/tracers/validator/vm_refunds_enhancement/mod.rs +++ b/core/lib/multivm/src/tracers/validator/vm_refunds_enhancement/mod.rs @@ -3,8 +3,9 @@ use zk_evm_1_3_3::{ zkevm_opcode_defs::{ContextOpcode, FarCallABI, LogOpcode, Opcode}, }; use zksync_system_constants::KECCAK256_PRECOMPILE_ADDRESS; -use zksync_types::{get_code_key, AccountTreeId, StorageKey, H256}; -use zksync_utils::{h256_to_account_address, u256_to_account_address, u256_to_h256}; +use zksync_types::{ + get_code_key, h256_to_address, u256_to_address, u256_to_h256, AccountTreeId, StorageKey, H256, +}; use crate::{ interface::{ @@ -48,7 +49,7 @@ impl ValidationTracer { let packed_abi = data.src0_value.value; let call_destination_value = data.src1_value.value; - let called_address = u256_to_account_address(&call_destination_value); + let called_address = u256_to_address(&call_destination_value); let far_call_abi = FarCallABI::from_u256(packed_abi); if called_address == KECCAK256_PRECOMPILE_ADDRESS @@ -115,7 +116,7 @@ impl ValidationTracer { let value = storage.borrow_mut().read_value(&storage_key); return Ok(NewTrustedValidationItems { - new_trusted_addresses: vec![h256_to_account_address(&value)], + new_trusted_addresses: vec![h256_to_address(&value)], ..Default::default() }); } diff --git a/core/lib/multivm/src/tracers/validator/vm_virtual_blocks/mod.rs b/core/lib/multivm/src/tracers/validator/vm_virtual_blocks/mod.rs index 86a639915c9d..94f31ddf138d 100644 --- a/core/lib/multivm/src/tracers/validator/vm_virtual_blocks/mod.rs +++ b/core/lib/multivm/src/tracers/validator/vm_virtual_blocks/mod.rs @@ -3,8 +3,9 @@ use zk_evm_1_3_3::{ zkevm_opcode_defs::{ContextOpcode, FarCallABI, LogOpcode, Opcode}, }; use zksync_system_constants::KECCAK256_PRECOMPILE_ADDRESS; -use zksync_types::{get_code_key, AccountTreeId, StorageKey, H256}; -use zksync_utils::{h256_to_account_address, u256_to_account_address, u256_to_h256}; +use zksync_types::{ + get_code_key, h256_to_address, u256_to_address, u256_to_h256, AccountTreeId, StorageKey, H256, +}; use crate::{ interface::{ @@ -48,7 +49,7 @@ impl ValidationTracer { let packed_abi = data.src0_value.value; let call_destination_value = data.src1_value.value; - let called_address = u256_to_account_address(&call_destination_value); + let called_address = u256_to_address(&call_destination_value); let far_call_abi = FarCallABI::from_u256(packed_abi); if called_address == KECCAK256_PRECOMPILE_ADDRESS @@ -115,7 +116,7 @@ impl ValidationTracer { let value = storage.borrow_mut().read_value(&storage_key); return Ok(NewTrustedValidationItems { - new_trusted_addresses: vec![h256_to_account_address(&value)], + new_trusted_addresses: vec![h256_to_address(&value)], ..Default::default() }); } diff --git a/core/lib/multivm/src/utils/bytecode.rs b/core/lib/multivm/src/utils/bytecode.rs index 260749b44f3c..f5dee805864e 100644 --- a/core/lib/multivm/src/utils/bytecode.rs +++ b/core/lib/multivm/src/utils/bytecode.rs @@ -1,10 +1,60 @@ use std::collections::HashMap; -use zksync_types::ethabi::{self, Token}; -use zksync_utils::bytecode::{hash_bytecode, validate_bytecode, InvalidBytecodeError}; +use zksync_types::{ + bytecode::{validate_bytecode, BytecodeHash, InvalidBytecodeError}, + ethabi::{self, Token}, + Address, H256, U256, +}; use crate::interface::CompressedBytecodeInfo; +pub(crate) fn be_chunks_to_h256_words(chunks: Vec<[u8; 32]>) -> Vec { + chunks.into_iter().map(|el| H256::from_slice(&el)).collect() +} + +pub(crate) fn be_words_to_bytes(words: &[U256]) -> Vec { + words + .iter() + .flat_map(|w| { + let mut bytes = [0u8; 32]; + w.to_big_endian(&mut bytes); + bytes + }) + .collect() +} + +pub(crate) fn bytes_to_be_words(bytes: &[u8]) -> Vec { + assert_eq!( + bytes.len() % 32, + 0, + "Bytes must be divisible by 32 to split into chunks" + ); + bytes.chunks(32).map(U256::from_big_endian).collect() +} + +pub(crate) fn be_bytes_to_safe_address(bytes: &[u8]) -> Option
{ + if bytes.len() < 20 { + return None; + } + + let (zero_bytes, address_bytes) = bytes.split_at(bytes.len() - 20); + + if zero_bytes.iter().any(|b| *b != 0) { + None + } else { + Some(Address::from_slice(address_bytes)) + } +} + +pub(crate) fn bytecode_len_in_words(bytecode_hash: &H256) -> u16 { + let bytes = bytecode_hash.as_bytes(); + u16::from_be_bytes([bytes[2], bytes[3]]) +} + +pub(crate) fn bytecode_len_in_bytes(bytecode_hash: &H256) -> u32 { + u32::from(bytecode_len_in_words(bytecode_hash)) * 32 +} + #[derive(Debug, thiserror::Error)] pub(crate) enum FailedToCompressBytecodeError { #[error("Number of unique 8-bytes bytecode chunks exceed the limit of 2^16 - 1")] @@ -87,7 +137,10 @@ pub(crate) fn compress( } pub(crate) fn encode_call(bytecode: &CompressedBytecodeInfo) -> Vec { - let mut bytecode_hash = hash_bytecode(&bytecode.original).as_bytes().to_vec(); + let mut bytecode_hash = BytecodeHash::for_bytecode(&bytecode.original) + .value() + .as_bytes() + .to_vec(); let empty_cell = [0_u8; 32]; bytecode_hash.extend_from_slice(&empty_cell); diff --git a/core/lib/multivm/src/utils/deduplicator.rs b/core/lib/multivm/src/utils/deduplicator.rs index e9a870e6901d..0cb4c3fa7cd8 100644 --- a/core/lib/multivm/src/utils/deduplicator.rs +++ b/core/lib/multivm/src/utils/deduplicator.rs @@ -1,10 +1,9 @@ use std::collections::HashMap; use zksync_types::{ - writes::compression::compress_with_best_strategy, StorageKey, StorageLogKind, + h256_to_u256, writes::compression::compress_with_best_strategy, StorageKey, StorageLogKind, StorageLogWithPreviousValue, H256, }; -use zksync_utils::h256_to_u256; use crate::interface::DeduplicatedWritesMetrics; @@ -211,8 +210,7 @@ impl StorageWritesDeduplicator { #[cfg(test)] mod tests { - use zksync_types::{AccountTreeId, StorageLog, H160, U256}; - use zksync_utils::u256_to_h256; + use zksync_types::{u256_to_h256, AccountTreeId, StorageLog, H160, U256}; use super::*; diff --git a/core/lib/multivm/src/utils/events.rs b/core/lib/multivm/src/utils/events.rs index d84651989e75..37124b822040 100644 --- a/core/lib/multivm/src/utils/events.rs +++ b/core/lib/multivm/src/utils/events.rs @@ -93,8 +93,7 @@ mod tests { use zksync_system_constants::{ BOOTLOADER_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L2_BASE_TOKEN_ADDRESS, }; - use zksync_types::{Address, L1BatchNumber}; - use zksync_utils::u256_to_h256; + use zksync_types::{u256_to_h256, Address, L1BatchNumber}; use super::*; diff --git a/core/lib/multivm/src/utils/mod.rs b/core/lib/multivm/src/utils/mod.rs index a55adb16c85a..4332c0327ff1 100644 --- a/core/lib/multivm/src/utils/mod.rs +++ b/core/lib/multivm/src/utils/mod.rs @@ -239,16 +239,16 @@ pub fn get_bootloader_encoding_space(version: VmVersion) -> u32 { VmVersion::Vm1_4_2 => crate::vm_1_4_2::constants::BOOTLOADER_TX_ENCODING_SPACE, VmVersion::Vm1_5_0SmallBootloaderMemory => { crate::vm_latest::constants::get_bootloader_tx_encoding_space( - crate::vm_latest::MultiVMSubversion::SmallBootloaderMemory, + crate::vm_latest::MultiVmSubversion::SmallBootloaderMemory, ) } VmVersion::Vm1_5_0IncreasedBootloaderMemory => { crate::vm_latest::constants::get_bootloader_tx_encoding_space( - crate::vm_latest::MultiVMSubversion::IncreasedBootloaderMemory, + crate::vm_latest::MultiVmSubversion::IncreasedBootloaderMemory, ) } VmVersion::VmGateway => crate::vm_latest::constants::get_bootloader_tx_encoding_space( - crate::vm_latest::MultiVMSubversion::Gateway, + crate::vm_latest::MultiVmSubversion::Gateway, ), } } @@ -394,16 +394,16 @@ pub fn get_used_bootloader_memory_bytes(version: VmVersion) -> usize { VmVersion::Vm1_4_2 => crate::vm_1_4_2::constants::USED_BOOTLOADER_MEMORY_BYTES, VmVersion::Vm1_5_0SmallBootloaderMemory => { crate::vm_latest::constants::get_used_bootloader_memory_bytes( - crate::vm_latest::MultiVMSubversion::SmallBootloaderMemory, + crate::vm_latest::MultiVmSubversion::SmallBootloaderMemory, ) } VmVersion::Vm1_5_0IncreasedBootloaderMemory => { crate::vm_latest::constants::get_used_bootloader_memory_bytes( - crate::vm_latest::MultiVMSubversion::IncreasedBootloaderMemory, + crate::vm_latest::MultiVmSubversion::IncreasedBootloaderMemory, ) } VmVersion::VmGateway => crate::vm_latest::constants::get_used_bootloader_memory_bytes( - crate::vm_latest::MultiVMSubversion::Gateway, + crate::vm_latest::MultiVmSubversion::Gateway, ), } } @@ -430,16 +430,16 @@ pub fn get_used_bootloader_memory_words(version: VmVersion) -> usize { VmVersion::Vm1_4_2 => crate::vm_1_4_2::constants::USED_BOOTLOADER_MEMORY_WORDS, VmVersion::Vm1_5_0SmallBootloaderMemory => { crate::vm_latest::constants::get_used_bootloader_memory_bytes( - crate::vm_latest::MultiVMSubversion::SmallBootloaderMemory, + crate::vm_latest::MultiVmSubversion::SmallBootloaderMemory, ) } VmVersion::Vm1_5_0IncreasedBootloaderMemory => { crate::vm_latest::constants::get_used_bootloader_memory_bytes( - crate::vm_latest::MultiVMSubversion::IncreasedBootloaderMemory, + crate::vm_latest::MultiVmSubversion::IncreasedBootloaderMemory, ) } VmVersion::VmGateway => crate::vm_latest::constants::get_used_bootloader_memory_bytes( - crate::vm_latest::MultiVMSubversion::Gateway, + crate::vm_latest::MultiVmSubversion::Gateway, ), } } diff --git a/core/lib/multivm/src/versions/shadow/mod.rs b/core/lib/multivm/src/versions/shadow/mod.rs index 42a0fbb1b8ba..a335d0fe5906 100644 --- a/core/lib/multivm/src/versions/shadow/mod.rs +++ b/core/lib/multivm/src/versions/shadow/mod.rs @@ -2,17 +2,11 @@ //! these tests are placed here. use assert_matches::assert_matches; -use ethabi::Contract; -use zksync_contracts::{ - get_loadnext_contract, load_contract, read_bytecode, - test_contracts::LoadnextContractExecutionParams, -}; -use zksync_test_account::{Account, TxType}; +use zksync_test_contracts::{Account, LoadnextContractExecutionParams, TestContract, TxType}; use zksync_types::{ block::L2BlockHasher, fee::Fee, AccountTreeId, Address, Execute, L1BatchNumber, L2BlockNumber, ProtocolVersionId, StorageKey, H256, U256, }; -use zksync_utils::bytecode::hash_bytecode; use crate::{ interface::{ @@ -61,13 +55,11 @@ struct Harness { alice: Account, bob: Account, storage_contract: ContractToDeploy, - storage_contract_abi: Contract, + storage_contract_abi: &'static ethabi::Contract, current_block: L2BlockEnv, } impl Harness { - const STORAGE_CONTRACT_PATH: &'static str = - "etc/contracts-test-data/artifacts-zk/contracts/storage/storage.sol/StorageTester.json"; const STORAGE_CONTRACT_ADDRESS: Address = Address::repeat_byte(23); fn new(l1_batch_env: &L1BatchEnv) -> Self { @@ -75,10 +67,10 @@ impl Harness { alice: Account::from_seed(0), bob: Account::from_seed(1), storage_contract: ContractToDeploy::new( - read_bytecode(Self::STORAGE_CONTRACT_PATH), + TestContract::storage_test().bytecode.to_vec(), Self::STORAGE_CONTRACT_ADDRESS, ), - storage_contract_abi: load_contract(Self::STORAGE_CONTRACT_PATH), + storage_contract_abi: &TestContract::storage_test().abi, current_block: l1_batch_env.first_l2_block, } } @@ -178,7 +170,7 @@ impl Harness { self.new_block(vm, &[out_of_gas_transfer.hash(), simple_write_tx.hash()]); let deploy_tx = self.alice.get_deploy_tx( - &get_loadnext_contract().bytecode, + TestContract::load_test().bytecode, Some(&[ethabi::Token::Uint(100.into())]), TxType::L2, ); @@ -207,7 +199,7 @@ where { let system_env = default_system_env(); let l1_batch_env = default_l1_batch(L1BatchNumber(1)); - let mut storage = InMemoryStorage::with_system_contracts(hash_bytecode); + let mut storage = InMemoryStorage::with_system_contracts(); let mut harness = Harness::new(&l1_batch_env); harness.setup_storage(&mut storage); @@ -231,7 +223,7 @@ fn sanity_check_harness_on_new_vm() { fn sanity_check_shadow_vm() { let system_env = default_system_env(); let l1_batch_env = default_l1_batch(L1BatchNumber(1)); - let mut storage = InMemoryStorage::with_system_contracts(hash_bytecode); + let mut storage = InMemoryStorage::with_system_contracts(); let mut harness = Harness::new(&l1_batch_env); harness.setup_storage(&mut storage); @@ -258,7 +250,7 @@ fn shadow_vm_basics() { pretty_assertions::assert_eq!(replayed_dump, dump); // Check that the VM executes identically when reading from the original storage and one restored from the dump. - let mut storage = InMemoryStorage::with_system_contracts(hash_bytecode); + let mut storage = InMemoryStorage::with_system_contracts(); harness.setup_storage(&mut storage); let storage = StorageView::new(storage).to_rc_ptr(); diff --git a/core/lib/multivm/src/versions/testonly/block_tip.rs b/core/lib/multivm/src/versions/testonly/block_tip.rs index 220653308a7e..efdf2e1b0cdf 100644 --- a/core/lib/multivm/src/versions/testonly/block_tip.rs +++ b/core/lib/multivm/src/versions/testonly/block_tip.rs @@ -4,14 +4,15 @@ use zksync_contracts::load_sys_contract; use zksync_system_constants::{ CONTRACT_FORCE_DEPLOYER_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L1_MESSENGER_ADDRESS, }; +use zksync_test_contracts::TestContract; use zksync_types::{ - commitment::SerializeCommitment, fee_model::BatchFeeInput, get_code_key, - l2_to_l1_log::L2ToL1Log, writes::StateDiffRecord, Address, Execute, H256, U256, + bytecode::BytecodeHash, commitment::SerializeCommitment, fee_model::BatchFeeInput, + get_code_key, l2_to_l1_log::L2ToL1Log, u256_to_h256, writes::StateDiffRecord, Address, Execute, + H256, U256, }; -use zksync_utils::{bytecode::hash_bytecode, u256_to_h256}; use super::{ - default_pubdata_builder, get_complex_upgrade_abi, get_empty_storage, read_complex_upgrade, + default_pubdata_builder, get_empty_storage, tester::{TestedVm, VmTesterBuilder}, }; use crate::{ @@ -41,7 +42,7 @@ struct MimicCallInfo { const CALLS_PER_TX: usize = 1_000; fn populate_mimic_calls(data: L1MessengerTestData) -> Vec> { - let complex_upgrade = get_complex_upgrade_abi(); + let complex_upgrade = TestContract::complex_upgrade(); let l1_messenger = load_sys_contract("L1Messenger"); let logs_mimic_calls = (0..data.l2_to_l1_logs).map(|i| MimicCallInfo { @@ -72,7 +73,9 @@ fn populate_mimic_calls(data: L1MessengerTestData) -> Vec> { data: l1_messenger .function("requestBytecodeL1Publication") .unwrap() - .encode_input(&[Token::FixedBytes(hash_bytecode(bytecode).0.to_vec())]) + .encode_input(&[Token::FixedBytes( + BytecodeHash::for_bytecode(bytecode).value().0.to_vec(), + )]) .unwrap(), }); @@ -91,7 +94,6 @@ fn populate_mimic_calls(data: L1MessengerTestData) -> Vec> { .map(|chunk| { complex_upgrade .function("mimicCalls") - .unwrap() .encode_input(&[Token::Array(chunk.collect_vec())]) .unwrap() }) @@ -113,14 +115,17 @@ struct StatisticsTagged { fn execute_test(test_data: L1MessengerTestData) -> TestStatistics { let mut storage = get_empty_storage(); - let complex_upgrade_code = read_complex_upgrade(); + let complex_upgrade_code = TestContract::complex_upgrade().bytecode.to_vec(); // For this test we'll just put the bytecode onto the force deployer address storage.set_value( get_code_key(&CONTRACT_FORCE_DEPLOYER_ADDRESS), - hash_bytecode(&complex_upgrade_code), + BytecodeHash::for_bytecode(&complex_upgrade_code).value(), + ); + storage.store_factory_dep( + BytecodeHash::for_bytecode(&complex_upgrade_code).value(), + complex_upgrade_code, ); - storage.store_factory_dep(hash_bytecode(&complex_upgrade_code), complex_upgrade_code); // We are measuring computational cost, so prices for pubdata don't matter, while they artificially dilute // the gas limit diff --git a/core/lib/multivm/src/versions/testonly/bytecode_publishing.rs b/core/lib/multivm/src/versions/testonly/bytecode_publishing.rs index 9da005b995d3..f60bc5594143 100644 --- a/core/lib/multivm/src/versions/testonly/bytecode_publishing.rs +++ b/core/lib/multivm/src/versions/testonly/bytecode_publishing.rs @@ -1,6 +1,6 @@ -use zksync_test_account::TxType; +use zksync_test_contracts::{TestContract, TxType}; -use super::{default_pubdata_builder, read_test_contract, tester::VmTesterBuilder, TestedVm}; +use super::{default_pubdata_builder, tester::VmTesterBuilder, TestedVm}; use crate::{ interface::{InspectExecutionMode, TxExecutionMode, VmEvent, VmInterfaceExt}, utils::bytecode, @@ -15,12 +15,12 @@ pub(crate) fn test_bytecode_publishing() { .with_rich_accounts(1) .build::(); - let counter = read_test_contract(); + let counter = TestContract::counter().bytecode; let account = &mut vm.rich_accounts[0]; - let compressed_bytecode = bytecode::compress(counter.clone()).unwrap().compressed; + let compressed_bytecode = bytecode::compress(counter.to_vec()).unwrap().compressed; - let tx = account.get_deploy_tx(&counter, None, TxType::L2).tx; + let tx = account.get_deploy_tx(counter, None, TxType::L2).tx; assert_eq!(tx.execute.factory_deps.len(), 1); // The deployed bytecode is the only dependency let push_result = vm.vm.push_transaction(tx); assert_eq!(push_result.compressed_bytecodes.len(), 1); diff --git a/core/lib/multivm/src/versions/testonly/code_oracle.rs b/core/lib/multivm/src/versions/testonly/code_oracle.rs index 767a294f44ab..e48b434403f2 100644 --- a/core/lib/multivm/src/versions/testonly/code_oracle.rs +++ b/core/lib/multivm/src/versions/testonly/code_oracle.rs @@ -1,13 +1,11 @@ use ethabi::Token; +use zksync_test_contracts::TestContract; use zksync_types::{ - get_known_code_key, web3::keccak256, Address, Execute, StorageLogWithPreviousValue, U256, + bytecode::BytecodeHash, get_known_code_key, h256_to_u256, u256_to_h256, web3::keccak256, + Address, Execute, StorageLogWithPreviousValue, U256, }; -use zksync_utils::{bytecode::hash_bytecode, h256_to_u256, u256_to_h256}; -use super::{ - get_empty_storage, load_precompiles_contract, read_precompiles_contract, read_test_contract, - tester::VmTesterBuilder, TestedVm, -}; +use super::{get_empty_storage, tester::VmTesterBuilder, TestedVm}; use crate::{ interface::{InspectExecutionMode, TxExecutionMode, VmInterfaceExt}, versions::testonly::ContractToDeploy, @@ -20,12 +18,12 @@ fn generate_large_bytecode() -> Vec { pub(crate) fn test_code_oracle() { let precompiles_contract_address = Address::repeat_byte(1); - let precompile_contract_bytecode = read_precompiles_contract(); + let precompile_contract_bytecode = TestContract::precompiles_test().bytecode.to_vec(); // Filling the zkevm bytecode - let normal_zkevm_bytecode = read_test_contract(); - let normal_zkevm_bytecode_hash = hash_bytecode(&normal_zkevm_bytecode); - let normal_zkevm_bytecode_keccak_hash = keccak256(&normal_zkevm_bytecode); + let normal_zkevm_bytecode = TestContract::counter().bytecode; + let normal_zkevm_bytecode_hash = BytecodeHash::for_bytecode(normal_zkevm_bytecode).value(); + let normal_zkevm_bytecode_keccak_hash = keccak256(normal_zkevm_bytecode); let mut storage = get_empty_storage(); storage.set_value( get_known_code_key(&normal_zkevm_bytecode_hash), @@ -45,10 +43,10 @@ pub(crate) fn test_code_oracle() { .with_storage(storage) .build::(); - let precompile_contract = load_precompiles_contract(); - let call_code_oracle_function = precompile_contract.function("callCodeOracle").unwrap(); + let precompile_contract = TestContract::precompiles_test(); + let call_code_oracle_function = precompile_contract.function("callCodeOracle"); - vm.vm.insert_bytecodes(&[normal_zkevm_bytecode.as_slice()]); + vm.vm.insert_bytecodes(&[normal_zkevm_bytecode]); let account = &mut vm.rich_accounts[0]; // Firstly, let's ensure that the contract works. @@ -111,10 +109,10 @@ fn find_code_oracle_cost_log( pub(crate) fn test_code_oracle_big_bytecode() { let precompiles_contract_address = Address::repeat_byte(1); - let precompile_contract_bytecode = read_precompiles_contract(); + let precompile_contract_bytecode = TestContract::precompiles_test().bytecode.to_vec(); let big_zkevm_bytecode = generate_large_bytecode(); - let big_zkevm_bytecode_hash = hash_bytecode(&big_zkevm_bytecode); + let big_zkevm_bytecode_hash = BytecodeHash::for_bytecode(&big_zkevm_bytecode).value(); let big_zkevm_bytecode_keccak_hash = keccak256(&big_zkevm_bytecode); let mut storage = get_empty_storage(); @@ -136,8 +134,8 @@ pub(crate) fn test_code_oracle_big_bytecode() { .with_storage(storage) .build::(); - let precompile_contract = load_precompiles_contract(); - let call_code_oracle_function = precompile_contract.function("callCodeOracle").unwrap(); + let precompile_contract = TestContract::precompiles_test(); + let call_code_oracle_function = precompile_contract.function("callCodeOracle"); vm.vm.insert_bytecodes(&[big_zkevm_bytecode.as_slice()]); @@ -169,19 +167,18 @@ pub(crate) fn test_code_oracle_big_bytecode() { pub(crate) fn test_refunds_in_code_oracle() { let precompiles_contract_address = Address::repeat_byte(1); - let precompile_contract_bytecode = read_precompiles_contract(); - let normal_zkevm_bytecode = read_test_contract(); - let normal_zkevm_bytecode_hash = hash_bytecode(&normal_zkevm_bytecode); - let normal_zkevm_bytecode_keccak_hash = keccak256(&normal_zkevm_bytecode); + let normal_zkevm_bytecode = TestContract::counter().bytecode; + let normal_zkevm_bytecode_hash = BytecodeHash::for_bytecode(normal_zkevm_bytecode).value(); + let normal_zkevm_bytecode_keccak_hash = keccak256(normal_zkevm_bytecode); let mut storage = get_empty_storage(); storage.set_value( get_known_code_key(&normal_zkevm_bytecode_hash), u256_to_h256(U256::one()), ); - let precompile_contract = load_precompiles_contract(); - let call_code_oracle_function = precompile_contract.function("callCodeOracle").unwrap(); + let precompile_contract = TestContract::precompiles_test(); + let call_code_oracle_function = precompile_contract.function("callCodeOracle"); // Execute code oracle twice with identical VM state that only differs in that the queried bytecode // is already decommitted the second time. The second call must consume less gas (`decommit` doesn't charge additional gas @@ -192,13 +189,13 @@ pub(crate) fn test_refunds_in_code_oracle() { .with_execution_mode(TxExecutionMode::VerifyExecute) .with_rich_accounts(1) .with_custom_contracts(vec![ContractToDeploy::new( - precompile_contract_bytecode.clone(), + TestContract::precompiles_test().bytecode.to_vec(), precompiles_contract_address, )]) .with_storage(storage.clone()) .build::(); - vm.vm.insert_bytecodes(&[normal_zkevm_bytecode.as_slice()]); + vm.vm.insert_bytecodes(&[normal_zkevm_bytecode]); let account = &mut vm.rich_accounts[0]; if decommit { diff --git a/core/lib/multivm/src/versions/testonly/default_aa.rs b/core/lib/multivm/src/versions/testonly/default_aa.rs index c69c00de4508..9255854e8703 100644 --- a/core/lib/multivm/src/versions/testonly/default_aa.rs +++ b/core/lib/multivm/src/versions/testonly/default_aa.rs @@ -1,13 +1,12 @@ -use zksync_test_account::{DeployContractsTx, TxType}; +use zksync_test_contracts::{DeployContractsTx, TestContract, TxType}; use zksync_types::{ - get_code_key, get_known_code_key, get_nonce_key, + get_code_key, get_known_code_key, get_nonce_key, h256_to_u256, system_contracts::{DEPLOYMENT_NONCE_INCREMENT, TX_NONCE_INCREMENT}, utils::storage_key_for_eth_balance, U256, }; -use zksync_utils::h256_to_u256; -use super::{default_pubdata_builder, read_test_contract, tester::VmTesterBuilder, TestedVm}; +use super::{default_pubdata_builder, tester::VmTesterBuilder, TestedVm}; use crate::{ interface::{InspectExecutionMode, TxExecutionMode, VmInterfaceExt}, vm_latest::utils::fee::get_batch_base_fee, @@ -22,13 +21,13 @@ pub(crate) fn test_default_aa_interaction() { .with_rich_accounts(1) .build::(); - let counter = read_test_contract(); + let counter = TestContract::counter().bytecode; let account = &mut vm.rich_accounts[0]; let DeployContractsTx { tx, bytecode_hash, address, - } = account.get_deploy_tx(&counter, None, TxType::L2); + } = account.get_deploy_tx(counter, None, TxType::L2); let maximal_fee = tx.gas_limit() * get_batch_base_fee(&vm.l1_batch_env); vm.vm.push_transaction(tx); @@ -36,7 +35,6 @@ pub(crate) fn test_default_aa_interaction() { assert!(!result.result.is_failed(), "Transaction wasn't successful"); vm.vm.finish_batch(default_pubdata_builder()); - vm.vm.get_current_execution_state(); // Both deployment and ordinary nonce should be incremented by one. diff --git a/core/lib/multivm/src/versions/testonly/evm_emulator.rs b/core/lib/multivm/src/versions/testonly/evm_emulator.rs index a77274ec581c..b979efe360db 100644 --- a/core/lib/multivm/src/versions/testonly/evm_emulator.rs +++ b/core/lib/multivm/src/versions/testonly/evm_emulator.rs @@ -3,20 +3,17 @@ use std::collections::HashMap; use assert_matches::assert_matches; use ethabi::Token; use rand::{rngs::StdRng, Rng, SeedableRng}; -use zksync_contracts::{load_contract, read_bytecode, SystemContractCode}; +use zksync_contracts::SystemContractCode; use zksync_system_constants::{ CONTRACT_DEPLOYER_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L2_BASE_TOKEN_ADDRESS, }; -use zksync_test_account::TxType; +use zksync_test_contracts::{TestContract, TxType}; use zksync_types::{ - get_code_key, get_known_code_key, + bytecode::BytecodeHash, + get_code_key, get_known_code_key, h256_to_u256, utils::{key_for_eth_balance, storage_key_for_eth_balance}, AccountTreeId, Address, Execute, StorageKey, H256, U256, }; -use zksync_utils::{ - bytecode::{hash_bytecode, hash_evm_bytecode}, - bytes_to_be_words, h256_to_u256, -}; use super::{default_system_env, TestedVm, VmTester, VmTesterBuilder}; use crate::interface::{ @@ -24,18 +21,11 @@ use crate::interface::{ VmInterfaceExt, }; -const MOCK_DEPLOYER_PATH: &str = "etc/contracts-test-data/artifacts-zk/contracts/mock-evm/mock-evm.sol/MockContractDeployer.json"; -const MOCK_KNOWN_CODE_STORAGE_PATH: &str = "etc/contracts-test-data/artifacts-zk/contracts/mock-evm/mock-evm.sol/MockKnownCodeStorage.json"; -const MOCK_EMULATOR_PATH: &str = - "etc/contracts-test-data/artifacts-zk/contracts/mock-evm/mock-evm.sol/MockEvmEmulator.json"; -const RECURSIVE_CONTRACT_PATH: &str = "etc/contracts-test-data/artifacts-zk/contracts/mock-evm/mock-evm.sol/NativeRecursiveContract.json"; -const INCREMENTING_CONTRACT_PATH: &str = "etc/contracts-test-data/artifacts-zk/contracts/mock-evm/mock-evm.sol/IncrementingContract.json"; - fn override_system_contracts(storage: &mut InMemoryStorage) { - let mock_deployer = read_bytecode(MOCK_DEPLOYER_PATH); - let mock_deployer_hash = hash_bytecode(&mock_deployer); - let mock_known_code_storage = read_bytecode(MOCK_KNOWN_CODE_STORAGE_PATH); - let mock_known_code_storage_hash = hash_bytecode(&mock_known_code_storage); + let mock_deployer = TestContract::mock_deployer().bytecode.to_vec(); + let mock_deployer_hash = BytecodeHash::for_bytecode(&mock_deployer).value(); + let mock_known_code_storage = TestContract::mock_known_code_storage().bytecode.to_vec(); + let mock_known_code_storage_hash = BytecodeHash::for_bytecode(&mock_known_code_storage).value(); storage.set_value(get_code_key(&CONTRACT_DEPLOYER_ADDRESS), mock_deployer_hash); storage.set_value( @@ -65,7 +55,7 @@ impl EvmTestBuilder { fn new(deploy_emulator: bool, evm_contract_address: Address) -> Self { Self { deploy_emulator, - storage: InMemoryStorage::with_system_contracts(hash_bytecode), + storage: InMemoryStorage::with_system_contracts(), evm_contract_addresses: vec![evm_contract_address], } } @@ -81,12 +71,12 @@ impl EvmTestBuilder { } fn build(self) -> VmTester { - let mock_emulator = read_bytecode(MOCK_EMULATOR_PATH); + let mock_emulator = TestContract::mock_evm_emulator().bytecode.to_vec(); let mut storage = self.storage; let mut system_env = default_system_env(); if self.deploy_emulator { let evm_bytecode: Vec<_> = (0..32).collect(); - let evm_bytecode_hash = hash_evm_bytecode(&evm_bytecode); + let evm_bytecode_hash = BytecodeHash::for_evm_bytecode(&evm_bytecode).value(); storage.set_value( get_known_code_key(&evm_bytecode_hash), H256::from_low_u64_be(1), @@ -96,11 +86,11 @@ impl EvmTestBuilder { } system_env.base_system_smart_contracts.evm_emulator = Some(SystemContractCode { - hash: hash_bytecode(&mock_emulator), - code: bytes_to_be_words(mock_emulator), + hash: BytecodeHash::for_bytecode(&mock_emulator).value(), + code: mock_emulator, }); } else { - let emulator_hash = hash_bytecode(&mock_emulator); + let emulator_hash = BytecodeHash::for_bytecode(&mock_emulator).value(); storage.set_value(get_known_code_key(&emulator_hash), H256::from_low_u64_be(1)); storage.store_factory_dep(emulator_hash, mock_emulator); @@ -124,7 +114,7 @@ impl EvmTestBuilder { } pub(crate) fn test_tracing_evm_contract_deployment() { - let mut storage = InMemoryStorage::with_system_contracts(hash_bytecode); + let mut storage = InMemoryStorage::with_system_contracts(); override_system_contracts(&mut storage); let mut system_env = default_system_env(); @@ -141,7 +131,7 @@ pub(crate) fn test_tracing_evm_contract_deployment() { let args = [Token::Bytes((0..32).collect())]; let evm_bytecode = ethabi::encode(&args); - let expected_bytecode_hash = hash_evm_bytecode(&evm_bytecode); + let expected_bytecode_hash = BytecodeHash::for_evm_bytecode(&evm_bytecode).value(); let execute = Execute::for_deploy(expected_bytecode_hash, vec![0; 32], &args); let deploy_tx = account.get_l2_tx_for_execute(execute, None); let (_, vm_result) = vm @@ -158,7 +148,7 @@ pub(crate) fn test_tracing_evm_contract_deployment() { // "Deploy" a bytecode in another transaction and check that the first tx doesn't interfere with the returned `dynamic_factory_deps`. let args = [Token::Bytes((0..32).rev().collect())]; let evm_bytecode = ethabi::encode(&args); - let expected_bytecode_hash = hash_evm_bytecode(&evm_bytecode); + let expected_bytecode_hash = BytecodeHash::for_evm_bytecode(&evm_bytecode).value(); let execute = Execute::for_deploy(expected_bytecode_hash, vec![0; 32], &args); let deploy_tx = account.get_l2_tx_for_execute(execute, None); let (_, vm_result) = vm @@ -196,7 +186,6 @@ const RECIPIENT_ADDRESS: Address = Address::repeat_byte(0x12); /// `deploy_emulator = false` here and below tests the mock emulator as an ordinary contract (i.e., sanity-checks its logic). pub(crate) fn test_mock_emulator_with_payment(deploy_emulator: bool) { - let mock_emulator_abi = load_contract(MOCK_EMULATOR_PATH); let mut vm = EvmTestBuilder::new(deploy_emulator, RECIPIENT_ADDRESS).build::(); let mut current_balance = U256::zero(); @@ -204,7 +193,7 @@ pub(crate) fn test_mock_emulator_with_payment(deploy_emulator: boo let transferred_value = (1_000_000_000 * i).into(); let vm_result = test_payment( &mut vm, - &mock_emulator_abi, + &TestContract::mock_evm_emulator().abi, &mut current_balance, transferred_value, ); @@ -253,7 +242,7 @@ pub(crate) fn test_mock_emulator_with_recursion( deploy_emulator: bool, is_external: bool, ) { - let mock_emulator_abi = load_contract(MOCK_EMULATOR_PATH); + let mock_emulator_abi = &TestContract::mock_evm_emulator().abi; let recipient_address = Address::repeat_byte(0x12); let mut vm = EvmTestBuilder::new(deploy_emulator, recipient_address).build::(); let account = &mut vm.rich_accounts[0]; @@ -272,7 +261,7 @@ pub(crate) fn test_mock_emulator_with_recursion( } let factory_deps = if is_external { - vec![read_bytecode(RECURSIVE_CONTRACT_PATH)] + vec![TestContract::recursive_test().bytecode.to_vec()] } else { vec![] }; @@ -299,10 +288,8 @@ pub(crate) fn test_calling_to_mock_emulator_from_native_contract() let account = &mut vm.rich_accounts[0]; // Deploy a native contract. - let native_contract = read_bytecode(RECURSIVE_CONTRACT_PATH); - let native_contract_abi = load_contract(RECURSIVE_CONTRACT_PATH); let deploy_tx = account.get_deploy_tx( - &native_contract, + TestContract::recursive_test().bytecode, Some(&[Token::Address(recipient_address)]), TxType::L2, ); @@ -312,7 +299,7 @@ pub(crate) fn test_calling_to_mock_emulator_from_native_contract() assert!(!vm_result.result.is_failed(), "{:?}", vm_result.result); // Call from the native contract to the EVM emulator. - let test_fn = native_contract_abi.function("recurse").unwrap(); + let test_fn = TestContract::recursive_test().function("recurse"); let test_tx = account.get_l2_tx_for_execute( Execute { contract_address: Some(deploy_tx.address), @@ -335,9 +322,9 @@ pub(crate) fn test_mock_emulator_with_deployment(revert: bool) { .build::(); let account = &mut vm.rich_accounts[0]; - let mock_emulator_abi = load_contract(MOCK_EMULATOR_PATH); + let mock_emulator_abi = &TestContract::mock_evm_emulator().abi; let new_evm_bytecode = vec![0xfe; 96]; - let new_evm_bytecode_hash = hash_evm_bytecode(&new_evm_bytecode); + let new_evm_bytecode_hash = BytecodeHash::for_evm_bytecode(&new_evm_bytecode).value(); let test_fn = mock_emulator_abi.function("testDeploymentAndCall").unwrap(); let test_tx = account.get_l2_tx_for_execute( @@ -411,11 +398,11 @@ pub(crate) fn test_mock_emulator_with_recursive_deployment() { .build::(); let account = &mut vm.rich_accounts[0]; - let mock_emulator_abi = load_contract(MOCK_EMULATOR_PATH); + let mock_emulator_abi = &TestContract::mock_evm_emulator().abi; let bytecodes: HashMap<_, _> = (0_u8..10) .map(|byte| { let bytecode = vec![byte; 32]; - (hash_evm_bytecode(&bytecode), bytecode) + (BytecodeHash::for_evm_bytecode(&bytecode).value(), bytecode) }) .collect(); let test_fn = mock_emulator_abi @@ -457,11 +444,11 @@ fn test_mock_emulator_with_partial_reverts_and_rng(rng: &mut impl .build::(); let account = &mut vm.rich_accounts[0]; - let mock_emulator_abi = load_contract(MOCK_EMULATOR_PATH); + let mock_emulator_abi = &TestContract::mock_evm_emulator().abi; let all_bytecodes: HashMap<_, _> = (0_u8..10) .map(|_| { let bytecode = vec![rng.gen(); 32]; - (hash_evm_bytecode(&bytecode), bytecode) + (BytecodeHash::for_evm_bytecode(&bytecode).value(), bytecode) }) .collect(); let should_revert: Vec<_> = (0..10).map(|_| rng.gen::()).collect(); @@ -528,15 +515,14 @@ pub(crate) fn test_mock_emulator_with_delegate_call() { let account = &mut vm.rich_accounts[0]; // Deploy a native contract. - let native_contract = read_bytecode(INCREMENTING_CONTRACT_PATH); - let native_contract_abi = load_contract(INCREMENTING_CONTRACT_PATH); - let deploy_tx = account.get_deploy_tx(&native_contract, None, TxType::L2); + let deploy_tx = + account.get_deploy_tx(TestContract::increment_test().bytecode, None, TxType::L2); let (_, vm_result) = vm .vm .execute_transaction_with_bytecode_compression(deploy_tx.tx, true); assert!(!vm_result.result.is_failed(), "{:?}", vm_result.result); - let test_fn = native_contract_abi.function("testDelegateCall").unwrap(); + let test_fn = TestContract::increment_test().function("testDelegateCall"); // Delegate to the native contract from EVM. test_delegate_call(&mut vm, test_fn, evm_contract_address, deploy_tx.address); // Delegate to EVM from the native contract. @@ -600,15 +586,14 @@ pub(crate) fn test_mock_emulator_with_static_call() { let account = &mut vm.rich_accounts[0]; // Deploy a native contract. - let native_contract = read_bytecode(INCREMENTING_CONTRACT_PATH); - let native_contract_abi = load_contract(INCREMENTING_CONTRACT_PATH); - let deploy_tx = account.get_deploy_tx(&native_contract, None, TxType::L2); + let deploy_tx = + account.get_deploy_tx(TestContract::increment_test().bytecode, None, TxType::L2); let (_, vm_result) = vm .vm .execute_transaction_with_bytecode_compression(deploy_tx.tx, true); assert!(!vm_result.result.is_failed(), "{:?}", vm_result.result); - let test_fn = native_contract_abi.function("testStaticCall").unwrap(); + let test_fn = TestContract::increment_test().function("testStaticCall"); // Call to the native contract from EVM. test_static_call(&mut vm, test_fn, evm_contract_address, deploy_tx.address, 0); // Call to EVM from the native contract. diff --git a/core/lib/multivm/src/versions/testonly/gas_limit.rs b/core/lib/multivm/src/versions/testonly/gas_limit.rs index 5e31eb2b159d..789bfb97b217 100644 --- a/core/lib/multivm/src/versions/testonly/gas_limit.rs +++ b/core/lib/multivm/src/versions/testonly/gas_limit.rs @@ -1,4 +1,4 @@ -use zksync_test_account::Account; +use zksync_test_contracts::Account; use zksync_types::{fee::Fee, Execute}; use super::{tester::VmTesterBuilder, TestedVm}; diff --git a/core/lib/multivm/src/versions/testonly/get_used_contracts.rs b/core/lib/multivm/src/versions/testonly/get_used_contracts.rs index 9d0908807e21..7bfce535b44d 100644 --- a/core/lib/multivm/src/versions/testonly/get_used_contracts.rs +++ b/core/lib/multivm/src/versions/testonly/get_used_contracts.rs @@ -4,12 +4,12 @@ use assert_matches::assert_matches; use ethabi::Token; use zk_evm_1_3_1::zkevm_opcode_defs::decoding::{EncodingModeProduction, VmEncodingMode}; use zksync_system_constants::CONTRACT_DEPLOYER_ADDRESS; -use zksync_test_account::{Account, TxType}; -use zksync_types::{AccountTreeId, Address, Execute, StorageKey, H256, U256}; -use zksync_utils::{bytecode::hash_bytecode, h256_to_u256}; +use zksync_test_contracts::{Account, TestContract, TxType}; +use zksync_types::{ + bytecode::BytecodeHash, h256_to_u256, AccountTreeId, Address, Execute, StorageKey, H256, U256, +}; use super::{ - read_proxy_counter_contract, read_test_contract, tester::{VmTester, VmTesterBuilder}, TestedVm, }; @@ -32,9 +32,9 @@ pub(crate) fn test_get_used_contracts() { // create and push and execute some not-empty factory deps transaction with success status // to check that `get_decommitted_hashes()` updates - let contract_code = read_test_contract(); + let contract_code = TestContract::counter().bytecode; let account = &mut vm.rich_accounts[0]; - let tx = account.get_deploy_tx(&contract_code, None, TxType::L1 { serial_id: 0 }); + let tx = account.get_deploy_tx(contract_code, None, TxType::L1 { serial_id: 0 }); vm.vm.push_transaction(tx.tx.clone()); let result = vm.vm.execute(InspectExecutionMode::OneTx); assert!(!result.result.is_failed()); @@ -76,8 +76,7 @@ pub(crate) fn test_get_used_contracts() { assert!(res2.result.is_failed()); for factory_dep in tx2.execute.factory_deps { - let hash = hash_bytecode(&factory_dep); - let hash_to_u256 = h256_to_u256(hash); + let hash_to_u256 = BytecodeHash::for_bytecode(&factory_dep).value_u256(); assert!(vm.vm.known_bytecode_hashes().contains(&hash_to_u256)); assert!(!vm.vm.decommitted_hashes().contains(&hash_to_u256)); } @@ -86,7 +85,7 @@ pub(crate) fn test_get_used_contracts() { /// Counter test contract bytecode inflated by appending lots of `NOP` opcodes at the end. This leads to non-trivial /// decommitment cost (>10,000 gas). fn inflated_counter_bytecode() -> Vec { - let mut counter_bytecode = read_test_contract(); + let mut counter_bytecode = TestContract::counter().bytecode.to_vec(); counter_bytecode.extend( iter::repeat(EncodingModeProduction::nop_encoding().to_be_bytes()) .take(10_000) @@ -105,7 +104,7 @@ fn execute_proxy_counter( gas: u32, ) -> (VmTester, ProxyCounterData, VmExecutionResultAndLogs) { let counter_bytecode = inflated_counter_bytecode(); - let counter_bytecode_hash = h256_to_u256(hash_bytecode(&counter_bytecode)); + let counter_bytecode_hash = BytecodeHash::for_bytecode(&counter_bytecode).value_u256(); let counter_address = Address::repeat_byte(0x23); let mut vm = VmTesterBuilder::new() @@ -118,10 +117,9 @@ fn execute_proxy_counter( .with_rich_accounts(1) .build::(); - let (proxy_counter_bytecode, proxy_counter_abi) = read_proxy_counter_contract(); let account = &mut vm.rich_accounts[0]; let deploy_tx = account.get_deploy_tx( - &proxy_counter_bytecode, + TestContract::proxy_counter().bytecode, Some(&[Token::Address(counter_address)]), TxType::L2, ); @@ -137,7 +135,7 @@ fn execute_proxy_counter( "{decommitted_hashes:?}" ); - let increment = proxy_counter_abi.function("increment").unwrap(); + let increment = TestContract::proxy_counter().function("increment"); let increment_tx = account.get_l2_tx_for_execute( Execute { contract_address: Some(deploy_tx.address), @@ -181,8 +179,7 @@ pub(crate) fn test_get_used_contracts_with_out_of_gas_far_call() { // Execute another transaction with a successful far call and check that it's still charged for decommitment. let account = &mut vm.rich_accounts[0]; - let (_, proxy_counter_abi) = read_proxy_counter_contract(); - let increment = proxy_counter_abi.function("increment").unwrap(); + let increment = TestContract::proxy_counter().function("increment"); let increment_tx = account.get_l2_tx_for_execute( Execute { contract_address: Some(data.proxy_counter_address), diff --git a/core/lib/multivm/src/versions/testonly/is_write_initial.rs b/core/lib/multivm/src/versions/testonly/is_write_initial.rs index cac9be173639..9eb986549c52 100644 --- a/core/lib/multivm/src/versions/testonly/is_write_initial.rs +++ b/core/lib/multivm/src/versions/testonly/is_write_initial.rs @@ -1,7 +1,7 @@ -use zksync_test_account::TxType; +use zksync_test_contracts::{TestContract, TxType}; use zksync_types::get_nonce_key; -use super::{read_test_contract, tester::VmTesterBuilder, TestedVm}; +use super::{tester::VmTesterBuilder, TestedVm}; use crate::interface::{ storage::ReadStorage, InspectExecutionMode, TxExecutionMode, VmInterfaceExt, }; @@ -25,8 +25,9 @@ pub(crate) fn test_is_write_initial_behaviour() { .borrow_mut() .is_write_initial(&nonce_key)); - let contract_code = read_test_contract(); - let tx = account.get_deploy_tx(&contract_code, None, TxType::L2).tx; + let tx = account + .get_deploy_tx(TestContract::counter().bytecode, None, TxType::L2) + .tx; vm.vm.push_transaction(tx); vm.vm.execute(InspectExecutionMode::OneTx); diff --git a/core/lib/multivm/src/versions/testonly/l1_tx_execution.rs b/core/lib/multivm/src/versions/testonly/l1_tx_execution.rs index 37a2bf2bec20..4a39611dfd3c 100644 --- a/core/lib/multivm/src/versions/testonly/l1_tx_execution.rs +++ b/core/lib/multivm/src/versions/testonly/l1_tx_execution.rs @@ -2,17 +2,14 @@ use assert_matches::assert_matches; use ethabi::Token; use zksync_contracts::l1_messenger_contract; use zksync_system_constants::{BOOTLOADER_ADDRESS, L1_MESSENGER_ADDRESS}; -use zksync_test_account::TxType; +use zksync_test_contracts::{TestContract, TxType}; use zksync_types::{ - get_code_key, get_known_code_key, + get_code_key, get_known_code_key, h256_to_u256, l2_to_l1_log::{L2ToL1Log, UserL2ToL1Log}, - Address, Execute, ExecuteTransactionCommon, U256, + u256_to_h256, Address, Execute, ExecuteTransactionCommon, U256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; -use super::{ - read_test_contract, tester::VmTesterBuilder, ContractToDeploy, TestedVm, BASE_SYSTEM_CONTRACTS, -}; +use super::{tester::VmTesterBuilder, ContractToDeploy, TestedVm, BASE_SYSTEM_CONTRACTS}; use crate::{ interface::{ ExecutionResult, InspectExecutionMode, TxExecutionMode, VmInterfaceExt, VmRevertReason, @@ -46,9 +43,12 @@ pub(crate) fn test_l1_tx_execution() { .with_rich_accounts(1) .build::(); - let contract_code = read_test_contract(); let account = &mut vm.rich_accounts[0]; - let deploy_tx = account.get_deploy_tx(&contract_code, None, TxType::L1 { serial_id: 1 }); + let deploy_tx = account.get_deploy_tx( + TestContract::counter().bytecode, + None, + TxType::L1 { serial_id: 1 }, + ); let tx_hash = deploy_tx.tx.hash(); let required_l2_to_l1_logs: Vec<_> = vec![L2ToL1Log { @@ -187,7 +187,7 @@ pub(crate) fn test_l1_tx_execution_high_gas_limit() { } pub(crate) fn test_l1_tx_execution_gas_estimation_with_low_gas() { - let counter_contract = read_test_contract(); + let counter_contract = TestContract::counter().bytecode.to_vec(); let counter_address = Address::repeat_byte(0x11); let mut vm = VmTesterBuilder::new() .with_empty_in_memory_storage() diff --git a/core/lib/multivm/src/versions/testonly/l2_blocks.rs b/core/lib/multivm/src/versions/testonly/l2_blocks.rs index 947d8b5859f8..0dfe600b73be 100644 --- a/core/lib/multivm/src/versions/testonly/l2_blocks.rs +++ b/core/lib/multivm/src/versions/testonly/l2_blocks.rs @@ -7,12 +7,12 @@ use assert_matches::assert_matches; use zksync_system_constants::REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_BYTE; use zksync_types::{ block::{pack_block_info, L2BlockHasher}, - AccountTreeId, Address, Execute, ExecuteTransactionCommon, L1BatchNumber, L1TxCommonData, - L2BlockNumber, ProtocolVersionId, StorageKey, Transaction, H256, SYSTEM_CONTEXT_ADDRESS, - SYSTEM_CONTEXT_BLOCK_INFO_POSITION, SYSTEM_CONTEXT_CURRENT_L2_BLOCK_INFO_POSITION, - SYSTEM_CONTEXT_CURRENT_TX_ROLLING_HASH_POSITION, U256, + h256_to_u256, u256_to_h256, AccountTreeId, Address, Execute, ExecuteTransactionCommon, + L1BatchNumber, L1TxCommonData, L2BlockNumber, ProtocolVersionId, StorageKey, Transaction, H256, + SYSTEM_CONTEXT_ADDRESS, SYSTEM_CONTEXT_BLOCK_INFO_POSITION, + SYSTEM_CONTEXT_CURRENT_L2_BLOCK_INFO_POSITION, SYSTEM_CONTEXT_CURRENT_TX_ROLLING_HASH_POSITION, + U256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; use super::{default_l1_batch, get_empty_storage, tester::VmTesterBuilder, TestedVm}; use crate::{ diff --git a/core/lib/multivm/src/versions/testonly/mod.rs b/core/lib/multivm/src/versions/testonly/mod.rs index 309c0edff583..38a09049b15a 100644 --- a/core/lib/multivm/src/versions/testonly/mod.rs +++ b/core/lib/multivm/src/versions/testonly/mod.rs @@ -11,18 +11,15 @@ use std::{collections::HashSet, rc::Rc}; -use ethabi::Contract; use once_cell::sync::Lazy; use zksync_contracts::{ - load_contract, read_bootloader_code, read_bytecode, read_zbin_bytecode, BaseSystemContracts, - SystemContractCode, + read_bootloader_code, read_zbin_bytecode, BaseSystemContracts, SystemContractCode, }; use zksync_types::{ - block::L2BlockHasher, fee_model::BatchFeeInput, get_code_key, get_is_account_key, - utils::storage_key_for_eth_balance, Address, L1BatchNumber, L2BlockNumber, L2ChainId, - ProtocolVersionId, U256, + block::L2BlockHasher, bytecode::BytecodeHash, fee_model::BatchFeeInput, get_code_key, + get_is_account_key, h256_to_u256, u256_to_h256, utils::storage_key_for_eth_balance, Address, + L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersionId, U256, }; -use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256, u256_to_h256}; use zksync_vm_interface::{ pubdata::PubdataBuilder, L1BatchEnv, L2BlockEnv, SystemEnv, TxExecutionMode, }; @@ -62,59 +59,7 @@ static BASE_SYSTEM_CONTRACTS: Lazy = Lazy::new(BaseSystemContracts::load_from_disk); fn get_empty_storage() -> InMemoryStorage { - InMemoryStorage::with_system_contracts(hash_bytecode) -} - -pub(crate) fn read_test_contract() -> Vec { - read_bytecode("etc/contracts-test-data/artifacts-zk/contracts/counter/counter.sol/Counter.json") -} - -fn get_complex_upgrade_abi() -> Contract { - load_contract( - "etc/contracts-test-data/artifacts-zk/contracts/complex-upgrade/complex-upgrade.sol/ComplexUpgrade.json" - ) -} - -fn read_complex_upgrade() -> Vec { - read_bytecode("etc/contracts-test-data/artifacts-zk/contracts/complex-upgrade/complex-upgrade.sol/ComplexUpgrade.json") -} - -fn read_precompiles_contract() -> Vec { - read_bytecode( - "etc/contracts-test-data/artifacts-zk/contracts/precompiles/precompiles.sol/Precompiles.json", - ) -} - -fn load_precompiles_contract() -> Contract { - load_contract( - "etc/contracts-test-data/artifacts-zk/contracts/precompiles/precompiles.sol/Precompiles.json", - ) -} - -fn read_proxy_counter_contract() -> (Vec, Contract) { - const PATH: &str = "etc/contracts-test-data/artifacts-zk/contracts/counter/proxy_counter.sol/ProxyCounter.json"; - (read_bytecode(PATH), load_contract(PATH)) -} - -fn read_nonce_holder_tester() -> Vec { - read_bytecode("etc/contracts-test-data/artifacts-zk/contracts/custom-account/nonce-holder-test.sol/NonceHolderTest.json") -} - -fn read_expensive_contract() -> (Vec, Contract) { - const PATH: &str = - "etc/contracts-test-data/artifacts-zk/contracts/expensive/expensive.sol/Expensive.json"; - (read_bytecode(PATH), load_contract(PATH)) -} - -fn read_many_owners_custom_account_contract() -> (Vec, Contract) { - let path = "etc/contracts-test-data/artifacts-zk/contracts/custom-account/many-owners-custom-account.sol/ManyOwnersCustomAccount.json"; - (read_bytecode(path), load_contract(path)) -} - -fn read_error_contract() -> Vec { - read_bytecode( - "etc/contracts-test-data/artifacts-zk/contracts/error/error.sol/SimpleRequire.json", - ) + InMemoryStorage::with_system_contracts() } pub(crate) fn read_max_depth_contract() -> Vec { @@ -123,17 +68,11 @@ pub(crate) fn read_max_depth_contract() -> Vec { ) } -pub(crate) fn read_simple_transfer_contract() -> Vec { - read_bytecode( - "etc/contracts-test-data/artifacts-zk/contracts/simple-transfer/simple-transfer.sol/SimpleTransfer.json", - ) -} - pub(crate) fn get_bootloader(test: &str) -> SystemContractCode { let bootloader_code = read_bootloader_code(test); - let bootloader_hash = hash_bytecode(&bootloader_code); + let bootloader_hash = BytecodeHash::for_bytecode(&bootloader_code).value(); SystemContractCode { - code: bytes_to_be_words(bootloader_code), + code: bootloader_code, hash: bootloader_hash, } } @@ -223,12 +162,13 @@ impl ContractToDeploy { pub fn insert(&self, storage: &mut InMemoryStorage) { let deployer_code_key = get_code_key(&self.address); - storage.set_value(deployer_code_key, hash_bytecode(&self.bytecode)); + let bytecode_hash = BytecodeHash::for_bytecode(&self.bytecode).value(); + storage.set_value(deployer_code_key, bytecode_hash); if self.is_account { let is_account_key = get_is_account_key(&self.address); storage.set_value(is_account_key, u256_to_h256(1_u32.into())); } - storage.store_factory_dep(hash_bytecode(&self.bytecode), self.bytecode.clone()); + storage.store_factory_dep(bytecode_hash, self.bytecode.clone()); if self.is_funded { make_address_rich(storage, self.address); diff --git a/core/lib/multivm/src/versions/testonly/nonce_holder.rs b/core/lib/multivm/src/versions/testonly/nonce_holder.rs index 36f736c0bbe5..41d5202fbf15 100644 --- a/core/lib/multivm/src/versions/testonly/nonce_holder.rs +++ b/core/lib/multivm/src/versions/testonly/nonce_holder.rs @@ -1,7 +1,7 @@ -use zksync_test_account::Account; +use zksync_test_contracts::{Account, TestContract}; use zksync_types::{Execute, ExecuteTransactionCommon, Nonce}; -use super::{read_nonce_holder_tester, tester::VmTesterBuilder, ContractToDeploy, TestedVm}; +use super::{tester::VmTesterBuilder, ContractToDeploy, TestedVm}; use crate::interface::{ ExecutionResult, Halt, InspectExecutionMode, TxExecutionMode, TxRevertReason, VmInterfaceExt, VmRevertReason, @@ -80,7 +80,7 @@ pub(crate) fn test_nonce_holder() { let account_address = builder.rich_account(0).address; let mut vm = builder .with_custom_contracts(vec![ContractToDeploy::account( - read_nonce_holder_tester(), + TestContract::nonce_holder().bytecode.to_vec(), account_address, )]) .build::(); diff --git a/core/lib/multivm/src/versions/testonly/precompiles.rs b/core/lib/multivm/src/versions/testonly/precompiles.rs index 2e26dc134b07..e525bd627646 100644 --- a/core/lib/multivm/src/versions/testonly/precompiles.rs +++ b/core/lib/multivm/src/versions/testonly/precompiles.rs @@ -1,7 +1,8 @@ use circuit_sequencer_api_1_5_0::geometry_config::get_geometry_config; +use zksync_test_contracts::TestContract; use zksync_types::{Address, Execute}; -use super::{read_precompiles_contract, tester::VmTesterBuilder, TestedVm}; +use super::{tester::VmTesterBuilder, TestedVm}; use crate::{ interface::{InspectExecutionMode, TxExecutionMode, VmInterfaceExt}, versions::testonly::ContractToDeploy, @@ -10,7 +11,7 @@ use crate::{ pub(crate) fn test_keccak() { // Execute special transaction and check that at least 1000 keccak calls were made. - let contract = read_precompiles_contract(); + let contract = TestContract::precompiles_test().bytecode.to_vec(); let address = Address::repeat_byte(1); let mut vm = VmTesterBuilder::new() .with_empty_in_memory_storage() @@ -46,7 +47,7 @@ pub(crate) fn test_keccak() { pub(crate) fn test_sha256() { // Execute special transaction and check that at least 1000 `sha256` calls were made. - let contract = read_precompiles_contract(); + let contract = TestContract::precompiles_test().bytecode.to_vec(); let address = Address::repeat_byte(1); let mut vm = VmTesterBuilder::new() .with_empty_in_memory_storage() diff --git a/core/lib/multivm/src/versions/testonly/refunds.rs b/core/lib/multivm/src/versions/testonly/refunds.rs index 874425fc435c..384a3edb7dbd 100644 --- a/core/lib/multivm/src/versions/testonly/refunds.rs +++ b/core/lib/multivm/src/versions/testonly/refunds.rs @@ -1,11 +1,8 @@ use ethabi::Token; -use zksync_test_account::TxType; +use zksync_test_contracts::{TestContract, TxType}; use zksync_types::{Address, Execute, U256}; -use super::{ - default_pubdata_builder, read_expensive_contract, read_test_contract, tester::VmTesterBuilder, - ContractToDeploy, TestedVm, -}; +use super::{default_pubdata_builder, tester::VmTesterBuilder, ContractToDeploy, TestedVm}; use crate::interface::{InspectExecutionMode, TxExecutionMode, VmInterfaceExt}; pub(crate) fn test_predetermined_refunded_gas() { @@ -19,10 +16,11 @@ pub(crate) fn test_predetermined_refunded_gas() { .build::(); let l1_batch = vm.l1_batch_env.clone(); - let counter = read_test_contract(); let account = &mut vm.rich_accounts[0]; - let tx = account.get_deploy_tx(&counter, None, TxType::L2).tx; + let tx = account + .get_deploy_tx(TestContract::counter().bytecode, None, TxType::L2) + .tx; vm.vm.push_transaction(tx.clone()); let result = vm.vm.execute(InspectExecutionMode::OneTx); @@ -168,16 +166,16 @@ pub(crate) fn test_predetermined_refunded_gas() { pub(crate) fn test_negative_pubdata_for_transaction() { let expensive_contract_address = Address::repeat_byte(1); - let (expensive_contract_bytecode, expensive_contract) = read_expensive_contract(); - let expensive_function = expensive_contract.function("expensive").unwrap(); - let cleanup_function = expensive_contract.function("cleanUp").unwrap(); + let expensive_contract = TestContract::expensive(); + let expensive_function = expensive_contract.function("expensive"); + let cleanup_function = expensive_contract.function("cleanUp"); let mut vm = VmTesterBuilder::new() .with_empty_in_memory_storage() .with_execution_mode(TxExecutionMode::VerifyExecute) .with_rich_accounts(1) .with_custom_contracts(vec![ContractToDeploy::new( - expensive_contract_bytecode, + TestContract::expensive().bytecode.to_vec(), expensive_contract_address, )]) .build::(); diff --git a/core/lib/multivm/src/versions/testonly/require_eip712.rs b/core/lib/multivm/src/versions/testonly/require_eip712.rs index e789fbda2902..7a934c570aea 100644 --- a/core/lib/multivm/src/versions/testonly/require_eip712.rs +++ b/core/lib/multivm/src/versions/testonly/require_eip712.rs @@ -1,13 +1,12 @@ use ethabi::Token; use zksync_eth_signer::TransactionParameters; +use zksync_test_contracts::TestContract; use zksync_types::{ fee::Fee, l2::L2Tx, transaction_request::TransactionRequest, Address, Eip712Domain, Execute, L2ChainId, Nonce, Transaction, U256, }; -use super::{ - read_many_owners_custom_account_contract, tester::VmTesterBuilder, ContractToDeploy, TestedVm, -}; +use super::{tester::VmTesterBuilder, ContractToDeploy, TestedVm}; use crate::interface::{InspectExecutionMode, TxExecutionMode, VmInterfaceExt}; /// This test deploys 'buggy' account abstraction code, and then tries accessing it both with legacy @@ -21,7 +20,7 @@ pub(crate) fn test_require_eip712() { let aa_address = Address::repeat_byte(0x10); let beneficiary_address = Address::repeat_byte(0x20); - let (bytecode, contract) = read_many_owners_custom_account_contract(); + let bytecode = TestContract::many_owners().bytecode.to_vec(); let mut vm = VmTesterBuilder::new() .with_empty_in_memory_storage() .with_custom_contracts(vec![ @@ -36,7 +35,7 @@ pub(crate) fn test_require_eip712() { // First, let's set the owners of the AA account to the `private_address`. // (so that messages signed by `private_address`, are authorized to act on behalf of the AA account). - let set_owners_function = contract.function("setOwners").unwrap(); + let set_owners_function = TestContract::many_owners().function("setOwners"); let encoded_input = set_owners_function .encode_input(&[Token::Array(vec![Token::Address(private_account.address)])]) .unwrap(); diff --git a/core/lib/multivm/src/versions/testonly/rollbacks.rs b/core/lib/multivm/src/versions/testonly/rollbacks.rs index cab3427899ea..9a825c08d49b 100644 --- a/core/lib/multivm/src/versions/testonly/rollbacks.rs +++ b/core/lib/multivm/src/versions/testonly/rollbacks.rs @@ -2,12 +2,12 @@ use std::collections::HashMap; use assert_matches::assert_matches; use ethabi::Token; -use zksync_contracts::{get_loadnext_contract, test_contracts::LoadnextContractExecutionParams}; -use zksync_test_account::{DeployContractsTx, TxType}; +use zksync_test_contracts::{ + DeployContractsTx, LoadnextContractExecutionParams, TestContract, TxType, +}; use zksync_types::{Address, Execute, Nonce, U256}; use super::{ - read_test_contract, tester::{TransactionTestInfo, TxModifier, VmTesterBuilder}, ContractToDeploy, TestedVm, }; @@ -21,10 +21,10 @@ pub(crate) fn test_vm_rollbacks() { .build::(); let mut account = vm.rich_accounts[0].clone(); - let counter = read_test_contract(); - let tx_0 = account.get_deploy_tx(&counter, None, TxType::L2).tx; - let tx_1 = account.get_deploy_tx(&counter, None, TxType::L2).tx; - let tx_2 = account.get_deploy_tx(&counter, None, TxType::L2).tx; + let counter = TestContract::counter().bytecode; + let tx_0 = account.get_deploy_tx(counter, None, TxType::L2).tx; + let tx_1 = account.get_deploy_tx(counter, None, TxType::L2).tx; + let tx_2 = account.get_deploy_tx(counter, None, TxType::L2).tx; let result_without_rollbacks = vm.execute_and_verify_txs(&vec![ TransactionTestInfo::new_processed(tx_0.clone(), false), @@ -87,16 +87,16 @@ pub(crate) fn test_vm_loadnext_rollbacks() { .build::(); let mut account = vm.rich_accounts[0].clone(); - let loadnext_contract = get_loadnext_contract(); + let loadnext_contract = TestContract::load_test(); let loadnext_constructor_data = &[Token::Uint(U256::from(100))]; let DeployContractsTx { tx: loadnext_deploy_tx, address, .. } = account.get_deploy_tx_with_factory_deps( - &loadnext_contract.bytecode, + loadnext_contract.bytecode, Some(loadnext_constructor_data), - loadnext_contract.factory_deps.clone(), + loadnext_contract.factory_deps(), TxType::L2, ); @@ -105,7 +105,8 @@ pub(crate) fn test_vm_loadnext_rollbacks() { contract_address: Some(address), calldata: LoadnextContractExecutionParams { reads: 100, - writes: 100, + initial_writes: 100, + repeated_writes: 100, events: 100, hashes: 500, recursive_calls: 10, @@ -123,7 +124,8 @@ pub(crate) fn test_vm_loadnext_rollbacks() { contract_address: Some(address), calldata: LoadnextContractExecutionParams { reads: 100, - writes: 100, + initial_writes: 100, + repeated_writes: 100, events: 100, hashes: 500, recursive_calls: 10, @@ -174,7 +176,7 @@ pub(crate) fn test_vm_loadnext_rollbacks() { } pub(crate) fn test_rollback_in_call_mode() { - let counter_bytecode = read_test_contract(); + let counter_bytecode = TestContract::counter().bytecode.to_vec(); let counter_address = Address::repeat_byte(1); let mut vm = VmTesterBuilder::new() diff --git a/core/lib/multivm/src/versions/testonly/secp256r1.rs b/core/lib/multivm/src/versions/testonly/secp256r1.rs index 37d428f82101..8a6077ab522f 100644 --- a/core/lib/multivm/src/versions/testonly/secp256r1.rs +++ b/core/lib/multivm/src/versions/testonly/secp256r1.rs @@ -1,7 +1,6 @@ use zk_evm_1_5_0::zkevm_opcode_defs::p256; use zksync_system_constants::P256VERIFY_PRECOMPILE_ADDRESS; -use zksync_types::{web3::keccak256, Execute, H256, U256}; -use zksync_utils::h256_to_u256; +use zksync_types::{h256_to_u256, web3::keccak256, Execute, H256, U256}; use super::{tester::VmTesterBuilder, TestedVm}; use crate::interface::{ExecutionResult, InspectExecutionMode, TxExecutionMode, VmInterfaceExt}; diff --git a/core/lib/multivm/src/versions/testonly/simple_execution.rs b/core/lib/multivm/src/versions/testonly/simple_execution.rs index 96239fb362d2..13dd7d617d82 100644 --- a/core/lib/multivm/src/versions/testonly/simple_execution.rs +++ b/core/lib/multivm/src/versions/testonly/simple_execution.rs @@ -1,5 +1,5 @@ use assert_matches::assert_matches; -use zksync_test_account::TxType; +use zksync_test_contracts::TxType; use super::{default_pubdata_builder, tester::VmTesterBuilder, TestedVm}; use crate::interface::{ExecutionResult, InspectExecutionMode, VmInterfaceExt}; diff --git a/core/lib/multivm/src/versions/testonly/storage.rs b/core/lib/multivm/src/versions/testonly/storage.rs index efe7be1edbd1..d57acc37944a 100644 --- a/core/lib/multivm/src/versions/testonly/storage.rs +++ b/core/lib/multivm/src/versions/testonly/storage.rs @@ -1,15 +1,12 @@ use ethabi::Token; -use zksync_contracts::{load_contract, read_bytecode}; +use zksync_test_contracts::TestContract; use zksync_types::{Address, Execute, U256}; use super::{tester::VmTesterBuilder, ContractToDeploy, TestedVm}; use crate::interface::{InspectExecutionMode, TxExecutionMode, VmInterfaceExt}; fn test_storage(first_tx_calldata: Vec, second_tx_calldata: Vec) -> u32 { - let bytecode = read_bytecode( - "etc/contracts-test-data/artifacts-zk/contracts/storage/storage.sol/StorageTester.json", - ); - + let bytecode = TestContract::storage_test().bytecode.to_vec(); let test_contract_address = Address::repeat_byte(1); // In this test, we aim to test whether a simple account interaction (without any fee logic) @@ -69,32 +66,23 @@ fn test_storage_one_tx(second_tx_calldata: Vec) -> u32 { } pub(crate) fn test_storage_behavior() { - let contract = load_contract( - "etc/contracts-test-data/artifacts-zk/contracts/storage/storage.sol/StorageTester.json", - ); + let contract = TestContract::storage_test(); // In all of the tests below we provide the first tx to ensure that the tracers will not include // the statistics from the start of the bootloader and will only include those for the transaction itself. let base_pubdata = test_storage_one_tx::(vec![]); - let simple_test_pubdata = test_storage_one_tx::( - contract - .function("simpleWrite") - .unwrap() - .encode_input(&[]) - .unwrap(), - ); + let simple_test_pubdata = + test_storage_one_tx::(contract.function("simpleWrite").encode_input(&[]).unwrap()); let resetting_write_pubdata = test_storage_one_tx::( contract .function("resettingWrite") - .unwrap() .encode_input(&[]) .unwrap(), ); let resetting_write_via_revert_pubdata = test_storage_one_tx::( contract .function("resettingWriteViaRevert") - .unwrap() .encode_input(&[]) .unwrap(), ); @@ -105,19 +93,15 @@ pub(crate) fn test_storage_behavior() { } pub(crate) fn test_transient_storage_behavior() { - let contract = load_contract( - "etc/contracts-test-data/artifacts-zk/contracts/storage/storage.sol/StorageTester.json", - ); + let contract = TestContract::storage_test(); let first_tstore_test = contract .function("testTransientStore") - .unwrap() .encode_input(&[]) .unwrap(); // Second transaction checks that, as expected, the transient storage is cleared after the first transaction. let second_tstore_test = contract .function("assertTValue") - .unwrap() .encode_input(&[Token::Uint(U256::zero())]) .unwrap(); diff --git a/core/lib/multivm/src/versions/testonly/tester/mod.rs b/core/lib/multivm/src/versions/testonly/tester/mod.rs index 716b9386235f..32499e409d82 100644 --- a/core/lib/multivm/src/versions/testonly/tester/mod.rs +++ b/core/lib/multivm/src/versions/testonly/tester/mod.rs @@ -1,7 +1,7 @@ use std::{collections::HashSet, fmt, rc::Rc}; use zksync_contracts::BaseSystemContracts; -use zksync_test_account::{Account, TxType}; +use zksync_test_contracts::{Account, TestContract, TxType}; use zksync_types::{ utils::{deployed_address_create, storage_key_for_eth_balance}, writes::StateDiffRecord, @@ -13,7 +13,7 @@ use zksync_vm_interface::{ }; pub(crate) use self::transaction_test_info::{ExpectedError, TransactionTestInfo, TxModifier}; -use super::{get_empty_storage, read_test_contract}; +use super::get_empty_storage; use crate::{ interface::{ storage::{InMemoryStorage, StoragePtr, StorageView}, @@ -39,9 +39,9 @@ pub(crate) struct VmTester { impl VmTester { pub(crate) fn deploy_test_contract(&mut self) { - let contract = read_test_contract(); + let contract = TestContract::counter().bytecode; let account = &mut self.rich_accounts[0]; - let tx = account.get_deploy_tx(&contract, None, TxType::L2).tx; + let tx = account.get_deploy_tx(contract, None, TxType::L2).tx; let nonce = tx.nonce().unwrap().0.into(); self.vm.push_transaction(tx); self.vm.execute(InspectExecutionMode::OneTx); diff --git a/core/lib/multivm/src/versions/testonly/tracing_execution_error.rs b/core/lib/multivm/src/versions/testonly/tracing_execution_error.rs index e87e6eb7c06a..14b4cb4873bb 100644 --- a/core/lib/multivm/src/versions/testonly/tracing_execution_error.rs +++ b/core/lib/multivm/src/versions/testonly/tracing_execution_error.rs @@ -1,43 +1,30 @@ -use zksync_contracts::load_contract; +use zksync_test_contracts::TestContract; use zksync_types::{Address, Execute}; -use super::{ - read_error_contract, tester::VmTesterBuilder, ContractToDeploy, TestedVm, BASE_SYSTEM_CONTRACTS, -}; +use super::{tester::VmTesterBuilder, ContractToDeploy, TestedVm, BASE_SYSTEM_CONTRACTS}; use crate::{ interface::{TxExecutionMode, TxRevertReason, VmRevertReason}, versions::testonly::tester::{ExpectedError, TransactionTestInfo}, }; -fn get_execute_error_calldata() -> Vec { - let test_contract = load_contract( - "etc/contracts-test-data/artifacts-zk/contracts/error/error.sol/SimpleRequire.json", - ); - let function = test_contract.function("require_short").unwrap(); - function - .encode_input(&[]) - .expect("failed to encode parameters") -} - pub(crate) fn test_tracing_of_execution_errors() { let contract_address = Address::repeat_byte(1); + let bytecode = TestContract::reverts_test().bytecode.to_vec(); let mut vm = VmTesterBuilder::new() .with_empty_in_memory_storage() .with_base_system_smart_contracts(BASE_SYSTEM_CONTRACTS.clone()) - .with_custom_contracts(vec![ContractToDeploy::new( - read_error_contract(), - contract_address, - )]) + .with_custom_contracts(vec![ContractToDeploy::new(bytecode, contract_address)]) .with_execution_mode(TxExecutionMode::VerifyExecute) .with_rich_accounts(1) .build::(); let account = &mut vm.rich_accounts[0]; + let require_fn = TestContract::reverts_test().function("require_short"); let tx = account.get_l2_tx_for_execute( Execute { contract_address: Some(contract_address), - calldata: get_execute_error_calldata(), + calldata: require_fn.encode_input(&[]).unwrap(), value: Default::default(), factory_deps: vec![], }, diff --git a/core/lib/multivm/src/versions/testonly/transfer.rs b/core/lib/multivm/src/versions/testonly/transfer.rs index 3572adba147c..1f504b382882 100644 --- a/core/lib/multivm/src/versions/testonly/transfer.rs +++ b/core/lib/multivm/src/versions/testonly/transfer.rs @@ -1,7 +1,6 @@ use ethabi::Token; -use zksync_contracts::{load_contract, read_bytecode}; -use zksync_types::{utils::storage_key_for_eth_balance, Address, Execute, U256}; -use zksync_utils::u256_to_h256; +use zksync_test_contracts::TestContract; +use zksync_types::{u256_to_h256, utils::storage_key_for_eth_balance, Address, Execute, U256}; use super::{ default_pubdata_builder, get_empty_storage, tester::VmTesterBuilder, ContractToDeploy, TestedVm, @@ -14,33 +13,22 @@ enum TestOptions { } fn test_send_or_transfer(test_option: TestOptions) { - let test_bytecode = read_bytecode( - "etc/contracts-test-data/artifacts-zk/contracts/transfer/transfer.sol/TransferTest.json", - ); - let recipient_bytecode = read_bytecode( - "etc/contracts-test-data/artifacts-zk/contracts/transfer/transfer.sol/Recipient.json", - ); - let test_abi = load_contract( - "etc/contracts-test-data/artifacts-zk/contracts/transfer/transfer.sol/TransferTest.json", - ); - + let test_contract = TestContract::transfer_test(); let test_contract_address = Address::repeat_byte(1); let recipient_address = Address::repeat_byte(2); let (value, calldata) = match test_option { TestOptions::Send(value) => ( value, - test_abi + test_contract .function("send") - .unwrap() .encode_input(&[Token::Address(recipient_address), Token::Uint(value)]) .unwrap(), ), TestOptions::Transfer(value) => ( value, - test_abi + test_contract .function("transfer") - .unwrap() .encode_input(&[Token::Address(recipient_address), Token::Uint(value)]) .unwrap(), ), @@ -57,8 +45,14 @@ fn test_send_or_transfer(test_option: TestOptions) { .with_execution_mode(TxExecutionMode::VerifyExecute) .with_rich_accounts(1) .with_custom_contracts(vec![ - ContractToDeploy::new(test_bytecode, test_contract_address), - ContractToDeploy::new(recipient_bytecode, recipient_address), + ContractToDeploy::new( + TestContract::transfer_test().bytecode.to_vec(), + test_contract_address, + ), + ContractToDeploy::new( + TestContract::transfer_recipient().bytecode.to_vec(), + recipient_address, + ), ]) .build::(); @@ -98,28 +92,16 @@ pub(crate) fn test_send_and_transfer() { } fn test_reentrancy_protection_send_or_transfer(test_option: TestOptions) { - let test_bytecode = read_bytecode( - "etc/contracts-test-data/artifacts-zk/contracts/transfer/transfer.sol/TransferTest.json", - ); - let reentrant_recipient_bytecode = read_bytecode( - "etc/contracts-test-data/artifacts-zk/contracts/transfer/transfer.sol/ReentrantRecipient.json", - ); - let test_abi = load_contract( - "etc/contracts-test-data/artifacts-zk/contracts/transfer/transfer.sol/TransferTest.json", - ); - let reentrant_recipient_abi = load_contract( - "etc/contracts-test-data/artifacts-zk/contracts/transfer/transfer.sol/ReentrantRecipient.json", - ); - + let test_contract = TestContract::transfer_test(); + let reentrant_recipient_contract = TestContract::reentrant_recipient(); let test_contract_address = Address::repeat_byte(1); let reentrant_recipient_address = Address::repeat_byte(2); let (value, calldata) = match test_option { TestOptions::Send(value) => ( value, - test_abi + test_contract .function("send") - .unwrap() .encode_input(&[ Token::Address(reentrant_recipient_address), Token::Uint(value), @@ -128,9 +110,8 @@ fn test_reentrancy_protection_send_or_transfer(test_option: TestOp ), TestOptions::Transfer(value) => ( value, - test_abi + test_contract .function("transfer") - .unwrap() .encode_input(&[ Token::Address(reentrant_recipient_address), Token::Uint(value), @@ -144,8 +125,14 @@ fn test_reentrancy_protection_send_or_transfer(test_option: TestOp .with_execution_mode(TxExecutionMode::VerifyExecute) .with_rich_accounts(1) .with_custom_contracts(vec![ - ContractToDeploy::new(test_bytecode, test_contract_address), - ContractToDeploy::new(reentrant_recipient_bytecode, reentrant_recipient_address), + ContractToDeploy::new( + TestContract::transfer_test().bytecode.to_vec(), + test_contract_address, + ), + ContractToDeploy::new( + TestContract::reentrant_recipient().bytecode.to_vec(), + reentrant_recipient_address, + ), ]) .build::(); @@ -154,9 +141,8 @@ fn test_reentrancy_protection_send_or_transfer(test_option: TestOp let tx1 = account.get_l2_tx_for_execute( Execute { contract_address: Some(reentrant_recipient_address), - calldata: reentrant_recipient_abi + calldata: reentrant_recipient_contract .function("setX") - .unwrap() .encode_input(&[]) .unwrap(), value: U256::from(1), diff --git a/core/lib/multivm/src/versions/testonly/upgrade.rs b/core/lib/multivm/src/versions/testonly/upgrade.rs index 359f19faedb2..323abf280c7f 100644 --- a/core/lib/multivm/src/versions/testonly/upgrade.rs +++ b/core/lib/multivm/src/versions/testonly/upgrade.rs @@ -1,19 +1,16 @@ -use zksync_contracts::{deployer_contract, load_sys_contract, read_bytecode}; -use zksync_test_account::TxType; +use zksync_contracts::{deployer_contract, load_sys_contract}; +use zksync_test_contracts::{TestContract, TxType}; use zksync_types::{ + bytecode::BytecodeHash, ethabi::{Contract, Token}, - get_code_key, get_known_code_key, + get_code_key, get_known_code_key, h256_to_u256, protocol_upgrade::ProtocolUpgradeTxCommonData, - Address, Execute, ExecuteTransactionCommon, Transaction, COMPLEX_UPGRADER_ADDRESS, - CONTRACT_DEPLOYER_ADDRESS, CONTRACT_FORCE_DEPLOYER_ADDRESS, H256, + u256_to_h256, Address, Execute, ExecuteTransactionCommon, Transaction, + COMPLEX_UPGRADER_ADDRESS, CONTRACT_DEPLOYER_ADDRESS, CONTRACT_FORCE_DEPLOYER_ADDRESS, H256, REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_BYTE, U256, }; -use zksync_utils::{bytecode::hash_bytecode, h256_to_u256, u256_to_h256}; -use super::{ - get_complex_upgrade_abi, get_empty_storage, read_complex_upgrade, read_test_contract, - tester::VmTesterBuilder, TestedVm, -}; +use super::{get_empty_storage, tester::VmTesterBuilder, TestedVm}; use crate::interface::{ ExecutionResult, Halt, InspectExecutionMode, TxExecutionMode, VmInterfaceExt, }; @@ -23,7 +20,7 @@ use crate::interface::{ /// - If present, this transaction must be the first one in block pub(crate) fn test_protocol_upgrade_is_first() { let mut storage = get_empty_storage(); - let bytecode_hash = hash_bytecode(&read_test_contract()); + let bytecode_hash = BytecodeHash::for_bytecode(TestContract::counter().bytecode).value(); storage.set_value(get_known_code_key(&bytecode_hash), u256_to_h256(1.into())); let mut vm = VmTesterBuilder::new() @@ -61,7 +58,11 @@ pub(crate) fn test_protocol_upgrade_is_first() { }]); let normal_l1_transaction = vm.rich_accounts[0] - .get_deploy_tx(&read_test_contract(), None, TxType::L1 { serial_id: 0 }) + .get_deploy_tx( + TestContract::counter().bytecode, + None, + TxType::L1 { serial_id: 0 }, + ) .tx; let expected_error = @@ -111,7 +112,7 @@ pub(crate) fn test_protocol_upgrade_is_first() { /// In this test we try to test how force deployments could be done via protocol upgrade transactions. pub(crate) fn test_force_deploy_upgrade() { let mut storage = get_empty_storage(); - let bytecode_hash = hash_bytecode(&read_test_contract()); + let bytecode_hash = BytecodeHash::for_bytecode(TestContract::counter().bytecode).value(); let known_code_key = get_known_code_key(&bytecode_hash); // It is generally expected that all the keys will be set as known prior to the protocol upgrade. storage.set_value(known_code_key, u256_to_h256(1.into())); @@ -156,8 +157,10 @@ pub(crate) fn test_force_deploy_upgrade() { /// Here we show how the work with the complex upgrader could be done. pub(crate) fn test_complex_upgrader() { let mut storage = get_empty_storage(); - let bytecode_hash = hash_bytecode(&read_complex_upgrade()); - let msg_sender_test_hash = hash_bytecode(&read_msg_sender_test()); + let upgrade_bytecode = TestContract::complex_upgrade().bytecode.to_vec(); + let bytecode_hash = BytecodeHash::for_bytecode(&upgrade_bytecode).value(); + let msg_sender_test_bytecode = TestContract::msg_sender_test().bytecode.to_vec(); + let msg_sender_test_hash = BytecodeHash::for_bytecode(&msg_sender_test_bytecode).value(); // Let's assume that the bytecode for the implementation of the complex upgrade // is already deployed in some address in user space let upgrade_impl = Address::repeat_byte(1); @@ -168,8 +171,8 @@ pub(crate) fn test_complex_upgrader() { u256_to_h256(1.into()), ); storage.set_value(account_code_key, bytecode_hash); - storage.store_factory_dep(bytecode_hash, read_complex_upgrade()); - storage.store_factory_dep(msg_sender_test_hash, read_msg_sender_test()); + storage.store_factory_dep(bytecode_hash, upgrade_bytecode); + storage.store_factory_dep(msg_sender_test_hash, msg_sender_test_bytecode); let mut vm = VmTesterBuilder::new() .with_storage(storage) @@ -268,16 +271,15 @@ fn get_forced_deploy_tx(deployment: &[ForceDeployment]) -> Transaction { // Returns the transaction that performs a complex protocol upgrade. // The first param is the address of the implementation of the complex upgrade // in user-space, while the next 3 params are params of the implementation itself -// For the explanation for the parameters, please refer to: -// etc/contracts-test-data/complex-upgrade/complex-upgrade.sol +// For the explanation for the parameters, please refer to the contract source code. fn get_complex_upgrade_tx( implementation_address: Address, address1: Address, address2: Address, bytecode_hash: H256, ) -> Transaction { - let impl_contract = get_complex_upgrade_abi(); - let impl_function = impl_contract.function("someComplexUpgrade").unwrap(); + let impl_contract = TestContract::complex_upgrade(); + let impl_function = impl_contract.function("someComplexUpgrade"); let impl_calldata = impl_function .encode_input(&[ Token::Address(address1), @@ -315,10 +317,6 @@ fn get_complex_upgrade_tx( } } -fn read_msg_sender_test() -> Vec { - read_bytecode("etc/contracts-test-data/artifacts-zk/contracts/complex-upgrade/msg-sender.sol/MsgSenderTest.json") -} - fn get_complex_upgrader_abi() -> Contract { load_sys_contract("ComplexUpgrader") } diff --git a/core/lib/multivm/src/versions/vm_1_3_2/events.rs b/core/lib/multivm/src/versions/vm_1_3_2/events.rs index 7b1f03c8ac99..0e62312185a2 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/events.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/events.rs @@ -1,8 +1,7 @@ use zk_evm_1_3_3::{ethereum_types::Address, reference_impls::event_sink::EventMessage}; -use zksync_types::{L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use zksync_utils::{be_chunks_to_h256_words, h256_to_account_address}; +use zksync_types::{h256_to_address, L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use crate::interface::VmEvent; +use crate::{interface::VmEvent, utils::bytecode::be_chunks_to_h256_words}; #[derive(Clone)] pub struct SolidityLikeEvent { @@ -135,7 +134,7 @@ pub fn merge_events(events: Vec) -> Vec { .filter(|e| e.address == EVENT_WRITER_ADDRESS) .map(|event| { // The events writer events where the first topic is the actual address of the event and the rest of the topics are real topics - let address = h256_to_account_address(&H256(event.topics[0])); + let address = h256_to_address(&H256(event.topics[0])); let topics = event.topics.into_iter().skip(1).collect(); SolidityLikeEvent { diff --git a/core/lib/multivm/src/versions/vm_1_3_2/history_recorder.rs b/core/lib/multivm/src/versions/vm_1_3_2/history_recorder.rs index 2912fad2841d..bfd33b4b355e 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/history_recorder.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/history_recorder.rs @@ -1,8 +1,7 @@ use std::{collections::HashMap, fmt::Debug, hash::Hash}; use zk_evm_1_3_3::{aux_structures::Timestamp, vm_state::PrimitiveValue, zkevm_opcode_defs}; -use zksync_types::{StorageKey, U256}; -use zksync_utils::{h256_to_u256, u256_to_h256}; +use zksync_types::{h256_to_u256, u256_to_h256, StorageKey, U256}; use crate::interface::storage::{StoragePtr, WriteStorage}; diff --git a/core/lib/multivm/src/versions/vm_1_3_2/oracles/decommitter.rs b/core/lib/multivm/src/versions/vm_1_3_2/oracles/decommitter.rs index e9a85f8ba4b1..779fc126e72c 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/oracles/decommitter.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/oracles/decommitter.rs @@ -6,12 +6,12 @@ use zk_evm_1_3_3::{ DecommittmentQuery, MemoryIndex, MemoryLocation, MemoryPage, MemoryQuery, Timestamp, }, }; -use zksync_types::U256; -use zksync_utils::{bytecode::bytecode_len_in_words, bytes_to_be_words, u256_to_h256}; +use zksync_types::{u256_to_h256, U256}; use super::OracleWithHistory; use crate::{ interface::storage::{StoragePtr, WriteStorage}, + utils::bytecode::{bytecode_len_in_words, bytes_to_be_words}, vm_1_3_2::history_recorder::{HistoryEnabled, HistoryMode, HistoryRecorder, WithHistory}, }; @@ -59,7 +59,7 @@ impl DecommitterOracle .load_factory_dep(u256_to_h256(hash)) .expect("Trying to decode unexisting hash"); - let value = bytes_to_be_words(value); + let value = bytes_to_be_words(&value); self.known_bytecodes.insert(hash, value.clone(), timestamp); value } diff --git a/core/lib/multivm/src/versions/vm_1_3_2/oracles/storage.rs b/core/lib/multivm/src/versions/vm_1_3_2/oracles/storage.rs index ac4cc3df1706..e3614cbd471c 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/oracles/storage.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/oracles/storage.rs @@ -6,10 +6,9 @@ use zk_evm_1_3_3::{ zkevm_opcode_defs::system_params::INITIAL_STORAGE_WRITE_PUBDATA_BYTES, }; use zksync_types::{ - utils::storage_key_for_eth_balance, AccountTreeId, Address, StorageKey, StorageLogKind, - BOOTLOADER_ADDRESS, U256, + u256_to_h256, utils::storage_key_for_eth_balance, AccountTreeId, Address, StorageKey, + StorageLogKind, BOOTLOADER_ADDRESS, U256, }; -use zksync_utils::u256_to_h256; use super::OracleWithHistory; use crate::{ diff --git a/core/lib/multivm/src/versions/vm_1_3_2/oracles/tracer/utils.rs b/core/lib/multivm/src/versions/vm_1_3_2/oracles/tracer/utils.rs index 86ed02365a94..ef2d4f0b5769 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/oracles/tracer/utils.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/oracles/tracer/utils.rs @@ -9,8 +9,7 @@ use zksync_system_constants::{ ECRECOVER_PRECOMPILE_ADDRESS, KECCAK256_PRECOMPILE_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L1_MESSENGER_ADDRESS, SHA256_PRECOMPILE_ADDRESS, }; -use zksync_types::U256; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, U256}; use crate::vm_1_3_2::{ history_recorder::HistoryMode, diff --git a/core/lib/multivm/src/versions/vm_1_3_2/oracles/tracer/validation.rs b/core/lib/multivm/src/versions/vm_1_3_2/oracles/tracer/validation.rs index f52b6b8940db..fbb6795d89a3 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/oracles/tracer/validation.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/oracles/tracer/validation.rs @@ -11,13 +11,14 @@ use zksync_system_constants::{ KECCAK256_PRECOMPILE_ADDRESS, L2_BASE_TOKEN_ADDRESS, MSG_VALUE_SIMULATOR_ADDRESS, SYSTEM_CONTEXT_ADDRESS, }; -use zksync_types::{get_code_key, web3::keccak256, AccountTreeId, Address, StorageKey, H256, U256}; -use zksync_utils::{ - be_bytes_to_safe_address, h256_to_account_address, u256_to_account_address, u256_to_h256, +use zksync_types::{ + get_code_key, h256_to_address, u256_to_address, u256_to_h256, web3::keccak256, AccountTreeId, + Address, StorageKey, H256, U256, }; use crate::{ interface::storage::{StoragePtr, WriteStorage}, + utils::bytecode::be_bytes_to_safe_address, vm_1_3_2::{ errors::VmRevertReasonParsingResult, history_recorder::HistoryMode, @@ -242,7 +243,7 @@ impl ValidationTracer { // The user is allowed to touch its own slots or slots semantically related to him. let valid_users_slot = address == self.user_address - || u256_to_account_address(&key) == self.user_address + || u256_to_address(&key) == self.user_address || self.auxilary_allowed_slots.contains(&u256_to_h256(key)); if valid_users_slot { return true; @@ -309,7 +310,7 @@ impl ValidationTracer { let packed_abi = data.src0_value.value; let call_destination_value = data.src1_value.value; - let called_address = u256_to_account_address(&call_destination_value); + let called_address = u256_to_address(&call_destination_value); let far_call_abi = FarCallABI::from_u256(packed_abi); if called_address == KECCAK256_PRECOMPILE_ADDRESS @@ -376,7 +377,7 @@ impl ValidationTracer { let value = self.storage.borrow_mut().read_value(&storage_key); return Ok(NewTrustedValidationItems { - new_trusted_addresses: vec![h256_to_account_address(&value)], + new_trusted_addresses: vec![h256_to_address(&value)], ..Default::default() }); } diff --git a/core/lib/multivm/src/versions/vm_1_3_2/pubdata_utils.rs b/core/lib/multivm/src/versions/vm_1_3_2/pubdata_utils.rs index d88ee70991bc..3c10bd8c48be 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/pubdata_utils.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/pubdata_utils.rs @@ -3,10 +3,10 @@ use std::collections::HashMap; use circuit_sequencer_api_1_3_3::sort_storage_access::sort_storage_access_queries; use zk_evm_1_3_3::aux_structures::Timestamp; use zksync_types::{StorageKey, PUBLISH_BYTECODE_OVERHEAD, SYSTEM_CONTEXT_ADDRESS}; -use zksync_utils::bytecode::bytecode_len_in_bytes; use crate::{ interface::{storage::WriteStorage, VmEvent}, + utils::bytecode::bytecode_len_in_bytes, vm_1_3_2::{history_recorder::HistoryMode, oracles::storage::storage_key_of_log, VmInstance}, }; @@ -30,9 +30,7 @@ impl VmInstance { let published_bytecode_bytes: u32 = VmEvent::extract_published_bytecodes(&events) .iter() - .map(|bytecodehash| { - bytecode_len_in_bytes(*bytecodehash) as u32 + PUBLISH_BYTECODE_OVERHEAD - }) + .map(|bytecode_hash| bytecode_len_in_bytes(bytecode_hash) + PUBLISH_BYTECODE_OVERHEAD) .sum(); storage_writes_pubdata_published diff --git a/core/lib/multivm/src/versions/vm_1_3_2/refunds.rs b/core/lib/multivm/src/versions/vm_1_3_2/refunds.rs index 163992516d27..b0d70c3522c4 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/refunds.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/refunds.rs @@ -1,6 +1,5 @@ use zk_evm_1_3_3::aux_structures::Timestamp; -use zksync_types::U256; -use zksync_utils::ceil_div_u256; +use zksync_types::{ceil_div_u256, U256}; use crate::{ interface::storage::WriteStorage, diff --git a/core/lib/multivm/src/versions/vm_1_3_2/test_utils.rs b/core/lib/multivm/src/versions/vm_1_3_2/test_utils.rs index 34c70e0f9c45..ac6ce7fcdfcf 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/test_utils.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/test_utils.rs @@ -12,13 +12,13 @@ use itertools::Itertools; use zk_evm_1_3_3::{aux_structures::Timestamp, vm_state::VmLocalState}; use zksync_contracts::deployer_contract; use zksync_types::{ + address_to_h256, + bytecode::BytecodeHash, ethabi::{Address, Token}, + h256_to_address, u256_to_h256, web3::keccak256, Execute, Nonce, StorageKey, StorageValue, CONTRACT_DEPLOYER_ADDRESS, H256, U256, }; -use zksync_utils::{ - address_to_h256, bytecode::hash_bytecode, h256_to_account_address, u256_to_h256, -}; use crate::interface::storage::WriteStorage; /// The tests here help us with the testing the VM @@ -145,7 +145,7 @@ pub fn get_create_execute(code: &[u8], calldata: &[u8]) -> Execute { let params = [ Token::FixedBytes(vec![0u8; 32]), - Token::FixedBytes(hash_bytecode(code).0.to_vec()), + Token::FixedBytes(BytecodeHash::for_bytecode(code).value().0.to_vec()), Token::Bytes(calldata.to_vec()), ]; let calldata = contract_function @@ -174,7 +174,7 @@ pub fn get_create_zksync_address(sender_address: Address, sender_nonce: Nonce) - let hash = keccak256(&digest); - h256_to_account_address(&H256(hash)) + h256_to_address(&H256(hash)) } pub fn verify_required_storage( diff --git a/core/lib/multivm/src/versions/vm_1_3_2/transaction_data.rs b/core/lib/multivm/src/versions/vm_1_3_2/transaction_data.rs index 0285320daa30..c2dfe97ed076 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/transaction_data.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/transaction_data.rs @@ -1,19 +1,23 @@ use zk_evm_1_3_3::zkevm_opcode_defs::system_params::MAX_TX_ERGS_LIMIT; use zksync_types::{ + address_to_h256, + bytecode::BytecodeHash, + ceil_div_u256, ethabi::{encode, Address, Token}, fee::encoding_len, + h256_to_u256, l1::is_l1_tx_type, l2::TransactionType, ExecuteTransactionCommon, Transaction, MAX_L2_TX_GAS_LIMIT, U256, }; -use zksync_utils::{ - address_to_h256, bytecode::hash_bytecode, bytes_to_be_words, ceil_div_u256, h256_to_u256, -}; use super::vm_with_bootloader::MAX_TXS_IN_BLOCK; -use crate::vm_1_3_2::vm_with_bootloader::{ - BLOCK_OVERHEAD_GAS, BLOCK_OVERHEAD_PUBDATA, BOOTLOADER_TX_ENCODING_SPACE, - MAX_GAS_PER_PUBDATA_BYTE, +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_1_3_2::vm_with_bootloader::{ + BLOCK_OVERHEAD_GAS, BLOCK_OVERHEAD_PUBDATA, BOOTLOADER_TX_ENCODING_SPACE, + MAX_GAS_PER_PUBDATA_BYTE, + }, }; // This structure represents the data that is used by @@ -191,16 +195,13 @@ impl TransactionData { let factory_deps_hashes = self .factory_deps .iter() - .map(|dep| h256_to_u256(hash_bytecode(dep))) + .map(|dep| BytecodeHash::for_bytecode(dep).value_u256()) .collect(); self.abi_encode_with_custom_factory_deps(factory_deps_hashes) } pub fn into_tokens(self) -> Vec { - let bytes = self.abi_encode(); - assert!(bytes.len() % 32 == 0); - - bytes_to_be_words(bytes) + bytes_to_be_words(&self.abi_encode()) } pub(crate) fn effective_gas_price_per_pubdata(&self, block_gas_price_per_pubdata: u32) -> u32 { diff --git a/core/lib/multivm/src/versions/vm_1_3_2/utils.rs b/core/lib/multivm/src/versions/vm_1_3_2/utils.rs index 7870b1ff7443..5c72ba204d89 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/utils.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/utils.rs @@ -7,8 +7,7 @@ use zk_evm_1_3_3::{ }; use zksync_contracts::BaseSystemContracts; use zksync_system_constants::ZKPORTER_IS_AVAILABLE; -use zksync_types::{Address, StorageLogKind, H160, MAX_L2_TX_GAS_LIMIT, U256}; -use zksync_utils::h256_to_u256; +use zksync_types::{h256_to_u256, Address, StorageLogKind, H160, MAX_L2_TX_GAS_LIMIT, U256}; use crate::{ interface::storage::WriteStorage, diff --git a/core/lib/multivm/src/versions/vm_1_3_2/vm.rs b/core/lib/multivm/src/versions/vm_1_3_2/vm.rs index d9768652c2f3..05902b736fbd 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/vm.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/vm.rs @@ -1,7 +1,6 @@ use std::{collections::HashSet, rc::Rc}; -use zksync_types::Transaction; -use zksync_utils::{bytecode::hash_bytecode, h256_to_u256}; +use zksync_types::{bytecode::BytecodeHash, h256_to_u256, Transaction}; use zksync_vm_interface::{pubdata::PubdataBuilder, InspectExecutionMode}; use crate::{ @@ -119,7 +118,7 @@ impl VmInterface for Vm { let mut deps_hashes = HashSet::with_capacity(deps.len()); let mut bytecode_hashes = vec![]; let filtered_deps = deps.iter().filter_map(|bytecode| { - let bytecode_hash = hash_bytecode(bytecode); + let bytecode_hash = BytecodeHash::for_bytecode(bytecode).value(); let is_known = !deps_hashes.insert(bytecode_hash) || self.vm.is_bytecode_known(&bytecode_hash); diff --git a/core/lib/multivm/src/versions/vm_1_3_2/vm_with_bootloader.rs b/core/lib/multivm/src/versions/vm_1_3_2/vm_with_bootloader.rs index fd4d483fba5e..ca9ba097d472 100644 --- a/core/lib/multivm/src/versions/vm_1_3_2/vm_with_bootloader.rs +++ b/core/lib/multivm/src/versions/vm_1_3_2/vm_with_bootloader.rs @@ -15,16 +15,14 @@ use zk_evm_1_3_3::{ use zksync_contracts::BaseSystemContracts; use zksync_system_constants::MAX_L2_TX_GAS_LIMIT; use zksync_types::{ - fee_model::L1PeggedBatchFeeModelInput, l1::is_l1_tx_type, Address, Transaction, - BOOTLOADER_ADDRESS, L1_GAS_PER_PUBDATA_BYTE, MAX_NEW_FACTORY_DEPS, U256, -}; -use zksync_utils::{ - address_to_u256, bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256, misc::ceil_div, + address_to_u256, bytecode::BytecodeHash, fee_model::L1PeggedBatchFeeModelInput, h256_to_u256, + l1::is_l1_tx_type, Address, Transaction, BOOTLOADER_ADDRESS, L1_GAS_PER_PUBDATA_BYTE, + MAX_NEW_FACTORY_DEPS, U256, }; use crate::{ interface::{storage::WriteStorage, CompressedBytecodeInfo, L1BatchEnv}, - utils::bytecode, + utils::{bytecode, bytecode::bytes_to_be_words}, vm_1_3_2::{ bootloader_state::BootloaderState, history_recorder::HistoryMode, @@ -84,8 +82,11 @@ pub(crate) fn eth_price_per_pubdata_byte(l1_gas_price: u64) -> u64 { pub fn base_fee_to_gas_per_pubdata(l1_gas_price: u64, base_fee: u64) -> u64 { let eth_price_per_pubdata_byte = eth_price_per_pubdata_byte(l1_gas_price); - - ceil_div(eth_price_per_pubdata_byte, base_fee) + if eth_price_per_pubdata_byte == 0 { + 0 + } else { + eth_price_per_pubdata_byte.div_ceil(base_fee) + } } pub(crate) fn derive_base_fee_and_gas_per_pubdata( @@ -102,7 +103,7 @@ pub(crate) fn derive_base_fee_and_gas_per_pubdata( // publish enough public data while compensating us for it. let base_fee = std::cmp::max( fair_l2_gas_price, - ceil_div(eth_price_per_pubdata_byte, MAX_GAS_PER_PUBDATA_BYTE), + eth_price_per_pubdata_byte.div_ceil(MAX_GAS_PER_PUBDATA_BYTE), ); ( @@ -391,7 +392,7 @@ pub fn init_vm_inner( oracle_tools.decommittment_processor.populate( vec![( h256_to_u256(base_system_contract.default_aa.hash), - base_system_contract.default_aa.code.clone(), + bytes_to_be_words(&base_system_contract.default_aa.code), )], Timestamp(0), ); @@ -399,7 +400,7 @@ pub fn init_vm_inner( oracle_tools.memory.populate( vec![( BOOTLOADER_CODE_PAGE, - base_system_contract.bootloader.code.clone(), + bytes_to_be_words(&base_system_contract.bootloader.code), )], Timestamp(0), ); @@ -517,7 +518,7 @@ pub fn push_raw_transaction_to_bootloader_memory PrimitiveValue { } pub(crate) fn bytecode_to_factory_dep(bytecode: Vec) -> (U256, Vec) { - let bytecode_hash = hash_bytecode(&bytecode); - let bytecode_hash = U256::from_big_endian(bytecode_hash.as_bytes()); - - let bytecode_words = bytes_to_be_words(bytecode); - + let bytecode_hash = BytecodeHash::for_bytecode(&bytecode).value_u256(); + let bytecode_words = bytes_to_be_words(&bytecode); (bytecode_hash, bytecode_words) } diff --git a/core/lib/multivm/src/versions/vm_1_4_1/bootloader_state/l2_block.rs b/core/lib/multivm/src/versions/vm_1_4_1/bootloader_state/l2_block.rs index d3c428ab282b..a5157e323408 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/bootloader_state/l2_block.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/bootloader_state/l2_block.rs @@ -1,7 +1,6 @@ use std::cmp::Ordering; -use zksync_types::{L2BlockNumber, H256}; -use zksync_utils::concat_and_hash; +use zksync_types::{web3::keccak256_concat, L2BlockNumber, H256}; use crate::{ interface::{L2Block, L2BlockEnv}, @@ -53,7 +52,7 @@ impl BootloaderL2Block { } fn update_rolling_hash(&mut self, tx_hash: H256) { - self.txs_rolling_hash = concat_and_hash(self.txs_rolling_hash, tx_hash) + self.txs_rolling_hash = keccak256_concat(self.txs_rolling_hash, tx_hash) } pub(crate) fn interim_version(&self) -> BootloaderL2Block { diff --git a/core/lib/multivm/src/versions/vm_1_4_1/bootloader_state/utils.rs b/core/lib/multivm/src/versions/vm_1_4_1/bootloader_state/utils.rs index 1acf75b27e1b..33b15e68005b 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/bootloader_state/utils.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/bootloader_state/utils.rs @@ -1,5 +1,4 @@ -use zksync_types::{ethabi, U256}; -use zksync_utils::{bytes_to_be_words, h256_to_u256}; +use zksync_types::{ethabi, h256_to_u256, U256}; use super::tx::BootloaderTx; use crate::{ @@ -25,8 +24,7 @@ pub(super) fn get_memory_for_compressed_bytecodes( .iter() .flat_map(bytecode::encode_call) .collect(); - - bytes_to_be_words(memory_addition) + bytecode::bytes_to_be_words(&memory_addition) } #[allow(clippy::too_many_arguments)] diff --git a/core/lib/multivm/src/versions/vm_1_4_1/implementation/bytecode.rs b/core/lib/multivm/src/versions/vm_1_4_1/implementation/bytecode.rs index 5f24f2465a32..0278e239522b 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/implementation/bytecode.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/implementation/bytecode.rs @@ -1,6 +1,5 @@ use itertools::Itertools; -use zksync_types::U256; -use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words}; +use zksync_types::{bytecode::BytecodeHash, U256}; use crate::{ interface::{ @@ -25,18 +24,15 @@ impl Vm { .storage .get_ptr() .borrow_mut() - .is_bytecode_known(&hash_bytecode(&info.original)) + .is_bytecode_known(&BytecodeHash::for_bytecode(&info.original).value()) }) } } /// Converts bytecode to tokens and hashes it. pub(crate) fn bytecode_to_factory_dep(bytecode: Vec) -> (U256, Vec) { - let bytecode_hash = hash_bytecode(&bytecode); - let bytecode_hash = U256::from_big_endian(bytecode_hash.as_bytes()); - - let bytecode_words = bytes_to_be_words(bytecode); - + let bytecode_hash = BytecodeHash::for_bytecode(&bytecode).value_u256(); + let bytecode_words = bytecode::bytes_to_be_words(&bytecode); (bytecode_hash, bytecode_words) } @@ -49,7 +45,11 @@ pub(crate) fn compress_bytecodes( .enumerate() .sorted_by_key(|(_idx, dep)| *dep) .dedup_by(|x, y| x.1 == y.1) - .filter(|(_idx, dep)| !storage.borrow_mut().is_bytecode_known(&hash_bytecode(dep))) + .filter(|(_idx, dep)| { + !storage + .borrow_mut() + .is_bytecode_known(&BytecodeHash::for_bytecode(dep).value()) + }) .sorted_by_key(|(idx, _dep)| *idx) .filter_map(|(_idx, dep)| bytecode::compress(dep.clone()).ok()) .collect() diff --git a/core/lib/multivm/src/versions/vm_1_4_1/old_vm/events.rs b/core/lib/multivm/src/versions/vm_1_4_1/old_vm/events.rs index ffa4b4d50b8e..bc5befe3810c 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/old_vm/events.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/old_vm/events.rs @@ -1,8 +1,7 @@ use zk_evm_1_4_1::{ethereum_types::Address, reference_impls::event_sink::EventMessage}; -use zksync_types::{L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use zksync_utils::{be_chunks_to_h256_words, h256_to_account_address}; +use zksync_types::{h256_to_address, L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use crate::interface::VmEvent; +use crate::{interface::VmEvent, utils::bytecode::be_chunks_to_h256_words}; #[derive(Clone)] pub(crate) struct SolidityLikeEvent { @@ -135,7 +134,7 @@ pub(crate) fn merge_events(events: Vec) -> Vec .filter(|e| e.address == EVENT_WRITER_ADDRESS) .map(|event| { // The events writer events where the first topic is the actual address of the event and the rest of the topics are real topics - let address = h256_to_account_address(&H256(event.topics[0])); + let address = h256_to_address(&H256(event.topics[0])); let topics = event.topics.into_iter().skip(1).collect(); SolidityLikeEvent { diff --git a/core/lib/multivm/src/versions/vm_1_4_1/old_vm/history_recorder.rs b/core/lib/multivm/src/versions/vm_1_4_1/old_vm/history_recorder.rs index c9d899742202..bfd7b9130f50 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/old_vm/history_recorder.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/old_vm/history_recorder.rs @@ -5,8 +5,7 @@ use zk_evm_1_4_1::{ vm_state::PrimitiveValue, zkevm_opcode_defs::{self}, }; -use zksync_types::{StorageKey, H256, U256}; -use zksync_utils::{h256_to_u256, u256_to_h256}; +use zksync_types::{h256_to_u256, u256_to_h256, StorageKey, H256, U256}; use crate::interface::storage::{StoragePtr, WriteStorage}; diff --git a/core/lib/multivm/src/versions/vm_1_4_1/old_vm/oracles/decommitter.rs b/core/lib/multivm/src/versions/vm_1_4_1/old_vm/oracles/decommitter.rs index 636a4058a037..0fe3efa30b68 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/old_vm/oracles/decommitter.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/old_vm/oracles/decommitter.rs @@ -6,16 +6,17 @@ use zk_evm_1_4_1::{ DecommittmentQuery, MemoryIndex, MemoryLocation, MemoryPage, MemoryQuery, Timestamp, }, }; -use zksync_types::U256; -use zksync_utils::{bytecode::bytecode_len_in_words, bytes_to_be_words, u256_to_h256}; +use zksync_types::{u256_to_h256, U256}; use super::OracleWithHistory; use crate::{ interface::storage::{ReadStorage, StoragePtr}, + utils::bytecode::{bytecode_len_in_words, bytes_to_be_words}, vm_1_4_1::old_vm::history_recorder::{ HistoryEnabled, HistoryMode, HistoryRecorder, WithHistory, }, }; + /// The main job of the DecommiterOracle is to implement the DecommittmentProcessor trait - that is /// used by the VM to 'load' bytecodes into memory. #[derive(Debug)] @@ -60,7 +61,7 @@ impl DecommitterOracle { .load_factory_dep(u256_to_h256(hash)) .expect("Trying to decode unexisting hash"); - let value = bytes_to_be_words(value); + let value = bytes_to_be_words(&value); self.known_bytecodes.insert(hash, value.clone(), timestamp); value } diff --git a/core/lib/multivm/src/versions/vm_1_4_1/oracles/storage.rs b/core/lib/multivm/src/versions/vm_1_4_1/oracles/storage.rs index 3debfd1ca627..921e9b81f71f 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/oracles/storage.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/oracles/storage.rs @@ -6,6 +6,7 @@ use zk_evm_1_4_1::{ zkevm_opcode_defs::system_params::INITIAL_STORAGE_WRITE_PUBDATA_BYTES, }; use zksync_types::{ + u256_to_h256, utils::storage_key_for_eth_balance, writes::{ compression::compress_with_best_strategy, BYTES_PER_DERIVED_KEY, @@ -13,7 +14,6 @@ use zksync_types::{ }, AccountTreeId, Address, StorageKey, StorageLogKind, BOOTLOADER_ADDRESS, U256, }; -use zksync_utils::u256_to_h256; use crate::{ glue::GlueInto, diff --git a/core/lib/multivm/src/versions/vm_1_4_1/tracers/pubdata_tracer.rs b/core/lib/multivm/src/versions/vm_1_4_1/tracers/pubdata_tracer.rs index 6f927c5c99a8..a51c5ce46197 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/tracers/pubdata_tracer.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/tracers/pubdata_tracer.rs @@ -5,8 +5,10 @@ use zk_evm_1_4_1::{ aux_structures::Timestamp, tracing::{BeforeExecutionData, VmLocalStateData}, }; -use zksync_types::{writes::StateDiffRecord, AccountTreeId, StorageKey, L1_MESSENGER_ADDRESS}; -use zksync_utils::{h256_to_u256, u256_to_bytes_be, u256_to_h256}; +use zksync_types::{ + h256_to_u256, u256_to_h256, writes::StateDiffRecord, AccountTreeId, StorageKey, + L1_MESSENGER_ADDRESS, +}; use crate::{ interface::{ @@ -16,9 +18,12 @@ use crate::{ L1BatchEnv, VmEvent, VmExecutionMode, }, tracers::dynamic::vm_1_4_1::DynTracer, - utils::events::{ - extract_bytecode_publication_requests_from_l1_messenger, - extract_l2tol1logs_from_l1_messenger, + utils::{ + bytecode::be_words_to_bytes, + events::{ + extract_bytecode_publication_requests_from_l1_messenger, + extract_l2tol1logs_from_l1_messenger, + }, }, vm_1_4_1::{ bootloader_state::{utils::apply_pubdata_to_memory, BootloaderState}, @@ -100,15 +105,13 @@ impl PubdataTracer { bytecode_publication_requests .iter() .map(|bytecode_publication_request| { - state + let bytecode_words = state .decommittment_processor .known_bytecodes .inner() .get(&h256_to_u256(bytecode_publication_request.bytecode_hash)) - .unwrap() - .iter() - .flat_map(u256_to_bytes_be) - .collect() + .unwrap(); + be_words_to_bytes(bytecode_words) }) .collect() } diff --git a/core/lib/multivm/src/versions/vm_1_4_1/tracers/refunds.rs b/core/lib/multivm/src/versions/vm_1_4_1/tracers/refunds.rs index 2586d8d7f873..dc945e183a8f 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/tracers/refunds.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/tracers/refunds.rs @@ -8,8 +8,9 @@ use zk_evm_1_4_1::{ zkevm_opcode_defs::system_params::L1_MESSAGE_PUBDATA_BYTES, }; use zksync_system_constants::{PUBLISH_BYTECODE_OVERHEAD, SYSTEM_CONTEXT_ADDRESS}; -use zksync_types::{l2_to_l1_log::L2ToL1Log, L1BatchNumber, H256, U256}; -use zksync_utils::{bytecode::bytecode_len_in_bytes, ceil_div_u256, u256_to_h256}; +use zksync_types::{ + ceil_div_u256, l2_to_l1_log::L2ToL1Log, u256_to_h256, L1BatchNumber, H256, U256, +}; use crate::{ interface::{ @@ -18,6 +19,7 @@ use crate::{ L1BatchEnv, Refunds, VmEvent, }, tracers::dynamic::vm_1_4_1::DynTracer, + utils::bytecode::bytecode_len_in_bytes, vm_1_4_1::{ bootloader_state::BootloaderState, constants::{BOOTLOADER_HEAP_PAGE, OPERATOR_REFUNDS_OFFSET, TX_GAS_LIMIT_OFFSET}, @@ -348,7 +350,7 @@ pub(crate) fn pubdata_published( let published_bytecode_bytes: u32 = VmEvent::extract_published_bytecodes(&events) .iter() - .map(|bytecodehash| bytecode_len_in_bytes(*bytecodehash) as u32 + PUBLISH_BYTECODE_OVERHEAD) + .map(|bytecode_hash| bytecode_len_in_bytes(bytecode_hash) + PUBLISH_BYTECODE_OVERHEAD) .sum(); storage_writes_pubdata_published diff --git a/core/lib/multivm/src/versions/vm_1_4_1/tracers/utils.rs b/core/lib/multivm/src/versions/vm_1_4_1/tracers/utils.rs index 7b24e482b72d..536ea79e22f9 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/tracers/utils.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/tracers/utils.rs @@ -9,8 +9,7 @@ use zksync_system_constants::{ ECRECOVER_PRECOMPILE_ADDRESS, KECCAK256_PRECOMPILE_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L1_MESSENGER_ADDRESS, SHA256_PRECOMPILE_ADDRESS, }; -use zksync_types::U256; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, U256}; use crate::vm_1_4_1::{ constants::{ diff --git a/core/lib/multivm/src/versions/vm_1_4_1/types/internals/pubdata.rs b/core/lib/multivm/src/versions/vm_1_4_1/types/internals/pubdata.rs index c1ca93152a03..f938696297b5 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/types/internals/pubdata.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/types/internals/pubdata.rs @@ -64,7 +64,7 @@ impl PubdataInput { #[cfg(test)] mod tests { use zksync_system_constants::{ACCOUNT_CODE_STORAGE_ADDRESS, BOOTLOADER_ADDRESS}; - use zksync_utils::u256_to_h256; + use zksync_types::u256_to_h256; use super::*; diff --git a/core/lib/multivm/src/versions/vm_1_4_1/types/internals/transaction_data.rs b/core/lib/multivm/src/versions/vm_1_4_1/types/internals/transaction_data.rs index f7384da76d0d..af9a93f647a2 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/types/internals/transaction_data.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/types/internals/transaction_data.rs @@ -1,19 +1,24 @@ use std::convert::TryInto; use zksync_types::{ + address_to_h256, + bytecode::BytecodeHash, ethabi::{encode, Address, Token}, fee::{encoding_len, Fee}, + h256_to_u256, l1::is_l1_tx_type, l2::{L2Tx, TransactionType}, transaction_request::{PaymasterParams, TransactionRequest}, web3::Bytes, Execute, ExecuteTransactionCommon, L2ChainId, L2TxCommonData, Nonce, Transaction, H256, U256, }; -use zksync_utils::{address_to_h256, bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256}; -use crate::vm_1_4_1::{ - constants::{L1_TX_TYPE, MAX_GAS_PER_PUBDATA_BYTE, PRIORITY_TX_MAX_GAS_LIMIT}, - utils::overhead::derive_overhead, +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_1_4_1::{ + constants::{L1_TX_TYPE, MAX_GAS_PER_PUBDATA_BYTE, PRIORITY_TX_MAX_GAS_LIMIT}, + utils::overhead::derive_overhead, + }, }; /// This structure represents the data that is used by @@ -190,16 +195,13 @@ impl TransactionData { let factory_deps_hashes = self .factory_deps .iter() - .map(|dep| h256_to_u256(hash_bytecode(dep))) + .map(|dep| BytecodeHash::for_bytecode(dep).value_u256()) .collect(); self.abi_encode_with_custom_factory_deps(factory_deps_hashes) } pub(crate) fn into_tokens(self) -> Vec { - let bytes = self.abi_encode(); - assert!(bytes.len() % 32 == 0); - - bytes_to_be_words(bytes) + bytes_to_be_words(&self.abi_encode()) } pub(crate) fn overhead_gas(&self) -> u32 { diff --git a/core/lib/multivm/src/versions/vm_1_4_1/types/internals/vm_state.rs b/core/lib/multivm/src/versions/vm_1_4_1/types/internals/vm_state.rs index b91733c7ca14..9c3ecd9741a3 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/types/internals/vm_state.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/types/internals/vm_state.rs @@ -11,14 +11,14 @@ use zk_evm_1_4_1::{ }, }; use zksync_system_constants::BOOTLOADER_ADDRESS; -use zksync_types::{block::L2BlockHasher, Address, L2BlockNumber}; -use zksync_utils::h256_to_u256; +use zksync_types::{block::L2BlockHasher, h256_to_u256, Address, L2BlockNumber}; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, L1BatchEnv, L2Block, SystemEnv, }, + utils::bytecode::bytes_to_be_words, vm_1_4_1::{ bootloader_state::BootloaderState, constants::BOOTLOADER_HEAP_PAGE, @@ -89,11 +89,7 @@ pub(crate) fn new_vm_state( decommittment_processor.populate( vec![( h256_to_u256(system_env.base_system_smart_contracts.default_aa.hash), - system_env - .base_system_smart_contracts - .default_aa - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.default_aa.code), )], Timestamp(0), ); @@ -101,11 +97,7 @@ pub(crate) fn new_vm_state( memory.populate( vec![( BOOTLOADER_CODE_PAGE, - system_env - .base_system_smart_contracts - .bootloader - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.bootloader.code), )], Timestamp(0), ); diff --git a/core/lib/multivm/src/versions/vm_1_4_1/types/l1_batch.rs b/core/lib/multivm/src/versions/vm_1_4_1/types/l1_batch.rs index ca2f0688154b..31807cb66cc1 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/types/l1_batch.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/types/l1_batch.rs @@ -1,5 +1,4 @@ -use zksync_types::U256; -use zksync_utils::{address_to_u256, h256_to_u256}; +use zksync_types::{address_to_u256, h256_to_u256, U256}; use crate::{interface::L1BatchEnv, vm_1_4_1::utils::fee::get_batch_base_fee}; diff --git a/core/lib/multivm/src/versions/vm_1_4_1/utils/fee.rs b/core/lib/multivm/src/versions/vm_1_4_1/utils/fee.rs index b5d4cc971b9e..7f214b457317 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/utils/fee.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/utils/fee.rs @@ -1,6 +1,5 @@ //! Utility functions for vm use zksync_types::fee_model::PubdataIndependentBatchFeeModelInput; -use zksync_utils::ceil_div; use crate::{interface::L1BatchEnv, vm_1_4_1::constants::MAX_GAS_PER_PUBDATA_BYTE}; @@ -18,11 +17,14 @@ pub(crate) fn derive_base_fee_and_gas_per_pubdata( // publish enough public data while compensating us for it. let base_fee = std::cmp::max( fair_l2_gas_price, - ceil_div(fair_pubdata_price, MAX_GAS_PER_PUBDATA_BYTE), + fair_pubdata_price.div_ceil(MAX_GAS_PER_PUBDATA_BYTE), ); - let gas_per_pubdata = ceil_div(fair_pubdata_price, base_fee); - + let gas_per_pubdata = if fair_pubdata_price == 0 { + 0 + } else { + fair_pubdata_price.div_ceil(base_fee) + }; (base_fee, gas_per_pubdata) } diff --git a/core/lib/multivm/src/versions/vm_1_4_1/utils/l2_blocks.rs b/core/lib/multivm/src/versions/vm_1_4_1/utils/l2_blocks.rs index ff5536ae0b97..1095abd82db1 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/utils/l2_blocks.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/utils/l2_blocks.rs @@ -4,9 +4,9 @@ use zksync_system_constants::{ SYSTEM_CONTEXT_STORED_L2_BLOCK_HASHES, }; use zksync_types::{ - block::unpack_block_info, web3::keccak256, AccountTreeId, L2BlockNumber, StorageKey, H256, U256, + block::unpack_block_info, h256_to_u256, u256_to_h256, web3::keccak256, AccountTreeId, + L2BlockNumber, StorageKey, H256, U256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; use crate::interface::{ storage::{ReadStorage, StoragePtr}, diff --git a/core/lib/multivm/src/versions/vm_1_4_2/bootloader_state/l2_block.rs b/core/lib/multivm/src/versions/vm_1_4_2/bootloader_state/l2_block.rs index d151e3078b4a..a6376852fb28 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/bootloader_state/l2_block.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/bootloader_state/l2_block.rs @@ -1,7 +1,6 @@ use std::cmp::Ordering; -use zksync_types::{L2BlockNumber, H256}; -use zksync_utils::concat_and_hash; +use zksync_types::{web3::keccak256_concat, L2BlockNumber, H256}; use crate::{ interface::{L2Block, L2BlockEnv}, @@ -53,7 +52,7 @@ impl BootloaderL2Block { } fn update_rolling_hash(&mut self, tx_hash: H256) { - self.txs_rolling_hash = concat_and_hash(self.txs_rolling_hash, tx_hash) + self.txs_rolling_hash = keccak256_concat(self.txs_rolling_hash, tx_hash) } pub(crate) fn interim_version(&self) -> BootloaderL2Block { diff --git a/core/lib/multivm/src/versions/vm_1_4_2/bootloader_state/utils.rs b/core/lib/multivm/src/versions/vm_1_4_2/bootloader_state/utils.rs index 182f6eff4414..8b367c5c5cae 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/bootloader_state/utils.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/bootloader_state/utils.rs @@ -1,5 +1,4 @@ -use zksync_types::{ethabi, U256}; -use zksync_utils::{bytes_to_be_words, h256_to_u256}; +use zksync_types::{ethabi, h256_to_u256, U256}; use super::tx::BootloaderTx; use crate::{ @@ -25,8 +24,7 @@ pub(super) fn get_memory_for_compressed_bytecodes( .iter() .flat_map(bytecode::encode_call) .collect(); - - bytes_to_be_words(memory_addition) + bytecode::bytes_to_be_words(&memory_addition) } #[allow(clippy::too_many_arguments)] diff --git a/core/lib/multivm/src/versions/vm_1_4_2/implementation/bytecode.rs b/core/lib/multivm/src/versions/vm_1_4_2/implementation/bytecode.rs index 1033fff90e46..f81deff48c25 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/implementation/bytecode.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/implementation/bytecode.rs @@ -1,13 +1,12 @@ use itertools::Itertools; -use zksync_types::U256; -use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words}; +use zksync_types::{bytecode::BytecodeHash, U256}; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, CompressedBytecodeInfo, }, - utils::bytecode, + utils::{bytecode, bytecode::bytes_to_be_words}, vm_1_4_2::Vm, HistoryMode, }; @@ -25,18 +24,15 @@ impl Vm { .storage .get_ptr() .borrow_mut() - .is_bytecode_known(&hash_bytecode(&info.original)) + .is_bytecode_known(&BytecodeHash::for_bytecode(&info.original).value()) }) } } /// Converts bytecode to tokens and hashes it. pub(crate) fn bytecode_to_factory_dep(bytecode: Vec) -> (U256, Vec) { - let bytecode_hash = hash_bytecode(&bytecode); - let bytecode_hash = U256::from_big_endian(bytecode_hash.as_bytes()); - - let bytecode_words = bytes_to_be_words(bytecode); - + let bytecode_hash = BytecodeHash::for_bytecode(&bytecode).value_u256(); + let bytecode_words = bytes_to_be_words(&bytecode); (bytecode_hash, bytecode_words) } @@ -49,7 +45,11 @@ pub(crate) fn compress_bytecodes( .enumerate() .sorted_by_key(|(_idx, dep)| *dep) .dedup_by(|x, y| x.1 == y.1) - .filter(|(_idx, dep)| !storage.borrow_mut().is_bytecode_known(&hash_bytecode(dep))) + .filter(|(_idx, dep)| { + !storage + .borrow_mut() + .is_bytecode_known(&BytecodeHash::for_bytecode(dep).value()) + }) .sorted_by_key(|(idx, _dep)| *idx) .filter_map(|(_idx, dep)| bytecode::compress(dep.clone()).ok()) .collect() diff --git a/core/lib/multivm/src/versions/vm_1_4_2/old_vm/events.rs b/core/lib/multivm/src/versions/vm_1_4_2/old_vm/events.rs index ffa4b4d50b8e..bc5befe3810c 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/old_vm/events.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/old_vm/events.rs @@ -1,8 +1,7 @@ use zk_evm_1_4_1::{ethereum_types::Address, reference_impls::event_sink::EventMessage}; -use zksync_types::{L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use zksync_utils::{be_chunks_to_h256_words, h256_to_account_address}; +use zksync_types::{h256_to_address, L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use crate::interface::VmEvent; +use crate::{interface::VmEvent, utils::bytecode::be_chunks_to_h256_words}; #[derive(Clone)] pub(crate) struct SolidityLikeEvent { @@ -135,7 +134,7 @@ pub(crate) fn merge_events(events: Vec) -> Vec .filter(|e| e.address == EVENT_WRITER_ADDRESS) .map(|event| { // The events writer events where the first topic is the actual address of the event and the rest of the topics are real topics - let address = h256_to_account_address(&H256(event.topics[0])); + let address = h256_to_address(&H256(event.topics[0])); let topics = event.topics.into_iter().skip(1).collect(); SolidityLikeEvent { diff --git a/core/lib/multivm/src/versions/vm_1_4_2/old_vm/history_recorder.rs b/core/lib/multivm/src/versions/vm_1_4_2/old_vm/history_recorder.rs index d8d32a2b6c50..9e562de59866 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/old_vm/history_recorder.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/old_vm/history_recorder.rs @@ -5,8 +5,7 @@ use zk_evm_1_4_1::{ vm_state::PrimitiveValue, zkevm_opcode_defs::{self}, }; -use zksync_types::{StorageKey, H256, U256}; -use zksync_utils::{h256_to_u256, u256_to_h256}; +use zksync_types::{h256_to_u256, u256_to_h256, StorageKey, H256, U256}; use crate::interface::storage::{StoragePtr, WriteStorage}; diff --git a/core/lib/multivm/src/versions/vm_1_4_2/old_vm/oracles/decommitter.rs b/core/lib/multivm/src/versions/vm_1_4_2/old_vm/oracles/decommitter.rs index 706e70d4b116..9122a10c9266 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/old_vm/oracles/decommitter.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/old_vm/oracles/decommitter.rs @@ -6,16 +6,17 @@ use zk_evm_1_4_1::{ DecommittmentQuery, MemoryIndex, MemoryLocation, MemoryPage, MemoryQuery, Timestamp, }, }; -use zksync_types::U256; -use zksync_utils::{bytecode::bytecode_len_in_words, bytes_to_be_words, u256_to_h256}; +use zksync_types::{u256_to_h256, U256}; use super::OracleWithHistory; use crate::{ interface::storage::{ReadStorage, StoragePtr}, + utils::bytecode::{bytecode_len_in_words, bytes_to_be_words}, vm_1_4_2::old_vm::history_recorder::{ HistoryEnabled, HistoryMode, HistoryRecorder, WithHistory, }, }; + /// The main job of the DecommiterOracle is to implement the DecommittmentProcessor trait - that is /// used by the VM to 'load' bytecodes into memory. #[derive(Debug)] @@ -60,7 +61,7 @@ impl DecommitterOracle { .load_factory_dep(u256_to_h256(hash)) .expect("Trying to decode unexisting hash"); - let value = bytes_to_be_words(value); + let value = bytes_to_be_words(&value); self.known_bytecodes.insert(hash, value.clone(), timestamp); value } diff --git a/core/lib/multivm/src/versions/vm_1_4_2/oracles/storage.rs b/core/lib/multivm/src/versions/vm_1_4_2/oracles/storage.rs index e8d387621907..170bed0eed5d 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/oracles/storage.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/oracles/storage.rs @@ -6,6 +6,7 @@ use zk_evm_1_4_1::{ zkevm_opcode_defs::system_params::INITIAL_STORAGE_WRITE_PUBDATA_BYTES, }; use zksync_types::{ + u256_to_h256, utils::storage_key_for_eth_balance, writes::{ compression::compress_with_best_strategy, BYTES_PER_DERIVED_KEY, @@ -13,7 +14,6 @@ use zksync_types::{ }, AccountTreeId, Address, StorageKey, StorageLogKind, BOOTLOADER_ADDRESS, U256, }; -use zksync_utils::u256_to_h256; use crate::{ glue::GlueInto, diff --git a/core/lib/multivm/src/versions/vm_1_4_2/tracers/pubdata_tracer.rs b/core/lib/multivm/src/versions/vm_1_4_2/tracers/pubdata_tracer.rs index 6c4f737f9e94..58318f5d845e 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/tracers/pubdata_tracer.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/tracers/pubdata_tracer.rs @@ -5,8 +5,10 @@ use zk_evm_1_4_1::{ aux_structures::Timestamp, tracing::{BeforeExecutionData, VmLocalStateData}, }; -use zksync_types::{writes::StateDiffRecord, AccountTreeId, StorageKey, L1_MESSENGER_ADDRESS}; -use zksync_utils::{h256_to_u256, u256_to_bytes_be, u256_to_h256}; +use zksync_types::{ + h256_to_u256, u256_to_h256, writes::StateDiffRecord, AccountTreeId, StorageKey, + L1_MESSENGER_ADDRESS, +}; use crate::{ interface::{ @@ -16,9 +18,12 @@ use crate::{ L1BatchEnv, VmEvent, VmExecutionMode, }, tracers::dynamic::vm_1_4_1::DynTracer, - utils::events::{ - extract_bytecode_publication_requests_from_l1_messenger, - extract_l2tol1logs_from_l1_messenger, + utils::{ + bytecode::be_words_to_bytes, + events::{ + extract_bytecode_publication_requests_from_l1_messenger, + extract_l2tol1logs_from_l1_messenger, + }, }, vm_1_4_2::{ bootloader_state::{utils::apply_pubdata_to_memory, BootloaderState}, @@ -120,15 +125,13 @@ impl PubdataTracer { bytecode_publication_requests .iter() .map(|bytecode_publication_request| { - state + let bytecode_words = state .decommittment_processor .known_bytecodes .inner() .get(&h256_to_u256(bytecode_publication_request.bytecode_hash)) - .unwrap() - .iter() - .flat_map(u256_to_bytes_be) - .collect() + .unwrap(); + be_words_to_bytes(bytecode_words) }) .collect() } diff --git a/core/lib/multivm/src/versions/vm_1_4_2/tracers/refunds.rs b/core/lib/multivm/src/versions/vm_1_4_2/tracers/refunds.rs index 0da5736bf955..324cad02b4eb 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/tracers/refunds.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/tracers/refunds.rs @@ -8,8 +8,9 @@ use zk_evm_1_4_1::{ zkevm_opcode_defs::system_params::L1_MESSAGE_PUBDATA_BYTES, }; use zksync_system_constants::{PUBLISH_BYTECODE_OVERHEAD, SYSTEM_CONTEXT_ADDRESS}; -use zksync_types::{l2_to_l1_log::L2ToL1Log, L1BatchNumber, H256, U256}; -use zksync_utils::{bytecode::bytecode_len_in_bytes, ceil_div_u256, u256_to_h256}; +use zksync_types::{ + ceil_div_u256, l2_to_l1_log::L2ToL1Log, u256_to_h256, L1BatchNumber, H256, U256, +}; use crate::{ interface::{ @@ -18,6 +19,7 @@ use crate::{ L1BatchEnv, Refunds, VmEvent, }, tracers::dynamic::vm_1_4_1::DynTracer, + utils::bytecode::bytecode_len_in_bytes, vm_1_4_2::{ bootloader_state::BootloaderState, constants::{BOOTLOADER_HEAP_PAGE, OPERATOR_REFUNDS_OFFSET, TX_GAS_LIMIT_OFFSET}, @@ -348,7 +350,7 @@ pub(crate) fn pubdata_published( let published_bytecode_bytes: u32 = VmEvent::extract_published_bytecodes(&events) .iter() - .map(|bytecodehash| bytecode_len_in_bytes(*bytecodehash) as u32 + PUBLISH_BYTECODE_OVERHEAD) + .map(|bytecode_hash| bytecode_len_in_bytes(bytecode_hash) + PUBLISH_BYTECODE_OVERHEAD) .sum(); storage_writes_pubdata_published diff --git a/core/lib/multivm/src/versions/vm_1_4_2/tracers/utils.rs b/core/lib/multivm/src/versions/vm_1_4_2/tracers/utils.rs index 5832241d262d..2caf7b060563 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/tracers/utils.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/tracers/utils.rs @@ -9,8 +9,7 @@ use zksync_system_constants::{ ECRECOVER_PRECOMPILE_ADDRESS, KECCAK256_PRECOMPILE_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L1_MESSENGER_ADDRESS, SHA256_PRECOMPILE_ADDRESS, }; -use zksync_types::U256; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, U256}; use crate::vm_1_4_2::{ constants::{ diff --git a/core/lib/multivm/src/versions/vm_1_4_2/types/internals/pubdata.rs b/core/lib/multivm/src/versions/vm_1_4_2/types/internals/pubdata.rs index c1ca93152a03..f938696297b5 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/types/internals/pubdata.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/types/internals/pubdata.rs @@ -64,7 +64,7 @@ impl PubdataInput { #[cfg(test)] mod tests { use zksync_system_constants::{ACCOUNT_CODE_STORAGE_ADDRESS, BOOTLOADER_ADDRESS}; - use zksync_utils::u256_to_h256; + use zksync_types::u256_to_h256; use super::*; diff --git a/core/lib/multivm/src/versions/vm_1_4_2/types/internals/transaction_data.rs b/core/lib/multivm/src/versions/vm_1_4_2/types/internals/transaction_data.rs index 38280aa80513..e0f113f8a7ff 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/types/internals/transaction_data.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/types/internals/transaction_data.rs @@ -1,19 +1,24 @@ use std::convert::TryInto; use zksync_types::{ + address_to_h256, + bytecode::BytecodeHash, ethabi::{encode, Address, Token}, fee::{encoding_len, Fee}, + h256_to_u256, l1::is_l1_tx_type, l2::{L2Tx, TransactionType}, transaction_request::{PaymasterParams, TransactionRequest}, web3::Bytes, Execute, ExecuteTransactionCommon, L2ChainId, L2TxCommonData, Nonce, Transaction, H256, U256, }; -use zksync_utils::{address_to_h256, bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256}; -use crate::vm_1_4_2::{ - constants::{L1_TX_TYPE, MAX_GAS_PER_PUBDATA_BYTE, PRIORITY_TX_MAX_GAS_LIMIT}, - utils::overhead::derive_overhead, +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_1_4_2::{ + constants::{L1_TX_TYPE, MAX_GAS_PER_PUBDATA_BYTE, PRIORITY_TX_MAX_GAS_LIMIT}, + utils::overhead::derive_overhead, + }, }; /// This structure represents the data that is used by @@ -190,16 +195,13 @@ impl TransactionData { let factory_deps_hashes = self .factory_deps .iter() - .map(|dep| h256_to_u256(hash_bytecode(dep))) + .map(|dep| BytecodeHash::for_bytecode(dep).value_u256()) .collect(); self.abi_encode_with_custom_factory_deps(factory_deps_hashes) } pub(crate) fn into_tokens(self) -> Vec { - let bytes = self.abi_encode(); - assert!(bytes.len() % 32 == 0); - - bytes_to_be_words(bytes) + bytes_to_be_words(&self.abi_encode()) } pub(crate) fn overhead_gas(&self) -> u32 { diff --git a/core/lib/multivm/src/versions/vm_1_4_2/types/internals/vm_state.rs b/core/lib/multivm/src/versions/vm_1_4_2/types/internals/vm_state.rs index 87630a1ff372..52a0dc61d740 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/types/internals/vm_state.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/types/internals/vm_state.rs @@ -11,14 +11,14 @@ use zk_evm_1_4_1::{ }, }; use zksync_system_constants::BOOTLOADER_ADDRESS; -use zksync_types::{block::L2BlockHasher, Address, L2BlockNumber}; -use zksync_utils::h256_to_u256; +use zksync_types::{block::L2BlockHasher, h256_to_u256, Address, L2BlockNumber}; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, L1BatchEnv, L2Block, SystemEnv, }, + utils::bytecode::bytes_to_be_words, vm_1_4_2::{ bootloader_state::BootloaderState, constants::BOOTLOADER_HEAP_PAGE, @@ -89,11 +89,7 @@ pub(crate) fn new_vm_state( decommittment_processor.populate( vec![( h256_to_u256(system_env.base_system_smart_contracts.default_aa.hash), - system_env - .base_system_smart_contracts - .default_aa - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.default_aa.code), )], Timestamp(0), ); @@ -101,11 +97,7 @@ pub(crate) fn new_vm_state( memory.populate( vec![( BOOTLOADER_CODE_PAGE, - system_env - .base_system_smart_contracts - .bootloader - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.bootloader.code), )], Timestamp(0), ); diff --git a/core/lib/multivm/src/versions/vm_1_4_2/types/l1_batch.rs b/core/lib/multivm/src/versions/vm_1_4_2/types/l1_batch.rs index b3a54c410f4d..d2233a515eab 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/types/l1_batch.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/types/l1_batch.rs @@ -1,5 +1,4 @@ -use zksync_types::U256; -use zksync_utils::{address_to_u256, h256_to_u256}; +use zksync_types::{address_to_u256, h256_to_u256, U256}; use crate::{interface::L1BatchEnv, vm_1_4_2::utils::fee::get_batch_base_fee}; diff --git a/core/lib/multivm/src/versions/vm_1_4_2/utils/fee.rs b/core/lib/multivm/src/versions/vm_1_4_2/utils/fee.rs index 11f8b6b6c427..b01b18716836 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/utils/fee.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/utils/fee.rs @@ -1,6 +1,5 @@ //! Utility functions for vm use zksync_types::fee_model::PubdataIndependentBatchFeeModelInput; -use zksync_utils::ceil_div; use crate::{interface::L1BatchEnv, vm_1_4_2::constants::MAX_GAS_PER_PUBDATA_BYTE}; @@ -18,11 +17,14 @@ pub(crate) fn derive_base_fee_and_gas_per_pubdata( // publish enough public data while compensating us for it. let base_fee = std::cmp::max( fair_l2_gas_price, - ceil_div(fair_pubdata_price, MAX_GAS_PER_PUBDATA_BYTE), + fair_pubdata_price.div_ceil(MAX_GAS_PER_PUBDATA_BYTE), ); - let gas_per_pubdata = ceil_div(fair_pubdata_price, base_fee); - + let gas_per_pubdata = if fair_pubdata_price == 0 { + 0 + } else { + fair_pubdata_price.div_ceil(base_fee) + }; (base_fee, gas_per_pubdata) } diff --git a/core/lib/multivm/src/versions/vm_1_4_2/utils/l2_blocks.rs b/core/lib/multivm/src/versions/vm_1_4_2/utils/l2_blocks.rs index ff5536ae0b97..1095abd82db1 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/utils/l2_blocks.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/utils/l2_blocks.rs @@ -4,9 +4,9 @@ use zksync_system_constants::{ SYSTEM_CONTEXT_STORED_L2_BLOCK_HASHES, }; use zksync_types::{ - block::unpack_block_info, web3::keccak256, AccountTreeId, L2BlockNumber, StorageKey, H256, U256, + block::unpack_block_info, h256_to_u256, u256_to_h256, web3::keccak256, AccountTreeId, + L2BlockNumber, StorageKey, H256, U256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; use crate::interface::{ storage::{ReadStorage, StoragePtr}, diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/bootloader_state/l2_block.rs b/core/lib/multivm/src/versions/vm_boojum_integration/bootloader_state/l2_block.rs index 47bbbb5bae64..501207e52bd9 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/bootloader_state/l2_block.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/bootloader_state/l2_block.rs @@ -1,7 +1,6 @@ use std::cmp::Ordering; -use zksync_types::{L2BlockNumber, H256}; -use zksync_utils::concat_and_hash; +use zksync_types::{web3::keccak256_concat, L2BlockNumber, H256}; use crate::{ interface::{L2Block, L2BlockEnv}, @@ -53,7 +52,7 @@ impl BootloaderL2Block { } fn update_rolling_hash(&mut self, tx_hash: H256) { - self.txs_rolling_hash = concat_and_hash(self.txs_rolling_hash, tx_hash) + self.txs_rolling_hash = keccak256_concat(self.txs_rolling_hash, tx_hash) } pub(crate) fn interim_version(&self) -> BootloaderL2Block { diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/bootloader_state/utils.rs b/core/lib/multivm/src/versions/vm_boojum_integration/bootloader_state/utils.rs index c97d3ff30e49..6605bea1f6b5 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/bootloader_state/utils.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/bootloader_state/utils.rs @@ -1,5 +1,4 @@ -use zksync_types::{ethabi, U256}; -use zksync_utils::{bytes_to_be_words, h256_to_u256}; +use zksync_types::{ethabi, h256_to_u256, U256}; use super::tx::BootloaderTx; use crate::{ @@ -25,8 +24,7 @@ pub(super) fn get_memory_for_compressed_bytecodes( .iter() .flat_map(bytecode::encode_call) .collect(); - - bytes_to_be_words(memory_addition) + bytecode::bytes_to_be_words(&memory_addition) } #[allow(clippy::too_many_arguments)] diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/implementation/bytecode.rs b/core/lib/multivm/src/versions/vm_boojum_integration/implementation/bytecode.rs index 2d6f081a1886..42507a589e50 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/implementation/bytecode.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/implementation/bytecode.rs @@ -1,13 +1,12 @@ use itertools::Itertools; -use zksync_types::U256; -use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words}; +use zksync_types::{bytecode::BytecodeHash, U256}; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, CompressedBytecodeInfo, }, - utils::bytecode, + utils::{bytecode, bytecode::bytes_to_be_words}, vm_boojum_integration::Vm, HistoryMode, }; @@ -25,18 +24,15 @@ impl Vm { .storage .get_ptr() .borrow_mut() - .is_bytecode_known(&hash_bytecode(&info.original)) + .is_bytecode_known(&BytecodeHash::for_bytecode(&info.original).value()) }) } } /// Converts bytecode to tokens and hashes it. pub(crate) fn bytecode_to_factory_dep(bytecode: Vec) -> (U256, Vec) { - let bytecode_hash = hash_bytecode(&bytecode); - let bytecode_hash = U256::from_big_endian(bytecode_hash.as_bytes()); - - let bytecode_words = bytes_to_be_words(bytecode); - + let bytecode_hash = BytecodeHash::for_bytecode(&bytecode).value_u256(); + let bytecode_words = bytes_to_be_words(&bytecode); (bytecode_hash, bytecode_words) } @@ -49,7 +45,11 @@ pub(crate) fn compress_bytecodes( .enumerate() .sorted_by_key(|(_idx, dep)| *dep) .dedup_by(|x, y| x.1 == y.1) - .filter(|(_idx, dep)| !storage.borrow_mut().is_bytecode_known(&hash_bytecode(dep))) + .filter(|(_idx, dep)| { + !storage + .borrow_mut() + .is_bytecode_known(&BytecodeHash::for_bytecode(dep).value()) + }) .sorted_by_key(|(idx, _dep)| *idx) .filter_map(|(_idx, dep)| bytecode::compress(dep.clone()).ok()) .collect() diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/events.rs b/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/events.rs index 1e95d0bc8f35..48db28747bef 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/events.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/events.rs @@ -1,8 +1,7 @@ use zk_evm_1_4_0::{ethereum_types::Address, reference_impls::event_sink::EventMessage}; -use zksync_types::{L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use zksync_utils::{be_chunks_to_h256_words, h256_to_account_address}; +use zksync_types::{h256_to_address, L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use crate::interface::VmEvent; +use crate::{interface::VmEvent, utils::bytecode::be_chunks_to_h256_words}; #[derive(Clone)] pub(crate) struct SolidityLikeEvent { @@ -135,7 +134,7 @@ pub(crate) fn merge_events(events: Vec) -> Vec .filter(|e| e.address == EVENT_WRITER_ADDRESS) .map(|event| { // The events writer events where the first topic is the actual address of the event and the rest of the topics are real topics - let address = h256_to_account_address(&H256(event.topics[0])); + let address = h256_to_address(&H256(event.topics[0])); let topics = event.topics.into_iter().skip(1).collect(); SolidityLikeEvent { diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/history_recorder.rs b/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/history_recorder.rs index 704a774893d3..19da0ffda77c 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/history_recorder.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/history_recorder.rs @@ -5,8 +5,7 @@ use zk_evm_1_4_0::{ vm_state::PrimitiveValue, zkevm_opcode_defs::{self}, }; -use zksync_types::{StorageKey, U256}; -use zksync_utils::{h256_to_u256, u256_to_h256}; +use zksync_types::{h256_to_u256, u256_to_h256, StorageKey, U256}; use crate::interface::storage::{StoragePtr, WriteStorage}; diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/oracles/decommitter.rs b/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/oracles/decommitter.rs index eb7db7097920..804bd7179781 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/oracles/decommitter.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/old_vm/oracles/decommitter.rs @@ -6,12 +6,12 @@ use zk_evm_1_4_0::{ DecommittmentQuery, MemoryIndex, MemoryLocation, MemoryPage, MemoryQuery, Timestamp, }, }; -use zksync_types::U256; -use zksync_utils::{bytecode::bytecode_len_in_words, bytes_to_be_words, u256_to_h256}; +use zksync_types::{u256_to_h256, U256}; use super::OracleWithHistory; use crate::{ interface::storage::{ReadStorage, StoragePtr}, + utils::bytecode::{bytecode_len_in_words, bytes_to_be_words}, vm_boojum_integration::old_vm::history_recorder::{ HistoryEnabled, HistoryMode, HistoryRecorder, WithHistory, }, @@ -61,7 +61,7 @@ impl DecommitterOracle { .load_factory_dep(u256_to_h256(hash)) .expect("Trying to decode unexisting hash"); - let value = bytes_to_be_words(value); + let value = bytes_to_be_words(&value); self.known_bytecodes.insert(hash, value.clone(), timestamp); value } diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/oracles/storage.rs b/core/lib/multivm/src/versions/vm_boojum_integration/oracles/storage.rs index acdfbaaa42e0..b5fc1c5b92f8 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/oracles/storage.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/oracles/storage.rs @@ -6,6 +6,7 @@ use zk_evm_1_4_0::{ zkevm_opcode_defs::system_params::INITIAL_STORAGE_WRITE_PUBDATA_BYTES, }; use zksync_types::{ + u256_to_h256, utils::storage_key_for_eth_balance, writes::{ compression::compress_with_best_strategy, BYTES_PER_DERIVED_KEY, @@ -13,7 +14,6 @@ use zksync_types::{ }, AccountTreeId, Address, StorageKey, StorageLogKind, BOOTLOADER_ADDRESS, U256, }; -use zksync_utils::u256_to_h256; use crate::{ interface::storage::{StoragePtr, WriteStorage}, diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/tracers/pubdata_tracer.rs b/core/lib/multivm/src/versions/vm_boojum_integration/tracers/pubdata_tracer.rs index 2f7d141cb0a7..6396d143b401 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/tracers/pubdata_tracer.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/tracers/pubdata_tracer.rs @@ -5,8 +5,10 @@ use zk_evm_1_4_0::{ aux_structures::Timestamp, tracing::{BeforeExecutionData, VmLocalStateData}, }; -use zksync_types::{writes::StateDiffRecord, AccountTreeId, StorageKey, L1_MESSENGER_ADDRESS}; -use zksync_utils::{h256_to_u256, u256_to_bytes_be, u256_to_h256}; +use zksync_types::{ + h256_to_u256, u256_to_h256, writes::StateDiffRecord, AccountTreeId, StorageKey, + L1_MESSENGER_ADDRESS, +}; use crate::{ interface::{ @@ -16,9 +18,12 @@ use crate::{ L1BatchEnv, VmEvent, VmExecutionMode, }, tracers::dynamic::vm_1_4_0::DynTracer, - utils::events::{ - extract_bytecode_publication_requests_from_l1_messenger, - extract_l2tol1logs_from_l1_messenger, + utils::{ + bytecode::be_words_to_bytes, + events::{ + extract_bytecode_publication_requests_from_l1_messenger, + extract_l2tol1logs_from_l1_messenger, + }, }, vm_boojum_integration::{ bootloader_state::{utils::apply_pubdata_to_memory, BootloaderState}, @@ -98,15 +103,13 @@ impl PubdataTracer { bytecode_publication_requests .iter() .map(|bytecode_publication_request| { - state + let bytecode_words = state .decommittment_processor .known_bytecodes .inner() .get(&h256_to_u256(bytecode_publication_request.bytecode_hash)) - .unwrap() - .iter() - .flat_map(u256_to_bytes_be) - .collect() + .unwrap(); + be_words_to_bytes(bytecode_words) }) .collect() } diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/tracers/refunds.rs b/core/lib/multivm/src/versions/vm_boojum_integration/tracers/refunds.rs index ffbb1d80a80e..682cbda5252a 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/tracers/refunds.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/tracers/refunds.rs @@ -7,8 +7,7 @@ use zk_evm_1_4_0::{ vm_state::VmLocalState, }; use zksync_system_constants::{PUBLISH_BYTECODE_OVERHEAD, SYSTEM_CONTEXT_ADDRESS}; -use zksync_types::{l2_to_l1_log::L2ToL1Log, L1BatchNumber, U256}; -use zksync_utils::{bytecode::bytecode_len_in_bytes, ceil_div_u256, u256_to_h256}; +use zksync_types::{ceil_div_u256, l2_to_l1_log::L2ToL1Log, u256_to_h256, L1BatchNumber, U256}; use crate::{ interface::{ @@ -17,6 +16,7 @@ use crate::{ L1BatchEnv, Refunds, VmEvent, }, tracers::dynamic::vm_1_4_0::DynTracer, + utils::bytecode::bytecode_len_in_bytes, vm_boojum_integration::{ bootloader_state::BootloaderState, constants::{BOOTLOADER_HEAP_PAGE, OPERATOR_REFUNDS_OFFSET, TX_GAS_LIMIT_OFFSET}, @@ -339,7 +339,7 @@ pub(crate) fn pubdata_published( let published_bytecode_bytes: u32 = VmEvent::extract_published_bytecodes(&events) .iter() - .map(|bytecodehash| bytecode_len_in_bytes(*bytecodehash) as u32 + PUBLISH_BYTECODE_OVERHEAD) + .map(|bytecode_hash| bytecode_len_in_bytes(bytecode_hash) + PUBLISH_BYTECODE_OVERHEAD) .sum(); storage_writes_pubdata_published diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/tracers/utils.rs b/core/lib/multivm/src/versions/vm_boojum_integration/tracers/utils.rs index aafdab9ee428..e916d6e0e66c 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/tracers/utils.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/tracers/utils.rs @@ -9,8 +9,7 @@ use zksync_system_constants::{ ECRECOVER_PRECOMPILE_ADDRESS, KECCAK256_PRECOMPILE_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L1_MESSENGER_ADDRESS, SHA256_PRECOMPILE_ADDRESS, }; -use zksync_types::U256; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, U256}; use crate::vm_boojum_integration::{ constants::{ diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/pubdata.rs b/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/pubdata.rs index 152ccad2fbcb..cb400ab5fa7d 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/pubdata.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/pubdata.rs @@ -64,7 +64,7 @@ impl PubdataInput { #[cfg(test)] mod tests { use zksync_system_constants::{ACCOUNT_CODE_STORAGE_ADDRESS, BOOTLOADER_ADDRESS}; - use zksync_utils::u256_to_h256; + use zksync_types::u256_to_h256; use super::*; diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/transaction_data.rs b/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/transaction_data.rs index 8bf575effe06..9011fa486da2 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/transaction_data.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/transaction_data.rs @@ -1,19 +1,24 @@ use std::convert::TryInto; use zksync_types::{ + address_to_h256, + bytecode::BytecodeHash, ethabi::{encode, Address, Token}, fee::{encoding_len, Fee}, + h256_to_u256, l1::is_l1_tx_type, l2::{L2Tx, TransactionType}, transaction_request::{PaymasterParams, TransactionRequest}, web3::Bytes, Execute, ExecuteTransactionCommon, L2ChainId, L2TxCommonData, Nonce, Transaction, H256, U256, }; -use zksync_utils::{address_to_h256, bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256}; -use crate::vm_boojum_integration::{ - constants::MAX_GAS_PER_PUBDATA_BYTE, - utils::overhead::{get_amortized_overhead, OverheadCoefficients}, +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_boojum_integration::{ + constants::MAX_GAS_PER_PUBDATA_BYTE, + utils::overhead::{get_amortized_overhead, OverheadCoefficients}, + }, }; /// This structure represents the data that is used by @@ -190,16 +195,13 @@ impl TransactionData { let factory_deps_hashes = self .factory_deps .iter() - .map(|dep| h256_to_u256(hash_bytecode(dep))) + .map(|dep| BytecodeHash::for_bytecode(dep).value_u256()) .collect(); self.abi_encode_with_custom_factory_deps(factory_deps_hashes) } pub(crate) fn into_tokens(self) -> Vec { - let bytes = self.abi_encode(); - assert!(bytes.len() % 32 == 0); - - bytes_to_be_words(bytes) + bytes_to_be_words(&self.abi_encode()) } pub(crate) fn effective_gas_price_per_pubdata(&self, block_gas_price_per_pubdata: u32) -> u32 { diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/vm_state.rs b/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/vm_state.rs index 5b6b9b2eca17..dc41926c4485 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/vm_state.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/types/internals/vm_state.rs @@ -11,14 +11,14 @@ use zk_evm_1_4_0::{ }, }; use zksync_system_constants::BOOTLOADER_ADDRESS; -use zksync_types::{block::L2BlockHasher, Address, L2BlockNumber}; -use zksync_utils::h256_to_u256; +use zksync_types::{block::L2BlockHasher, h256_to_u256, Address, L2BlockNumber}; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, L1BatchEnv, L2Block, SystemEnv, }, + utils::bytecode::bytes_to_be_words, vm_boojum_integration::{ bootloader_state::BootloaderState, constants::BOOTLOADER_HEAP_PAGE, @@ -89,11 +89,7 @@ pub(crate) fn new_vm_state( decommittment_processor.populate( vec![( h256_to_u256(system_env.base_system_smart_contracts.default_aa.hash), - system_env - .base_system_smart_contracts - .default_aa - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.default_aa.code), )], Timestamp(0), ); @@ -101,11 +97,7 @@ pub(crate) fn new_vm_state( memory.populate( vec![( BOOTLOADER_CODE_PAGE, - system_env - .base_system_smart_contracts - .bootloader - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.bootloader.code), )], Timestamp(0), ); diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/types/l1_batch.rs b/core/lib/multivm/src/versions/vm_boojum_integration/types/l1_batch.rs index 386dc040099b..91082e98f9d1 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/types/l1_batch.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/types/l1_batch.rs @@ -1,5 +1,4 @@ -use zksync_types::U256; -use zksync_utils::{address_to_u256, h256_to_u256}; +use zksync_types::{address_to_u256, h256_to_u256, U256}; use crate::{interface::L1BatchEnv, vm_boojum_integration::utils::fee::get_batch_base_fee}; diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/utils/fee.rs b/core/lib/multivm/src/versions/vm_boojum_integration/utils/fee.rs index 8e785775697a..6fa1a38828e0 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/utils/fee.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/utils/fee.rs @@ -1,6 +1,5 @@ //! Utility functions for vm use zksync_types::fee_model::L1PeggedBatchFeeModelInput; -use zksync_utils::ceil_div; use crate::{ interface::L1BatchEnv, @@ -12,8 +11,11 @@ use crate::{ /// Calculates the amount of gas required to publish one byte of pubdata pub fn base_fee_to_gas_per_pubdata(l1_gas_price: u64, base_fee: u64) -> u64 { let eth_price_per_pubdata_byte = eth_price_per_pubdata_byte(l1_gas_price); - - ceil_div(eth_price_per_pubdata_byte, base_fee) + if eth_price_per_pubdata_byte == 0 { + 0 + } else { + eth_price_per_pubdata_byte.div_ceil(base_fee) + } } /// Calculates the base fee and gas per pubdata for the given L1 gas price. @@ -30,7 +32,7 @@ pub(crate) fn derive_base_fee_and_gas_per_pubdata( // publish enough public data while compensating us for it. let base_fee = std::cmp::max( fair_l2_gas_price, - ceil_div(eth_price_per_pubdata_byte, MAX_GAS_PER_PUBDATA_BYTE), + eth_price_per_pubdata_byte.div_ceil(MAX_GAS_PER_PUBDATA_BYTE), ); ( diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/utils/l2_blocks.rs b/core/lib/multivm/src/versions/vm_boojum_integration/utils/l2_blocks.rs index ff5536ae0b97..1095abd82db1 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/utils/l2_blocks.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/utils/l2_blocks.rs @@ -4,9 +4,9 @@ use zksync_system_constants::{ SYSTEM_CONTEXT_STORED_L2_BLOCK_HASHES, }; use zksync_types::{ - block::unpack_block_info, web3::keccak256, AccountTreeId, L2BlockNumber, StorageKey, H256, U256, + block::unpack_block_info, h256_to_u256, u256_to_h256, web3::keccak256, AccountTreeId, + L2BlockNumber, StorageKey, H256, U256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; use crate::interface::{ storage::{ReadStorage, StoragePtr}, diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/utils/overhead.rs b/core/lib/multivm/src/versions/vm_boojum_integration/utils/overhead.rs index 02fe0b8b3000..c6d299075f2a 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/utils/overhead.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/utils/overhead.rs @@ -1,7 +1,6 @@ use zk_evm_1_4_0::zkevm_opcode_defs::system_params::MAX_TX_ERGS_LIMIT; use zksync_system_constants::MAX_L2_TX_GAS_LIMIT; -use zksync_types::{l1::is_l1_tx_type, U256}; -use zksync_utils::ceil_div_u256; +use zksync_types::{ceil_div_u256, l1::is_l1_tx_type, U256}; use crate::vm_boojum_integration::constants::{ BLOCK_OVERHEAD_GAS, BLOCK_OVERHEAD_PUBDATA, BOOTLOADER_TX_ENCODING_SPACE, MAX_TXS_IN_BLOCK, diff --git a/core/lib/multivm/src/versions/vm_fast/bootloader_state/l2_block.rs b/core/lib/multivm/src/versions/vm_fast/bootloader_state/l2_block.rs index adb406eec789..4f05ef30a46d 100644 --- a/core/lib/multivm/src/versions/vm_fast/bootloader_state/l2_block.rs +++ b/core/lib/multivm/src/versions/vm_fast/bootloader_state/l2_block.rs @@ -1,7 +1,6 @@ use std::cmp::Ordering; -use zksync_types::{L2BlockNumber, H256}; -use zksync_utils::concat_and_hash; +use zksync_types::{web3::keccak256_concat, L2BlockNumber, H256}; use super::{snapshot::L2BlockSnapshot, tx::BootloaderTx}; use crate::{ @@ -51,7 +50,7 @@ impl BootloaderL2Block { } fn update_rolling_hash(&mut self, tx_hash: H256) { - self.txs_rolling_hash = concat_and_hash(self.txs_rolling_hash, tx_hash) + self.txs_rolling_hash = keccak256_concat(self.txs_rolling_hash, tx_hash) } pub(crate) fn make_snapshot(&self) -> L2BlockSnapshot { diff --git a/core/lib/multivm/src/versions/vm_fast/bootloader_state/state.rs b/core/lib/multivm/src/versions/vm_fast/bootloader_state/state.rs index 15b4daf02a77..e104eba6ef4f 100644 --- a/core/lib/multivm/src/versions/vm_fast/bootloader_state/state.rs +++ b/core/lib/multivm/src/versions/vm_fast/bootloader_state/state.rs @@ -1,7 +1,7 @@ use std::cmp::Ordering; use once_cell::sync::OnceCell; -use zksync_types::{L2ChainId, U256}; +use zksync_types::{L2ChainId, ProtocolVersionId, U256}; use super::{ l2_block::BootloaderL2Block, @@ -10,8 +10,11 @@ use super::{ BootloaderStateSnapshot, }; use crate::{ - interface::{BootloaderMemory, CompressedBytecodeInfo, L2BlockEnv, TxExecutionMode}, - versions::vm_fast::{pubdata::PubdataInput, transaction_data::TransactionData}, + interface::{ + pubdata::{PubdataBuilder, PubdataInput}, + BootloaderMemory, CompressedBytecodeInfo, L2BlockEnv, TxExecutionMode, + }, + versions::vm_fast::transaction_data::TransactionData, vm_latest::{constants::TX_DESCRIPTION_OFFSET, utils::l2_blocks::assert_next_block}, }; @@ -42,6 +45,8 @@ pub struct BootloaderState { free_tx_offset: usize, /// Information about the pubdata that will be needed to supply to the L1Messenger pubdata_information: OnceCell, + /// Protocol version. + protocol_version: ProtocolVersionId, } impl BootloaderState { @@ -49,6 +54,7 @@ impl BootloaderState { execution_mode: TxExecutionMode, initial_memory: BootloaderMemory, first_l2_block: L2BlockEnv, + protocol_version: ProtocolVersionId, ) -> Self { let l2_block = BootloaderL2Block::new(first_l2_block, 0); Self { @@ -59,6 +65,7 @@ impl BootloaderState { execution_mode, free_tx_offset: 0, pubdata_information: Default::default(), + protocol_version, } } @@ -139,12 +146,23 @@ impl BootloaderState { .expect("Pubdata information is not set") } + pub(crate) fn settlement_layer_pubdata(&self, pubdata_builder: &dyn PubdataBuilder) -> Vec { + let pubdata_information = self + .pubdata_information + .get() + .expect("Pubdata information is not set"); + pubdata_builder.settlement_layer_pubdata(pubdata_information, self.protocol_version) + } + fn last_mut_l2_block(&mut self) -> &mut BootloaderL2Block { self.l2_blocks.last_mut().unwrap() } /// Apply all bootloader transaction to the initial memory - pub(crate) fn bootloader_memory(&self) -> BootloaderMemory { + pub(crate) fn bootloader_memory( + &self, + pubdata_builder: &dyn PubdataBuilder, + ) -> BootloaderMemory { let mut initial_memory = self.initial_memory.clone(); let mut offset = 0; let mut compressed_bytecodes_offset = 0; @@ -172,11 +190,15 @@ impl BootloaderState { let pubdata_information = self .pubdata_information - .clone() - .into_inner() + .get() .expect("Empty pubdata information"); - apply_pubdata_to_memory(&mut initial_memory, pubdata_information); + apply_pubdata_to_memory( + &mut initial_memory, + pubdata_builder, + pubdata_information, + self.protocol_version, + ); initial_memory } diff --git a/core/lib/multivm/src/versions/vm_fast/bootloader_state/utils.rs b/core/lib/multivm/src/versions/vm_fast/bootloader_state/utils.rs index 770f232019bf..9eb55d794235 100644 --- a/core/lib/multivm/src/versions/vm_fast/bootloader_state/utils.rs +++ b/core/lib/multivm/src/versions/vm_fast/bootloader_state/utils.rs @@ -1,11 +1,12 @@ -use zksync_types::{ethabi, U256}; -use zksync_utils::{bytes_to_be_words, h256_to_u256}; +use zksync_types::{ethabi, h256_to_u256, ProtocolVersionId, U256}; use super::{l2_block::BootloaderL2Block, tx::BootloaderTx}; use crate::{ - interface::{BootloaderMemory, CompressedBytecodeInfo, TxExecutionMode}, + interface::{ + pubdata::{PubdataBuilder, PubdataInput}, + BootloaderMemory, CompressedBytecodeInfo, TxExecutionMode, + }, utils::bytecode, - versions::vm_fast::pubdata::PubdataInput, vm_latest::constants::{ BOOTLOADER_TX_DESCRIPTION_OFFSET, BOOTLOADER_TX_DESCRIPTION_SIZE, COMPRESSED_BYTECODES_OFFSET, OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_OFFSET, @@ -22,8 +23,7 @@ pub(super) fn get_memory_for_compressed_bytecodes( .iter() .flat_map(bytecode::encode_call) .collect(); - - bytes_to_be_words(memory_addition) + bytecode::bytes_to_be_words(&memory_addition) } #[allow(clippy::too_many_arguments)] @@ -120,26 +120,54 @@ fn apply_l2_block_inner( ]) } +fn bootloader_memory_input( + pubdata_builder: &dyn PubdataBuilder, + input: &PubdataInput, + protocol_version: ProtocolVersionId, +) -> Vec { + let l2_da_validator_address = pubdata_builder.l2_da_validator(); + let operator_input = pubdata_builder.l1_messenger_operator_input(input, protocol_version); + ethabi::encode(&[ + ethabi::Token::Address(l2_da_validator_address), + ethabi::Token::Bytes(operator_input), + ]) +} + pub(crate) fn apply_pubdata_to_memory( memory: &mut BootloaderMemory, - pubdata_information: PubdataInput, + pubdata_builder: &dyn PubdataBuilder, + pubdata_information: &PubdataInput, + protocol_version: ProtocolVersionId, ) { - // Skipping two slots as they will be filled by the bootloader itself: - // - One slot is for the selector of the call to the L1Messenger. - // - The other slot is for the 0x20 offset for the calldata. - let l1_messenger_pubdata_start_slot = OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_OFFSET + 2; - - // Need to skip first word as it represents array offset - // while bootloader expects only [len || data] - let pubdata = ethabi::encode(&[ethabi::Token::Bytes( - pubdata_information.build_pubdata(true), - )])[32..] - .to_vec(); - - assert!( - pubdata.len() / 32 <= OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_SLOTS - 2, - "The encoded pubdata is too big" - ); + let (l1_messenger_pubdata_start_slot, pubdata) = if protocol_version.is_pre_gateway() { + // Skipping two slots as they will be filled by the bootloader itself: + // - One slot is for the selector of the call to the L1Messenger. + // - The other slot is for the 0x20 offset for the calldata. + let l1_messenger_pubdata_start_slot = OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_OFFSET + 2; + // Need to skip first word as it represents array offset + // while bootloader expects only [len || data] + let pubdata = ethabi::encode(&[ethabi::Token::Bytes( + pubdata_builder.l1_messenger_operator_input(pubdata_information, protocol_version), + )])[32..] + .to_vec(); + assert!( + pubdata.len() / 32 <= OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_SLOTS - 2, + "The encoded pubdata is too big" + ); + (l1_messenger_pubdata_start_slot, pubdata) + } else { + // Skipping the first slot as it will be filled by the bootloader itself: + // It is for the selector of the call to the L1Messenger. + let l1_messenger_pubdata_start_slot = OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_OFFSET + 1; + let pubdata = + bootloader_memory_input(pubdata_builder, pubdata_information, protocol_version); + assert!( + // Note that unlike the previous version, the difference is `1`, since now it also includes the offset + pubdata.len() / 32 < OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_SLOTS, + "The encoded pubdata is too big" + ); + (l1_messenger_pubdata_start_slot, pubdata) + }; pubdata .chunks(32) diff --git a/core/lib/multivm/src/versions/vm_fast/bytecode.rs b/core/lib/multivm/src/versions/vm_fast/bytecode.rs index b75e33a21b05..4dc52951c16c 100644 --- a/core/lib/multivm/src/versions/vm_fast/bytecode.rs +++ b/core/lib/multivm/src/versions/vm_fast/bytecode.rs @@ -1,6 +1,5 @@ use itertools::Itertools; -use zksync_types::H256; -use zksync_utils::{bytecode::hash_bytecode, h256_to_u256}; +use zksync_types::{bytecode::BytecodeHash, h256_to_u256, H256}; use super::Vm; use crate::{ @@ -15,7 +14,7 @@ impl Vm { .get_last_tx_compressed_bytecodes() .iter() .any(|info| { - let hash_bytecode = hash_bytecode(&info.original); + let hash_bytecode = BytecodeHash::for_bytecode(&info.original).value(); let is_bytecode_known = self.world.storage.is_bytecode_known(&hash_bytecode); let is_bytecode_known_cache = self @@ -36,7 +35,7 @@ pub(crate) fn compress_bytecodes( .enumerate() .sorted_by_key(|(_idx, dep)| *dep) .dedup_by(|x, y| x.1 == y.1) - .filter(|(_idx, dep)| !is_bytecode_known(hash_bytecode(dep))) + .filter(|(_idx, dep)| !is_bytecode_known(BytecodeHash::for_bytecode(dep).value())) .sorted_by_key(|(idx, _dep)| *idx) .filter_map(|(_idx, dep)| bytecode::compress(dep.clone()).ok()) .collect() diff --git a/core/lib/multivm/src/versions/vm_fast/events.rs b/core/lib/multivm/src/versions/vm_fast/events.rs index 294e8adce32b..4fb26d306897 100644 --- a/core/lib/multivm/src/versions/vm_fast/events.rs +++ b/core/lib/multivm/src/versions/vm_fast/events.rs @@ -1,5 +1,4 @@ -use zksync_types::{L1BatchNumber, H256}; -use zksync_utils::h256_to_account_address; +use zksync_types::{h256_to_address, L1BatchNumber, H256}; use zksync_vm2::interface::Event; use crate::interface::VmEvent; @@ -16,7 +15,7 @@ impl EventAccumulator { fn into_vm_event(self, block_number: L1BatchNumber) -> VmEvent { VmEvent { location: (block_number, self.tx_number_in_block as u32), - address: h256_to_account_address(&H256(self.topics[0])), + address: h256_to_address(&H256(self.topics[0])), indexed_topics: self.topics[1..].iter().map(H256::from).collect(), value: self.data, } diff --git a/core/lib/multivm/src/versions/vm_fast/evm_deploy_tracer.rs b/core/lib/multivm/src/versions/vm_fast/evm_deploy_tracer.rs index 62aba8df5b9b..c443c99ccf9a 100644 --- a/core/lib/multivm/src/versions/vm_fast/evm_deploy_tracer.rs +++ b/core/lib/multivm/src/versions/vm_fast/evm_deploy_tracer.rs @@ -3,8 +3,7 @@ use std::{cell::RefCell, collections::HashMap, rc::Rc}; use zksync_system_constants::{CONTRACT_DEPLOYER_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS}; -use zksync_types::U256; -use zksync_utils::{bytecode::hash_evm_bytecode, h256_to_u256}; +use zksync_types::{bytecode::BytecodeHash, U256}; use zksync_vm2::interface::{ CallframeInterface, CallingMode, GlobalStateInterface, Opcode, OpcodeType, ShouldStop, Tracer, }; @@ -66,7 +65,8 @@ impl EvmDeployTracer { Ok(decoded) => { // `unwrap`s should be safe since the function signature is checked above. let published_bytecode = decoded.into_iter().next().unwrap().into_bytes().unwrap(); - let bytecode_hash = h256_to_u256(hash_evm_bytecode(&published_bytecode)); + let bytecode_hash = + BytecodeHash::for_evm_bytecode(&published_bytecode).value_u256(); self.bytecodes.insert(bytecode_hash, published_bytecode); } Err(err) => tracing::error!("Unable to decode `publishEVMBytecode` call: {err}"), diff --git a/core/lib/multivm/src/versions/vm_fast/glue.rs b/core/lib/multivm/src/versions/vm_fast/glue.rs index c2d38f351c04..f1a43d557358 100644 --- a/core/lib/multivm/src/versions/vm_fast/glue.rs +++ b/core/lib/multivm/src/versions/vm_fast/glue.rs @@ -1,5 +1,7 @@ -use zksync_types::l2_to_l1_log::{L2ToL1Log, SystemL2ToL1Log}; -use zksync_utils::u256_to_h256; +use zksync_types::{ + l2_to_l1_log::{L2ToL1Log, SystemL2ToL1Log}, + u256_to_h256, +}; use zksync_vm2::interface; use crate::glue::GlueFrom; diff --git a/core/lib/multivm/src/versions/vm_fast/initial_bootloader_memory.rs b/core/lib/multivm/src/versions/vm_fast/initial_bootloader_memory.rs index b3bf15cb1be5..89b22d328ac5 100644 --- a/core/lib/multivm/src/versions/vm_fast/initial_bootloader_memory.rs +++ b/core/lib/multivm/src/versions/vm_fast/initial_bootloader_memory.rs @@ -1,5 +1,4 @@ -use zksync_types::U256; -use zksync_utils::{address_to_u256, h256_to_u256}; +use zksync_types::{address_to_u256, h256_to_u256, U256}; use crate::{interface::L1BatchEnv, vm_latest::utils::fee::get_batch_base_fee}; diff --git a/core/lib/multivm/src/versions/vm_fast/mod.rs b/core/lib/multivm/src/versions/vm_fast/mod.rs index de6e7bd4ef6a..840653b63b08 100644 --- a/core/lib/multivm/src/versions/vm_fast/mod.rs +++ b/core/lib/multivm/src/versions/vm_fast/mod.rs @@ -1,5 +1,6 @@ pub use zksync_vm2::interface; +pub(crate) use self::version::FastVmVersion; pub use self::vm::Vm; mod bootloader_state; @@ -10,10 +11,10 @@ mod evm_deploy_tracer; mod glue; mod hook; mod initial_bootloader_memory; -mod pubdata; mod refund; #[cfg(test)] mod tests; mod transaction_data; mod utils; +mod version; mod vm; diff --git a/core/lib/multivm/src/versions/vm_fast/pubdata.rs b/core/lib/multivm/src/versions/vm_fast/pubdata.rs deleted file mode 100644 index c1ca93152a03..000000000000 --- a/core/lib/multivm/src/versions/vm_fast/pubdata.rs +++ /dev/null @@ -1,123 +0,0 @@ -use zksync_types::writes::{compress_state_diffs, StateDiffRecord}; - -use crate::interface::pubdata::L1MessengerL2ToL1Log; - -/// Struct based on which the pubdata blob is formed -#[derive(Debug, Clone, Default)] -pub(crate) struct PubdataInput { - pub(crate) user_logs: Vec, - pub(crate) l2_to_l1_messages: Vec>, - pub(crate) published_bytecodes: Vec>, - pub(crate) state_diffs: Vec, -} - -impl PubdataInput { - pub(crate) fn build_pubdata(self, with_uncompressed_state_diffs: bool) -> Vec { - let mut l1_messenger_pubdata = vec![]; - - let PubdataInput { - user_logs, - l2_to_l1_messages, - published_bytecodes, - state_diffs, - } = self; - - // Encoding user L2->L1 logs. - // Format: `[(numberOfL2ToL1Logs as u32) || l2tol1logs[1] || ... || l2tol1logs[n]]` - l1_messenger_pubdata.extend((user_logs.len() as u32).to_be_bytes()); - for l2tol1log in user_logs { - l1_messenger_pubdata.extend(l2tol1log.packed_encoding()); - } - - // Encoding L2->L1 messages - // Format: `[(numberOfMessages as u32) || (messages[1].len() as u32) || messages[1] || ... || (messages[n].len() as u32) || messages[n]]` - l1_messenger_pubdata.extend((l2_to_l1_messages.len() as u32).to_be_bytes()); - for message in l2_to_l1_messages { - l1_messenger_pubdata.extend((message.len() as u32).to_be_bytes()); - l1_messenger_pubdata.extend(message); - } - - // Encoding bytecodes - // Format: `[(numberOfBytecodes as u32) || (bytecodes[1].len() as u32) || bytecodes[1] || ... || (bytecodes[n].len() as u32) || bytecodes[n]]` - l1_messenger_pubdata.extend((published_bytecodes.len() as u32).to_be_bytes()); - for bytecode in published_bytecodes { - l1_messenger_pubdata.extend((bytecode.len() as u32).to_be_bytes()); - l1_messenger_pubdata.extend(bytecode); - } - - // Encoding state diffs - // Format: `[size of compressed state diffs u32 || compressed state diffs || (# state diffs: intial + repeated) as u32 || sorted state diffs by ]` - let state_diffs_compressed = compress_state_diffs(state_diffs.clone()); - l1_messenger_pubdata.extend(state_diffs_compressed); - - if with_uncompressed_state_diffs { - l1_messenger_pubdata.extend((state_diffs.len() as u32).to_be_bytes()); - for state_diff in state_diffs { - l1_messenger_pubdata.extend(state_diff.encode_padded()); - } - } - - l1_messenger_pubdata - } -} - -#[cfg(test)] -mod tests { - use zksync_system_constants::{ACCOUNT_CODE_STORAGE_ADDRESS, BOOTLOADER_ADDRESS}; - use zksync_utils::u256_to_h256; - - use super::*; - - #[test] - fn test_basic_pubdata_building() { - // Just using some constant addresses for tests - let addr1 = BOOTLOADER_ADDRESS; - let addr2 = ACCOUNT_CODE_STORAGE_ADDRESS; - - let user_logs = vec![L1MessengerL2ToL1Log { - l2_shard_id: 0, - is_service: false, - tx_number_in_block: 0, - sender: addr1, - key: 1.into(), - value: 128.into(), - }]; - - let l2_to_l1_messages = vec![hex::decode("deadbeef").unwrap()]; - - let published_bytecodes = vec![hex::decode("aaaabbbb").unwrap()]; - - // For covering more cases, we have two state diffs: - // One with enumeration index present (and so it is a repeated write) and the one without it. - let state_diffs = vec![ - StateDiffRecord { - address: addr2, - key: 155.into(), - derived_key: u256_to_h256(125.into()).0, - enumeration_index: 12, - initial_value: 11.into(), - final_value: 12.into(), - }, - StateDiffRecord { - address: addr2, - key: 156.into(), - derived_key: u256_to_h256(126.into()).0, - enumeration_index: 0, - initial_value: 0.into(), - final_value: 14.into(), - }, - ]; - - let input = PubdataInput { - user_logs, - l2_to_l1_messages, - published_bytecodes, - state_diffs, - }; - - let pubdata = - ethabi::encode(&[ethabi::Token::Bytes(input.build_pubdata(true))])[32..].to_vec(); - - assert_eq!(hex::encode(pubdata), "00000000000000000000000000000000000000000000000000000000000002c700000001000000000000000000000000000000000000000000008001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000800000000100000004deadbeef0000000100000004aaaabbbb0100002a040001000000000000000000000000000000000000000000000000000000000000007e090e0000000c0901000000020000000000000000000000000000000000008002000000000000000000000000000000000000000000000000000000000000009b000000000000000000000000000000000000000000000000000000000000007d000000000000000c000000000000000000000000000000000000000000000000000000000000000b000000000000000000000000000000000000000000000000000000000000000c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008002000000000000000000000000000000000000000000000000000000000000009c000000000000000000000000000000000000000000000000000000000000007e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"); - } -} diff --git a/core/lib/multivm/src/versions/vm_fast/refund.rs b/core/lib/multivm/src/versions/vm_fast/refund.rs index 05648acddcfe..13637ff97122 100644 --- a/core/lib/multivm/src/versions/vm_fast/refund.rs +++ b/core/lib/multivm/src/versions/vm_fast/refund.rs @@ -1,5 +1,4 @@ -use zksync_types::{H256, U256}; -use zksync_utils::ceil_div_u256; +use zksync_types::{ceil_div_u256, H256, U256}; use crate::{interface::L1BatchEnv, vm_latest::utils::fee::get_batch_base_fee}; diff --git a/core/lib/multivm/src/versions/vm_fast/tests/mod.rs b/core/lib/multivm/src/versions/vm_fast/tests/mod.rs index 27192f46d8dd..0a26e895b5a7 100644 --- a/core/lib/multivm/src/versions/vm_fast/tests/mod.rs +++ b/core/lib/multivm/src/versions/vm_fast/tests/mod.rs @@ -1,7 +1,8 @@ use std::{any::Any, collections::HashSet, fmt, rc::Rc}; -use zksync_types::{writes::StateDiffRecord, StorageKey, Transaction, H160, H256, U256}; -use zksync_utils::h256_to_u256; +use zksync_types::{ + h256_to_u256, writes::StateDiffRecord, StorageKey, Transaction, H160, H256, U256, +}; use zksync_vm2::interface::{Event, HeapId, StateInterface}; use zksync_vm_interface::{ pubdata::PubdataBuilder, storage::ReadStorage, CurrentExecutionState, L2BlockEnv, @@ -111,7 +112,7 @@ impl TestedVm for Vm> { } fn finish_batch_without_pubdata(&mut self) -> VmExecutionResultAndLogs { - self.inspect_inner(&mut Default::default(), VmExecutionMode::Batch) + self.inspect_inner(&mut Default::default(), VmExecutionMode::Batch, None) } fn insert_bytecodes(&mut self, bytecodes: &[&[u8]]) { diff --git a/core/lib/multivm/src/versions/vm_fast/transaction_data.rs b/core/lib/multivm/src/versions/vm_fast/transaction_data.rs index 2ec86eb3ceaf..02697beee341 100644 --- a/core/lib/multivm/src/versions/vm_fast/transaction_data.rs +++ b/core/lib/multivm/src/versions/vm_fast/transaction_data.rs @@ -1,19 +1,24 @@ use std::convert::TryInto; use zksync_types::{ + address_to_h256, + bytecode::BytecodeHash, ethabi::{encode, Address, Token}, fee::{encoding_len, Fee}, + h256_to_u256, l1::is_l1_tx_type, l2::{L2Tx, TransactionType}, transaction_request::{PaymasterParams, TransactionRequest}, web3::Bytes, Execute, ExecuteTransactionCommon, L2ChainId, L2TxCommonData, Nonce, Transaction, H256, U256, }; -use zksync_utils::{address_to_h256, bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256}; -use crate::vm_latest::{ - constants::{MAX_GAS_PER_PUBDATA_BYTE, TX_MAX_COMPUTE_GAS_LIMIT}, - utils::overhead::derive_overhead, +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_latest::{ + constants::{MAX_GAS_PER_PUBDATA_BYTE, TX_MAX_COMPUTE_GAS_LIMIT}, + utils::overhead::derive_overhead, + }, }; /// This structure represents the data that is used by @@ -190,16 +195,13 @@ impl TransactionData { let factory_deps_hashes = self .factory_deps .iter() - .map(|dep| h256_to_u256(hash_bytecode(dep))) + .map(|dep| BytecodeHash::for_bytecode(dep).value_u256()) .collect(); self.abi_encode_with_custom_factory_deps(factory_deps_hashes) } pub(crate) fn into_tokens(self) -> Vec { - let bytes = self.abi_encode(); - assert!(bytes.len() % 32 == 0); - - bytes_to_be_words(bytes) + bytes_to_be_words(&self.abi_encode()) } pub(crate) fn overhead_gas(&self) -> u32 { diff --git a/core/lib/multivm/src/versions/vm_fast/version.rs b/core/lib/multivm/src/versions/vm_fast/version.rs new file mode 100644 index 000000000000..8da180d8ba59 --- /dev/null +++ b/core/lib/multivm/src/versions/vm_fast/version.rs @@ -0,0 +1,28 @@ +use crate::{vm_latest::MultiVmSubversion, VmVersion}; + +#[derive(Debug, Copy, Clone)] +pub(crate) enum FastVmVersion { + IncreasedBootloaderMemory, + Gateway, +} + +impl From for MultiVmSubversion { + fn from(value: FastVmVersion) -> Self { + match value { + FastVmVersion::IncreasedBootloaderMemory => Self::IncreasedBootloaderMemory, + FastVmVersion::Gateway => Self::Gateway, + } + } +} + +impl TryFrom for FastVmVersion { + type Error = (); + + fn try_from(value: VmVersion) -> Result { + match value { + VmVersion::Vm1_5_0IncreasedBootloaderMemory => Ok(Self::IncreasedBootloaderMemory), + VmVersion::VmGateway => Ok(Self::Gateway), + _ => Err(()), + } + } +} diff --git a/core/lib/multivm/src/versions/vm_fast/vm.rs b/core/lib/multivm/src/versions/vm_fast/vm.rs index d18f7b91f323..c935b1c0e7f5 100644 --- a/core/lib/multivm/src/versions/vm_fast/vm.rs +++ b/core/lib/multivm/src/versions/vm_fast/vm.rs @@ -5,8 +5,11 @@ use zk_evm_1_5_0::{ }; use zksync_contracts::SystemContractCode; use zksync_types::{ + bytecode::BytecodeHash, + h256_to_u256, l1::is_l1_tx_type, l2_to_l1_log::UserL2ToL1Log, + u256_to_h256, utils::key_for_eth_balance, writes::{ compression::compress_with_best_strategy, StateDiffRecord, BYTES_PER_DERIVED_KEY, @@ -16,12 +19,10 @@ use zksync_types::{ Transaction, BOOTLOADER_ADDRESS, H160, H256, KNOWN_CODES_STORAGE_ADDRESS, L1_MESSENGER_ADDRESS, L2_BASE_TOKEN_ADDRESS, U256, }; -use zksync_utils::{bytecode::hash_bytecode, h256_to_u256, u256_to_h256}; use zksync_vm2::{ interface::{CallframeInterface, HeapId, StateInterface, Tracer}, ExecutionEnd, FatPointer, Program, Settings, StorageSlot, VirtualMachine, }; -use zksync_vm_interface::{pubdata::PubdataBuilder, InspectExecutionMode}; use super::{ bootloader_state::{BootloaderState, BootloaderStateSnapshot}, @@ -35,32 +36,28 @@ use super::{ use crate::{ glue::GlueInto, interface::{ + pubdata::{PubdataBuilder, PubdataInput}, storage::{ImmutableStorageView, ReadStorage, StoragePtr, StorageView}, BytecodeCompressionError, BytecodeCompressionResult, CurrentExecutionState, - ExecutionResult, FinishedL1Batch, Halt, L1BatchEnv, L2BlockEnv, PushTransactionResult, - Refunds, SystemEnv, TxRevertReason, VmEvent, VmExecutionLogs, VmExecutionMode, - VmExecutionResultAndLogs, VmExecutionStatistics, VmFactory, VmInterface, + ExecutionResult, FinishedL1Batch, Halt, InspectExecutionMode, L1BatchEnv, L2BlockEnv, + PushTransactionResult, Refunds, SystemEnv, TxRevertReason, VmEvent, VmExecutionLogs, + VmExecutionMode, VmExecutionResultAndLogs, VmExecutionStatistics, VmFactory, VmInterface, VmInterfaceHistoryEnabled, VmRevertReason, VmTrackingContracts, }, - is_supported_by_fast_vm, utils::events::extract_l2tol1logs_from_l1_messenger, vm_fast::{ bootloader_state::utils::{apply_l2_block, apply_pubdata_to_memory}, events::merge_events, - pubdata::PubdataInput, refund::compute_refund, + version::FastVmVersion, }, - vm_latest::{ - constants::{ - get_result_success_first_slot, get_vm_hook_params_start_position, get_vm_hook_position, - OPERATOR_REFUNDS_OFFSET, TX_GAS_LIMIT_OFFSET, VM_HOOK_PARAMS_COUNT, - }, - MultiVMSubversion, + vm_latest::constants::{ + get_result_success_first_slot, get_vm_hook_params_start_position, get_vm_hook_position, + OPERATOR_REFUNDS_OFFSET, TX_GAS_LIMIT_OFFSET, VM_HOOK_PARAMS_COUNT, }, + VmVersion, }; -const VM_VERSION: MultiVMSubversion = MultiVMSubversion::IncreasedBootloaderMemory; - type FullTracer = ((Tr, CircuitsTracer), EvmDeployTracer); #[derive(Debug)] @@ -101,17 +98,21 @@ pub struct Vm { pub(super) batch_env: L1BatchEnv, pub(super) system_env: SystemEnv, snapshot: Option, + vm_version: FastVmVersion, #[cfg(test)] enforced_state_diffs: Option>, } impl Vm { pub fn custom(batch_env: L1BatchEnv, system_env: SystemEnv, storage: S) -> Self { - assert!( - is_supported_by_fast_vm(system_env.version), - "Protocol version {:?} is not supported by fast VM", - system_env.version - ); + let vm_version: FastVmVersion = VmVersion::from(system_env.version) + .try_into() + .unwrap_or_else(|_| { + panic!( + "Protocol version {:?} is not supported by fast VM", + system_env.version + ) + }); let default_aa_code_hash = system_env.base_system_smart_contracts.default_aa.hash; let evm_emulator_hash = system_env @@ -145,7 +146,7 @@ impl Vm { Settings { default_aa_code_hash: default_aa_code_hash.into(), evm_interpreter_code_hash: evm_emulator_hash.into(), - hook_address: get_vm_hook_position(VM_VERSION) * 32, + hook_address: get_vm_hook_position(vm_version.into()) * 32, }, ); @@ -165,10 +166,12 @@ impl Vm { system_env.execution_mode, bootloader_memory.clone(), batch_env.first_l2_block, + system_env.version, ), system_env, batch_env, snapshot: None, + vm_version, #[cfg(test)] enforced_state_diffs: None, }; @@ -181,6 +184,7 @@ impl Vm { execution_mode: VmExecutionMode, tracer: &mut FullTracer, track_refunds: bool, + pubdata_builder: Option<&dyn PubdataBuilder>, ) -> VmRunResult { let mut refunds = Refunds { gas_refunded: 0, @@ -351,15 +355,19 @@ impl Vm { state_diffs: self.compute_state_diffs(), }; - // Save the pubdata for the future initial bootloader memory building - self.bootloader_state - .set_pubdata_input(pubdata_input.clone()); - // Apply the pubdata to the current memory let mut memory_to_apply = vec![]; - apply_pubdata_to_memory(&mut memory_to_apply, pubdata_input); + apply_pubdata_to_memory( + &mut memory_to_apply, + pubdata_builder.expect("`pubdata_builder` is required to finish batch"), + &pubdata_input, + self.system_env.version, + ); self.write_to_bootloader_heap(memory_to_apply); + + // Save the pubdata for the future initial bootloader memory building + self.bootloader_state.set_pubdata_input(pubdata_input); } Hook::PaymasterValidationEntered | Hook::ValidationStepEnded => { /* unused */ } @@ -384,8 +392,8 @@ impl Vm { } fn get_hook_params(&self) -> [U256; 3] { - (get_vm_hook_params_start_position(VM_VERSION) - ..get_vm_hook_params_start_position(VM_VERSION) + VM_HOOK_PARAMS_COUNT) + (get_vm_hook_params_start_position(self.vm_version.into()) + ..get_vm_hook_params_start_position(self.vm_version.into()) + VM_HOOK_PARAMS_COUNT) .map(|word| self.read_word_from_bootloader_heap(word as usize)) .collect::>() .try_into() @@ -394,7 +402,7 @@ impl Vm { fn get_tx_result(&self) -> U256 { let tx_idx = self.bootloader_state.current_tx(); - let slot = get_result_success_first_slot(VM_VERSION) as usize + tx_idx; + let slot = get_result_success_first_slot(self.vm_version.into()) as usize + tx_idx; self.read_word_from_bootloader_heap(slot) } @@ -455,7 +463,7 @@ impl Vm { pub(crate) fn insert_bytecodes<'a>(&mut self, bytecodes: impl IntoIterator) { for code in bytecodes { - let hash = h256_to_u256(hash_bytecode(code)); + let hash = BytecodeHash::for_bytecode(code).value_u256(); self.world.bytecode_cache.insert(hash, code.into()); } } @@ -576,6 +584,7 @@ impl Vm { &mut self, tracer: &mut Tr, execution_mode: VmExecutionMode, + pubdata_builder: Option<&dyn PubdataBuilder>, ) -> VmExecutionResultAndLogs { let mut track_refunds = false; if matches!(execution_mode, VmExecutionMode::OneTx) { @@ -591,7 +600,12 @@ impl Vm { (mem::take(tracer), CircuitsTracer::default()), EvmDeployTracer::new(self.world.dynamic_bytecodes.clone()), ); - let result = self.run(execution_mode, &mut full_tracer, track_refunds); + let result = self.run( + execution_mode, + &mut full_tracer, + track_refunds, + pubdata_builder, + ); let ((external_tracer, circuits_tracer), _) = full_tracer; *tracer = external_tracer; // place the tracer back @@ -710,7 +724,7 @@ impl VmInterface for Vm { tracer: &mut Self::TracerDispatcher, execution_mode: InspectExecutionMode, ) -> VmExecutionResultAndLogs { - self.inspect_inner(tracer, execution_mode.into()) + self.inspect_inner(tracer, execution_mode.into(), None) } fn inspect_transaction_with_bytecode_compression( @@ -737,19 +751,23 @@ impl VmInterface for Vm { self.bootloader_state.start_new_l2_block(l2_block_env) } - fn finish_batch(&mut self, _pubdata_builder: Rc) -> FinishedL1Batch { - let result = self.inspect_inner(&mut Tr::default(), VmExecutionMode::Batch); + fn finish_batch(&mut self, pubdata_builder: Rc) -> FinishedL1Batch { + let result = self.inspect_inner( + &mut Tr::default(), + VmExecutionMode::Batch, + Some(pubdata_builder.as_ref()), + ); let execution_state = self.get_current_execution_state(); - let bootloader_memory = self.bootloader_state.bootloader_memory(); + let bootloader_memory = self + .bootloader_state + .bootloader_memory(pubdata_builder.as_ref()); FinishedL1Batch { block_tip_execution_result: result, final_execution_state: execution_state, final_bootloader_memory: Some(bootloader_memory), pubdata_input: Some( self.bootloader_state - .get_pubdata_information() - .clone() - .build_pubdata(false), + .settlement_layer_pubdata(pubdata_builder.as_ref()), ), state_diffs: Some( self.bootloader_state @@ -845,7 +863,7 @@ impl World { ) -> (U256, Program) { ( h256_to_u256(code.hash), - Program::from_words(code.code.clone(), is_bootloader), + Program::new(&code.code, is_bootloader), ) } diff --git a/core/lib/multivm/src/versions/vm_latest/bootloader_state/l2_block.rs b/core/lib/multivm/src/versions/vm_latest/bootloader_state/l2_block.rs index 103c5d16540e..95502b8dc60c 100644 --- a/core/lib/multivm/src/versions/vm_latest/bootloader_state/l2_block.rs +++ b/core/lib/multivm/src/versions/vm_latest/bootloader_state/l2_block.rs @@ -1,7 +1,6 @@ use std::cmp::Ordering; -use zksync_types::{L2BlockNumber, H256}; -use zksync_utils::concat_and_hash; +use zksync_types::{web3::keccak256_concat, L2BlockNumber, H256}; use crate::{ interface::{L2Block, L2BlockEnv}, @@ -53,7 +52,7 @@ impl BootloaderL2Block { } fn update_rolling_hash(&mut self, tx_hash: H256) { - self.txs_rolling_hash = concat_and_hash(self.txs_rolling_hash, tx_hash) + self.txs_rolling_hash = keccak256_concat(self.txs_rolling_hash, tx_hash) } pub(crate) fn make_snapshot(&self) -> L2BlockSnapshot { diff --git a/core/lib/multivm/src/versions/vm_latest/bootloader_state/utils.rs b/core/lib/multivm/src/versions/vm_latest/bootloader_state/utils.rs index c409bda35c1d..58dc20346a6f 100644 --- a/core/lib/multivm/src/versions/vm_latest/bootloader_state/utils.rs +++ b/core/lib/multivm/src/versions/vm_latest/bootloader_state/utils.rs @@ -1,5 +1,4 @@ -use zksync_types::{ethabi, ProtocolVersionId, U256}; -use zksync_utils::{bytes_to_be_words, h256_to_u256}; +use zksync_types::{ethabi, h256_to_u256, ProtocolVersionId, U256}; use super::tx::BootloaderTx; use crate::{ @@ -27,8 +26,7 @@ pub(super) fn get_memory_for_compressed_bytecodes( .iter() .flat_map(bytecode::encode_call) .collect(); - - bytes_to_be_words(memory_addition) + bytecode::bytes_to_be_words(&memory_addition) } #[allow(clippy::too_many_arguments)] diff --git a/core/lib/multivm/src/versions/vm_latest/constants.rs b/core/lib/multivm/src/versions/vm_latest/constants.rs index c047e6ffa3b0..c95771f9e849 100644 --- a/core/lib/multivm/src/versions/vm_latest/constants.rs +++ b/core/lib/multivm/src/versions/vm_latest/constants.rs @@ -5,7 +5,7 @@ pub use zk_evm_1_5_0::zkevm_opcode_defs::system_params::{ }; use zksync_system_constants::MAX_NEW_FACTORY_DEPS; -use super::vm::MultiVMSubversion; +use super::vm::MultiVmSubversion; use crate::vm_latest::old_vm::utils::heap_page_from_base; /// The amount of ergs to be reserved at the end of the batch to ensure that it has enough ergs to verify compression, etc. @@ -22,15 +22,15 @@ pub(crate) const MAX_BASE_LAYER_CIRCUITS: usize = 34100; /// the requirements on RAM. /// In this version of the VM the used bootloader memory bytes has increased from `30_000_000` to `59_000_000`, /// and then to `63_800_000` in a subsequent upgrade. -pub(crate) const fn get_used_bootloader_memory_bytes(subversion: MultiVMSubversion) -> usize { +pub(crate) const fn get_used_bootloader_memory_bytes(subversion: MultiVmSubversion) -> usize { match subversion { - MultiVMSubversion::SmallBootloaderMemory => 59_000_000, - MultiVMSubversion::IncreasedBootloaderMemory => 63_800_000, - MultiVMSubversion::Gateway => 63_800_000, + MultiVmSubversion::SmallBootloaderMemory => 59_000_000, + MultiVmSubversion::IncreasedBootloaderMemory => 63_800_000, + MultiVmSubversion::Gateway => 63_800_000, } } -pub(crate) const fn get_used_bootloader_memory_words(subversion: MultiVMSubversion) -> usize { +pub(crate) const fn get_used_bootloader_memory_words(subversion: MultiVmSubversion) -> usize { get_used_bootloader_memory_bytes(subversion) / 32 } @@ -105,7 +105,7 @@ pub(crate) const BOOTLOADER_TX_DESCRIPTION_OFFSET: usize = OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_OFFSET + OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_SLOTS; /// The size of the bootloader memory dedicated to the encodings of transactions -pub(crate) const fn get_bootloader_tx_encoding_space(subversion: MultiVMSubversion) -> u32 { +pub(crate) const fn get_bootloader_tx_encoding_space(subversion: MultiVmSubversion) -> u32 { (get_used_bootloader_memory_words(subversion) - TX_DESCRIPTION_OFFSET - MAX_TXS_IN_BATCH) as u32 } @@ -129,21 +129,21 @@ pub const BOOTLOADER_HEAP_PAGE: u32 = heap_page_from_base(MemoryPage(INITIAL_BAS /// So the layout looks like this: /// `[param 0][param 1][param 2][vmhook opcode]` pub const VM_HOOK_PARAMS_COUNT: u32 = 3; -pub(crate) const fn get_vm_hook_position(subversion: MultiVMSubversion) -> u32 { +pub(crate) const fn get_vm_hook_position(subversion: MultiVmSubversion) -> u32 { get_result_success_first_slot(subversion) - 1 } -pub(crate) const fn get_vm_hook_params_start_position(subversion: MultiVMSubversion) -> u32 { +pub(crate) const fn get_vm_hook_params_start_position(subversion: MultiVmSubversion) -> u32 { get_vm_hook_position(subversion) - VM_HOOK_PARAMS_COUNT } /// Method that provides the start position of the vm hook in the memory for the latest version of v1.5.0. /// This method is used only in `test_infra` in the bootloader tests and that's why it should be exposed. pub const fn get_vm_hook_start_position_latest() -> u32 { - get_vm_hook_params_start_position(MultiVMSubversion::IncreasedBootloaderMemory) + get_vm_hook_params_start_position(MultiVmSubversion::IncreasedBootloaderMemory) } /// Arbitrary space in memory closer to the end of the page -pub(crate) const fn get_result_success_first_slot(subversion: MultiVMSubversion) -> u32 { +pub(crate) const fn get_result_success_first_slot(subversion: MultiVmSubversion) -> u32 { ((get_used_bootloader_memory_bytes(subversion) as u32) - (MAX_TXS_IN_BATCH as u32) * 32) / 32 } diff --git a/core/lib/multivm/src/versions/vm_latest/implementation/bytecode.rs b/core/lib/multivm/src/versions/vm_latest/implementation/bytecode.rs index 2cd98c8e58a3..655f55bc8fc2 100644 --- a/core/lib/multivm/src/versions/vm_latest/implementation/bytecode.rs +++ b/core/lib/multivm/src/versions/vm_latest/implementation/bytecode.rs @@ -1,13 +1,12 @@ use itertools::Itertools; -use zksync_types::U256; -use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words}; +use zksync_types::{bytecode::BytecodeHash, U256}; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, CompressedBytecodeInfo, }, - utils::bytecode, + utils::{bytecode, bytecode::bytes_to_be_words}, vm_latest::Vm, HistoryMode, }; @@ -25,18 +24,15 @@ impl Vm { .storage .get_ptr() .borrow_mut() - .is_bytecode_known(&hash_bytecode(&info.original)) + .is_bytecode_known(&BytecodeHash::for_bytecode(&info.original).value()) }) } } /// Converts bytecode to tokens and hashes it. pub(crate) fn bytecode_to_factory_dep(bytecode: Vec) -> (U256, Vec) { - let bytecode_hash = hash_bytecode(&bytecode); - let bytecode_hash = U256::from_big_endian(bytecode_hash.as_bytes()); - - let bytecode_words = bytes_to_be_words(bytecode); - + let bytecode_hash = BytecodeHash::for_bytecode(&bytecode).value_u256(); + let bytecode_words = bytes_to_be_words(&bytecode); (bytecode_hash, bytecode_words) } @@ -49,7 +45,11 @@ pub(crate) fn compress_bytecodes( .enumerate() .sorted_by_key(|(_idx, dep)| *dep) .dedup_by(|x, y| x.1 == y.1) - .filter(|(_idx, dep)| !storage.borrow_mut().is_bytecode_known(&hash_bytecode(dep))) + .filter(|(_idx, dep)| { + !storage + .borrow_mut() + .is_bytecode_known(&BytecodeHash::for_bytecode(dep).value()) + }) .sorted_by_key(|(idx, _dep)| *idx) .filter_map(|(_idx, dep)| bytecode::compress(dep.clone()).ok()) .collect() diff --git a/core/lib/multivm/src/versions/vm_latest/mod.rs b/core/lib/multivm/src/versions/vm_latest/mod.rs index 211c527c3816..46f8db789ddc 100644 --- a/core/lib/multivm/src/versions/vm_latest/mod.rs +++ b/core/lib/multivm/src/versions/vm_latest/mod.rs @@ -1,4 +1,4 @@ -pub(crate) use self::vm::MultiVMSubversion; +pub(crate) use self::vm::MultiVmSubversion; pub use self::{ bootloader_state::BootloaderState, old_vm::{ diff --git a/core/lib/multivm/src/versions/vm_latest/old_vm/events.rs b/core/lib/multivm/src/versions/vm_latest/old_vm/events.rs index fd6f393155d7..bded254c7fcc 100644 --- a/core/lib/multivm/src/versions/vm_latest/old_vm/events.rs +++ b/core/lib/multivm/src/versions/vm_latest/old_vm/events.rs @@ -1,8 +1,7 @@ use zk_evm_1_5_0::{ethereum_types::Address, reference_impls::event_sink::EventMessage}; -use zksync_types::{L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use zksync_utils::{be_chunks_to_h256_words, h256_to_account_address}; +use zksync_types::{h256_to_address, L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use crate::interface::VmEvent; +use crate::{interface::VmEvent, utils::bytecode::be_chunks_to_h256_words}; #[derive(Clone)] pub(crate) struct SolidityLikeEvent { @@ -135,7 +134,7 @@ pub(crate) fn merge_events(events: Vec) -> Vec .filter(|e| e.address == EVENT_WRITER_ADDRESS) .map(|event| { // The events writer events where the first topic is the actual address of the event and the rest of the topics are real topics - let address = h256_to_account_address(&H256(event.topics[0])); + let address = h256_to_address(&H256(event.topics[0])); let topics = event.topics.into_iter().skip(1).collect(); SolidityLikeEvent { diff --git a/core/lib/multivm/src/versions/vm_latest/old_vm/history_recorder.rs b/core/lib/multivm/src/versions/vm_latest/old_vm/history_recorder.rs index e7277f38289d..9dac6480dc57 100644 --- a/core/lib/multivm/src/versions/vm_latest/old_vm/history_recorder.rs +++ b/core/lib/multivm/src/versions/vm_latest/old_vm/history_recorder.rs @@ -5,8 +5,7 @@ use zk_evm_1_5_0::{ vm_state::PrimitiveValue, zkevm_opcode_defs::{self}, }; -use zksync_types::{StorageKey, H256, U256}; -use zksync_utils::{h256_to_u256, u256_to_h256}; +use zksync_types::{h256_to_u256, u256_to_h256, StorageKey, H256, U256}; use crate::interface::storage::{StoragePtr, WriteStorage}; diff --git a/core/lib/multivm/src/versions/vm_latest/old_vm/oracles/decommitter.rs b/core/lib/multivm/src/versions/vm_latest/old_vm/oracles/decommitter.rs index 507e3d8c7598..1afa9b483ec5 100644 --- a/core/lib/multivm/src/versions/vm_latest/old_vm/oracles/decommitter.rs +++ b/core/lib/multivm/src/versions/vm_latest/old_vm/oracles/decommitter.rs @@ -10,12 +10,12 @@ use zk_evm_1_5_0::{ }, zkevm_opcode_defs::{VersionedHashHeader, VersionedHashNormalizedPreimage}, }; -use zksync_types::{H256, U256}; -use zksync_utils::{bytes_to_be_words, h256_to_u256, u256_to_h256}; +use zksync_types::{h256_to_u256, u256_to_h256, H256, U256}; use super::OracleWithHistory; use crate::{ interface::storage::{ReadStorage, StoragePtr}, + utils::bytecode::bytes_to_be_words, vm_latest::old_vm::history_recorder::{ HistoryEnabled, HistoryMode, HistoryRecorder, WithHistory, }, @@ -69,7 +69,7 @@ impl DecommitterOracle { .load_factory_dep(u256_to_h256(hash)) .unwrap_or_else(|| panic!("Trying to decommit unexisting hash: {}", hash)); - let value = bytes_to_be_words(value); + let value = bytes_to_be_words(&value); self.known_bytecodes.insert(hash, value.clone(), timestamp); value } diff --git a/core/lib/multivm/src/versions/vm_latest/oracles/storage.rs b/core/lib/multivm/src/versions/vm_latest/oracles/storage.rs index 9c7b68c1ad51..242cdc6a2239 100644 --- a/core/lib/multivm/src/versions/vm_latest/oracles/storage.rs +++ b/core/lib/multivm/src/versions/vm_latest/oracles/storage.rs @@ -10,6 +10,7 @@ use zk_evm_1_5_0::{ }, }; use zksync_types::{ + h256_to_u256, u256_to_h256, utils::storage_key_for_eth_balance, writes::{ compression::compress_with_best_strategy, BYTES_PER_DERIVED_KEY, @@ -17,7 +18,6 @@ use zksync_types::{ }, AccountTreeId, Address, StorageKey, StorageLogKind, BOOTLOADER_ADDRESS, U256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; use crate::{ glue::GlueInto, @@ -620,8 +620,7 @@ fn get_pubdata_price_bytes(initial_value: U256, final_value: U256, is_initial: b #[cfg(test)] mod tests { - use zksync_types::H256; - use zksync_utils::h256_to_u256; + use zksync_types::{h256_to_u256, H256}; use super::*; use crate::interface::storage::{InMemoryStorage, StorageView}; diff --git a/core/lib/multivm/src/versions/vm_latest/tests/call_tracer.rs b/core/lib/multivm/src/versions/vm_latest/tests/call_tracer.rs index b502ea50b1af..c8f623478569 100644 --- a/core/lib/multivm/src/versions/vm_latest/tests/call_tracer.rs +++ b/core/lib/multivm/src/versions/vm_latest/tests/call_tracer.rs @@ -1,15 +1,14 @@ use std::sync::Arc; use once_cell::sync::OnceCell; +use zksync_test_contracts::TestContract; use zksync_types::{Address, Execute}; use super::TestedLatestVm; use crate::{ interface::{InspectExecutionMode, TxExecutionMode, VmInterface}, tracers::CallTracer, - versions::testonly::{ - read_max_depth_contract, read_test_contract, ContractToDeploy, VmTesterBuilder, - }, + versions::testonly::{read_max_depth_contract, ContractToDeploy, VmTesterBuilder}, vm_latest::{constants::BATCH_COMPUTATIONAL_GAS_LIMIT, ToTracerPointer}, }; @@ -50,8 +49,8 @@ fn test_max_depth() { #[test] fn test_basic_behavior() { - let contract = read_test_contract(); - let address = Address::random(); + let contract = TestContract::counter().bytecode.to_vec(); + let address = Address::repeat_byte(1); let mut vm = VmTesterBuilder::new() .with_empty_in_memory_storage() .with_rich_accounts(1) diff --git a/core/lib/multivm/src/versions/vm_latest/tests/constants.rs b/core/lib/multivm/src/versions/vm_latest/tests/constants.rs index 3b75bfd6d36b..8ee62650ca77 100644 --- a/core/lib/multivm/src/versions/vm_latest/tests/constants.rs +++ b/core/lib/multivm/src/versions/vm_latest/tests/constants.rs @@ -3,7 +3,7 @@ #[test] fn test_that_bootloader_encoding_space_is_large_enoguh() { let encoding_space = crate::vm_latest::constants::get_bootloader_tx_encoding_space( - crate::vm_latest::MultiVMSubversion::latest(), + crate::vm_latest::MultiVmSubversion::latest(), ); assert!(encoding_space >= 330000, "Bootloader tx space is too small"); } diff --git a/core/lib/multivm/src/versions/vm_latest/tests/mod.rs b/core/lib/multivm/src/versions/vm_latest/tests/mod.rs index 96d59f208b03..b059c9716d89 100644 --- a/core/lib/multivm/src/versions/vm_latest/tests/mod.rs +++ b/core/lib/multivm/src/versions/vm_latest/tests/mod.rs @@ -8,8 +8,10 @@ use zk_evm_1_5_0::{ vm_state::VmLocalState, zkevm_opcode_defs::{ContractCodeSha256Format, VersionedHashLen32}, }; -use zksync_types::{writes::StateDiffRecord, StorageKey, StorageValue, Transaction, H256, U256}; -use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256}; +use zksync_types::{ + bytecode::BytecodeHash, writes::StateDiffRecord, StorageKey, StorageValue, Transaction, H256, + U256, +}; use zksync_vm_interface::pubdata::PubdataBuilder; use super::{HistoryEnabled, Vm}; @@ -18,6 +20,7 @@ use crate::{ storage::{InMemoryStorage, ReadStorage, StorageView, WriteStorage}, CurrentExecutionState, L2BlockEnv, VmExecutionMode, VmExecutionResultAndLogs, }, + utils::bytecode::bytes_to_be_words, versions::testonly::{filter_out_base_system_contracts, TestedVm}, vm_latest::{ constants::BOOTLOADER_HEAP_PAGE, @@ -88,7 +91,7 @@ impl TestedVm for TestedLatestVm { self.batch_env.clone(), VmExecutionMode::Batch, diffs, - crate::vm_latest::MultiVMSubversion::latest(), + crate::vm_latest::MultiVmSubversion::latest(), Some(pubdata_builder), ); self.inspect_inner( @@ -110,9 +113,9 @@ impl TestedVm for TestedLatestVm { let bytecodes = bytecodes .iter() .map(|&bytecode| { - let hash = hash_bytecode(bytecode); - let words = bytes_to_be_words(bytecode.to_vec()); - (h256_to_u256(hash), words) + let hash = BytecodeHash::for_bytecode(bytecode).value_u256(); + let words = bytes_to_be_words(bytecode); + (hash, words) }) .collect(); self.state diff --git a/core/lib/multivm/src/versions/vm_latest/tests/prestate_tracer.rs b/core/lib/multivm/src/versions/vm_latest/tests/prestate_tracer.rs index 7028f7a89711..8dce2765233c 100644 --- a/core/lib/multivm/src/versions/vm_latest/tests/prestate_tracer.rs +++ b/core/lib/multivm/src/versions/vm_latest/tests/prestate_tracer.rs @@ -1,14 +1,14 @@ use std::sync::Arc; use once_cell::sync::OnceCell; -use zksync_test_account::TxType; +use zksync_test_contracts::{TestContract, TxType}; use zksync_types::{utils::deployed_address_create, Execute, U256}; use super::TestedLatestVm; use crate::{ interface::{InspectExecutionMode, TxExecutionMode, VmInterface, VmInterfaceExt}, tracers::PrestateTracer, - versions::testonly::{read_simple_transfer_contract, VmTesterBuilder}, + versions::testonly::VmTesterBuilder, vm_latest::{constants::BATCH_COMPUTATIONAL_GAS_LIMIT, ToTracerPointer}, }; @@ -56,9 +56,9 @@ fn test_prestate_tracer_diff_mode() { .with_bootloader_gas_limit(BATCH_COMPUTATIONAL_GAS_LIMIT) .with_execution_mode(TxExecutionMode::VerifyExecute) .build::(); - let contract = read_simple_transfer_contract(); + let contract = TestContract::simple_transfer().bytecode; let account = &mut vm.rich_accounts[0]; - let tx = account.get_deploy_tx(&contract, None, TxType::L2).tx; + let tx = account.get_deploy_tx(contract, None, TxType::L2).tx; let nonce = tx.nonce().unwrap().0.into(); vm.vm.push_transaction(tx); vm.vm.execute(InspectExecutionMode::OneTx); @@ -66,7 +66,7 @@ fn test_prestate_tracer_diff_mode() { vm.test_contract = Some(deployed_address); // Deploy a second copy of the contract to see its appearance in the pre-state - let tx2 = account.get_deploy_tx(&contract, None, TxType::L2).tx; + let tx2 = account.get_deploy_tx(contract, None, TxType::L2).tx; let nonce2 = tx2.nonce().unwrap().0.into(); vm.vm.push_transaction(tx2); vm.vm.execute(InspectExecutionMode::OneTx); diff --git a/core/lib/multivm/src/versions/vm_latest/tests/rollbacks.rs b/core/lib/multivm/src/versions/vm_latest/tests/rollbacks.rs index de674498427d..f126a7f8fbdd 100644 --- a/core/lib/multivm/src/versions/vm_latest/tests/rollbacks.rs +++ b/core/lib/multivm/src/versions/vm_latest/tests/rollbacks.rs @@ -1,6 +1,7 @@ use ethabi::Token; -use zksync_contracts::{get_loadnext_contract, test_contracts::LoadnextContractExecutionParams}; -use zksync_test_account::{DeployContractsTx, TxType}; +use zksync_test_contracts::{ + DeployContractsTx, LoadnextContractExecutionParams, TestContract, TxType, +}; use zksync_types::{get_nonce_key, U256}; use zksync_vm_interface::InspectExecutionMode; @@ -58,7 +59,7 @@ impl VmTracer for MaxRecursionTracer { } #[test] -fn test_layered_rollback() { +fn layered_rollback() { // This test checks that the layered rollbacks work correctly, i.e. // the rollback by the operator will always revert all the changes @@ -69,14 +70,13 @@ fn test_layered_rollback() { .build::(); let account = &mut vm.rich_accounts[0]; - let loadnext_contract = get_loadnext_contract().bytecode; let DeployContractsTx { tx: deploy_tx, address, .. } = account.get_deploy_tx( - &loadnext_contract, + TestContract::load_test().bytecode, Some(&[Token::Uint(0.into())]), TxType::L2, ); @@ -87,7 +87,7 @@ fn test_layered_rollback() { let loadnext_transaction = account.get_loadnext_transaction( address, LoadnextContractExecutionParams { - writes: 1, + initial_writes: 1, recursive_calls: 20, ..LoadnextContractExecutionParams::empty() }, diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/default_tracers.rs b/core/lib/multivm/src/versions/vm_latest/tracers/default_tracers.rs index 7156acce152e..8755b98ddb8c 100755 --- a/core/lib/multivm/src/versions/vm_latest/tracers/default_tracers.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/default_tracers.rs @@ -32,7 +32,7 @@ use crate::{ CircuitsTracer, RefundsTracer, ResultTracer, }, types::internals::ZkSyncVmState, - vm::MultiVMSubversion, + vm::MultiVmSubversion, VmTracer, }, }; @@ -65,7 +65,7 @@ pub struct DefaultExecutionTracer { pub(crate) circuits_tracer: CircuitsTracer, // This tracer is responsible for handling EVM deployments and providing the data to the code decommitter. pub(crate) evm_deploy_tracer: Option>, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, storage: StoragePtr, _phantom: PhantomData, } @@ -80,7 +80,7 @@ impl DefaultExecutionTracer { storage: StoragePtr, refund_tracer: Option>, pubdata_tracer: Option>, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, ) -> Self { Self { tx_has_been_processed: false, diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/evm_deploy_tracer.rs b/core/lib/multivm/src/versions/vm_latest/tracers/evm_deploy_tracer.rs index 61c8ef0b5abf..2e6ab8089eb0 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/evm_deploy_tracer.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/evm_deploy_tracer.rs @@ -7,14 +7,18 @@ use zk_evm_1_5_0::{ FarCallOpcode, FatPointer, Opcode, CALL_IMPLICIT_CALLDATA_FAT_PTR_REGISTER, }, }; -use zksync_types::{CONTRACT_DEPLOYER_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS}; -use zksync_utils::{bytecode::hash_evm_bytecode, bytes_to_be_words, h256_to_u256}; -use zksync_vm_interface::storage::StoragePtr; +use zksync_types::{ + bytecode::BytecodeHash, CONTRACT_DEPLOYER_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, +}; use super::{traits::VmTracer, utils::read_pointer}; use crate::{ - interface::{storage::WriteStorage, tracer::TracerExecutionStatus}, + interface::{ + storage::{StoragePtr, WriteStorage}, + tracer::TracerExecutionStatus, + }, tracers::dynamic::vm_1_5_0::DynTracer, + utils::bytecode::bytes_to_be_words, vm_latest::{BootloaderState, HistoryMode, SimpleMemory, ZkSyncVmState}, }; @@ -91,8 +95,8 @@ impl VmTracer for EvmDeployTracer { ) -> TracerExecutionStatus { let timestamp = Timestamp(state.local_state.timestamp); for published_bytecode in mem::take(&mut self.pending_bytecodes) { - let hash = h256_to_u256(hash_evm_bytecode(&published_bytecode)); - let as_words = bytes_to_be_words(published_bytecode); + let hash = BytecodeHash::for_evm_bytecode(&published_bytecode).value_u256(); + let as_words = bytes_to_be_words(&published_bytecode); state .decommittment_processor .insert_dynamic_bytecode(hash, as_words, timestamp); diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/pubdata_tracer.rs b/core/lib/multivm/src/versions/vm_latest/tracers/pubdata_tracer.rs index 998e8a13ad25..3698914630dd 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/pubdata_tracer.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/pubdata_tracer.rs @@ -5,8 +5,10 @@ use zk_evm_1_5_0::{ aux_structures::Timestamp, tracing::{BeforeExecutionData, VmLocalStateData}, }; -use zksync_types::{writes::StateDiffRecord, AccountTreeId, StorageKey, L1_MESSENGER_ADDRESS}; -use zksync_utils::{h256_to_u256, u256_to_bytes_be, u256_to_h256}; +use zksync_types::{ + h256_to_u256, u256_to_h256, writes::StateDiffRecord, AccountTreeId, StorageKey, + L1_MESSENGER_ADDRESS, +}; use zksync_vm_interface::pubdata::PubdataBuilder; use crate::{ @@ -17,9 +19,12 @@ use crate::{ L1BatchEnv, VmEvent, VmExecutionMode, }, tracers::dynamic::vm_1_5_0::DynTracer, - utils::events::{ - extract_bytecode_publication_requests_from_l1_messenger, - extract_l2tol1logs_from_l1_messenger, + utils::{ + bytecode::be_words_to_bytes, + events::{ + extract_bytecode_publication_requests_from_l1_messenger, + extract_l2tol1logs_from_l1_messenger, + }, }, vm_latest::{ bootloader_state::{utils::apply_pubdata_to_memory, BootloaderState}, @@ -28,7 +33,7 @@ use crate::{ tracers::{traits::VmTracer, utils::VmHook}, types::internals::ZkSyncVmState, utils::logs::collect_events_and_l1_system_logs_after_timestamp, - vm::MultiVMSubversion, + vm::MultiVmSubversion, StorageOracle, }, }; @@ -42,7 +47,7 @@ pub(crate) struct PubdataTracer { // For testing purposes it might be helpful to supply an exact set of state diffs to be provided // to the L1Messenger. enforced_state_diffs: Option>, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, pubdata_builder: Option>, _phantom_data: PhantomData, } @@ -51,7 +56,7 @@ impl PubdataTracer { pub(crate) fn new( l1_batch_env: L1BatchEnv, execution_mode: VmExecutionMode, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, pubdata_builder: Option>, ) -> Self { Self { @@ -72,7 +77,7 @@ impl PubdataTracer { l1_batch_env: L1BatchEnv, execution_mode: VmExecutionMode, forced_state_diffs: Vec, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, pubdata_builder: Option>, ) -> Self { Self { @@ -132,15 +137,13 @@ impl PubdataTracer { bytecode_publication_requests .iter() .map(|bytecode_publication_request| { - state + let bytecode_words = state .decommittment_processor .known_bytecodes .inner() .get(&h256_to_u256(bytecode_publication_request.bytecode_hash)) - .unwrap() - .iter() - .flat_map(u256_to_bytes_be) - .collect() + .unwrap(); + be_words_to_bytes(bytecode_words) }) .collect() } diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/refunds.rs b/core/lib/multivm/src/versions/vm_latest/tracers/refunds.rs index 78826a16313d..6ef251c2db98 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/refunds.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/refunds.rs @@ -5,8 +5,7 @@ use zk_evm_1_5_0::{ aux_structures::Timestamp, tracing::{BeforeExecutionData, VmLocalStateData}, }; -use zksync_types::{H256, U256}; -use zksync_utils::ceil_div_u256; +use zksync_types::{ceil_div_u256, H256, U256}; use crate::{ interface::{ @@ -25,7 +24,7 @@ use crate::{ }, types::internals::ZkSyncVmState, utils::fee::get_batch_base_fee, - vm::MultiVMSubversion, + vm::MultiVmSubversion, }, }; @@ -51,12 +50,12 @@ pub(crate) struct RefundsTracer { spent_pubdata_counter_before: u32, l1_batch: L1BatchEnv, pubdata_published: u32, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, _phantom: PhantomData, } impl RefundsTracer { - pub(crate) fn new(l1_batch: L1BatchEnv, subversion: MultiVMSubversion) -> Self { + pub(crate) fn new(l1_batch: L1BatchEnv, subversion: MultiVmSubversion) -> Self { Self { pending_refund_request: None, refund_gas: 0, diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/result_tracer.rs b/core/lib/multivm/src/versions/vm_latest/tracers/result_tracer.rs index 0687c8393c62..80a3147f65d2 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/result_tracer.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/result_tracer.rs @@ -23,7 +23,7 @@ use crate::{ utils::{get_vm_hook_params, read_pointer, VmHook}, }, types::internals::ZkSyncVmState, - vm::MultiVMSubversion, + vm::MultiVmSubversion, BootloaderState, HistoryMode, SimpleMemory, }, }; @@ -102,7 +102,7 @@ pub(crate) struct ResultTracer { execution_mode: VmExecutionMode, far_call_tracker: FarCallTracker, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, pub(crate) tx_finished_in_one_tx_mode: bool, @@ -110,7 +110,7 @@ pub(crate) struct ResultTracer { } impl ResultTracer { - pub(crate) fn new(execution_mode: VmExecutionMode, subversion: MultiVMSubversion) -> Self { + pub(crate) fn new(execution_mode: VmExecutionMode, subversion: MultiVmSubversion) -> Self { Self { result: None, bootloader_out_of_gas: false, @@ -336,7 +336,7 @@ impl ResultTracer { pub(crate) fn tx_has_failed( state: &ZkSyncVmState, tx_id: u32, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, ) -> bool { let mem_slot = get_result_success_first_slot(subversion) + tx_id; let mem_value = state diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/utils.rs b/core/lib/multivm/src/versions/vm_latest/tracers/utils.rs index 0a11f5d3f849..6f81a3ac8de5 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/utils.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/utils.rs @@ -9,8 +9,7 @@ use zksync_system_constants::{ ECRECOVER_PRECOMPILE_ADDRESS, KECCAK256_PRECOMPILE_ADDRESS, SECP256R1_VERIFY_PRECOMPILE_ADDRESS, SHA256_PRECOMPILE_ADDRESS, }; -use zksync_types::U256; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, U256}; use crate::vm_latest::{ constants::{ @@ -22,7 +21,7 @@ use crate::vm_latest::{ memory::SimpleMemory, utils::{aux_heap_page_from_base, heap_page_from_base}, }, - vm::MultiVMSubversion, + vm::MultiVmSubversion, }; #[derive(Clone, Debug, Copy)] @@ -48,7 +47,7 @@ impl VmHook { pub(crate) fn from_opcode_memory( state: &VmLocalStateData<'_>, data: &BeforeExecutionData, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, ) -> Self { let opcode_variant = data.opcode.variant; let heap_page = @@ -90,7 +89,7 @@ impl VmHook { pub(crate) fn get_debug_log( state: &VmLocalStateData<'_>, memory: &SimpleMemory, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, ) -> String { let vm_hook_params: Vec<_> = get_vm_hook_params(memory, subversion) .into_iter() @@ -162,7 +161,7 @@ pub(crate) fn print_debug_if_needed( state: &VmLocalStateData<'_>, memory: &SimpleMemory, latest_returndata_ptr: Option, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, ) { let log = match hook { VmHook::DebugLog => get_debug_log(state, memory, subversion), @@ -211,7 +210,7 @@ pub(crate) fn get_calldata_page_via_abi(far_call_abi: &FarCallABI, base_page: Me } pub(crate) fn get_vm_hook_params( memory: &SimpleMemory, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, ) -> Vec { let start_position = get_vm_hook_params_start_position(subversion); memory.dump_page_content_as_u256_words( diff --git a/core/lib/multivm/src/versions/vm_latest/types/internals/transaction_data.rs b/core/lib/multivm/src/versions/vm_latest/types/internals/transaction_data.rs index 90948f2f89fd..33f923414eb3 100644 --- a/core/lib/multivm/src/versions/vm_latest/types/internals/transaction_data.rs +++ b/core/lib/multivm/src/versions/vm_latest/types/internals/transaction_data.rs @@ -1,19 +1,24 @@ use std::convert::TryInto; use zksync_types::{ + address_to_h256, + bytecode::BytecodeHash, ethabi::{encode, Address, Token}, fee::{encoding_len, Fee}, + h256_to_u256, l1::is_l1_tx_type, l2::{L2Tx, TransactionType}, transaction_request::{PaymasterParams, TransactionRequest}, web3::Bytes, Execute, ExecuteTransactionCommon, L2ChainId, L2TxCommonData, Nonce, Transaction, H256, U256, }; -use zksync_utils::{address_to_h256, bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256}; -use crate::vm_latest::{ - constants::{MAX_GAS_PER_PUBDATA_BYTE, TX_MAX_COMPUTE_GAS_LIMIT}, - utils::overhead::derive_overhead, +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_latest::{ + constants::{MAX_GAS_PER_PUBDATA_BYTE, TX_MAX_COMPUTE_GAS_LIMIT}, + utils::overhead::derive_overhead, + }, }; /// This structure represents the data that is used by @@ -203,16 +208,13 @@ impl TransactionData { let factory_deps_hashes = self .factory_deps .iter() - .map(|dep| h256_to_u256(hash_bytecode(dep))) + .map(|dep| BytecodeHash::for_bytecode(dep).value_u256()) .collect(); self.abi_encode_with_custom_factory_deps(factory_deps_hashes) } pub(crate) fn into_tokens(self) -> Vec { - let bytes = self.abi_encode(); - assert!(bytes.len() % 32 == 0); - - bytes_to_be_words(bytes) + bytes_to_be_words(&self.abi_encode()) } pub(crate) fn overhead_gas(&self) -> u32 { diff --git a/core/lib/multivm/src/versions/vm_latest/types/internals/vm_state.rs b/core/lib/multivm/src/versions/vm_latest/types/internals/vm_state.rs index 90bb0c610e2c..03f306f36c52 100644 --- a/core/lib/multivm/src/versions/vm_latest/types/internals/vm_state.rs +++ b/core/lib/multivm/src/versions/vm_latest/types/internals/vm_state.rs @@ -11,14 +11,14 @@ use zk_evm_1_5_0::{ }, }; use zksync_system_constants::BOOTLOADER_ADDRESS; -use zksync_types::{block::L2BlockHasher, Address, L2BlockNumber}; -use zksync_utils::h256_to_u256; +use zksync_types::{block::L2BlockHasher, h256_to_u256, Address, L2BlockNumber}; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, L1BatchEnv, L2Block, SystemEnv, }, + utils::bytecode::bytes_to_be_words, vm_latest::{ bootloader_state::BootloaderState, constants::BOOTLOADER_HEAP_PAGE, @@ -88,25 +88,20 @@ pub(crate) fn new_vm_state( DecommitterOracle::new(storage); let mut initial_bytecodes = vec![( h256_to_u256(system_env.base_system_smart_contracts.default_aa.hash), - system_env - .base_system_smart_contracts - .default_aa - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.default_aa.code), )]; if let Some(evm_emulator) = &system_env.base_system_smart_contracts.evm_emulator { - initial_bytecodes.push((h256_to_u256(evm_emulator.hash), evm_emulator.code.clone())); + initial_bytecodes.push(( + h256_to_u256(evm_emulator.hash), + bytes_to_be_words(&evm_emulator.code), + )); } decommittment_processor.populate(initial_bytecodes, Timestamp(0)); memory.populate( vec![( BOOTLOADER_CODE_PAGE, - system_env - .base_system_smart_contracts - .bootloader - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.bootloader.code), )], Timestamp(0), ); diff --git a/core/lib/multivm/src/versions/vm_latest/types/l1_batch.rs b/core/lib/multivm/src/versions/vm_latest/types/l1_batch.rs index b3bf15cb1be5..89b22d328ac5 100644 --- a/core/lib/multivm/src/versions/vm_latest/types/l1_batch.rs +++ b/core/lib/multivm/src/versions/vm_latest/types/l1_batch.rs @@ -1,5 +1,4 @@ -use zksync_types::U256; -use zksync_utils::{address_to_u256, h256_to_u256}; +use zksync_types::{address_to_u256, h256_to_u256, U256}; use crate::{interface::L1BatchEnv, vm_latest::utils::fee::get_batch_base_fee}; diff --git a/core/lib/multivm/src/versions/vm_latest/utils/fee.rs b/core/lib/multivm/src/versions/vm_latest/utils/fee.rs index 666fcca87e12..58b457dce68a 100644 --- a/core/lib/multivm/src/versions/vm_latest/utils/fee.rs +++ b/core/lib/multivm/src/versions/vm_latest/utils/fee.rs @@ -1,6 +1,5 @@ //! Utility functions for vm use zksync_types::fee_model::PubdataIndependentBatchFeeModelInput; -use zksync_utils::ceil_div; use crate::{interface::L1BatchEnv, vm_latest::constants::MAX_GAS_PER_PUBDATA_BYTE}; @@ -18,11 +17,14 @@ pub(crate) fn derive_base_fee_and_gas_per_pubdata( // publish enough public data while compensating us for it. let base_fee = std::cmp::max( fair_l2_gas_price, - ceil_div(fair_pubdata_price, MAX_GAS_PER_PUBDATA_BYTE), + fair_pubdata_price.div_ceil(MAX_GAS_PER_PUBDATA_BYTE), ); - let gas_per_pubdata = ceil_div(fair_pubdata_price, base_fee); - + let gas_per_pubdata = if fair_pubdata_price == 0 { + 0 + } else { + fair_pubdata_price.div_ceil(base_fee) + }; (base_fee, gas_per_pubdata) } diff --git a/core/lib/multivm/src/versions/vm_latest/utils/l2_blocks.rs b/core/lib/multivm/src/versions/vm_latest/utils/l2_blocks.rs index 59d3eb0ef0fc..840f1687ccfa 100644 --- a/core/lib/multivm/src/versions/vm_latest/utils/l2_blocks.rs +++ b/core/lib/multivm/src/versions/vm_latest/utils/l2_blocks.rs @@ -4,9 +4,9 @@ use zksync_system_constants::{ SYSTEM_CONTEXT_STORED_L2_BLOCK_HASHES, }; use zksync_types::{ - block::unpack_block_info, web3::keccak256, AccountTreeId, L2BlockNumber, StorageKey, H256, U256, + block::unpack_block_info, h256_to_u256, u256_to_h256, web3::keccak256, AccountTreeId, + L2BlockNumber, StorageKey, H256, U256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; use crate::interface::{ storage::{ReadStorage, StoragePtr}, diff --git a/core/lib/multivm/src/versions/vm_latest/vm.rs b/core/lib/multivm/src/versions/vm_latest/vm.rs index ff90eb14ee42..ada20af9fa3c 100644 --- a/core/lib/multivm/src/versions/vm_latest/vm.rs +++ b/core/lib/multivm/src/versions/vm_latest/vm.rs @@ -2,11 +2,12 @@ use std::{collections::HashMap, rc::Rc}; use circuit_sequencer_api_1_5_0::sort_storage_access::sort_storage_access_queries; use zksync_types::{ + h256_to_u256, l2_to_l1_log::{SystemL2ToL1Log, UserL2ToL1Log}, + u256_to_h256, vm::VmVersion, Transaction, H256, }; -use zksync_utils::{be_words_to_bytes, h256_to_u256, u256_to_h256}; use zksync_vm_interface::{pubdata::PubdataBuilder, InspectExecutionMode}; use crate::{ @@ -18,7 +19,7 @@ use crate::{ VmExecutionResultAndLogs, VmFactory, VmInterface, VmInterfaceHistoryEnabled, VmTrackingContracts, }, - utils::events::extract_l2tol1logs_from_l1_messenger, + utils::{bytecode::be_words_to_bytes, events::extract_l2tol1logs_from_l1_messenger}, vm_latest::{ bootloader_state::BootloaderState, old_vm::{events::merge_events, history_recorder::HistoryEnabled}, @@ -34,7 +35,7 @@ use crate::{ /// version was released with increased bootloader memory. The version with the small bootloader memory /// is available only on internal staging environments. #[derive(Debug, Copy, Clone)] -pub(crate) enum MultiVMSubversion { +pub(crate) enum MultiVmSubversion { /// The initial version of v1.5.0, available only on staging environments. SmallBootloaderMemory, /// The final correct version of v1.5.0 @@ -43,7 +44,7 @@ pub(crate) enum MultiVMSubversion { Gateway, } -impl MultiVMSubversion { +impl MultiVmSubversion { #[cfg(test)] pub(crate) fn latest() -> Self { Self::IncreasedBootloaderMemory @@ -52,7 +53,7 @@ impl MultiVMSubversion { #[derive(Debug)] pub(crate) struct VmVersionIsNotVm150Error; -impl TryFrom for MultiVMSubversion { +impl TryFrom for MultiVmSubversion { type Error = VmVersionIsNotVm150Error; fn try_from(value: VmVersion) -> Result { match value { @@ -76,7 +77,7 @@ pub struct Vm { pub(crate) batch_env: L1BatchEnv, // Snapshots for the current run pub(crate) snapshots: Vec, - pub(crate) subversion: MultiVMSubversion, + pub(crate) subversion: MultiVmSubversion, _phantom: std::marker::PhantomData, } @@ -246,7 +247,7 @@ impl Vm { batch_env: L1BatchEnv, system_env: SystemEnv, storage: StoragePtr, - subversion: MultiVMSubversion, + subversion: MultiVmSubversion, ) -> Self { let (state, bootloader_state) = new_vm_state(storage.clone(), &system_env, &batch_env); Self { diff --git a/core/lib/multivm/src/versions/vm_m5/events.rs b/core/lib/multivm/src/versions/vm_m5/events.rs index a444ad37feb5..659b41cc2060 100644 --- a/core/lib/multivm/src/versions/vm_m5/events.rs +++ b/core/lib/multivm/src/versions/vm_m5/events.rs @@ -1,8 +1,7 @@ use zk_evm_1_3_1::{ethereum_types::Address, reference_impls::event_sink::EventMessage}; -use zksync_types::{L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use zksync_utils::{be_chunks_to_h256_words, h256_to_account_address}; +use zksync_types::{h256_to_address, L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use crate::interface::VmEvent; +use crate::{interface::VmEvent, utils::bytecode::be_chunks_to_h256_words}; #[derive(Clone)] pub struct SolidityLikeEvent { @@ -135,7 +134,7 @@ pub fn merge_events(events: Vec) -> Vec { .filter(|e| e.address == EVENT_WRITER_ADDRESS) .map(|event| { // The events writer events where the first topic is the actual address of the event and the rest of the topics are real topics - let address = h256_to_account_address(&H256(event.topics[0])); + let address = h256_to_address(&H256(event.topics[0])); let topics = event.topics.into_iter().skip(1).collect(); SolidityLikeEvent { diff --git a/core/lib/multivm/src/versions/vm_m5/history_recorder.rs b/core/lib/multivm/src/versions/vm_m5/history_recorder.rs index f744be32d0bf..f7923e42b667 100644 --- a/core/lib/multivm/src/versions/vm_m5/history_recorder.rs +++ b/core/lib/multivm/src/versions/vm_m5/history_recorder.rs @@ -9,8 +9,7 @@ use zk_evm_1_3_1::{ vm_state::PrimitiveValue, zkevm_opcode_defs::{self}, }; -use zksync_types::{StorageKey, U256}; -use zksync_utils::{h256_to_u256, u256_to_h256}; +use zksync_types::{h256_to_u256, u256_to_h256, StorageKey, U256}; use crate::vm_m5::storage::{Storage, StoragePtr}; diff --git a/core/lib/multivm/src/versions/vm_m5/oracle_tools.rs b/core/lib/multivm/src/versions/vm_m5/oracle_tools.rs index 32930f31cd71..f430ad346387 100644 --- a/core/lib/multivm/src/versions/vm_m5/oracle_tools.rs +++ b/core/lib/multivm/src/versions/vm_m5/oracle_tools.rs @@ -10,7 +10,7 @@ use crate::vm_m5::{ storage::StorageOracle, }, storage::{Storage, StoragePtr}, - vm_instance::MultiVMSubversion, + vm_instance::MultiVmSubversion, }; #[derive(Debug)] @@ -25,7 +25,7 @@ pub struct OracleTools { } impl OracleTools { - pub fn new(storage_pointer: StoragePtr, refund_state: MultiVMSubversion) -> Self { + pub fn new(storage_pointer: StoragePtr, refund_state: MultiVmSubversion) -> Self { Self { storage: StorageOracle::new(storage_pointer.clone(), refund_state), memory: SimpleMemory::default(), diff --git a/core/lib/multivm/src/versions/vm_m5/oracles/decommitter.rs b/core/lib/multivm/src/versions/vm_m5/oracles/decommitter.rs index bc43c72966ea..7e2264201e11 100644 --- a/core/lib/multivm/src/versions/vm_m5/oracles/decommitter.rs +++ b/core/lib/multivm/src/versions/vm_m5/oracles/decommitter.rs @@ -6,13 +6,15 @@ use zk_evm_1_3_1::{ DecommittmentQuery, MemoryIndex, MemoryLocation, MemoryPage, MemoryQuery, Timestamp, }, }; -use zksync_types::U256; -use zksync_utils::{bytecode::bytecode_len_in_words, bytes_to_be_words, u256_to_h256}; +use zksync_types::{u256_to_h256, U256}; use super::OracleWithHistory; -use crate::vm_m5::{ - history_recorder::HistoryRecorder, - storage::{Storage, StoragePtr}, +use crate::{ + utils::bytecode::{bytecode_len_in_words, bytes_to_be_words}, + vm_m5::{ + history_recorder::HistoryRecorder, + storage::{Storage, StoragePtr}, + }, }; #[derive(Debug)] @@ -53,7 +55,7 @@ impl DecommitterOracle { .load_factory_dep(u256_to_h256(hash)) .expect("Trying to decode unexisting hash"); - let value = bytes_to_be_words(value); + let value = bytes_to_be_words(&value); self.known_bytecodes.insert(hash, value.clone(), timestamp); value } diff --git a/core/lib/multivm/src/versions/vm_m5/oracles/storage.rs b/core/lib/multivm/src/versions/vm_m5/oracles/storage.rs index 7ccfdf2f30c7..90bd9cfaab69 100644 --- a/core/lib/multivm/src/versions/vm_m5/oracles/storage.rs +++ b/core/lib/multivm/src/versions/vm_m5/oracles/storage.rs @@ -7,10 +7,9 @@ use zk_evm_1_3_1::{ zkevm_opcode_defs::system_params::INITIAL_STORAGE_WRITE_PUBDATA_BYTES, }; use zksync_types::{ - utils::storage_key_for_eth_balance, AccountTreeId, Address, StorageKey, StorageLogKind, - BOOTLOADER_ADDRESS, U256, + u256_to_h256, utils::storage_key_for_eth_balance, AccountTreeId, Address, StorageKey, + StorageLogKind, BOOTLOADER_ADDRESS, U256, }; -use zksync_utils::u256_to_h256; use super::OracleWithHistory; use crate::vm_m5::{ @@ -19,7 +18,7 @@ use crate::vm_m5::{ }, storage::{Storage, StoragePtr}, utils::StorageLogQuery, - vm_instance::MultiVMSubversion, + vm_instance::MultiVmSubversion, }; // While the storage does not support different shards, it was decided to write the @@ -46,7 +45,7 @@ pub struct StorageOracle { // to cover this slot. pub paid_changes: HistoryRecorder>, - pub refund_state: MultiVMSubversion, + pub refund_state: MultiVmSubversion, } impl OracleWithHistory for StorageOracle { @@ -64,7 +63,7 @@ impl OracleWithHistory for StorageOracle { } impl StorageOracle { - pub fn new(storage: StoragePtr, refund_state: MultiVMSubversion) -> Self { + pub fn new(storage: StoragePtr, refund_state: MultiVmSubversion) -> Self { Self { storage: HistoryRecorder::from_inner(StorageWrapper::new(storage)), frames_stack: Default::default(), @@ -75,10 +74,10 @@ impl StorageOracle { fn is_storage_key_free(&self, key: &StorageKey) -> bool { match self.refund_state { - MultiVMSubversion::V1 => { + MultiVmSubversion::V1 => { key.address() == &zksync_system_constants::SYSTEM_CONTEXT_ADDRESS } - MultiVMSubversion::V2 => { + MultiVmSubversion::V2 => { key.address() == &zksync_system_constants::SYSTEM_CONTEXT_ADDRESS || *key == storage_key_for_eth_balance(&BOOTLOADER_ADDRESS) } diff --git a/core/lib/multivm/src/versions/vm_m5/oracles/tracer.rs b/core/lib/multivm/src/versions/vm_m5/oracles/tracer.rs index 45f8ed88f834..ea92307d1224 100644 --- a/core/lib/multivm/src/versions/vm_m5/oracles/tracer.rs +++ b/core/lib/multivm/src/versions/vm_m5/oracles/tracer.rs @@ -16,22 +16,23 @@ use zk_evm_1_3_1::{ }, }; use zksync_types::{ - get_code_key, web3::keccak256, AccountTreeId, Address, StorageKey, - ACCOUNT_CODE_STORAGE_ADDRESS, BOOTLOADER_ADDRESS, CONTRACT_DEPLOYER_ADDRESS, H256, - KECCAK256_PRECOMPILE_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L1_MESSENGER_ADDRESS, - L2_BASE_TOKEN_ADDRESS, MSG_VALUE_SIMULATOR_ADDRESS, SYSTEM_CONTEXT_ADDRESS, U256, -}; -use zksync_utils::{ - be_bytes_to_safe_address, h256_to_account_address, u256_to_account_address, u256_to_h256, + get_code_key, h256_to_address, u256_to_address, u256_to_h256, web3::keccak256, AccountTreeId, + Address, StorageKey, ACCOUNT_CODE_STORAGE_ADDRESS, BOOTLOADER_ADDRESS, + CONTRACT_DEPLOYER_ADDRESS, H256, KECCAK256_PRECOMPILE_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, + L1_MESSENGER_ADDRESS, L2_BASE_TOKEN_ADDRESS, MSG_VALUE_SIMULATOR_ADDRESS, + SYSTEM_CONTEXT_ADDRESS, U256, }; -use crate::vm_m5::{ - errors::VmRevertReasonParsingResult, - memory::SimpleMemory, - storage::{Storage, StoragePtr}, - utils::{aux_heap_page_from_base, heap_page_from_base}, - vm_instance::{get_vm_hook_params, VM_HOOK_POSITION}, - vm_with_bootloader::BOOTLOADER_HEAP_PAGE, +use crate::{ + utils::bytecode::be_bytes_to_safe_address, + vm_m5::{ + errors::VmRevertReasonParsingResult, + memory::SimpleMemory, + storage::{Storage, StoragePtr}, + utils::{aux_heap_page_from_base, heap_page_from_base}, + vm_instance::{get_vm_hook_params, VM_HOOK_POSITION}, + vm_with_bootloader::BOOTLOADER_HEAP_PAGE, + }, }; pub trait ExecutionEndTracer: Tracer { @@ -322,7 +323,7 @@ impl ValidationTracer { // The user is allowed to touch its own slots or slots semantically related to him. let valid_users_slot = address == self.user_address - || u256_to_account_address(&key) == self.user_address + || u256_to_address(&key) == self.user_address || self.auxilary_allowed_slots.contains(&u256_to_h256(key)); if valid_users_slot { return true; @@ -383,7 +384,7 @@ impl ValidationTracer { let packed_abi = data.src0_value.value; let call_destination_value = data.src1_value.value; - let called_address = u256_to_account_address(&call_destination_value); + let called_address = u256_to_address(&call_destination_value); let far_call_abi = FarCallABI::from_u256(packed_abi); if called_address == KECCAK256_PRECOMPILE_ADDRESS @@ -450,7 +451,7 @@ impl ValidationTracer { let value = self.storage.borrow_mut().get_value(&storage_key); return Ok(NewTrustedValidationItems { - new_trusted_addresses: vec![h256_to_account_address(&value)], + new_trusted_addresses: vec![h256_to_address(&value)], ..Default::default() }); } diff --git a/core/lib/multivm/src/versions/vm_m5/pubdata_utils.rs b/core/lib/multivm/src/versions/vm_m5/pubdata_utils.rs index 1fd8c2460930..8eca2ef5cd86 100644 --- a/core/lib/multivm/src/versions/vm_m5/pubdata_utils.rs +++ b/core/lib/multivm/src/versions/vm_m5/pubdata_utils.rs @@ -4,11 +4,11 @@ use circuit_sequencer_api_1_3_3::sort_storage_access::sort_storage_access_querie use itertools::Itertools; use zk_evm_1_3_1::aux_structures::{LogQuery, Timestamp}; use zksync_types::{StorageKey, PUBLISH_BYTECODE_OVERHEAD, SYSTEM_CONTEXT_ADDRESS}; -use zksync_utils::bytecode::bytecode_len_in_bytes; use crate::{ glue::GlueInto, interface::VmEvent, + utils::bytecode::bytecode_len_in_bytes, vm_m5::{ oracles::storage::storage_key_of_log, storage::Storage, utils::collect_storage_log_queries_after_timestamp, vm_instance::VmInstance, @@ -35,9 +35,7 @@ impl VmInstance { let published_bytecode_bytes: u32 = VmEvent::extract_published_bytecodes(&events) .iter() - .map(|bytecodehash| { - bytecode_len_in_bytes(*bytecodehash) as u32 + PUBLISH_BYTECODE_OVERHEAD - }) + .map(|bytecode_hash| bytecode_len_in_bytes(bytecode_hash) + PUBLISH_BYTECODE_OVERHEAD) .sum(); storage_writes_pubdata_published diff --git a/core/lib/multivm/src/versions/vm_m5/refunds.rs b/core/lib/multivm/src/versions/vm_m5/refunds.rs index fd4e2788f035..8b0d3e5d84c4 100644 --- a/core/lib/multivm/src/versions/vm_m5/refunds.rs +++ b/core/lib/multivm/src/versions/vm_m5/refunds.rs @@ -1,6 +1,5 @@ use zk_evm_1_3_1::aux_structures::Timestamp; -use zksync_types::U256; -use zksync_utils::ceil_div_u256; +use zksync_types::{ceil_div_u256, U256}; use crate::vm_m5::{ storage::Storage, diff --git a/core/lib/multivm/src/versions/vm_m5/test_utils.rs b/core/lib/multivm/src/versions/vm_m5/test_utils.rs index d7c0dfb9f6d0..e0e377e85971 100644 --- a/core/lib/multivm/src/versions/vm_m5/test_utils.rs +++ b/core/lib/multivm/src/versions/vm_m5/test_utils.rs @@ -14,13 +14,13 @@ use zk_evm_1_3_1::{ }; use zksync_contracts::deployer_contract; use zksync_types::{ + address_to_h256, + bytecode::BytecodeHash, ethabi::{Address, Token}, + h256_to_address, u256_to_h256, web3::keccak256, Execute, Nonce, StorageKey, StorageValue, CONTRACT_DEPLOYER_ADDRESS, H256, U256, }; -use zksync_utils::{ - address_to_h256, bytecode::hash_bytecode, h256_to_account_address, u256_to_h256, -}; use super::utils::StorageLogQuery; use crate::vm_m5::{ @@ -143,7 +143,7 @@ pub fn get_create_execute(code: &[u8], calldata: &[u8]) -> Execute { let params = [ Token::FixedBytes(vec![0u8; 32]), - Token::FixedBytes(hash_bytecode(code).0.to_vec()), + Token::FixedBytes(BytecodeHash::for_bytecode(code).value().0.to_vec()), Token::Bytes(calldata.to_vec()), ]; let calldata = contract_function @@ -172,5 +172,5 @@ pub fn get_create_zksync_address(sender_address: Address, sender_nonce: Nonce) - let hash = keccak256(&digest); - h256_to_account_address(&H256(hash)) + h256_to_address(&H256(hash)) } diff --git a/core/lib/multivm/src/versions/vm_m5/transaction_data.rs b/core/lib/multivm/src/versions/vm_m5/transaction_data.rs index b64e3f770185..236c4c3d4122 100644 --- a/core/lib/multivm/src/versions/vm_m5/transaction_data.rs +++ b/core/lib/multivm/src/versions/vm_m5/transaction_data.rs @@ -1,17 +1,21 @@ use zk_evm_1_3_1::zkevm_opcode_defs::system_params::{MAX_PUBDATA_PER_BLOCK, MAX_TX_ERGS_LIMIT}; use zksync_types::{ + address_to_h256, + bytecode::BytecodeHash, + ceil_div_u256, ethabi::{encode, Address, Token}, fee::encoding_len, + h256_to_u256, l2::TransactionType, ExecuteTransactionCommon, Transaction, U256, }; -use zksync_utils::{ - address_to_h256, bytecode::hash_bytecode, bytes_to_be_words, ceil_div_u256, h256_to_u256, -}; use super::vm_with_bootloader::MAX_GAS_PER_PUBDATA_BYTE; -use crate::vm_m5::vm_with_bootloader::{ - BLOCK_OVERHEAD_GAS, BLOCK_OVERHEAD_PUBDATA, BOOTLOADER_TX_ENCODING_SPACE, MAX_TXS_IN_BLOCK, +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_m5::vm_with_bootloader::{ + BLOCK_OVERHEAD_GAS, BLOCK_OVERHEAD_PUBDATA, BOOTLOADER_TX_ENCODING_SPACE, MAX_TXS_IN_BLOCK, + }, }; const L1_TX_TYPE: u8 = 255; @@ -165,16 +169,13 @@ impl TransactionData { let factory_deps_hashes = self .factory_deps .iter() - .map(|dep| h256_to_u256(hash_bytecode(dep))) + .map(|dep| BytecodeHash::for_bytecode(dep).value_u256()) .collect(); self.abi_encode_with_custom_factory_deps(factory_deps_hashes) } pub fn into_tokens(self) -> Vec { - let bytes = self.abi_encode(); - assert!(bytes.len() % 32 == 0); - - bytes_to_be_words(bytes) + bytes_to_be_words(&self.abi_encode()) } pub fn overhead_gas(&self) -> u32 { diff --git a/core/lib/multivm/src/versions/vm_m5/utils.rs b/core/lib/multivm/src/versions/vm_m5/utils.rs index a38618395b1f..de8c746bfb80 100644 --- a/core/lib/multivm/src/versions/vm_m5/utils.rs +++ b/core/lib/multivm/src/versions/vm_m5/utils.rs @@ -7,8 +7,7 @@ use zk_evm_1_3_1::{ }; use zksync_contracts::BaseSystemContracts; use zksync_system_constants::ZKPORTER_IS_AVAILABLE; -use zksync_types::{Address, StorageLogKind, H160, MAX_L2_TX_GAS_LIMIT, U256}; -use zksync_utils::h256_to_u256; +use zksync_types::{h256_to_u256, Address, StorageLogKind, H160, MAX_L2_TX_GAS_LIMIT, U256}; use crate::{ glue::GlueInto, diff --git a/core/lib/multivm/src/versions/vm_m5/vm.rs b/core/lib/multivm/src/versions/vm_m5/vm.rs index 55afeed17cd1..bd104b868401 100644 --- a/core/lib/multivm/src/versions/vm_m5/vm.rs +++ b/core/lib/multivm/src/versions/vm_m5/vm.rs @@ -1,7 +1,6 @@ use std::rc::Rc; -use zksync_types::{vm::VmVersion, Transaction}; -use zksync_utils::h256_to_u256; +use zksync_types::{h256_to_u256, vm::VmVersion, Transaction}; use zksync_vm_interface::{pubdata::PubdataBuilder, InspectExecutionMode}; use crate::{ @@ -13,7 +12,7 @@ use crate::{ }, vm_m5::{ storage::Storage, - vm_instance::{MultiVMSubversion, VmInstance}, + vm_instance::{MultiVmSubversion, VmInstance}, }, }; @@ -29,7 +28,7 @@ impl Vm { batch_env: L1BatchEnv, system_env: SystemEnv, storage: StoragePtr, - vm_sub_version: MultiVMSubversion, + vm_sub_version: MultiVmSubversion, ) -> Self { let oracle_tools = crate::vm_m5::OracleTools::new(storage.clone(), vm_sub_version); let block_properties = zk_evm_1_3_1::block_properties::BlockProperties { @@ -128,8 +127,8 @@ impl VmFactory for Vm { fn new(batch_env: L1BatchEnv, system_env: SystemEnv, storage: StoragePtr) -> Self { let vm_version: VmVersion = system_env.version.into(); let vm_sub_version = match vm_version { - VmVersion::M5WithoutRefunds => MultiVMSubversion::V1, - VmVersion::M5WithRefunds => MultiVMSubversion::V2, + VmVersion::M5WithoutRefunds => MultiVmSubversion::V1, + VmVersion::M5WithRefunds => MultiVmSubversion::V2, _ => panic!("Unsupported protocol version for vm_m5: {:?}", vm_version), }; Self::new_with_subversion(batch_env, system_env, storage, vm_sub_version) diff --git a/core/lib/multivm/src/versions/vm_m5/vm_instance.rs b/core/lib/multivm/src/versions/vm_m5/vm_instance.rs index 4a96c4a750cc..94b86bce7ea7 100644 --- a/core/lib/multivm/src/versions/vm_m5/vm_instance.rs +++ b/core/lib/multivm/src/versions/vm_m5/vm_instance.rs @@ -81,7 +81,7 @@ pub(crate) fn get_vm_hook_params(memory: &SimpleMemory) -> Vec { /// /// This enum allows to execute blocks with the same VM but different support for refunds. #[derive(Debug, Copy, Clone)] -pub enum MultiVMSubversion { +pub enum MultiVmSubversion { /// Initial VM M5 version, refunds are fully disabled. V1, /// Refunds were enabled. ETH balance for bootloader address was marked as a free slot. @@ -99,7 +99,7 @@ pub struct VmInstance { pub snapshots: Vec, /// MultiVM-specific addition. See enum doc-comment for details. - pub(crate) refund_state: MultiVMSubversion, + pub(crate) refund_state: MultiVmSubversion, } /// This structure stores data that accumulates during the VM run. @@ -560,12 +560,12 @@ impl VmInstance { let refund_to_propose; let refund_slot; match self.refund_state { - MultiVMSubversion::V1 => { + MultiVmSubversion::V1 => { refund_to_propose = bootloader_refund; refund_slot = OPERATOR_REFUNDS_OFFSET + self.bootloader_state.tx_to_execute() - 1; } - MultiVMSubversion::V2 => { + MultiVmSubversion::V2 => { let gas_spent_on_pubdata = tracer .gas_spent_on_pubdata(&self.state.local_state) - spent_pubdata_counter_before; diff --git a/core/lib/multivm/src/versions/vm_m5/vm_with_bootloader.rs b/core/lib/multivm/src/versions/vm_m5/vm_with_bootloader.rs index cd2979db5e57..0a7df48df80f 100644 --- a/core/lib/multivm/src/versions/vm_m5/vm_with_bootloader.rs +++ b/core/lib/multivm/src/versions/vm_m5/vm_with_bootloader.rs @@ -14,15 +14,13 @@ use zk_evm_1_3_1::{ use zksync_contracts::BaseSystemContracts; use zksync_system_constants::MAX_L2_TX_GAS_LIMIT; use zksync_types::{ - fee_model::L1PeggedBatchFeeModelInput, Address, Transaction, BOOTLOADER_ADDRESS, - L1_GAS_PER_PUBDATA_BYTE, MAX_NEW_FACTORY_DEPS, U256, -}; -use zksync_utils::{ - address_to_u256, bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256, misc::ceil_div, + address_to_u256, bytecode::BytecodeHash, fee_model::L1PeggedBatchFeeModelInput, h256_to_u256, + Address, Transaction, BOOTLOADER_ADDRESS, L1_GAS_PER_PUBDATA_BYTE, MAX_NEW_FACTORY_DEPS, U256, }; use crate::{ interface::L1BatchEnv, + utils::bytecode::bytes_to_be_words, vm_m5::{ bootloader_state::BootloaderState, oracles::OracleWithHistory, @@ -31,7 +29,7 @@ use crate::{ utils::{ code_page_candidate_from_base, heap_page_from_base, BLOCK_GAS_LIMIT, INITIAL_BASE_PAGE, }, - vm_instance::{MultiVMSubversion, VmInstance, ZkSyncVmState}, + vm_instance::{MultiVmSubversion, VmInstance, ZkSyncVmState}, OracleTools, }, }; @@ -73,8 +71,11 @@ pub(crate) fn eth_price_per_pubdata_byte(l1_gas_price: u64) -> u64 { pub(crate) fn base_fee_to_gas_per_pubdata(l1_gas_price: u64, base_fee: u64) -> u64 { let eth_price_per_pubdata_byte = eth_price_per_pubdata_byte(l1_gas_price); - - ceil_div(eth_price_per_pubdata_byte, base_fee) + if eth_price_per_pubdata_byte == 0 { + 0 + } else { + eth_price_per_pubdata_byte.div_ceil(base_fee) + } } pub(crate) fn derive_base_fee_and_gas_per_pubdata( @@ -91,7 +92,7 @@ pub(crate) fn derive_base_fee_and_gas_per_pubdata( // publish enough public data while compensating us for it. let base_fee = std::cmp::max( fair_l2_gas_price, - ceil_div(eth_price_per_pubdata_byte, MAX_GAS_PER_PUBDATA_BYTE), + eth_price_per_pubdata_byte.div_ceil(MAX_GAS_PER_PUBDATA_BYTE), ); ( @@ -220,7 +221,7 @@ impl Default for TxExecutionMode { } pub fn init_vm( - refund_state: MultiVMSubversion, + refund_state: MultiVmSubversion, oracle_tools: OracleTools, block_context: BlockContextMode, block_properties: BlockProperties, @@ -239,7 +240,7 @@ pub fn init_vm( } pub fn init_vm_with_gas_limit( - refund_state: MultiVMSubversion, + refund_state: MultiVmSubversion, oracle_tools: OracleTools, block_context: BlockContextMode, block_properties: BlockProperties, @@ -336,7 +337,7 @@ impl BlockContextMode { // This method accepts a custom bootloader code. // It should be used only in tests. pub fn init_vm_inner( - refund_state: MultiVMSubversion, + refund_state: MultiVmSubversion, mut oracle_tools: OracleTools, block_context: BlockContextMode, block_properties: BlockProperties, @@ -347,7 +348,7 @@ pub fn init_vm_inner( oracle_tools.decommittment_processor.populate( vec![( h256_to_u256(base_system_contract.default_aa.hash), - base_system_contract.default_aa.code.clone(), + bytes_to_be_words(&base_system_contract.default_aa.code), )], Timestamp(0), ); @@ -355,7 +356,7 @@ pub fn init_vm_inner( oracle_tools.memory.populate( vec![( BOOTLOADER_CODE_PAGE, - base_system_contract.bootloader.code.clone(), + bytes_to_be_words(&base_system_contract.bootloader.code), )], Timestamp(0), ); @@ -583,11 +584,8 @@ fn formal_calldata_abi() -> PrimitiveValue { } pub(crate) fn bytecode_to_factory_dep(bytecode: Vec) -> (U256, Vec) { - let bytecode_hash = hash_bytecode(&bytecode); - let bytecode_hash = U256::from_big_endian(bytecode_hash.as_bytes()); - - let bytecode_words = bytes_to_be_words(bytecode); - + let bytecode_hash = BytecodeHash::for_bytecode(&bytecode).value_u256(); + let bytecode_words = bytes_to_be_words(&bytecode); (bytecode_hash, bytecode_words) } diff --git a/core/lib/multivm/src/versions/vm_m6/events.rs b/core/lib/multivm/src/versions/vm_m6/events.rs index a444ad37feb5..659b41cc2060 100644 --- a/core/lib/multivm/src/versions/vm_m6/events.rs +++ b/core/lib/multivm/src/versions/vm_m6/events.rs @@ -1,8 +1,7 @@ use zk_evm_1_3_1::{ethereum_types::Address, reference_impls::event_sink::EventMessage}; -use zksync_types::{L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use zksync_utils::{be_chunks_to_h256_words, h256_to_account_address}; +use zksync_types::{h256_to_address, L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use crate::interface::VmEvent; +use crate::{interface::VmEvent, utils::bytecode::be_chunks_to_h256_words}; #[derive(Clone)] pub struct SolidityLikeEvent { @@ -135,7 +134,7 @@ pub fn merge_events(events: Vec) -> Vec { .filter(|e| e.address == EVENT_WRITER_ADDRESS) .map(|event| { // The events writer events where the first topic is the actual address of the event and the rest of the topics are real topics - let address = h256_to_account_address(&H256(event.topics[0])); + let address = h256_to_address(&H256(event.topics[0])); let topics = event.topics.into_iter().skip(1).collect(); SolidityLikeEvent { diff --git a/core/lib/multivm/src/versions/vm_m6/history_recorder.rs b/core/lib/multivm/src/versions/vm_m6/history_recorder.rs index 63dc9be4933a..5f7a116c62ac 100644 --- a/core/lib/multivm/src/versions/vm_m6/history_recorder.rs +++ b/core/lib/multivm/src/versions/vm_m6/history_recorder.rs @@ -9,8 +9,7 @@ use zk_evm_1_3_1::{ vm_state::PrimitiveValue, zkevm_opcode_defs::{self}, }; -use zksync_types::{StorageKey, U256}; -use zksync_utils::{h256_to_u256, u256_to_h256}; +use zksync_types::{h256_to_u256, u256_to_h256, StorageKey, U256}; use crate::vm_m6::storage::{Storage, StoragePtr}; diff --git a/core/lib/multivm/src/versions/vm_m6/oracles/decommitter.rs b/core/lib/multivm/src/versions/vm_m6/oracles/decommitter.rs index fe59580e2ce9..5bd33d6d49c1 100644 --- a/core/lib/multivm/src/versions/vm_m6/oracles/decommitter.rs +++ b/core/lib/multivm/src/versions/vm_m6/oracles/decommitter.rs @@ -6,13 +6,15 @@ use zk_evm_1_3_1::{ DecommittmentQuery, MemoryIndex, MemoryLocation, MemoryPage, MemoryQuery, Timestamp, }, }; -use zksync_types::U256; -use zksync_utils::{bytecode::bytecode_len_in_words, bytes_to_be_words, u256_to_h256}; +use zksync_types::{u256_to_h256, U256}; use super::OracleWithHistory; -use crate::vm_m6::{ - history_recorder::{HistoryEnabled, HistoryMode, HistoryRecorder, WithHistory}, - storage::{Storage, StoragePtr}, +use crate::{ + utils::bytecode::{bytecode_len_in_words, bytes_to_be_words}, + vm_m6::{ + history_recorder::{HistoryEnabled, HistoryMode, HistoryRecorder, WithHistory}, + storage::{Storage, StoragePtr}, + }, }; /// The main job of the DecommiterOracle is to implement the DecommitmentProcessor trait - that is @@ -59,7 +61,7 @@ impl DecommitterOracle { .load_factory_dep(u256_to_h256(hash)) .expect("Trying to decode unexisting hash"); - let value = bytes_to_be_words(value); + let value = bytes_to_be_words(&value); self.known_bytecodes.insert(hash, value.clone(), timestamp); value } diff --git a/core/lib/multivm/src/versions/vm_m6/oracles/storage.rs b/core/lib/multivm/src/versions/vm_m6/oracles/storage.rs index 5393b9e48169..7a59754140c9 100644 --- a/core/lib/multivm/src/versions/vm_m6/oracles/storage.rs +++ b/core/lib/multivm/src/versions/vm_m6/oracles/storage.rs @@ -6,10 +6,9 @@ use zk_evm_1_3_1::{ zkevm_opcode_defs::system_params::INITIAL_STORAGE_WRITE_PUBDATA_BYTES, }; use zksync_types::{ - utils::storage_key_for_eth_balance, AccountTreeId, Address, StorageKey, StorageLogKind, - BOOTLOADER_ADDRESS, U256, + u256_to_h256, utils::storage_key_for_eth_balance, AccountTreeId, Address, StorageKey, + StorageLogKind, BOOTLOADER_ADDRESS, U256, }; -use zksync_utils::u256_to_h256; use super::OracleWithHistory; use crate::vm_m6::{ diff --git a/core/lib/multivm/src/versions/vm_m6/oracles/tracer/utils.rs b/core/lib/multivm/src/versions/vm_m6/oracles/tracer/utils.rs index 4d963d08952d..9b94ec9de84f 100644 --- a/core/lib/multivm/src/versions/vm_m6/oracles/tracer/utils.rs +++ b/core/lib/multivm/src/versions/vm_m6/oracles/tracer/utils.rs @@ -9,8 +9,7 @@ use zksync_system_constants::{ ECRECOVER_PRECOMPILE_ADDRESS, KECCAK256_PRECOMPILE_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L1_MESSENGER_ADDRESS, SHA256_PRECOMPILE_ADDRESS, }; -use zksync_types::U256; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, U256}; use crate::vm_m6::{ history_recorder::HistoryMode, diff --git a/core/lib/multivm/src/versions/vm_m6/oracles/tracer/validation.rs b/core/lib/multivm/src/versions/vm_m6/oracles/tracer/validation.rs index f046ba5befe9..e6b040b93f5d 100644 --- a/core/lib/multivm/src/versions/vm_m6/oracles/tracer/validation.rs +++ b/core/lib/multivm/src/versions/vm_m6/oracles/tracer/validation.rs @@ -11,22 +11,25 @@ use zksync_system_constants::{ KECCAK256_PRECOMPILE_ADDRESS, L2_BASE_TOKEN_ADDRESS, MSG_VALUE_SIMULATOR_ADDRESS, SYSTEM_CONTEXT_ADDRESS, }; -use zksync_types::{get_code_key, web3::keccak256, AccountTreeId, Address, StorageKey, H256, U256}; -use zksync_utils::{ - be_bytes_to_safe_address, h256_to_account_address, u256_to_account_address, u256_to_h256, +use zksync_types::{ + get_code_key, h256_to_address, u256_to_address, u256_to_h256, web3::keccak256, AccountTreeId, + Address, StorageKey, H256, U256, }; -use crate::vm_m6::{ - errors::VmRevertReasonParsingResult, - history_recorder::HistoryMode, - memory::SimpleMemory, - oracles::tracer::{ - utils::{ - computational_gas_price, get_calldata_page_via_abi, print_debug_if_needed, VmHook, +use crate::{ + utils::bytecode::be_bytes_to_safe_address, + vm_m6::{ + errors::VmRevertReasonParsingResult, + history_recorder::HistoryMode, + memory::SimpleMemory, + oracles::tracer::{ + utils::{ + computational_gas_price, get_calldata_page_via_abi, print_debug_if_needed, VmHook, + }, + ExecutionEndTracer, PendingRefundTracer, PubdataSpentTracer, StorageInvocationTracer, }, - ExecutionEndTracer, PendingRefundTracer, PubdataSpentTracer, StorageInvocationTracer, + storage::{Storage, StoragePtr}, }, - storage::{Storage, StoragePtr}, }; #[derive(Debug, Clone, Eq, PartialEq, Copy)] @@ -252,7 +255,7 @@ impl ValidationTracer { // The user is allowed to touch its own slots or slots semantically related to him. let valid_users_slot = address == self.user_address - || u256_to_account_address(&key) == self.user_address + || u256_to_address(&key) == self.user_address || self.auxilary_allowed_slots.contains(&u256_to_h256(key)); if valid_users_slot { return true; @@ -319,7 +322,7 @@ impl ValidationTracer { let packed_abi = data.src0_value.value; let call_destination_value = data.src1_value.value; - let called_address = u256_to_account_address(&call_destination_value); + let called_address = u256_to_address(&call_destination_value); let far_call_abi = FarCallABI::from_u256(packed_abi); if called_address == KECCAK256_PRECOMPILE_ADDRESS @@ -386,7 +389,7 @@ impl ValidationTracer { let value = self.storage.borrow_mut().get_value(&storage_key); return Ok(NewTrustedValidationItems { - new_trusted_addresses: vec![h256_to_account_address(&value)], + new_trusted_addresses: vec![h256_to_address(&value)], ..Default::default() }); } diff --git a/core/lib/multivm/src/versions/vm_m6/pubdata_utils.rs b/core/lib/multivm/src/versions/vm_m6/pubdata_utils.rs index 196883e1c936..97bf290a2162 100644 --- a/core/lib/multivm/src/versions/vm_m6/pubdata_utils.rs +++ b/core/lib/multivm/src/versions/vm_m6/pubdata_utils.rs @@ -4,11 +4,11 @@ use circuit_sequencer_api_1_3_3::sort_storage_access::sort_storage_access_querie use itertools::Itertools; use zk_evm_1_3_1::aux_structures::{LogQuery, Timestamp}; use zksync_types::{StorageKey, PUBLISH_BYTECODE_OVERHEAD, SYSTEM_CONTEXT_ADDRESS}; -use zksync_utils::bytecode::bytecode_len_in_bytes; use crate::{ glue::GlueInto, interface::VmEvent, + utils::bytecode::bytecode_len_in_bytes, vm_m6::{ history_recorder::HistoryMode, oracles::storage::storage_key_of_log, storage::Storage, utils::collect_storage_log_queries_after_timestamp, VmInstance, @@ -35,9 +35,7 @@ impl VmInstance { let published_bytecode_bytes: u32 = VmEvent::extract_published_bytecodes(&events) .iter() - .map(|bytecodehash| { - bytecode_len_in_bytes(*bytecodehash) as u32 + PUBLISH_BYTECODE_OVERHEAD - }) + .map(|bytecode_hash| bytecode_len_in_bytes(bytecode_hash) + PUBLISH_BYTECODE_OVERHEAD) .sum(); storage_writes_pubdata_published diff --git a/core/lib/multivm/src/versions/vm_m6/refunds.rs b/core/lib/multivm/src/versions/vm_m6/refunds.rs index 406bf380a0b2..f98c84409410 100644 --- a/core/lib/multivm/src/versions/vm_m6/refunds.rs +++ b/core/lib/multivm/src/versions/vm_m6/refunds.rs @@ -1,6 +1,5 @@ use zk_evm_1_3_1::aux_structures::Timestamp; -use zksync_types::U256; -use zksync_utils::ceil_div_u256; +use zksync_types::{ceil_div_u256, U256}; use crate::vm_m6::{ history_recorder::HistoryMode, diff --git a/core/lib/multivm/src/versions/vm_m6/test_utils.rs b/core/lib/multivm/src/versions/vm_m6/test_utils.rs index 4bd39bc56dd4..0debd8dea568 100644 --- a/core/lib/multivm/src/versions/vm_m6/test_utils.rs +++ b/core/lib/multivm/src/versions/vm_m6/test_utils.rs @@ -12,13 +12,13 @@ use itertools::Itertools; use zk_evm_1_3_1::{aux_structures::Timestamp, vm_state::VmLocalState}; use zksync_contracts::deployer_contract; use zksync_types::{ + address_to_h256, + bytecode::BytecodeHash, ethabi::{Address, Token}, + h256_to_address, u256_to_h256, web3::keccak256, Execute, Nonce, StorageKey, StorageValue, CONTRACT_DEPLOYER_ADDRESS, H256, U256, }; -use zksync_utils::{ - address_to_h256, bytecode::hash_bytecode, h256_to_account_address, u256_to_h256, -}; use super::utils::StorageLogQuery; use crate::vm_m6::{ @@ -143,7 +143,7 @@ pub fn get_create_execute(code: &[u8], calldata: &[u8]) -> Execute { let params = [ Token::FixedBytes(vec![0u8; 32]), - Token::FixedBytes(hash_bytecode(code).0.to_vec()), + Token::FixedBytes(BytecodeHash::for_bytecode(code).value().0.to_vec()), Token::Bytes(calldata.to_vec()), ]; let calldata = contract_function @@ -172,5 +172,5 @@ pub fn get_create_zksync_address(sender_address: Address, sender_nonce: Nonce) - let hash = keccak256(&digest); - h256_to_account_address(&H256(hash)) + h256_to_address(&H256(hash)) } diff --git a/core/lib/multivm/src/versions/vm_m6/transaction_data.rs b/core/lib/multivm/src/versions/vm_m6/transaction_data.rs index a8f80ea3255e..d0835b233009 100644 --- a/core/lib/multivm/src/versions/vm_m6/transaction_data.rs +++ b/core/lib/multivm/src/versions/vm_m6/transaction_data.rs @@ -1,18 +1,22 @@ use zk_evm_1_3_1::zkevm_opcode_defs::system_params::MAX_TX_ERGS_LIMIT; use zksync_types::{ + address_to_h256, + bytecode::BytecodeHash, + ceil_div_u256, ethabi::{encode, Address, Token}, fee::encoding_len, + h256_to_u256, l1::is_l1_tx_type, l2::TransactionType, ExecuteTransactionCommon, Transaction, MAX_L2_TX_GAS_LIMIT, U256, }; -use zksync_utils::{ - address_to_h256, bytecode::hash_bytecode, bytes_to_be_words, ceil_div_u256, h256_to_u256, -}; use super::vm_with_bootloader::{MAX_GAS_PER_PUBDATA_BYTE, MAX_TXS_IN_BLOCK}; -use crate::vm_m6::vm_with_bootloader::{ - BLOCK_OVERHEAD_GAS, BLOCK_OVERHEAD_PUBDATA, BOOTLOADER_TX_ENCODING_SPACE, +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_m6::vm_with_bootloader::{ + BLOCK_OVERHEAD_GAS, BLOCK_OVERHEAD_PUBDATA, BOOTLOADER_TX_ENCODING_SPACE, + }, }; pub(crate) const L1_TX_TYPE: u8 = 255; @@ -192,16 +196,13 @@ impl TransactionData { let factory_deps_hashes = self .factory_deps .iter() - .map(|dep| h256_to_u256(hash_bytecode(dep))) + .map(|dep| BytecodeHash::for_bytecode(dep).value_u256()) .collect(); self.abi_encode_with_custom_factory_deps(factory_deps_hashes) } pub fn into_tokens(self) -> Vec { - let bytes = self.abi_encode(); - assert!(bytes.len() % 32 == 0); - - bytes_to_be_words(bytes) + bytes_to_be_words(&self.abi_encode()) } pub(crate) fn effective_gas_price_per_pubdata(&self, block_gas_price_per_pubdata: u32) -> u32 { diff --git a/core/lib/multivm/src/versions/vm_m6/utils.rs b/core/lib/multivm/src/versions/vm_m6/utils.rs index 912a30a4eafc..a9304f5cd525 100644 --- a/core/lib/multivm/src/versions/vm_m6/utils.rs +++ b/core/lib/multivm/src/versions/vm_m6/utils.rs @@ -7,8 +7,7 @@ use zk_evm_1_3_1::{ }; use zksync_contracts::BaseSystemContracts; use zksync_system_constants::ZKPORTER_IS_AVAILABLE; -use zksync_types::{Address, StorageLogKind, H160, MAX_L2_TX_GAS_LIMIT, U256}; -use zksync_utils::h256_to_u256; +use zksync_types::{h256_to_u256, Address, StorageLogKind, H160, MAX_L2_TX_GAS_LIMIT, U256}; use crate::{ glue::GlueInto, diff --git a/core/lib/multivm/src/versions/vm_m6/vm.rs b/core/lib/multivm/src/versions/vm_m6/vm.rs index 4c67a2184180..ff089ba902dd 100644 --- a/core/lib/multivm/src/versions/vm_m6/vm.rs +++ b/core/lib/multivm/src/versions/vm_m6/vm.rs @@ -1,7 +1,6 @@ use std::{collections::HashSet, rc::Rc}; -use zksync_types::{vm::VmVersion, Transaction}; -use zksync_utils::{bytecode::hash_bytecode, h256_to_u256}; +use zksync_types::{bytecode::BytecodeHash, h256_to_u256, vm::VmVersion, Transaction}; use zksync_vm_interface::{pubdata::PubdataBuilder, InspectExecutionMode}; use crate::{ @@ -14,7 +13,7 @@ use crate::{ }, tracers::old::TracerDispatcher, utils::bytecode, - vm_m6::{storage::Storage, vm_instance::MultiVMSubversion, VmInstance}, + vm_m6::{storage::Storage, vm_instance::MultiVmSubversion, VmInstance}, }; #[derive(Debug)] @@ -28,7 +27,7 @@ impl Vm { batch_env: L1BatchEnv, system_env: SystemEnv, storage: StoragePtr, - vm_sub_version: MultiVMSubversion, + vm_sub_version: MultiVmSubversion, ) -> Self { let oracle_tools = crate::vm_m6::OracleTools::new(storage.clone(), H::VmM6Mode::default()); let block_properties = zk_evm_1_3_1::block_properties::BlockProperties { @@ -143,7 +142,7 @@ impl VmInterface for Vm { let mut deps_hashes = HashSet::with_capacity(deps.len()); let mut bytecode_hashes = vec![]; let filtered_deps = deps.iter().filter_map(|bytecode| { - let bytecode_hash = hash_bytecode(bytecode); + let bytecode_hash = BytecodeHash::for_bytecode(bytecode).value(); let is_known = !deps_hashes.insert(bytecode_hash) || self.vm.is_bytecode_exists(&bytecode_hash); @@ -220,8 +219,8 @@ impl VmFactory for Vm { fn new(batch_env: L1BatchEnv, system_env: SystemEnv, storage: StoragePtr) -> Self { let vm_version: VmVersion = system_env.version.into(); let vm_sub_version = match vm_version { - VmVersion::M6Initial => MultiVMSubversion::V1, - VmVersion::M6BugWithCompressionFixed => MultiVMSubversion::V2, + VmVersion::M6Initial => MultiVmSubversion::V1, + VmVersion::M6BugWithCompressionFixed => MultiVmSubversion::V2, _ => panic!("Unsupported protocol version for vm_m6: {:?}", vm_version), }; Self::new_with_subversion(batch_env, system_env, storage, vm_sub_version) diff --git a/core/lib/multivm/src/versions/vm_m6/vm_instance.rs b/core/lib/multivm/src/versions/vm_m6/vm_instance.rs index d6c418da4c20..29ef17aa4bc7 100644 --- a/core/lib/multivm/src/versions/vm_m6/vm_instance.rs +++ b/core/lib/multivm/src/versions/vm_m6/vm_instance.rs @@ -82,7 +82,7 @@ pub(crate) fn get_vm_hook_params(memory: &SimpleMemory) -> Ve /// /// This enum allows to execute blocks with the same VM but different support for refunds. #[derive(Debug, Copy, Clone)] -pub enum MultiVMSubversion { +pub enum MultiVmSubversion { /// Initial VM M6 version. V1, /// Bug with code compression was fixed. @@ -98,7 +98,7 @@ pub struct VmInstance { pub(crate) bootloader_state: BootloaderState, pub snapshots: Vec, - pub vm_subversion: MultiVMSubversion, + pub vm_subversion: MultiVmSubversion, } /// This structure stores data that accumulates during the VM run. diff --git a/core/lib/multivm/src/versions/vm_m6/vm_with_bootloader.rs b/core/lib/multivm/src/versions/vm_m6/vm_with_bootloader.rs index ae44e721b0d7..ff83abc45fcf 100644 --- a/core/lib/multivm/src/versions/vm_m6/vm_with_bootloader.rs +++ b/core/lib/multivm/src/versions/vm_m6/vm_with_bootloader.rs @@ -14,16 +14,13 @@ use zk_evm_1_3_1::{ use zksync_contracts::BaseSystemContracts; use zksync_system_constants::MAX_L2_TX_GAS_LIMIT; use zksync_types::{ - fee_model::L1PeggedBatchFeeModelInput, Address, Transaction, BOOTLOADER_ADDRESS, - L1_GAS_PER_PUBDATA_BYTE, MAX_NEW_FACTORY_DEPS, U256, -}; -use zksync_utils::{ - address_to_u256, bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256, misc::ceil_div, + address_to_u256, bytecode::BytecodeHash, fee_model::L1PeggedBatchFeeModelInput, h256_to_u256, + Address, Transaction, BOOTLOADER_ADDRESS, L1_GAS_PER_PUBDATA_BYTE, MAX_NEW_FACTORY_DEPS, U256, }; use crate::{ interface::{CompressedBytecodeInfo, L1BatchEnv}, - utils::bytecode, + utils::{bytecode, bytecode::bytes_to_be_words}, vm_m6::{ bootloader_state::BootloaderState, history_recorder::HistoryMode, @@ -32,7 +29,7 @@ use crate::{ utils::{ code_page_candidate_from_base, heap_page_from_base, BLOCK_GAS_LIMIT, INITIAL_BASE_PAGE, }, - vm_instance::{MultiVMSubversion, ZkSyncVmState}, + vm_instance::{MultiVmSubversion, ZkSyncVmState}, OracleTools, VmInstance, }, }; @@ -84,8 +81,11 @@ pub(crate) fn eth_price_per_pubdata_byte(l1_gas_price: u64) -> u64 { pub(crate) fn base_fee_to_gas_per_pubdata(l1_gas_price: u64, base_fee: u64) -> u64 { let eth_price_per_pubdata_byte = eth_price_per_pubdata_byte(l1_gas_price); - - ceil_div(eth_price_per_pubdata_byte, base_fee) + if eth_price_per_pubdata_byte == 0 { + 0 + } else { + eth_price_per_pubdata_byte.div_ceil(base_fee) + } } pub(crate) fn derive_base_fee_and_gas_per_pubdata( @@ -102,7 +102,7 @@ pub(crate) fn derive_base_fee_and_gas_per_pubdata( // publish enough public data while compensating us for it. let base_fee = std::cmp::max( fair_l2_gas_price, - ceil_div(eth_price_per_pubdata_byte, MAX_GAS_PER_PUBDATA_BYTE), + eth_price_per_pubdata_byte.div_ceil(MAX_GAS_PER_PUBDATA_BYTE), ); ( @@ -269,7 +269,7 @@ impl Default for TxExecutionMode { } pub fn init_vm( - vm_subversion: MultiVMSubversion, + vm_subversion: MultiVmSubversion, oracle_tools: OracleTools, block_context: BlockContextMode, block_properties: BlockProperties, @@ -288,7 +288,7 @@ pub fn init_vm( } pub fn init_vm_with_gas_limit( - vm_subversion: MultiVMSubversion, + vm_subversion: MultiVmSubversion, oracle_tools: OracleTools, block_context: BlockContextMode, block_properties: BlockProperties, @@ -385,7 +385,7 @@ impl BlockContextMode { // This method accepts a custom bootloader code. // It should be used only in tests. pub fn init_vm_inner( - vm_subversion: MultiVMSubversion, + vm_subversion: MultiVmSubversion, mut oracle_tools: OracleTools, block_context: BlockContextMode, block_properties: BlockProperties, @@ -396,7 +396,7 @@ pub fn init_vm_inner( oracle_tools.decommittment_processor.populate( vec![( h256_to_u256(base_system_contract.default_aa.hash), - base_system_contract.default_aa.code.clone(), + bytes_to_be_words(&base_system_contract.default_aa.code), )], Timestamp(0), ); @@ -404,7 +404,7 @@ pub fn init_vm_inner( oracle_tools.memory.populate( vec![( BOOTLOADER_CODE_PAGE, - base_system_contract.bootloader.code.clone(), + bytes_to_be_words(&base_system_contract.bootloader.code), )], Timestamp(0), ); @@ -433,7 +433,7 @@ fn bootloader_initial_memory(block_properties: &BlockContextMode) -> Vec<(usize, } pub fn get_bootloader_memory( - vm_subversion: MultiVMSubversion, + vm_subversion: MultiVmSubversion, txs: Vec, predefined_refunds: Vec, predefined_compressed_bytecodes: Vec>, @@ -441,14 +441,14 @@ pub fn get_bootloader_memory( block_context: BlockContextMode, ) -> Vec<(usize, U256)> { match vm_subversion { - MultiVMSubversion::V1 => get_bootloader_memory_v1( + MultiVmSubversion::V1 => get_bootloader_memory_v1( txs, predefined_refunds, predefined_compressed_bytecodes, execution_mode, block_context, ), - MultiVMSubversion::V2 => get_bootloader_memory_v2( + MultiVmSubversion::V2 => get_bootloader_memory_v2( txs, predefined_refunds, predefined_compressed_bytecodes, @@ -575,14 +575,14 @@ pub fn push_raw_transaction_to_bootloader_memory( explicit_compressed_bytecodes: Option>, ) -> Vec { match vm.vm_subversion { - MultiVMSubversion::V1 => push_raw_transaction_to_bootloader_memory_v1( + MultiVmSubversion::V1 => push_raw_transaction_to_bootloader_memory_v1( vm, tx, execution_mode, predefined_overhead, explicit_compressed_bytecodes, ), - MultiVMSubversion::V2 => push_raw_transaction_to_bootloader_memory_v2( + MultiVmSubversion::V2 => push_raw_transaction_to_bootloader_memory_v2( vm, tx, execution_mode, @@ -619,7 +619,7 @@ fn push_raw_transaction_to_bootloader_memory_v1( tx.factory_deps .iter() .filter_map(|bytecode| { - if vm.is_bytecode_exists(&hash_bytecode(bytecode)) { + if vm.is_bytecode_exists(&BytecodeHash::for_bytecode(bytecode).value()) { return None; } bytecode::compress(bytecode.clone()).ok() @@ -691,7 +691,7 @@ fn push_raw_transaction_to_bootloader_memory_v2( tx.factory_deps .iter() .filter_map(|bytecode| { - if vm.is_bytecode_exists(&hash_bytecode(bytecode)) { + if vm.is_bytecode_exists(&BytecodeHash::for_bytecode(bytecode).value()) { return None; } bytecode::compress(bytecode.clone()).ok() @@ -821,7 +821,7 @@ pub(crate) fn get_bootloader_memory_for_encoded_tx( .flat_map(bytecode::encode_call) .collect(); - let memory_addition = bytes_to_be_words(memory_addition); + let memory_addition = bytes_to_be_words(&memory_addition); memory.extend( (compressed_bytecodes_offset..compressed_bytecodes_offset + memory_addition.len()) @@ -904,11 +904,8 @@ fn formal_calldata_abi() -> PrimitiveValue { } pub(crate) fn bytecode_to_factory_dep(bytecode: Vec) -> (U256, Vec) { - let bytecode_hash = hash_bytecode(&bytecode); - let bytecode_hash = U256::from_big_endian(bytecode_hash.as_bytes()); - - let bytecode_words = bytes_to_be_words(bytecode); - + let bytecode_hash = BytecodeHash::for_bytecode(&bytecode).value_u256(); + let bytecode_words = bytes_to_be_words(&bytecode); (bytecode_hash, bytecode_words) } diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/bootloader_state/l2_block.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/bootloader_state/l2_block.rs index e8cabebc9f7c..3bc669105b05 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/bootloader_state/l2_block.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/bootloader_state/l2_block.rs @@ -1,7 +1,6 @@ use std::cmp::Ordering; -use zksync_types::{L2BlockNumber, H256}; -use zksync_utils::concat_and_hash; +use zksync_types::{web3::keccak256_concat, L2BlockNumber, H256}; use crate::{ interface::{L2Block, L2BlockEnv}, @@ -53,7 +52,7 @@ impl BootloaderL2Block { } fn update_rolling_hash(&mut self, tx_hash: H256) { - self.txs_rolling_hash = concat_and_hash(self.txs_rolling_hash, tx_hash) + self.txs_rolling_hash = keccak256_concat(self.txs_rolling_hash, tx_hash) } pub(crate) fn interim_version(&self) -> BootloaderL2Block { diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/bootloader_state/utils.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/bootloader_state/utils.rs index 14c895d7a0b4..a05dc1ae2430 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/bootloader_state/utils.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/bootloader_state/utils.rs @@ -1,10 +1,9 @@ -use zksync_types::U256; -use zksync_utils::{bytes_to_be_words, h256_to_u256}; +use zksync_types::{h256_to_u256, U256}; use super::tx::BootloaderTx; use crate::{ interface::{BootloaderMemory, CompressedBytecodeInfo, TxExecutionMode}, - utils::bytecode, + utils::{bytecode, bytecode::bytes_to_be_words}, vm_refunds_enhancement::{ bootloader_state::l2_block::BootloaderL2Block, constants::{ @@ -23,8 +22,7 @@ pub(super) fn get_memory_for_compressed_bytecodes( .iter() .flat_map(bytecode::encode_call) .collect(); - - bytes_to_be_words(memory_addition) + bytes_to_be_words(&memory_addition) } #[allow(clippy::too_many_arguments)] diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/implementation/bytecode.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/implementation/bytecode.rs index f7ab9ae8b517..38cfaa124b16 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/implementation/bytecode.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/implementation/bytecode.rs @@ -1,13 +1,12 @@ use itertools::Itertools; -use zksync_types::U256; -use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words}; +use zksync_types::{bytecode::BytecodeHash, U256}; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, CompressedBytecodeInfo, }, - utils::bytecode, + utils::{bytecode, bytecode::bytes_to_be_words}, vm_refunds_enhancement::Vm, HistoryMode, }; @@ -25,18 +24,15 @@ impl Vm { .storage .get_ptr() .borrow_mut() - .is_bytecode_known(&hash_bytecode(&info.original)) + .is_bytecode_known(&BytecodeHash::for_bytecode(&info.original).value()) }) } } /// Converts bytecode to tokens and hashes it. pub(crate) fn bytecode_to_factory_dep(bytecode: Vec) -> (U256, Vec) { - let bytecode_hash = hash_bytecode(&bytecode); - let bytecode_hash = U256::from_big_endian(bytecode_hash.as_bytes()); - - let bytecode_words = bytes_to_be_words(bytecode); - + let bytecode_hash = BytecodeHash::for_bytecode(&bytecode).value_u256(); + let bytecode_words = bytes_to_be_words(&bytecode); (bytecode_hash, bytecode_words) } @@ -49,7 +45,11 @@ pub(crate) fn compress_bytecodes( .enumerate() .sorted_by_key(|(_idx, dep)| *dep) .dedup_by(|x, y| x.1 == y.1) - .filter(|(_idx, dep)| !storage.borrow_mut().is_bytecode_known(&hash_bytecode(dep))) + .filter(|(_idx, dep)| { + !storage + .borrow_mut() + .is_bytecode_known(&BytecodeHash::for_bytecode(dep).value()) + }) .sorted_by_key(|(idx, _dep)| *idx) .filter_map(|(_idx, dep)| bytecode::compress(dep.clone()).ok()) .collect() diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/events.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/events.rs index 52a4ed8a2876..05ec6557e905 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/events.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/events.rs @@ -1,8 +1,7 @@ use zk_evm_1_3_3::{ethereum_types::Address, reference_impls::event_sink::EventMessage}; -use zksync_types::{L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use zksync_utils::{be_chunks_to_h256_words, h256_to_account_address}; +use zksync_types::{h256_to_address, L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use crate::interface::VmEvent; +use crate::{interface::VmEvent, utils::bytecode::be_chunks_to_h256_words}; #[derive(Clone)] pub(crate) struct SolidityLikeEvent { @@ -135,7 +134,7 @@ pub(crate) fn merge_events(events: Vec) -> Vec .filter(|e| e.address == EVENT_WRITER_ADDRESS) .map(|event| { // The events writer events where the first topic is the actual address of the event and the rest of the topics are real topics - let address = h256_to_account_address(&H256(event.topics[0])); + let address = h256_to_address(&H256(event.topics[0])); let topics = event.topics.into_iter().skip(1).collect(); SolidityLikeEvent { diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/history_recorder.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/history_recorder.rs index 8af2c42db957..d25d2a57259d 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/history_recorder.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/history_recorder.rs @@ -5,8 +5,7 @@ use zk_evm_1_3_3::{ vm_state::PrimitiveValue, zkevm_opcode_defs::{self}, }; -use zksync_types::{StorageKey, H256, U256}; -use zksync_utils::{h256_to_u256, u256_to_h256}; +use zksync_types::{h256_to_u256, u256_to_h256, StorageKey, H256, U256}; use crate::interface::storage::{StoragePtr, WriteStorage}; diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/oracles/decommitter.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/oracles/decommitter.rs index ccc8d9052b7e..b0ce7edbc95d 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/oracles/decommitter.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/old_vm/oracles/decommitter.rs @@ -6,12 +6,12 @@ use zk_evm_1_3_3::{ DecommittmentQuery, MemoryIndex, MemoryLocation, MemoryPage, MemoryQuery, Timestamp, }, }; -use zksync_types::U256; -use zksync_utils::{bytecode::bytecode_len_in_words, bytes_to_be_words, u256_to_h256}; +use zksync_types::{u256_to_h256, U256}; use super::OracleWithHistory; use crate::{ interface::storage::{ReadStorage, StoragePtr}, + utils::bytecode::{bytecode_len_in_words, bytes_to_be_words}, vm_refunds_enhancement::old_vm::history_recorder::{ HistoryEnabled, HistoryMode, HistoryRecorder, WithHistory, }, @@ -61,7 +61,7 @@ impl DecommitterOracle { .load_factory_dep(u256_to_h256(hash)) .expect("Trying to decode unexisting hash"); - let value = bytes_to_be_words(value); + let value = bytes_to_be_words(&value); self.known_bytecodes.insert(hash, value.clone(), timestamp); value } diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/oracles/storage.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/oracles/storage.rs index a9c5b71e782e..73a5d610bc26 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/oracles/storage.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/oracles/storage.rs @@ -6,10 +6,9 @@ use zk_evm_1_3_3::{ zkevm_opcode_defs::system_params::INITIAL_STORAGE_WRITE_PUBDATA_BYTES, }; use zksync_types::{ - utils::storage_key_for_eth_balance, AccountTreeId, Address, StorageKey, StorageLogKind, - BOOTLOADER_ADDRESS, U256, + u256_to_h256, utils::storage_key_for_eth_balance, AccountTreeId, Address, StorageKey, + StorageLogKind, BOOTLOADER_ADDRESS, U256, }; -use zksync_utils::u256_to_h256; use crate::{ glue::GlueInto, diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/tracers/refunds.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/tracers/refunds.rs index 0dbf5a3cbf40..777f0d51460f 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/tracers/refunds.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/tracers/refunds.rs @@ -5,8 +5,7 @@ use zk_evm_1_3_3::{ vm_state::VmLocalState, }; use zksync_system_constants::{PUBLISH_BYTECODE_OVERHEAD, SYSTEM_CONTEXT_ADDRESS}; -use zksync_types::{l2_to_l1_log::L2ToL1Log, L1BatchNumber, U256}; -use zksync_utils::{bytecode::bytecode_len_in_bytes, ceil_div_u256, u256_to_h256}; +use zksync_types::{ceil_div_u256, l2_to_l1_log::L2ToL1Log, u256_to_h256, L1BatchNumber, U256}; use crate::{ interface::{ @@ -15,6 +14,7 @@ use crate::{ L1BatchEnv, Refunds, VmEvent, }, tracers::dynamic::vm_1_3_3::DynTracer, + utils::bytecode::bytecode_len_in_bytes, vm_refunds_enhancement::{ bootloader_state::BootloaderState, constants::{BOOTLOADER_HEAP_PAGE, OPERATOR_REFUNDS_OFFSET, TX_GAS_LIMIT_OFFSET}, @@ -335,7 +335,7 @@ pub(crate) fn pubdata_published( let published_bytecode_bytes: u32 = VmEvent::extract_published_bytecodes(&events) .iter() - .map(|bytecodehash| bytecode_len_in_bytes(*bytecodehash) as u32 + PUBLISH_BYTECODE_OVERHEAD) + .map(|bytecode_hash| bytecode_len_in_bytes(bytecode_hash) + PUBLISH_BYTECODE_OVERHEAD) .sum(); storage_writes_pubdata_published diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/tracers/utils.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/tracers/utils.rs index 1d3e9a272764..d744261e4f48 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/tracers/utils.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/tracers/utils.rs @@ -9,8 +9,7 @@ use zksync_system_constants::{ ECRECOVER_PRECOMPILE_ADDRESS, KECCAK256_PRECOMPILE_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L1_MESSENGER_ADDRESS, SHA256_PRECOMPILE_ADDRESS, }; -use zksync_types::U256; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, U256}; use crate::vm_refunds_enhancement::{ constants::{ diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/types/internals/transaction_data.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/types/internals/transaction_data.rs index 22ab09296c91..64802d74c878 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/types/internals/transaction_data.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/types/internals/transaction_data.rs @@ -1,19 +1,24 @@ use std::convert::TryInto; use zksync_types::{ + address_to_h256, + bytecode::BytecodeHash, ethabi::{encode, Address, Token}, fee::{encoding_len, Fee}, + h256_to_u256, l1::is_l1_tx_type, l2::{L2Tx, TransactionType}, transaction_request::{PaymasterParams, TransactionRequest}, web3::Bytes, Execute, ExecuteTransactionCommon, L2ChainId, L2TxCommonData, Nonce, Transaction, H256, U256, }; -use zksync_utils::{address_to_h256, bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256}; -use crate::vm_refunds_enhancement::{ - constants::MAX_GAS_PER_PUBDATA_BYTE, - utils::overhead::{get_amortized_overhead, OverheadCoefficients}, +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_refunds_enhancement::{ + constants::MAX_GAS_PER_PUBDATA_BYTE, + utils::overhead::{get_amortized_overhead, OverheadCoefficients}, + }, }; /// This structure represents the data that is used by @@ -190,16 +195,13 @@ impl TransactionData { let factory_deps_hashes = self .factory_deps .iter() - .map(|dep| h256_to_u256(hash_bytecode(dep))) + .map(|dep| BytecodeHash::for_bytecode(dep).value_u256()) .collect(); self.abi_encode_with_custom_factory_deps(factory_deps_hashes) } pub(crate) fn into_tokens(self) -> Vec { - let bytes = self.abi_encode(); - assert!(bytes.len() % 32 == 0); - - bytes_to_be_words(bytes) + bytes_to_be_words(&self.abi_encode()) } pub(crate) fn effective_gas_price_per_pubdata(&self, block_gas_price_per_pubdata: u32) -> u32 { diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/types/internals/vm_state.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/types/internals/vm_state.rs index 22f92891e40a..6776bc37c9d5 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/types/internals/vm_state.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/types/internals/vm_state.rs @@ -11,14 +11,14 @@ use zk_evm_1_3_3::{ }, }; use zksync_system_constants::BOOTLOADER_ADDRESS; -use zksync_types::{block::L2BlockHasher, Address, L2BlockNumber}; -use zksync_utils::h256_to_u256; +use zksync_types::{block::L2BlockHasher, h256_to_u256, Address, L2BlockNumber}; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, L1BatchEnv, L2Block, SystemEnv, }, + utils::bytecode::bytes_to_be_words, vm_refunds_enhancement::{ bootloader_state::BootloaderState, constants::BOOTLOADER_HEAP_PAGE, @@ -89,11 +89,7 @@ pub(crate) fn new_vm_state( decommittment_processor.populate( vec![( h256_to_u256(system_env.base_system_smart_contracts.default_aa.hash), - system_env - .base_system_smart_contracts - .default_aa - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.default_aa.code), )], Timestamp(0), ); @@ -101,11 +97,7 @@ pub(crate) fn new_vm_state( memory.populate( vec![( BOOTLOADER_CODE_PAGE, - system_env - .base_system_smart_contracts - .bootloader - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.bootloader.code), )], Timestamp(0), ); diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/types/l1_batch.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/types/l1_batch.rs index b449165be348..58419acbe60a 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/types/l1_batch.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/types/l1_batch.rs @@ -1,5 +1,4 @@ -use zksync_types::U256; -use zksync_utils::{address_to_u256, h256_to_u256}; +use zksync_types::{address_to_u256, h256_to_u256, U256}; use crate::{interface::L1BatchEnv, vm_refunds_enhancement::utils::fee::get_batch_base_fee}; diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/fee.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/fee.rs index f7203b57b4c4..8bd06c7faa6b 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/fee.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/fee.rs @@ -1,6 +1,5 @@ //! Utility functions for vm use zksync_types::fee_model::L1PeggedBatchFeeModelInput; -use zksync_utils::ceil_div; use crate::{ interface::L1BatchEnv, @@ -12,8 +11,11 @@ use crate::{ /// Calculates the amount of gas required to publish one byte of pubdata pub(crate) fn base_fee_to_gas_per_pubdata(l1_gas_price: u64, base_fee: u64) -> u64 { let eth_price_per_pubdata_byte = eth_price_per_pubdata_byte(l1_gas_price); - - ceil_div(eth_price_per_pubdata_byte, base_fee) + if eth_price_per_pubdata_byte == 0 { + 0 + } else { + eth_price_per_pubdata_byte.div_ceil(base_fee) + } } /// Calculates the base fee and gas per pubdata for the given L1 gas price. @@ -30,7 +32,7 @@ pub(crate) fn derive_base_fee_and_gas_per_pubdata( // publish enough public data while compensating us for it. let base_fee = std::cmp::max( fair_l2_gas_price, - ceil_div(eth_price_per_pubdata_byte, MAX_GAS_PER_PUBDATA_BYTE), + eth_price_per_pubdata_byte.div_ceil(MAX_GAS_PER_PUBDATA_BYTE), ); ( diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/l2_blocks.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/l2_blocks.rs index ff5536ae0b97..1095abd82db1 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/l2_blocks.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/l2_blocks.rs @@ -4,9 +4,9 @@ use zksync_system_constants::{ SYSTEM_CONTEXT_STORED_L2_BLOCK_HASHES, }; use zksync_types::{ - block::unpack_block_info, web3::keccak256, AccountTreeId, L2BlockNumber, StorageKey, H256, U256, + block::unpack_block_info, h256_to_u256, u256_to_h256, web3::keccak256, AccountTreeId, + L2BlockNumber, StorageKey, H256, U256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; use crate::interface::{ storage::{ReadStorage, StoragePtr}, diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/overhead.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/overhead.rs index af25c4b4d7c4..efcee968db40 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/overhead.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/utils/overhead.rs @@ -1,7 +1,6 @@ use zk_evm_1_3_3::zkevm_opcode_defs::system_params::MAX_TX_ERGS_LIMIT; use zksync_system_constants::MAX_L2_TX_GAS_LIMIT; -use zksync_types::{l1::is_l1_tx_type, U256}; -use zksync_utils::ceil_div_u256; +use zksync_types::{ceil_div_u256, l1::is_l1_tx_type, U256}; use crate::vm_refunds_enhancement::constants::{ BLOCK_OVERHEAD_GAS, BLOCK_OVERHEAD_PUBDATA, BOOTLOADER_TX_ENCODING_SPACE, MAX_TXS_IN_BLOCK, diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/bootloader_state/l2_block.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/bootloader_state/l2_block.rs index 197ecbff5896..d100b17c7c08 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/bootloader_state/l2_block.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/bootloader_state/l2_block.rs @@ -1,7 +1,6 @@ use std::cmp::Ordering; -use zksync_types::{L2BlockNumber, H256}; -use zksync_utils::concat_and_hash; +use zksync_types::{web3::keccak256_concat, L2BlockNumber, H256}; use crate::{ interface::{L2Block, L2BlockEnv}, @@ -53,7 +52,7 @@ impl BootloaderL2Block { } fn update_rolling_hash(&mut self, tx_hash: H256) { - self.txs_rolling_hash = concat_and_hash(self.txs_rolling_hash, tx_hash) + self.txs_rolling_hash = keccak256_concat(self.txs_rolling_hash, tx_hash) } pub(crate) fn interim_version(&self) -> BootloaderL2Block { diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/bootloader_state/utils.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/bootloader_state/utils.rs index 3e2474835fa4..4c33aeb6e147 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/bootloader_state/utils.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/bootloader_state/utils.rs @@ -1,5 +1,4 @@ -use zksync_types::U256; -use zksync_utils::{bytes_to_be_words, h256_to_u256}; +use zksync_types::{h256_to_u256, U256}; use super::tx::BootloaderTx; use crate::{ @@ -23,8 +22,7 @@ pub(super) fn get_memory_for_compressed_bytecodes( .iter() .flat_map(bytecode::encode_call) .collect(); - - bytes_to_be_words(memory_addition) + bytecode::bytes_to_be_words(&memory_addition) } #[allow(clippy::too_many_arguments)] diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/implementation/bytecode.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/implementation/bytecode.rs index d5f2b50b83fc..828b1c961708 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/implementation/bytecode.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/implementation/bytecode.rs @@ -1,13 +1,12 @@ use itertools::Itertools; -use zksync_types::U256; -use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words}; +use zksync_types::{bytecode::BytecodeHash, U256}; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, CompressedBytecodeInfo, }, - utils::bytecode, + utils::{bytecode, bytecode::bytes_to_be_words}, vm_virtual_blocks::Vm, HistoryMode, }; @@ -25,18 +24,15 @@ impl Vm { .storage .get_ptr() .borrow_mut() - .is_bytecode_known(&hash_bytecode(&info.original)) + .is_bytecode_known(&BytecodeHash::for_bytecode(&info.original).value()) }) } } /// Converts bytecode to tokens and hashes it. pub(crate) fn bytecode_to_factory_dep(bytecode: Vec) -> (U256, Vec) { - let bytecode_hash = hash_bytecode(&bytecode); - let bytecode_hash = U256::from_big_endian(bytecode_hash.as_bytes()); - - let bytecode_words = bytes_to_be_words(bytecode); - + let bytecode_hash = BytecodeHash::for_bytecode(&bytecode).value_u256(); + let bytecode_words = bytes_to_be_words(&bytecode); (bytecode_hash, bytecode_words) } @@ -49,7 +45,11 @@ pub(crate) fn compress_bytecodes( .enumerate() .sorted_by_key(|(_idx, dep)| *dep) .dedup_by(|x, y| x.1 == y.1) - .filter(|(_idx, dep)| !storage.borrow_mut().is_bytecode_known(&hash_bytecode(dep))) + .filter(|(_idx, dep)| { + !storage + .borrow_mut() + .is_bytecode_known(&BytecodeHash::for_bytecode(dep).value()) + }) .sorted_by_key(|(idx, _dep)| *idx) .filter_map(|(_idx, dep)| bytecode::compress(dep.clone()).ok()) .collect() diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/events.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/events.rs index 52a4ed8a2876..05ec6557e905 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/events.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/events.rs @@ -1,8 +1,7 @@ use zk_evm_1_3_3::{ethereum_types::Address, reference_impls::event_sink::EventMessage}; -use zksync_types::{L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use zksync_utils::{be_chunks_to_h256_words, h256_to_account_address}; +use zksync_types::{h256_to_address, L1BatchNumber, EVENT_WRITER_ADDRESS, H256}; -use crate::interface::VmEvent; +use crate::{interface::VmEvent, utils::bytecode::be_chunks_to_h256_words}; #[derive(Clone)] pub(crate) struct SolidityLikeEvent { @@ -135,7 +134,7 @@ pub(crate) fn merge_events(events: Vec) -> Vec .filter(|e| e.address == EVENT_WRITER_ADDRESS) .map(|event| { // The events writer events where the first topic is the actual address of the event and the rest of the topics are real topics - let address = h256_to_account_address(&H256(event.topics[0])); + let address = h256_to_address(&H256(event.topics[0])); let topics = event.topics.into_iter().skip(1).collect(); SolidityLikeEvent { diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/history_recorder.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/history_recorder.rs index cbd4dc0ed738..111a337bf449 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/history_recorder.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/history_recorder.rs @@ -5,8 +5,7 @@ use zk_evm_1_3_3::{ vm_state::PrimitiveValue, zkevm_opcode_defs::{self}, }; -use zksync_types::{StorageKey, H256, U256}; -use zksync_utils::{h256_to_u256, u256_to_h256}; +use zksync_types::{h256_to_u256, u256_to_h256, StorageKey, H256, U256}; use crate::interface::storage::{StoragePtr, WriteStorage}; diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/oracles/decommitter.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/oracles/decommitter.rs index 3c8d72b0b33a..a432e782f658 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/oracles/decommitter.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/oracles/decommitter.rs @@ -6,12 +6,12 @@ use zk_evm_1_3_3::{ DecommittmentQuery, MemoryIndex, MemoryLocation, MemoryPage, MemoryQuery, Timestamp, }, }; -use zksync_types::U256; -use zksync_utils::{bytecode::bytecode_len_in_words, bytes_to_be_words, u256_to_h256}; +use zksync_types::{u256_to_h256, U256}; use super::OracleWithHistory; use crate::{ interface::storage::{ReadStorage, StoragePtr}, + utils::bytecode::{bytecode_len_in_words, bytes_to_be_words}, vm_virtual_blocks::old_vm::history_recorder::{ HistoryEnabled, HistoryMode, HistoryRecorder, WithHistory, }, @@ -61,7 +61,7 @@ impl DecommitterOracle { .load_factory_dep(u256_to_h256(hash)) .expect("Trying to decode unexisting hash"); - let value = bytes_to_be_words(value); + let value = bytes_to_be_words(&value); self.known_bytecodes.insert(hash, value.clone(), timestamp); value } diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/oracles/storage.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/oracles/storage.rs index defbad70f1a9..0b3a590d8d18 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/oracles/storage.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/old_vm/oracles/storage.rs @@ -6,10 +6,9 @@ use zk_evm_1_3_3::{ zkevm_opcode_defs::system_params::INITIAL_STORAGE_WRITE_PUBDATA_BYTES, }; use zksync_types::{ - utils::storage_key_for_eth_balance, AccountTreeId, Address, StorageKey, StorageLogKind, - BOOTLOADER_ADDRESS, U256, + u256_to_h256, utils::storage_key_for_eth_balance, AccountTreeId, Address, StorageKey, + StorageLogKind, BOOTLOADER_ADDRESS, U256, }; -use zksync_utils::u256_to_h256; use super::OracleWithHistory; use crate::{ diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/tracers/refunds.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/tracers/refunds.rs index a2ca08a7ef96..59aa837cd8fb 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/tracers/refunds.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/tracers/refunds.rs @@ -8,8 +8,9 @@ use zk_evm_1_3_3::{ vm_state::VmLocalState, }; use zksync_system_constants::{PUBLISH_BYTECODE_OVERHEAD, SYSTEM_CONTEXT_ADDRESS}; -use zksync_types::{l2_to_l1_log::L2ToL1Log, L1BatchNumber, StorageKey, U256}; -use zksync_utils::{bytecode::bytecode_len_in_bytes, ceil_div_u256, u256_to_h256}; +use zksync_types::{ + ceil_div_u256, l2_to_l1_log::L2ToL1Log, u256_to_h256, L1BatchNumber, StorageKey, U256, +}; use crate::{ interface::{ @@ -17,6 +18,7 @@ use crate::{ L1BatchEnv, Refunds, VmEvent, VmExecutionResultAndLogs, }, tracers::dynamic::vm_1_3_3::DynTracer, + utils::bytecode::bytecode_len_in_bytes, vm_virtual_blocks::{ bootloader_state::BootloaderState, constants::{BOOTLOADER_HEAP_PAGE, OPERATOR_REFUNDS_OFFSET, TX_GAS_LIMIT_OFFSET}, @@ -327,7 +329,7 @@ pub(crate) fn pubdata_published( let published_bytecode_bytes: u32 = VmEvent::extract_published_bytecodes(&events) .iter() - .map(|bytecodehash| bytecode_len_in_bytes(*bytecodehash) as u32 + PUBLISH_BYTECODE_OVERHEAD) + .map(|bytecode_hash| bytecode_len_in_bytes(bytecode_hash) + PUBLISH_BYTECODE_OVERHEAD) .sum(); storage_writes_pubdata_published diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/tracers/utils.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/tracers/utils.rs index ef8219ec2b4d..6db2bac819df 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/tracers/utils.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/tracers/utils.rs @@ -9,8 +9,7 @@ use zksync_system_constants::{ ECRECOVER_PRECOMPILE_ADDRESS, KECCAK256_PRECOMPILE_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L1_MESSENGER_ADDRESS, SHA256_PRECOMPILE_ADDRESS, }; -use zksync_types::U256; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, U256}; use crate::vm_virtual_blocks::{ constants::{ diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/types/internals/transaction_data.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/types/internals/transaction_data.rs index c96004163a65..d13304c93285 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/types/internals/transaction_data.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/types/internals/transaction_data.rs @@ -1,19 +1,24 @@ use std::convert::TryInto; use zksync_types::{ + address_to_h256, + bytecode::BytecodeHash, ethabi::{encode, Address, Token}, fee::{encoding_len, Fee}, + h256_to_u256, l1::is_l1_tx_type, l2::{L2Tx, TransactionType}, transaction_request::{PaymasterParams, TransactionRequest}, web3::Bytes, Execute, ExecuteTransactionCommon, L2ChainId, L2TxCommonData, Nonce, Transaction, H256, U256, }; -use zksync_utils::{address_to_h256, bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256}; -use crate::vm_virtual_blocks::{ - constants::MAX_GAS_PER_PUBDATA_BYTE, - utils::overhead::{get_amortized_overhead, OverheadCoefficients}, +use crate::{ + utils::bytecode::bytes_to_be_words, + vm_virtual_blocks::{ + constants::MAX_GAS_PER_PUBDATA_BYTE, + utils::overhead::{get_amortized_overhead, OverheadCoefficients}, + }, }; /// This structure represents the data that is used by @@ -190,16 +195,13 @@ impl TransactionData { let factory_deps_hashes = self .factory_deps .iter() - .map(|dep| h256_to_u256(hash_bytecode(dep))) + .map(|dep| BytecodeHash::for_bytecode(dep).value_u256()) .collect(); self.abi_encode_with_custom_factory_deps(factory_deps_hashes) } pub(crate) fn into_tokens(self) -> Vec { - let bytes = self.abi_encode(); - assert!(bytes.len() % 32 == 0); - - bytes_to_be_words(bytes) + bytes_to_be_words(&self.abi_encode()) } pub(crate) fn effective_gas_price_per_pubdata(&self, block_gas_price_per_pubdata: u32) -> u32 { diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/types/internals/vm_state.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/types/internals/vm_state.rs index d26acc4e9301..d1509bd016d8 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/types/internals/vm_state.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/types/internals/vm_state.rs @@ -11,14 +11,14 @@ use zk_evm_1_3_3::{ }, }; use zksync_system_constants::BOOTLOADER_ADDRESS; -use zksync_types::{block::L2BlockHasher, Address, L2BlockNumber}; -use zksync_utils::h256_to_u256; +use zksync_types::{block::L2BlockHasher, h256_to_u256, Address, L2BlockNumber}; use crate::{ interface::{ storage::{StoragePtr, WriteStorage}, L1BatchEnv, L2Block, SystemEnv, }, + utils::bytecode::bytes_to_be_words, vm_virtual_blocks::{ bootloader_state::BootloaderState, constants::BOOTLOADER_HEAP_PAGE, @@ -89,11 +89,7 @@ pub(crate) fn new_vm_state( decommittment_processor.populate( vec![( h256_to_u256(system_env.base_system_smart_contracts.default_aa.hash), - system_env - .base_system_smart_contracts - .default_aa - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.default_aa.code), )], Timestamp(0), ); @@ -101,11 +97,7 @@ pub(crate) fn new_vm_state( memory.populate( vec![( BOOTLOADER_CODE_PAGE, - system_env - .base_system_smart_contracts - .bootloader - .code - .clone(), + bytes_to_be_words(&system_env.base_system_smart_contracts.bootloader.code), )], Timestamp(0), ); diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/types/l1_batch_env.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/types/l1_batch_env.rs index f86d8749c9ed..08fe00741189 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/types/l1_batch_env.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/types/l1_batch_env.rs @@ -1,5 +1,4 @@ -use zksync_types::U256; -use zksync_utils::{address_to_u256, h256_to_u256}; +use zksync_types::{address_to_u256, h256_to_u256, U256}; use crate::{interface::L1BatchEnv, vm_virtual_blocks::utils::fee::get_batch_base_fee}; diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/utils/fee.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/utils/fee.rs index a53951a851e1..e9d46570983d 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/utils/fee.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/utils/fee.rs @@ -1,6 +1,5 @@ //! Utility functions for vm use zksync_types::fee_model::L1PeggedBatchFeeModelInput; -use zksync_utils::ceil_div; use crate::{ interface::L1BatchEnv, @@ -12,8 +11,11 @@ use crate::{ /// Calculates the amount of gas required to publish one byte of pubdata pub(crate) fn base_fee_to_gas_per_pubdata(l1_gas_price: u64, base_fee: u64) -> u64 { let eth_price_per_pubdata_byte = eth_price_per_pubdata_byte(l1_gas_price); - - ceil_div(eth_price_per_pubdata_byte, base_fee) + if eth_price_per_pubdata_byte == 0 { + 0 + } else { + eth_price_per_pubdata_byte.div_ceil(base_fee) + } } /// Calculates the base fee and gas per pubdata for the given L1 gas price. @@ -31,7 +33,7 @@ pub(crate) fn derive_base_fee_and_gas_per_pubdata( // publish enough public data while compensating us for it. let base_fee = std::cmp::max( fair_l2_gas_price, - ceil_div(eth_price_per_pubdata_byte, MAX_GAS_PER_PUBDATA_BYTE), + eth_price_per_pubdata_byte.div_ceil(MAX_GAS_PER_PUBDATA_BYTE), ); ( diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/utils/l2_blocks.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/utils/l2_blocks.rs index ff5536ae0b97..1095abd82db1 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/utils/l2_blocks.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/utils/l2_blocks.rs @@ -4,9 +4,9 @@ use zksync_system_constants::{ SYSTEM_CONTEXT_STORED_L2_BLOCK_HASHES, }; use zksync_types::{ - block::unpack_block_info, web3::keccak256, AccountTreeId, L2BlockNumber, StorageKey, H256, U256, + block::unpack_block_info, h256_to_u256, u256_to_h256, web3::keccak256, AccountTreeId, + L2BlockNumber, StorageKey, H256, U256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; use crate::interface::{ storage::{ReadStorage, StoragePtr}, diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/utils/overhead.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/utils/overhead.rs index cba4700002bb..6c79c05bc5b2 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/utils/overhead.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/utils/overhead.rs @@ -1,7 +1,6 @@ use zk_evm_1_3_3::zkevm_opcode_defs::system_params::MAX_TX_ERGS_LIMIT; use zksync_system_constants::MAX_L2_TX_GAS_LIMIT; -use zksync_types::{l1::is_l1_tx_type, U256}; -use zksync_utils::ceil_div_u256; +use zksync_types::{ceil_div_u256, l1::is_l1_tx_type, U256}; use crate::vm_virtual_blocks::constants::{ BLOCK_OVERHEAD_GAS, BLOCK_OVERHEAD_PUBDATA, BOOTLOADER_TX_ENCODING_SPACE, MAX_TXS_IN_BLOCK, diff --git a/core/lib/multivm/src/vm_instance.rs b/core/lib/multivm/src/vm_instance.rs index e2f72bd24113..9de99a7eb116 100644 --- a/core/lib/multivm/src/vm_instance.rs +++ b/core/lib/multivm/src/vm_instance.rs @@ -14,6 +14,7 @@ use crate::{ VmMemoryMetrics, }, tracers::TracerDispatcher, + vm_fast::FastVmVersion, vm_latest::HistoryEnabled, }; @@ -132,7 +133,7 @@ impl LegacyVmInstance { l1_batch_env, system_env, storage_view, - crate::vm_m5::vm_instance::MultiVMSubversion::V1, + crate::vm_m5::vm_instance::MultiVmSubversion::V1, ); Self::VmM5(vm) } @@ -141,7 +142,7 @@ impl LegacyVmInstance { l1_batch_env, system_env, storage_view, - crate::vm_m5::vm_instance::MultiVMSubversion::V2, + crate::vm_m5::vm_instance::MultiVmSubversion::V2, ); Self::VmM5(vm) } @@ -150,7 +151,7 @@ impl LegacyVmInstance { l1_batch_env, system_env, storage_view, - crate::vm_m6::vm_instance::MultiVMSubversion::V1, + crate::vm_m6::vm_instance::MultiVmSubversion::V1, ); Self::VmM6(vm) } @@ -159,7 +160,7 @@ impl LegacyVmInstance { l1_batch_env, system_env, storage_view, - crate::vm_m6::vm_instance::MultiVMSubversion::V2, + crate::vm_m6::vm_instance::MultiVmSubversion::V2, ); Self::VmM6(vm) } @@ -194,7 +195,7 @@ impl LegacyVmInstance { l1_batch_env, system_env, storage_view, - crate::vm_latest::MultiVMSubversion::SmallBootloaderMemory, + crate::vm_latest::MultiVmSubversion::SmallBootloaderMemory, ); Self::Vm1_5_0(vm) } @@ -203,7 +204,7 @@ impl LegacyVmInstance { l1_batch_env, system_env, storage_view, - crate::vm_latest::MultiVMSubversion::IncreasedBootloaderMemory, + crate::vm_latest::MultiVmSubversion::IncreasedBootloaderMemory, ); Self::Vm1_5_0(vm) } @@ -212,7 +213,7 @@ impl LegacyVmInstance { l1_batch_env, system_env, storage_view, - crate::vm_latest::MultiVMSubversion::Gateway, + crate::vm_latest::MultiVmSubversion::Gateway, ); Self::Vm1_5_0(vm) } @@ -340,8 +341,5 @@ impl FastVmInstance { /// Checks whether the protocol version is supported by the fast VM. pub fn is_supported_by_fast_vm(protocol_version: ProtocolVersionId) -> bool { - matches!( - protocol_version.into(), - VmVersion::Vm1_5_0IncreasedBootloaderMemory - ) + FastVmVersion::try_from(VmVersion::from(protocol_version)).is_ok() } diff --git a/core/lib/object_store/src/retries.rs b/core/lib/object_store/src/retries.rs index 2cccbb17c2bb..16d2c1cd55f1 100644 --- a/core/lib/object_store/src/retries.rs +++ b/core/lib/object_store/src/retries.rs @@ -53,7 +53,6 @@ impl Request<'_> { backoff_secs *= 2; } Err(err) => { - tracing::warn!(%err, "Failed request with a fatal error"); break Err(err); } } diff --git a/core/lib/protobuf_config/src/da_dispatcher.rs b/core/lib/protobuf_config/src/da_dispatcher.rs index d77073bd32cf..e85ff5ae76ed 100644 --- a/core/lib/protobuf_config/src/da_dispatcher.rs +++ b/core/lib/protobuf_config/src/da_dispatcher.rs @@ -12,6 +12,7 @@ impl ProtoRepr for proto::DataAvailabilityDispatcher { max_rows_to_dispatch: self.max_rows_to_dispatch, max_retries: self.max_retries.map(|x| x as u16), use_dummy_inclusion_data: self.use_dummy_inclusion_data, + max_concurrent_requests: self.max_concurrent_requests, }) } @@ -21,6 +22,7 @@ impl ProtoRepr for proto::DataAvailabilityDispatcher { max_rows_to_dispatch: this.max_rows_to_dispatch, max_retries: this.max_retries.map(Into::into), use_dummy_inclusion_data: this.use_dummy_inclusion_data, + max_concurrent_requests: this.max_concurrent_requests, } } } diff --git a/core/lib/protobuf_config/src/experimental.rs b/core/lib/protobuf_config/src/experimental.rs index 750dc7b04f01..8dfbf413d5a1 100644 --- a/core/lib/protobuf_config/src/experimental.rs +++ b/core/lib/protobuf_config/src/experimental.rs @@ -30,13 +30,12 @@ impl ProtoRepr for proto::Db { .map(|count| NonZeroU32::new(count).context("cannot be 0")) .transpose() .context("state_keeper_db_max_open_files")?, - protective_reads_persistence_enabled: self - .reads_persistence_enabled - .unwrap_or_default(), + protective_reads_persistence_enabled: self.reads_persistence_enabled.unwrap_or(false), processing_delay_ms: self.processing_delay_ms.unwrap_or_default(), include_indices_and_filters_in_block_cache: self .include_indices_and_filters_in_block_cache - .unwrap_or_default(), + .unwrap_or(false), + merkle_tree_repair_stale_keys: self.merkle_tree_repair_stale_keys.unwrap_or(false), }) } @@ -55,6 +54,7 @@ impl ProtoRepr for proto::Db { include_indices_and_filters_in_block_cache: Some( this.include_indices_and_filters_in_block_cache, ), + merkle_tree_repair_stale_keys: Some(this.merkle_tree_repair_stale_keys), } } } diff --git a/core/lib/protobuf_config/src/proof_data_handler.rs b/core/lib/protobuf_config/src/proof_data_handler.rs index c01e163bd771..92a9c90bbb64 100644 --- a/core/lib/protobuf_config/src/proof_data_handler.rs +++ b/core/lib/protobuf_config/src/proof_data_handler.rs @@ -29,6 +29,12 @@ impl ProtoRepr for proto::ProofDataHandler { .unwrap_or_else( configs::TeeConfig::default_tee_proof_generation_timeout_in_secs, ), + tee_batch_permanently_ignored_timeout_in_hours: self + .tee_batch_permanently_ignored_timeout_in_hours + .map(|x| x as u16) + .unwrap_or_else( + configs::TeeConfig::default_tee_batch_permanently_ignored_timeout_in_hours, + ), }, }) } @@ -42,6 +48,11 @@ impl ProtoRepr for proto::ProofDataHandler { tee_proof_generation_timeout_in_secs: Some( this.tee_config.tee_proof_generation_timeout_in_secs.into(), ), + tee_batch_permanently_ignored_timeout_in_hours: Some( + this.tee_config + .tee_batch_permanently_ignored_timeout_in_hours + .into(), + ), } } } diff --git a/core/lib/protobuf_config/src/proto/config/da_dispatcher.proto b/core/lib/protobuf_config/src/proto/config/da_dispatcher.proto index dd366bd5b925..d6329d14b281 100644 --- a/core/lib/protobuf_config/src/proto/config/da_dispatcher.proto +++ b/core/lib/protobuf_config/src/proto/config/da_dispatcher.proto @@ -7,4 +7,5 @@ message DataAvailabilityDispatcher { optional uint32 max_rows_to_dispatch = 2; optional uint32 max_retries = 3; optional bool use_dummy_inclusion_data = 4; + optional uint32 max_concurrent_requests = 5; } diff --git a/core/lib/protobuf_config/src/proto/config/experimental.proto b/core/lib/protobuf_config/src/proto/config/experimental.proto index 87af8d3835c6..22de076ece27 100644 --- a/core/lib/protobuf_config/src/proto/config/experimental.proto +++ b/core/lib/protobuf_config/src/proto/config/experimental.proto @@ -10,7 +10,8 @@ message DB { optional uint32 state_keeper_db_max_open_files = 2; // optional optional bool reads_persistence_enabled = 3; optional uint64 processing_delay_ms = 4; - optional bool include_indices_and_filters_in_block_cache = 5; + optional bool include_indices_and_filters_in_block_cache = 5; // optional; defaults to false + optional bool merkle_tree_repair_stale_keys = 6; // optional; defaults to false } // Experimental part of the Snapshot recovery configuration. diff --git a/core/lib/protobuf_config/src/proto/config/prover.proto b/core/lib/protobuf_config/src/proto/config/prover.proto index 392834d25f3d..64735713fcab 100644 --- a/core/lib/protobuf_config/src/proto/config/prover.proto +++ b/core/lib/protobuf_config/src/proto/config/prover.proto @@ -110,4 +110,5 @@ message ProofDataHandler { optional bool tee_support = 3; // optional optional uint64 first_tee_processed_batch = 4; // optional optional uint32 tee_proof_generation_timeout_in_secs = 5; // optional + optional uint32 tee_batch_permanently_ignored_timeout_in_hours = 6; // optional } diff --git a/core/lib/snapshots_applier/Cargo.toml b/core/lib/snapshots_applier/Cargo.toml index 4ab0c86843ef..d107aac6d4c6 100644 --- a/core/lib/snapshots_applier/Cargo.toml +++ b/core/lib/snapshots_applier/Cargo.toml @@ -17,7 +17,6 @@ zksync_health_check.workspace = true zksync_types.workspace = true zksync_object_store.workspace = true zksync_web3_decl.workspace = true -zksync_utils.workspace = true vise.workspace = true diff --git a/core/lib/snapshots_applier/src/lib.rs b/core/lib/snapshots_applier/src/lib.rs index b4d24a0b1851..2c68b56ca5c6 100644 --- a/core/lib/snapshots_applier/src/lib.rs +++ b/core/lib/snapshots_applier/src/lib.rs @@ -13,6 +13,7 @@ use zksync_health_check::{Health, HealthStatus, HealthUpdater, ReactiveHealthChe use zksync_object_store::{ObjectStore, ObjectStoreError}; use zksync_types::{ api, + bytecode::BytecodeHash, snapshots::{ SnapshotFactoryDependencies, SnapshotHeader, SnapshotRecoveryStatus, SnapshotStorageLog, SnapshotStorageLogsChunk, SnapshotStorageLogsStorageKey, SnapshotVersion, @@ -20,7 +21,6 @@ use zksync_types::{ tokens::TokenInfo, L1BatchNumber, L2BlockNumber, StorageKey, H256, }; -use zksync_utils::bytecode::hash_bytecode; use zksync_web3_decl::{ client::{DynClient, L2}, error::{ClientRpcContext, EnrichedClientError, EnrichedClientResult}, @@ -800,9 +800,15 @@ impl<'a> SnapshotsApplier<'a> { // in underlying query, see `https://www.postgresql.org/docs/current/limits.html` // there were around 100 thousand contracts on mainnet, where this issue first manifested for chunk in factory_deps.factory_deps.chunks(1000) { + // TODO: bytecode hashing is ambiguous with EVM bytecodes let chunk_deps_hashmap: HashMap> = chunk .iter() - .map(|dep| (hash_bytecode(&dep.bytecode.0), dep.bytecode.0.clone())) + .map(|dep| { + ( + BytecodeHash::for_bytecode(&dep.bytecode.0).value(), + dep.bytecode.0.clone(), + ) + }) .collect(); storage .factory_deps_dal() diff --git a/core/lib/state/Cargo.toml b/core/lib/state/Cargo.toml index dd56368f3d2e..ced06de1a8e8 100644 --- a/core/lib/state/Cargo.toml +++ b/core/lib/state/Cargo.toml @@ -14,7 +14,6 @@ categories.workspace = true vise.workspace = true zksync_dal.workspace = true zksync_types.workspace = true -zksync_utils.workspace = true zksync_shared_metrics.workspace = true zksync_storage.workspace = true zksync_vm_interface.workspace = true diff --git a/core/lib/state/src/storage_factory/mod.rs b/core/lib/state/src/storage_factory/mod.rs index 0b514f8f9644..be7e20c5f83d 100644 --- a/core/lib/state/src/storage_factory/mod.rs +++ b/core/lib/state/src/storage_factory/mod.rs @@ -5,8 +5,7 @@ use async_trait::async_trait; use tokio::{runtime::Handle, sync::watch}; use zksync_dal::{Connection, ConnectionPool, Core, CoreDal}; use zksync_storage::RocksDB; -use zksync_types::{L1BatchNumber, StorageKey, StorageValue, H256}; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, L1BatchNumber, StorageKey, StorageValue, H256}; use zksync_vm_interface::storage::{ReadStorage, StorageSnapshot}; use self::metrics::{SnapshotStage, SNAPSHOT_METRICS}; @@ -201,10 +200,7 @@ impl CommonStorage<'static> { let factory_deps = bytecodes .into_iter() - .map(|(hash_u256, words)| { - let bytes: Vec = words.into_iter().flatten().collect(); - (u256_to_h256(hash_u256), bytes) - }) + .map(|(hash_u256, bytes)| (u256_to_h256(hash_u256), bytes)) .collect(); let storage = previous_values.into_iter().map(|(key, prev_value)| { diff --git a/core/lib/tee_verifier/Cargo.toml b/core/lib/tee_verifier/Cargo.toml index 331c47e365eb..289803fb5a89 100644 --- a/core/lib/tee_verifier/Cargo.toml +++ b/core/lib/tee_verifier/Cargo.toml @@ -17,7 +17,6 @@ zksync_merkle_tree.workspace = true zksync_multivm.workspace = true zksync_prover_interface.workspace = true zksync_types.workspace = true -zksync_utils.workspace = true anyhow.workspace = true once_cell.workspace = true diff --git a/core/lib/tee_verifier/src/lib.rs b/core/lib/tee_verifier/src/lib.rs index 140085dbb9fe..8e8362b57f4b 100644 --- a/core/lib/tee_verifier/src/lib.rs +++ b/core/lib/tee_verifier/src/lib.rs @@ -23,10 +23,9 @@ use zksync_prover_interface::inputs::{ StorageLogMetadata, V1TeeVerifierInput, WitnessInputMerklePaths, }; use zksync_types::{ - block::L2BlockExecutionData, commitment::PubdataParams, L1BatchNumber, StorageLog, - StorageValue, Transaction, H256, + block::L2BlockExecutionData, commitment::PubdataParams, u256_to_h256, L1BatchNumber, + StorageLog, StorageValue, Transaction, H256, }; -use zksync_utils::u256_to_h256; /// A structure to hold the result of verification. pub struct VerificationResult { @@ -305,7 +304,6 @@ mod tests { use zksync_contracts::{BaseSystemContracts, SystemContractCode}; use zksync_multivm::interface::{L1BatchEnv, SystemEnv, TxExecutionMode}; use zksync_prover_interface::inputs::{TeeVerifierInput, VMRunWitnessInputData}; - use zksync_types::U256; use super::*; @@ -345,11 +343,11 @@ mod tests { version: Default::default(), base_system_smart_contracts: BaseSystemContracts { bootloader: SystemContractCode { - code: vec![U256([1; 4])], + code: vec![1; 32], hash: H256([1; 32]), }, default_aa: SystemContractCode { - code: vec![U256([1; 4])], + code: vec![1; 32], hash: H256([1; 32]), }, evm_emulator: None, diff --git a/core/tests/test_account/Cargo.toml b/core/lib/test_contracts/Cargo.toml similarity index 61% rename from core/tests/test_account/Cargo.toml rename to core/lib/test_contracts/Cargo.toml index 0dda4f8ac777..d9df995b7fa8 100644 --- a/core/tests/test_account/Cargo.toml +++ b/core/lib/test_contracts/Cargo.toml @@ -1,6 +1,6 @@ [package] -name = "zksync_test_account" -description = "ZKsync test account for writing unit tests" +name = "zksync_test_contracts" +description = "ZKsync test contracts for writing unit tests" version.workspace = true edition.workspace = true authors.workspace = true @@ -13,10 +13,15 @@ categories.workspace = true [dependencies] zksync_types.workspace = true zksync_system_constants.workspace = true -zksync_utils.workspace = true zksync_eth_signer.workspace = true -zksync_contracts.workspace = true hex.workspace = true +once_cell.workspace = true ethabi.workspace = true rand.workspace = true +serde.workspace = true +serde_json.workspace = true + +[build-dependencies] +serde_json.workspace = true +foundry-compilers.workspace = true diff --git a/core/lib/test_contracts/README.md b/core/lib/test_contracts/README.md new file mode 100644 index 000000000000..2c5515269d42 --- /dev/null +++ b/core/lib/test_contracts/README.md @@ -0,0 +1,16 @@ +# ZKsync Era Test Contracts + +This library exposes contracts used in ZKsync Era codebase for unit testing. + +## Contents + +Some of the commonly used contracts included into this crate are: + +- [`LoadnextContract`](contracts/loadnext/loadnext_contract.sol): Emulates various kinds of load (storage reads / writes, hashing, emitting events + deploying contracts etc.). Used in load testing. +- [`Counter`](contracts/counter/counter.sol): Simple stateful counter. Can be used to test "cheap" transactions and reverts. + +## Building + +Building the library relies on `foundry-compilers`; it doesn't require any external tools. If there are any issues during build, it may be useful +to inspect build artifacts, which are located in one of `target/{debug,release}/build/zksync_test_contracts-$random_numbers` directories. diff --git a/core/lib/test_contracts/build.rs b/core/lib/test_contracts/build.rs new file mode 100644 index 000000000000..64825e18d404 --- /dev/null +++ b/core/lib/test_contracts/build.rs @@ -0,0 +1,143 @@ +use std::{ + collections::{HashMap, HashSet}, + env, + fs::File, + io::{BufWriter, Write}, + path::{Path, PathBuf}, +}; + +use foundry_compilers::{ + artifacts::{ + zksolc::output_selection::{FileOutputSelection, OutputSelection, OutputSelectionFlag}, + Remapping, + }, + solc, + zksolc::{ + settings::{Optimizer, ZkSolcError, ZkSolcWarning}, + ZkSettings, ZkSolcCompiler, ZkSolcSettings, + }, + zksync, + zksync::artifact_output::zk::{ZkArtifactOutput, ZkContractArtifact}, + ArtifactId, ProjectBuilder, ProjectPathsConfig, +}; + +#[derive(Debug)] +struct ContractEntry { + abi: String, + bytecode: Vec, +} + +impl ContractEntry { + fn new(artifact: ZkContractArtifact) -> Option { + let abi = artifact.abi.expect("no ABI"); + let abi = serde_json::to_string(&abi).expect("cannot serialize ABI to string"); + let bytecode = artifact.bytecode?; // Bytecode is `None` for interfaces + let bytecode = bytecode + .object + .into_bytes() + .expect("bytecode is not fully compiled") + .into(); + Some(Self { abi, bytecode }) + } +} + +fn save_artifacts( + output: &mut impl Write, + artifacts: impl Iterator, +) { + let source_dir = Path::new(env!("CARGO_MANIFEST_DIR")).join("contracts"); + let mut modules = HashMap::<_, HashMap<_, _>>::new(); + + for (id, artifact) in artifacts { + let Ok(path_in_sources) = id.source.strip_prefix(&source_dir) else { + continue; // The artifact doesn't correspond to a source contract + }; + let contract_dir = path_in_sources.iter().next().expect("no dir"); + let module_name = contract_dir + .to_str() + .expect("contract dir is not UTF-8") + .replace('-', "_"); + if let Some(entry) = ContractEntry::new(artifact) { + modules + .entry(module_name) + .or_default() + .insert(id.name, entry); + } + } + + for (module_name, module_entries) in modules { + writeln!(output, "pub(crate) mod {module_name} {{").unwrap(); + for (contract_name, entry) in module_entries { + writeln!( + output, + " pub(crate) const {contract_name}: crate::contracts::RawContract = crate::contracts::RawContract {{" + ) + .unwrap(); + writeln!(output, " abi: r#\"{}\"#,", entry.abi).unwrap(); // ABI shouldn't include '"#' combinations for this to work + writeln!(output, " bytecode: &{:?},", entry.bytecode).unwrap(); + writeln!(output, " }};").unwrap(); + } + writeln!(output, "}}").unwrap(); + } +} + +/// `zksolc` compiler settings. +fn compiler_settings() -> ZkSolcSettings { + ZkSolcSettings { + cli_settings: solc::CliSettings::default(), + settings: ZkSettings { + // Optimizer must be enabled; otherwise, system calls work incorrectly for whatever reason + optimizer: Optimizer { + enabled: Some(true), + ..Optimizer::default() + }, + // Required by optimizer + via_ir: Some(true), + output_selection: OutputSelection { + all: FileOutputSelection { + per_file: HashSet::from([OutputSelectionFlag::ABI]), + per_contract: HashSet::from([OutputSelectionFlag::ABI]), + }, + }, + enable_eravm_extensions: true, + suppressed_errors: HashSet::from([ZkSolcError::SendTransfer]), + suppressed_warnings: HashSet::from([ZkSolcWarning::TxOrigin]), + ..ZkSettings::default() + }, + } +} + +fn main() { + let settings = compiler_settings(); + let temp_dir = PathBuf::from(env::var("OUT_DIR").expect("no `OUT_DIR` provided")); + let paths = ProjectPathsConfig::builder() + .sources(Path::new(env!("CARGO_MANIFEST_DIR")).join("contracts")) + .remapping(Remapping { + context: None, + name: "@openzeppelin/contracts".into(), + path: format!( + "{}/contract-libs/openzeppelin-contracts-v4/contracts", + env!("CARGO_MANIFEST_DIR") + ), + }) + .artifacts(temp_dir.join("artifacts")) + .cache(temp_dir.join("cache")) + .build() + .unwrap(); + + let project = ProjectBuilder::::new(ZkArtifactOutput::default()) + .paths(paths) + .settings(settings) + .build(ZkSolcCompiler::default()) + .unwrap(); + let output = zksync::project_compile(&project).unwrap(); + output.assert_success(); + + let module_path = temp_dir.join("raw_contracts.rs"); + let module = File::create(&module_path).expect("failed creating output Rust module"); + let mut module = BufWriter::new(module); + save_artifacts(&mut module, output.into_artifacts()); + + // Tell Cargo that if a source file changes, to rerun this build script. + project.rerun_if_sources_changed(); +} diff --git a/core/lib/test_contracts/contract-libs/openzeppelin-contracts-v4 b/core/lib/test_contracts/contract-libs/openzeppelin-contracts-v4 new file mode 120000 index 000000000000..ec18125715f9 --- /dev/null +++ b/core/lib/test_contracts/contract-libs/openzeppelin-contracts-v4 @@ -0,0 +1 @@ +../../../../contracts/l1-contracts/lib/openzeppelin-contracts-v4 \ No newline at end of file diff --git a/etc/contracts-test-data/contracts/complex-upgrade/complex-upgrade.sol b/core/lib/test_contracts/contracts/complex-upgrade/complex-upgrade.sol similarity index 100% rename from etc/contracts-test-data/contracts/complex-upgrade/complex-upgrade.sol rename to core/lib/test_contracts/contracts/complex-upgrade/complex-upgrade.sol diff --git a/etc/contracts-test-data/contracts/complex-upgrade/msg-sender.sol b/core/lib/test_contracts/contracts/complex-upgrade/msg-sender.sol similarity index 100% rename from etc/contracts-test-data/contracts/complex-upgrade/msg-sender.sol rename to core/lib/test_contracts/contracts/complex-upgrade/msg-sender.sol diff --git a/etc/contracts-test-data/contracts/context/context.sol b/core/lib/test_contracts/contracts/context/context.sol similarity index 100% rename from etc/contracts-test-data/contracts/context/context.sol rename to core/lib/test_contracts/contracts/context/context.sol diff --git a/etc/contracts-test-data/contracts/counter/counter.sol b/core/lib/test_contracts/contracts/counter/counter.sol similarity index 100% rename from etc/contracts-test-data/contracts/counter/counter.sol rename to core/lib/test_contracts/contracts/counter/counter.sol diff --git a/etc/contracts-test-data/contracts/counter/proxy_counter.sol b/core/lib/test_contracts/contracts/counter/proxy_counter.sol similarity index 100% rename from etc/contracts-test-data/contracts/counter/proxy_counter.sol rename to core/lib/test_contracts/contracts/counter/proxy_counter.sol diff --git a/etc/contracts-test-data/contracts/custom-account/Constants.sol b/core/lib/test_contracts/contracts/custom-account/Constants.sol similarity index 100% rename from etc/contracts-test-data/contracts/custom-account/Constants.sol rename to core/lib/test_contracts/contracts/custom-account/Constants.sol diff --git a/etc/contracts-test-data/contracts/custom-account/RLPEncoder.sol b/core/lib/test_contracts/contracts/custom-account/RLPEncoder.sol similarity index 100% rename from etc/contracts-test-data/contracts/custom-account/RLPEncoder.sol rename to core/lib/test_contracts/contracts/custom-account/RLPEncoder.sol diff --git a/etc/contracts-test-data/contracts/custom-account/SystemContext.sol b/core/lib/test_contracts/contracts/custom-account/SystemContext.sol similarity index 100% rename from etc/contracts-test-data/contracts/custom-account/SystemContext.sol rename to core/lib/test_contracts/contracts/custom-account/SystemContext.sol diff --git a/etc/contracts-test-data/contracts/custom-account/SystemContractsCaller.sol b/core/lib/test_contracts/contracts/custom-account/SystemContractsCaller.sol similarity index 100% rename from etc/contracts-test-data/contracts/custom-account/SystemContractsCaller.sol rename to core/lib/test_contracts/contracts/custom-account/SystemContractsCaller.sol diff --git a/etc/contracts-test-data/contracts/custom-account/TransactionHelper.sol b/core/lib/test_contracts/contracts/custom-account/TransactionHelper.sol similarity index 100% rename from etc/contracts-test-data/contracts/custom-account/TransactionHelper.sol rename to core/lib/test_contracts/contracts/custom-account/TransactionHelper.sol diff --git a/etc/contracts-test-data/contracts/custom-account/Utils.sol b/core/lib/test_contracts/contracts/custom-account/Utils.sol similarity index 100% rename from etc/contracts-test-data/contracts/custom-account/Utils.sol rename to core/lib/test_contracts/contracts/custom-account/Utils.sol diff --git a/etc/contracts-test-data/contracts/custom-account/custom-account.sol b/core/lib/test_contracts/contracts/custom-account/custom-account.sol similarity index 100% rename from etc/contracts-test-data/contracts/custom-account/custom-account.sol rename to core/lib/test_contracts/contracts/custom-account/custom-account.sol diff --git a/etc/contracts-test-data/contracts/custom-account/custom-paymaster.sol b/core/lib/test_contracts/contracts/custom-account/custom-paymaster.sol similarity index 100% rename from etc/contracts-test-data/contracts/custom-account/custom-paymaster.sol rename to core/lib/test_contracts/contracts/custom-account/custom-paymaster.sol diff --git a/etc/contracts-test-data/contracts/custom-account/interfaces/IAccount.sol b/core/lib/test_contracts/contracts/custom-account/interfaces/IAccount.sol similarity index 100% rename from etc/contracts-test-data/contracts/custom-account/interfaces/IAccount.sol rename to core/lib/test_contracts/contracts/custom-account/interfaces/IAccount.sol diff --git a/etc/contracts-test-data/contracts/custom-account/interfaces/IContractDeployer.sol b/core/lib/test_contracts/contracts/custom-account/interfaces/IContractDeployer.sol similarity index 100% rename from etc/contracts-test-data/contracts/custom-account/interfaces/IContractDeployer.sol rename to core/lib/test_contracts/contracts/custom-account/interfaces/IContractDeployer.sol diff --git a/etc/contracts-test-data/contracts/custom-account/interfaces/INonceHolder.sol b/core/lib/test_contracts/contracts/custom-account/interfaces/INonceHolder.sol similarity index 100% rename from etc/contracts-test-data/contracts/custom-account/interfaces/INonceHolder.sol rename to core/lib/test_contracts/contracts/custom-account/interfaces/INonceHolder.sol diff --git a/etc/contracts-test-data/contracts/custom-account/interfaces/IPaymaster.sol b/core/lib/test_contracts/contracts/custom-account/interfaces/IPaymaster.sol similarity index 100% rename from etc/contracts-test-data/contracts/custom-account/interfaces/IPaymaster.sol rename to core/lib/test_contracts/contracts/custom-account/interfaces/IPaymaster.sol diff --git a/etc/contracts-test-data/contracts/custom-account/interfaces/IPaymasterFlow.sol b/core/lib/test_contracts/contracts/custom-account/interfaces/IPaymasterFlow.sol similarity index 100% rename from etc/contracts-test-data/contracts/custom-account/interfaces/IPaymasterFlow.sol rename to core/lib/test_contracts/contracts/custom-account/interfaces/IPaymasterFlow.sol diff --git a/etc/contracts-test-data/contracts/custom-account/many-owners-custom-account.sol b/core/lib/test_contracts/contracts/custom-account/many-owners-custom-account.sol similarity index 100% rename from etc/contracts-test-data/contracts/custom-account/many-owners-custom-account.sol rename to core/lib/test_contracts/contracts/custom-account/many-owners-custom-account.sol diff --git a/etc/contracts-test-data/contracts/custom-account/nonce-holder-test.sol b/core/lib/test_contracts/contracts/custom-account/nonce-holder-test.sol similarity index 100% rename from etc/contracts-test-data/contracts/custom-account/nonce-holder-test.sol rename to core/lib/test_contracts/contracts/custom-account/nonce-holder-test.sol diff --git a/etc/contracts-test-data/contracts/error/error.sol b/core/lib/test_contracts/contracts/error/error.sol similarity index 100% rename from etc/contracts-test-data/contracts/error/error.sol rename to core/lib/test_contracts/contracts/error/error.sol diff --git a/etc/contracts-test-data/contracts/expensive/expensive.sol b/core/lib/test_contracts/contracts/expensive/expensive.sol similarity index 100% rename from etc/contracts-test-data/contracts/expensive/expensive.sol rename to core/lib/test_contracts/contracts/expensive/expensive.sol diff --git a/etc/contracts-test-data/contracts/failed-call/failed_call.sol b/core/lib/test_contracts/contracts/failed-call/failed_call.sol similarity index 100% rename from etc/contracts-test-data/contracts/failed-call/failed_call.sol rename to core/lib/test_contracts/contracts/failed-call/failed_call.sol diff --git a/etc/contracts-test-data/contracts/infinite/infinite.sol b/core/lib/test_contracts/contracts/infinite/infinite.sol similarity index 100% rename from etc/contracts-test-data/contracts/infinite/infinite.sol rename to core/lib/test_contracts/contracts/infinite/infinite.sol diff --git a/core/lib/test_contracts/contracts/loadnext/README.md b/core/lib/test_contracts/contracts/loadnext/README.md new file mode 100644 index 000000000000..5918c4f2308a --- /dev/null +++ b/core/lib/test_contracts/contracts/loadnext/README.md @@ -0,0 +1,15 @@ +# Calculating loadtest profiles + +Use the SQL scripts in this directory to calculate the characteristics of transactions within a miniblock range. + +Calculate `CONTRACT_EXECUTION_PARAMS` as follows: + +- `light`: all zeroes. +- `realistic`: median (50th percentile). +- `heavy`: generally use 2.5× the values in the 99th percentile. However, some operations are even less frequent than that (e.g. contract deployments). At the time of writing, contract deployments is set to 5. + +Metrics may be averaged across different block ranges to calculate a more holistic "characteristic." + +## Compensating for implicit activity + +The mere act of executing a transaction entails some ancillary activity on the network. For example, some events are emitted when tokens are transferred for gas payments. The loadtest contract does not compensate for this activity, so it should be kept in mind when evaluating loadtest activity. diff --git a/core/lib/test_contracts/contracts/loadnext/loadnext_contract.sol b/core/lib/test_contracts/contracts/loadnext/loadnext_contract.sol new file mode 100644 index 000000000000..9186ff6180a2 --- /dev/null +++ b/core/lib/test_contracts/contracts/loadnext/loadnext_contract.sol @@ -0,0 +1,81 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; +pragma abicoder v2; + +contract LoadnextContract { + event Event(uint val); + uint[] readArray; + uint[] writeArray; + + constructor(uint reads) { + for (uint i = 0; i < reads; i++) { + readArray.push(i); + } + } + + function execute( + uint reads, + uint initialWrites, + uint repeatedWrites, + uint hashes, + uint events, + uint maxRecursion, + uint deploys + ) external returns (uint) { + if (maxRecursion > 0) { + return + this.execute( + reads, + initialWrites, + repeatedWrites, + hashes, + events, + maxRecursion - 1, + deploys + ); + } + + require(repeatedWrites <= readArray.length); + uint sum = 0; + + // Somehow use result of storage read for compiler to not optimize this place. + for (uint i = 0; i < repeatedWrites; i++) { + uint value = readArray[i]; + sum += value; + readArray[i] = value + 1; + } + for (uint i = repeatedWrites; i < reads; i++) { + sum += readArray[i]; + } + + for (uint i = 0; i < initialWrites; i++) { + writeArray.push(i); + } + + for (uint i = 0; i < events; i++) { + emit Event(i); + } + + // Somehow use result of keccak for compiler to not optimize this place. + for (uint i = 0; i < hashes; i++) { + sum += uint8( + keccak256(abi.encodePacked("Message for encoding"))[0] + ); + } + + for (uint i = 0; i < deploys; i++) { + Foo foo = new Foo(); + } + return sum; + } + + function burnGas(uint256 gasToBurn) external { + uint256 initialGas = gasleft(); + while (initialGas - gasleft() < gasToBurn) {} + } +} + +contract Foo { + string public name = "Foo"; +} diff --git a/core/lib/test_contracts/contracts/loadnext/query_event_metrics.sql b/core/lib/test_contracts/contracts/loadnext/query_event_metrics.sql new file mode 100644 index 000000000000..1a5d87d6fcfb --- /dev/null +++ b/core/lib/test_contracts/contracts/loadnext/query_event_metrics.sql @@ -0,0 +1,19 @@ +-- calculate distribution of event emissions per transaction + +\set :start_from_miniblock_number 40000000 +\set :miniblock_range 10000 + +select stddev_samp(metric) as stddev, + avg(metric) as avg, + sum(metric) as sum, + min(metric) as min, + percentile_cont(0.01) within group (order by metric) as pct_01, + percentile_cont(0.50) within group (order by metric) as pct_50, + percentile_cont(0.99) within group (order by metric) as pct_99, + max(metric) as max +from (select tx.hash, count(ev.*) as metric + from transactions tx + left join events ev on ev.tx_hash = tx.hash + where ev.miniblock_number >= :start_from_miniblock_number + and ev.miniblock_number < :start_from_miniblock_number + :miniblock_range + group by tx.hash) s; diff --git a/core/lib/test_contracts/contracts/loadnext/query_execution_info_metrics.sql b/core/lib/test_contracts/contracts/loadnext/query_execution_info_metrics.sql new file mode 100644 index 000000000000..bf9faba4b6dc --- /dev/null +++ b/core/lib/test_contracts/contracts/loadnext/query_execution_info_metrics.sql @@ -0,0 +1,20 @@ +-- calculate distribution of execution_info fields per transaction + +-- execution_info fields: gas_used, vm_events, cycles_used, storage_logs, l2_to_l1_logs, contracts_used, pubdata_published, total_log_queries, contracts_deployed, l2_l1_long_messages, computational_gas_used, published_bytecode_bytes +\set exection_info_field 'storage_logs' +\set start_from_miniblock_number 40000000 +\set miniblock_range 10000 + +select stddev_samp(metric) as stddev, + avg(metric) as avg, + sum(metric) as sum, + min(metric) as min, + percentile_cont(0.01) within group (order by metric) as pct_01, + percentile_cont(0.50) within group (order by metric) as pct_50, + percentile_cont(0.99) within group (order by metric) as pct_99, + max(metric) as max +from (select tx.miniblock_number, + (execution_info ->> :execution_info_field)::bigint as metric + from transactions tx) cd +where cd.miniblock_number >= :start_from_miniblock_number + and cd.miniblock_number < :start_from_miniblock_number + :miniblock_range; diff --git a/core/lib/test_contracts/contracts/loadnext/query_max_transactions_in_window.sql b/core/lib/test_contracts/contracts/loadnext/query_max_transactions_in_window.sql new file mode 100644 index 000000000000..91dd4bd47a7c --- /dev/null +++ b/core/lib/test_contracts/contracts/loadnext/query_max_transactions_in_window.sql @@ -0,0 +1,23 @@ +-- not a metrics-collecting query, but may be useful to find an interesting range of transactions + +\set miniblock_number_range_start 36700000 +\set miniblock_number_range_end 36850000 +\set window_size 10000 +\set maximize_column l2_tx_count + +select miniblock_number_start, + miniblock_number_start + :window_size as miniblock_number_end, + metric_total +from (select mb.number as miniblock_number_start, + sum(mb.:maximize_column) + over lookahead + as metric_total + from miniblocks mb + where mb.number >= :miniblock_number_range_start + and mb.number < :miniblock_number_range_end + window lookahead as ( + order by mb.number + rows between current row and :window_size following + )) _s +order by metric_total desc +limit 10; diff --git a/core/lib/test_contracts/contracts/loadnext/query_read_metrics_basic.sql b/core/lib/test_contracts/contracts/loadnext/query_read_metrics_basic.sql new file mode 100644 index 000000000000..62195016f10e --- /dev/null +++ b/core/lib/test_contracts/contracts/loadnext/query_read_metrics_basic.sql @@ -0,0 +1,39 @@ +-- calculate distribution of storage reads per transaction +-- does not calculate hot/cold reads + +\set start_from_miniblock_number 40000000 +\set miniblock_range 10000 + +with mb as (select * + from miniblocks mb + where mb.number >= :start_from_miniblock_number + order by mb.number + limit :miniblock_range) +select stddev_samp(metric) as stddev, + avg(metric) as avg, + sum(metric) as sum, + min(metric) as min, + percentile_cont(0.01) within group (order by metric) as pct_01, + percentile_cont(0.50) within group (order by metric) as pct_50, + percentile_cont(0.99) within group (order by metric) as pct_99, + max(metric) as max +from (select miniblock_number, + (sum(read_write_logs) - sum(write_logs)) / sum(transaction_count) as metric, + sum(transaction_count) as transaction_count + from (select mb.number as miniblock_number, + (tx.execution_info ->> 'storage_logs')::bigint as read_write_logs, + null as write_logs, + 1 as transaction_count + from transactions tx, + mb + where tx.miniblock_number = mb.number + union + select mb.number as miniblock_number, + null as read_write_logs, + count(sl.*) as write_logs, + 0 as transaction_count + from storage_logs sl, + mb + where sl.miniblock_number = mb.number + group by mb.number) s + group by s.miniblock_number) t, generate_series(1, t.transaction_count); diff --git a/core/lib/test_contracts/contracts/loadnext/query_write_metrics.sql b/core/lib/test_contracts/contracts/loadnext/query_write_metrics.sql new file mode 100644 index 000000000000..f142347f9801 --- /dev/null +++ b/core/lib/test_contracts/contracts/loadnext/query_write_metrics.sql @@ -0,0 +1,50 @@ +-- calculate distribution of initial and repeated writes per transaction + +\set start_from_miniblock_number 40000000; +\set miniblock_range 10000; + +select + -- initial writes + stddev_samp(initial_writes_per_tx) as initial_writes_stddev, + avg(initial_writes_per_tx) as initial_writes_avg, + min(initial_writes_per_tx) as initial_writes_min, + percentile_cont(0.01) within group (order by initial_writes_per_tx) as initial_writes_pct_01, + percentile_cont(0.50) within group (order by initial_writes_per_tx) as initial_writes_pct_50, + percentile_cont(0.99) within group (order by initial_writes_per_tx) as initial_writes_pct_99, + max(initial_writes_per_tx) as initial_writes_max, + + -- repeated writes + stddev_samp(repeated_writes_per_tx) as repeated_writes_stddev, + avg(repeated_writes_per_tx) as repeated_writes_avg, + min(repeated_writes_per_tx) as repeated_writes_min, + percentile_cont(0.01) within group (order by repeated_writes_per_tx) as repeated_writes_pct_01, + percentile_cont(0.50) within group (order by repeated_writes_per_tx) as repeated_writes_pct_50, + percentile_cont(0.99) within group (order by repeated_writes_per_tx) as repeated_writes_pct_99, + max(repeated_writes_per_tx) as repeated_writes_max +from (select initial_writes::real / l2_tx_count::real as initial_writes_per_tx, + (total_writes - initial_writes)::real / l2_tx_count::real as repeated_writes_per_tx + from (select mb.number as miniblock_number, + count(sl.hashed_key) as total_writes, + count(distinct sl.hashed_key) filter ( + where + iw.hashed_key is not null + ) as initial_writes, + mb.l2_tx_count as l2_tx_count + from miniblocks mb + join l1_batches l1b on l1b.number = mb.l1_batch_number + join storage_logs sl on sl.miniblock_number = mb.number + left join initial_writes iw on iw.hashed_key = sl.hashed_key + and iw.l1_batch_number = mb.l1_batch_number + and mb.number = ( + -- initial writes are only tracked by l1 batch number, so find the first miniblock in that batch that contains a write to that key + select miniblock_number + from storage_logs + where hashed_key = sl.hashed_key + order by miniblock_number + limit 1) + where mb.l2_tx_count <> 0 -- avoid div0 + and mb.number >= :start_from_miniblock_number + group by mb.number + order by mb.number desc + limit :miniblock_range) s, generate_series(1, s.l2_tx_count) -- scale by # of tx + ) t; diff --git a/etc/contracts-test-data/contracts/mock-evm/mock-evm.sol b/core/lib/test_contracts/contracts/mock-evm/mock-evm.sol similarity index 100% rename from etc/contracts-test-data/contracts/mock-evm/mock-evm.sol rename to core/lib/test_contracts/contracts/mock-evm/mock-evm.sol diff --git a/etc/contracts-test-data/contracts/precompiles/precompiles.sol b/core/lib/test_contracts/contracts/precompiles/precompiles.sol similarity index 100% rename from etc/contracts-test-data/contracts/precompiles/precompiles.sol rename to core/lib/test_contracts/contracts/precompiles/precompiles.sol diff --git a/etc/contracts-test-data/contracts/simple-transfer/simple-transfer.sol b/core/lib/test_contracts/contracts/simple-transfer/simple-transfer.sol similarity index 90% rename from etc/contracts-test-data/contracts/simple-transfer/simple-transfer.sol rename to core/lib/test_contracts/contracts/simple-transfer/simple-transfer.sol index 591e97cc1ae9..8ab5bf330e02 100644 --- a/etc/contracts-test-data/contracts/simple-transfer/simple-transfer.sol +++ b/core/lib/test_contracts/contracts/simple-transfer/simple-transfer.sol @@ -19,7 +19,8 @@ contract SimpleTransfer { // Function to withdraw Ether to the owner's address function withdraw(uint _amount) public onlyOwner { require(address(this).balance >= _amount, "Insufficient balance in contract"); - payable(owner).transfer(_amount); + (bool success, ) = owner.call{value: _amount}(""); + require(success, "transfer reverted"); } // Function to transfer Ether from this contract to any address diff --git a/etc/contracts-test-data/contracts/storage/storage.sol b/core/lib/test_contracts/contracts/storage/storage.sol similarity index 96% rename from etc/contracts-test-data/contracts/storage/storage.sol rename to core/lib/test_contracts/contracts/storage/storage.sol index 2f386f5c732b..f1c629aeb2c6 100644 --- a/etc/contracts-test-data/contracts/storage/storage.sol +++ b/core/lib/test_contracts/contracts/storage/storage.sol @@ -37,7 +37,7 @@ contract StorageTester { } // This test aims to check that the tstore/sstore are writing into separate spaces. - function testTrasientAndNonTransientStore() external { + function testTransientAndNonTransientStore() external { value = 100; uint256 x; @@ -95,7 +95,7 @@ contract StorageTester { } function testTransientStore() external { - this.testTrasientAndNonTransientStore(); + this.testTransientAndNonTransientStore(); this.testTstoreRollback(); } diff --git a/etc/contracts-test-data/contracts/transfer/transfer.sol b/core/lib/test_contracts/contracts/transfer/transfer.sol similarity index 97% rename from etc/contracts-test-data/contracts/transfer/transfer.sol rename to core/lib/test_contracts/contracts/transfer/transfer.sol index 4c63a2e9c7d1..964fb3b01667 100644 --- a/etc/contracts-test-data/contracts/transfer/transfer.sol +++ b/core/lib/test_contracts/contracts/transfer/transfer.sol @@ -9,12 +9,11 @@ contract TransferTest { function send(address payable to, uint256 amount) public payable { bool success = to.send(amount); - require(success, "Transaction failed"); } receive() external payable { - + // Do nothing } } diff --git a/core/lib/test_contracts/src/contracts.rs b/core/lib/test_contracts/src/contracts.rs new file mode 100644 index 000000000000..09a0535824df --- /dev/null +++ b/core/lib/test_contracts/src/contracts.rs @@ -0,0 +1,314 @@ +//! Test contracts. + +use ethabi::Token; +use once_cell::sync::Lazy; +use serde::{Deserialize, Serialize}; +use zksync_types::{Execute, H256, U256}; + +/// The structure of produced modules is as follows: +/// +/// - Each dir in `/contracts` translates into a module with the same name (just with `-` chars replaced with `_`). +/// - Each contract in all files in this dir produces a `RawContract` constant with the same name as the contract. +mod raw { + #![allow(unused, non_upper_case_globals)] + include!(concat!(env!("OUT_DIR"), "/raw_contracts.rs")); +} + +/// Raw contracts produced by the build script. +#[derive(Debug, Clone, Copy)] +pub(crate) struct RawContract { + pub abi: &'static str, + pub bytecode: &'static [u8], +} + +/// Test contract consisting of deployable EraVM bytecode and Web3 ABI. +#[derive(Debug, Clone)] +#[non_exhaustive] +pub struct TestContract { + /// Web3 ABI of this contract. + pub abi: ethabi::Contract, + /// EraVM bytecode of this contract. + pub bytecode: &'static [u8], + /// Contract dependencies (i.e., potential factory deps to be included in the contract deployment / transactions). + pub dependencies: Vec, +} + +impl TestContract { + fn new(raw: RawContract) -> Self { + let abi = serde_json::from_str(raw.abi).expect("failed parsing contract ABI"); + Self { + abi, + bytecode: raw.bytecode, + dependencies: vec![], + } + } + + /// Returns a contract used to test complex system contract upgrades. + pub fn complex_upgrade() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::complex_upgrade::ComplexUpgrade)); + &CONTRACT + } + + /// Returns a contract used to test context methods. + pub fn context_test() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::context::Context)); + &CONTRACT + } + + /// Returns a simple counter contract. + pub fn counter() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::counter::Counter)); + &CONTRACT + } + + /// Returns a contract used in load testing that emulates various kinds of expensive operations + /// (storage reads / writes, hashing, recursion via far calls etc.). + pub fn load_test() -> &'static Self { + static CONTRACT: Lazy = Lazy::new(|| { + let mut contract = TestContract::new(raw::loadnext::LoadnextContract); + contract.dependencies = vec![TestContract::new(raw::loadnext::Foo)]; + contract + }); + &CONTRACT + } + + /// Returns a contract with expensive storage operations. + pub fn expensive() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::expensive::Expensive)); + &CONTRACT + } + + pub fn failed_call() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::failed_call::FailedCall)); + &CONTRACT + } + + /// Returns a contract with an infinite loop (useful for testing out-of-gas reverts). + pub fn infinite_loop() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::infinite::InfiniteLoop)); + &CONTRACT + } + + /// Returns a custom account with multiple owners. + pub fn many_owners() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::custom_account::ManyOwnersCustomAccount)); + &CONTRACT + } + + /// Returns a contract testing `msg.sender` value. + pub fn msg_sender_test() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::complex_upgrade::MsgSenderTest)); + &CONTRACT + } + + pub fn nonce_holder() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::custom_account::NonceHolderTest)); + &CONTRACT + } + + /// Returns a contract testing precompiles. + pub fn precompiles_test() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::precompiles::Precompiles)); + &CONTRACT + } + + /// Returns a contract proxying calls to a [counter](Self::counter()). + pub fn proxy_counter() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::counter::ProxyCounter)); + &CONTRACT + } + + /// Returns a reentrant recipient for transfers. + pub fn reentrant_recipient() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::transfer::ReentrantRecipient)); + &CONTRACT + } + + /// Returns a contract testing reverts. + pub fn reverts_test() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::error::SimpleRequire)); + &CONTRACT + } + + /// Returns a simple fungible token contract. + pub fn simple_transfer() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::simple_transfer::SimpleTransfer)); + &CONTRACT + } + + /// Returns a contract testing storage operations. + pub fn storage_test() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::storage::StorageTester)); + &CONTRACT + } + + /// Returns a contract for testing base token transfers. + pub fn transfer_test() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::transfer::TransferTest)); + &CONTRACT + } + + /// Returns a test recipient for the [transfer test](Self::transfer_test()) contract. + pub fn transfer_recipient() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::transfer::Recipient)); + &CONTRACT + } + + /// Returns a mock version of `ContractDeployer`. + pub fn mock_deployer() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::mock_evm::MockContractDeployer)); + &CONTRACT + } + + /// Returns a mock version of `KnownCodeStorage`. + pub fn mock_known_code_storage() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::mock_evm::MockKnownCodeStorage)); + &CONTRACT + } + + /// Returns a mock EVM emulator. + pub fn mock_evm_emulator() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::mock_evm::MockEvmEmulator)); + &CONTRACT + } + + /// Contract testing recursive calls. + pub fn recursive_test() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::mock_evm::NativeRecursiveContract)); + &CONTRACT + } + + /// Contract implementing incrementing operations. Used to test static / delegate calls. + pub fn increment_test() -> &'static Self { + static CONTRACT: Lazy = + Lazy::new(|| TestContract::new(raw::mock_evm::IncrementingContract)); + &CONTRACT + } + + /// Returns all factory deps for this contract deployment (including its own bytecode). + pub fn factory_deps(&self) -> Vec> { + let mut deps = vec![]; + self.insert_factory_deps(&mut deps); + deps + } + + fn insert_factory_deps(&self, dest: &mut Vec>) { + for deployed in &self.dependencies { + dest.push(deployed.bytecode.to_vec()); + deployed.insert_factory_deps(dest); + } + } + + /// Generates the `Execute` payload for deploying this contract with zero salt. + pub fn deploy_payload(&self, args: &[Token]) -> Execute { + self.deploy_payload_with_salt(H256::zero(), args) + } + + /// Generates the `Execute` payload for deploying this contract with custom salt. + pub fn deploy_payload_with_salt(&self, salt: H256, args: &[Token]) -> Execute { + let mut execute = Execute::for_deploy(salt, self.bytecode.to_vec(), args); + execute.factory_deps.extend(self.factory_deps()); + execute + } + + /// Shortcut for accessing a function that panics if a function doesn't exist. + pub fn function(&self, name: &str) -> ðabi::Function { + self.abi + .function(name) + .unwrap_or_else(|err| panic!("cannot access function `{name}`: {err}")) + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct LoadnextContractExecutionParams { + pub reads: usize, + pub initial_writes: usize, + pub repeated_writes: usize, + pub events: usize, + pub hashes: usize, + pub recursive_calls: usize, + pub deploys: usize, +} + +impl LoadnextContractExecutionParams { + pub fn empty() -> Self { + Self { + reads: 0, + initial_writes: 0, + repeated_writes: 0, + events: 0, + hashes: 0, + recursive_calls: 0, + deploys: 0, + } + } +} + +impl Default for LoadnextContractExecutionParams { + fn default() -> Self { + Self { + reads: 10, + initial_writes: 10, + repeated_writes: 10, + events: 10, + hashes: 10, + recursive_calls: 1, + deploys: 1, + } + } +} + +impl LoadnextContractExecutionParams { + pub fn to_bytes(&self) -> Vec { + let contract_function = TestContract::load_test().abi.function("execute").unwrap(); + + let params = vec![ + Token::Uint(U256::from(self.reads)), + Token::Uint(U256::from(self.initial_writes)), + Token::Uint(U256::from(self.repeated_writes)), + Token::Uint(U256::from(self.hashes)), + Token::Uint(U256::from(self.events)), + Token::Uint(U256::from(self.recursive_calls)), + Token::Uint(U256::from(self.deploys)), + ]; + + contract_function + .encode_input(¶ms) + .expect("failed to encode parameters") + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn contracts_are_initialized_correctly() { + TestContract::counter().abi.function("get").unwrap(); + TestContract::context_test() + .abi + .function("getBlockNumber") + .unwrap(); + } +} diff --git a/core/tests/test_account/src/lib.rs b/core/lib/test_contracts/src/lib.rs similarity index 81% rename from core/tests/test_account/src/lib.rs rename to core/lib/test_contracts/src/lib.rs index cfb539c0e0f7..223bd92a651a 100644 --- a/core/tests/test_account/src/lib.rs +++ b/core/lib/test_contracts/src/lib.rs @@ -1,17 +1,17 @@ use ethabi::Token; -use zksync_contracts::{ - deployer_contract, load_contract, test_contracts::LoadnextContractExecutionParams, -}; use zksync_eth_signer::{PrivateKeySigner, TransactionParameters}; use zksync_system_constants::{ - CONTRACT_DEPLOYER_ADDRESS, DEFAULT_L2_TX_GAS_PER_PUBDATA_BYTE, - REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_BYTE, + DEFAULT_L2_TX_GAS_PER_PUBDATA_BYTE, REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_BYTE, }; use zksync_types::{ - abi, fee::Fee, l2::L2Tx, utils::deployed_address_create, Address, Execute, K256PrivateKey, - L2ChainId, Nonce, Transaction, H256, PRIORITY_OPERATION_L2_TX_TYPE, U256, + abi, address_to_u256, bytecode::BytecodeHash, fee::Fee, l2::L2Tx, + utils::deployed_address_create, Address, Execute, K256PrivateKey, L2ChainId, Nonce, + Transaction, H256, PRIORITY_OPERATION_L2_TX_TYPE, U256, }; -use zksync_utils::{address_to_u256, bytecode::hash_bytecode, h256_to_u256}; + +pub use self::contracts::{LoadnextContractExecutionParams, TestContract}; + +mod contracts; pub const L1_TEST_GAS_PER_PUBDATA_BYTE: u32 = 800; const BASE_FEE: u64 = 2_000_000_000; @@ -115,31 +115,13 @@ impl Account { &mut self, code: &[u8], calldata: Option<&[Token]>, - mut factory_deps: Vec>, + factory_deps: Vec>, tx_type: TxType, ) -> DeployContractsTx { - let deployer = deployer_contract(); - - let contract_function = deployer.function("create").unwrap(); - - let calldata = calldata.map(ethabi::encode); - let code_hash = hash_bytecode(code); - let params = [ - Token::FixedBytes(vec![0u8; 32]), - Token::FixedBytes(code_hash.0.to_vec()), - Token::Bytes(calldata.unwrap_or_default().to_vec()), - ]; - factory_deps.push(code.to_vec()); - let calldata = contract_function - .encode_input(¶ms) - .expect("failed to encode parameters"); - - let execute = Execute { - contract_address: Some(CONTRACT_DEPLOYER_ADDRESS), - calldata, - factory_deps, - value: U256::zero(), - }; + let calldata = calldata.unwrap_or_default(); + let code_hash = BytecodeHash::for_bytecode(code).value(); + let mut execute = Execute::for_deploy(H256::zero(), code.to_vec(), calldata); + execute.factory_deps.extend(factory_deps); let tx = match tx_type { TxType::L2 => self.get_l2_tx_for_execute(execute, None), @@ -184,7 +166,7 @@ impl Account { signature: vec![], factory_deps: factory_deps .iter() - .map(|b| h256_to_u256(hash_bytecode(b))) + .map(|b| BytecodeHash::for_bytecode(b).value_u256()) .collect(), paymaster_input: vec![], reserved_dynamic: vec![], @@ -204,16 +186,15 @@ impl Account { payable: bool, tx_type: TxType, ) -> Transaction { - let test_contract = load_contract( - "etc/contracts-test-data/artifacts-zk/contracts/counter/counter.sol/Counter.json", - ); + let test_contract = TestContract::counter(); let function = if payable { test_contract + .abi .function("incrementWithRevertPayable") .unwrap() } else { - test_contract.function("incrementWithRevert").unwrap() + test_contract.abi.function("incrementWithRevert").unwrap() }; let calldata = function diff --git a/core/lib/types/Cargo.toml b/core/lib/types/Cargo.toml index ffa9d219f084..325fe22209a7 100644 --- a/core/lib/types/Cargo.toml +++ b/core/lib/types/Cargo.toml @@ -13,7 +13,6 @@ categories.workspace = true [dependencies] # **IMPORTANT.** Please do not add dependency on `zksync_config` etc. This crate has a heavy dependency graph as is. zksync_system_constants.workspace = true -zksync_utils.workspace = true zksync_basic_types.workspace = true zksync_contracts.workspace = true zksync_mini_merkle_tree.workspace = true diff --git a/core/lib/types/src/abi.rs b/core/lib/types/src/abi.rs index 84f8aba64869..92d4cb4c8612 100644 --- a/core/lib/types/src/abi.rs +++ b/core/lib/types/src/abi.rs @@ -1,7 +1,7 @@ use anyhow::Context as _; -use zksync_utils::{bytecode::hash_bytecode, h256_to_u256}; use crate::{ + bytecode::BytecodeHash, ethabi, ethabi::{ParamType, Token}, transaction_request::TransactionRequest, @@ -356,7 +356,7 @@ impl Transaction { // verify data integrity let factory_deps_hashes: Vec<_> = factory_deps .iter() - .map(|b| h256_to_u256(hash_bytecode(b))) + .map(|b| BytecodeHash::for_bytecode(b).value_u256()) .collect(); anyhow::ensure!(tx.factory_deps == factory_deps_hashes); tx.hash() diff --git a/core/lib/types/src/api/mod.rs b/core/lib/types/src/api/mod.rs index 409dc3727570..b5d2b3276527 100644 --- a/core/lib/types/src/api/mod.rs +++ b/core/lib/types/src/api/mod.rs @@ -4,7 +4,6 @@ use serde_json::Value; use serde_with::{hex::Hex, serde_as}; use strum::Display; use zksync_basic_types::{ - tee_types::TeeType, web3::{AccessList, Bytes, Index}, Bloom, L1BatchNumber, H160, H256, H64, U256, U64, }; @@ -16,6 +15,7 @@ pub use crate::transaction_request::{ use crate::{ debug_flat_call::{DebugCallFlat, ResultDebugCallFlat}, protocol_version::L1VerifierConfig, + tee_types::TeeType, Address, L2BlockNumber, ProtocolVersionId, }; @@ -515,6 +515,9 @@ pub struct Transaction { pub gas: U256, /// Input data pub input: Bytes, + /// The parity (0 for even, 1 for odd) of the y-value of the secp256k1 signature + #[serde(rename = "yParity", default, skip_serializing_if = "Option::is_none")] + pub y_parity: Option, /// ECDSA recovery id #[serde(default, skip_serializing_if = "Option::is_none")] pub v: Option, diff --git a/core/lib/types/src/api/state_override.rs b/core/lib/types/src/api/state_override.rs index f2986610840a..69025d1a1f78 100644 --- a/core/lib/types/src/api/state_override.rs +++ b/core/lib/types/src/api/state_override.rs @@ -1,10 +1,12 @@ use std::collections::HashMap; use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; -use zksync_basic_types::{web3::Bytes, H256, U256}; -use zksync_utils::bytecode::{hash_bytecode, validate_bytecode, InvalidBytecodeError}; +use zksync_basic_types::{bytecode::BytecodeHash, web3::Bytes, H256, U256}; -use crate::Address; +use crate::{ + bytecode::{validate_bytecode, InvalidBytecodeError}, + Address, +}; /// Collection of overridden accounts. #[derive(Debug, Clone, Default, Serialize, Deserialize)] @@ -44,7 +46,7 @@ impl Bytecode { /// Returns the canonical hash of this bytecode. pub fn hash(&self) -> H256 { - hash_bytecode(&self.0 .0) + BytecodeHash::for_bytecode(&self.0 .0).value() } /// Converts this bytecode into bytes. diff --git a/core/lib/types/src/block.rs b/core/lib/types/src/block.rs index 310e3a73b8e8..804da61b7295 100644 --- a/core/lib/types/src/block.rs +++ b/core/lib/types/src/block.rs @@ -4,13 +4,12 @@ use serde::{Deserialize, Serialize}; use zksync_basic_types::{commitment::PubdataParams, Address, Bloom, BloomInput, H256, U256}; use zksync_contracts::BaseSystemContractsHashes; use zksync_system_constants::SYSTEM_BLOCK_INFO_BLOCK_NUMBER_MULTIPLIER; -use zksync_utils::concat_and_hash; use crate::{ fee_model::BatchFeeInput, l2_to_l1_log::{SystemL2ToL1Log, UserL2ToL1Log}, priority_op_onchain_data::PriorityOpOnchainData, - web3::keccak256, + web3::{keccak256, keccak256_concat}, AccountTreeId, L1BatchNumber, L2BlockNumber, ProtocolVersionId, Transaction, }; @@ -253,7 +252,7 @@ impl L2BlockHasher { /// Updates this hasher with a transaction hash. This should be called for all transactions in the block /// in the order of their execution. pub fn push_tx_hash(&mut self, tx_hash: H256) { - self.txs_rolling_hash = concat_and_hash(self.txs_rolling_hash, tx_hash); + self.txs_rolling_hash = keccak256_concat(self.txs_rolling_hash, tx_hash); } /// Returns the hash of the L2 block. diff --git a/core/lib/types/src/commitment/mod.rs b/core/lib/types/src/commitment/mod.rs index 40532a1e5899..1e957a3756e4 100644 --- a/core/lib/types/src/commitment/mod.rs +++ b/core/lib/types/src/commitment/mod.rs @@ -17,7 +17,6 @@ use zksync_system_constants::{ KNOWN_CODES_STORAGE_ADDRESS, L2_TO_L1_LOGS_TREE_ROOT_KEY, STATE_DIFF_HASH_KEY_PRE_GATEWAY, ZKPORTER_IS_AVAILABLE, }; -use zksync_utils::u256_to_h256; use crate::{ blob::num_blobs_required, @@ -26,6 +25,7 @@ use crate::{ l2_to_l1_logs_tree_size, parse_system_logs_for_blob_hashes_pre_gateway, L2ToL1Log, SystemL2ToL1Log, UserL2ToL1Log, }, + u256_to_h256, web3::keccak256, writes::{ compress_state_diffs, InitialStorageWrite, RepeatedStorageWrite, StateDiffRecord, @@ -103,6 +103,8 @@ pub struct L1BatchMetadata { pub aggregation_root: Option, /// Data Availability inclusion proof, that has to be verified on the settlement layer. pub da_inclusion_data: Option>, + /// Data Availability blob id, persisted in L1 so it can be used for chain reconstruction. + pub da_blob_id: Option>, } impl L1BatchMetadata { diff --git a/core/lib/types/src/contract_verification_api.rs b/core/lib/types/src/contract_verification_api.rs index 21e511549beb..cca5ae5a83a0 100644 --- a/core/lib/types/src/contract_verification_api.rs +++ b/core/lib/types/src/contract_verification_api.rs @@ -137,6 +137,8 @@ pub struct VerificationIncomingRequest { #[serde(flatten)] pub compiler_versions: CompilerVersions, pub optimization_used: bool, + /// Optimization mode used for the contract. Semantics depends on the compiler used; e.g., for `vyper`, + /// allowed values are `gas` (default), `codesize` or `none`. pub optimizer_mode: Option, #[serde(default)] pub constructor_arguments: Bytes, diff --git a/core/lib/types/src/debug_flat_call.rs b/core/lib/types/src/debug_flat_call.rs index 5809026e521c..3488b0e5b42c 100644 --- a/core/lib/types/src/debug_flat_call.rs +++ b/core/lib/types/src/debug_flat_call.rs @@ -16,6 +16,7 @@ pub struct DebugCallFlat { pub action: Action, pub result: Option, pub subtraces: usize, + pub error: Option, pub trace_address: Vec, pub transaction_position: usize, pub transaction_hash: H256, diff --git a/core/lib/types/src/fee.rs b/core/lib/types/src/fee.rs index 9dc2cda9e62b..f302c51cd4a9 100644 --- a/core/lib/types/src/fee.rs +++ b/core/lib/types/src/fee.rs @@ -1,5 +1,4 @@ use serde::{Deserialize, Serialize}; -use zksync_utils::ceil_div; use crate::U256; @@ -43,10 +42,10 @@ pub fn encoding_len( // All of the fields are encoded as `bytes`, so their encoding takes ceil(len, 32) slots. // For factory deps we only provide hashes, which are encoded as an array of bytes32. - let dynamic_len = ceil_div(data_len, 32) - + ceil_div(signature_len, 32) - + ceil_div(paymaster_input_len, 32) - + ceil_div(reserved_dynamic_len, 32) + let dynamic_len = data_len.div_ceil(32) + + signature_len.div_ceil(32) + + paymaster_input_len.div_ceil(32) + + reserved_dynamic_len.div_ceil(32) + factory_deps_len; BASE_LEN + dynamic_len as usize diff --git a/core/lib/types/src/fee_model.rs b/core/lib/types/src/fee_model.rs index ae346656ea6f..79515e6f63a9 100644 --- a/core/lib/types/src/fee_model.rs +++ b/core/lib/types/src/fee_model.rs @@ -1,13 +1,10 @@ -// FIXME: separate crate together with node_fee_model interfaces? - use std::num::NonZeroU64; use bigdecimal::{BigDecimal, ToPrimitive}; use serde::{Deserialize, Serialize}; use zksync_system_constants::L1_GAS_PER_PUBDATA_BYTE; -use zksync_utils::ceil_div_u256; -use crate::{ProtocolVersionId, U256}; +use crate::{ceil_div_u256, ProtocolVersionId, U256}; /// Fee input to be provided into the VM. It contains two options: /// - `L1Pegged`: L1 gas price is provided to the VM, and the pubdata price is derived from it. Using this option is required for the diff --git a/core/lib/types/src/l1/mod.rs b/core/lib/types/src/l1/mod.rs index e8144c75db2e..33225dd6b0c9 100644 --- a/core/lib/types/src/l1/mod.rs +++ b/core/lib/types/src/l1/mod.rs @@ -1,22 +1,21 @@ //! Definition of ZKsync network priority operations: operations initiated from the L1. -use std::convert::TryFrom; - use serde::{Deserialize, Serialize}; -use zksync_basic_types::{web3::Log, Address, L1BlockNumber, PriorityOpId, H256, U256}; -use zksync_utils::{ - address_to_u256, bytecode::hash_bytecode, h256_to_u256, u256_to_account_address, -}; use super::Transaction; use crate::{ - abi, ethabi, + abi, address_to_u256, + bytecode::BytecodeHash, + ethabi, helpers::unix_timestamp_ms, l1::error::L1TxParseError, l2::TransactionType, priority_op_onchain_data::{PriorityOpOnchainData, PriorityOpOnchainMetadata}, tx::Execute, - ExecuteTransactionCommon, PRIORITY_OPERATION_L2_TX_TYPE, PROTOCOL_UPGRADE_TX_TYPE, + u256_to_address, + web3::Log, + Address, ExecuteTransactionCommon, L1BlockNumber, PriorityOpId, H256, + PRIORITY_OPERATION_L2_TX_TYPE, PROTOCOL_UPGRADE_TX_TYPE, U256, }; pub mod error; @@ -294,7 +293,7 @@ impl From for abi::NewPriorityRequest { signature: vec![], factory_deps: factory_deps .iter() - .map(|b| h256_to_u256(hash_bytecode(b))) + .map(|b| BytecodeHash::for_bytecode(b).value_u256()) .collect(), paymaster_input: vec![], reserved_dynamic: vec![], @@ -319,7 +318,7 @@ impl TryFrom for L1Tx { let factory_deps_hashes: Vec<_> = req .factory_deps .iter() - .map(|b| h256_to_u256(hash_bytecode(b))) + .map(|b| BytecodeHash::for_bytecode(b).value_u256()) .collect(); anyhow::ensure!(req.transaction.factory_deps == factory_deps_hashes); for item in &req.transaction.reserved[2..] { @@ -332,10 +331,10 @@ impl TryFrom for L1Tx { let common_data = L1TxCommonData { serial_id: PriorityOpId(req.transaction.nonce.try_into().unwrap()), canonical_tx_hash: H256::from_slice(&req.tx_hash), - sender: u256_to_account_address(&req.transaction.from), + sender: u256_to_address(&req.transaction.from), layer_2_tip_fee: U256::zero(), to_mint: req.transaction.reserved[0], - refund_recipient: u256_to_account_address(&req.transaction.reserved[1]), + refund_recipient: u256_to_address(&req.transaction.reserved[1]), full_fee: U256::zero(), gas_limit: req.transaction.gas_limit, max_fee_per_gas: req.transaction.max_fee_per_gas, @@ -347,7 +346,7 @@ impl TryFrom for L1Tx { }; let execute = Execute { - contract_address: Some(u256_to_account_address(&req.transaction.to)), + contract_address: Some(u256_to_address(&req.transaction.to)), calldata: req.transaction.data, factory_deps: req.factory_deps, value: req.transaction.value, diff --git a/core/lib/types/src/l2/mod.rs b/core/lib/types/src/l2/mod.rs index 48e813e571d2..e7d582ab17a1 100644 --- a/core/lib/types/src/l2/mod.rs +++ b/core/lib/types/src/l2/mod.rs @@ -396,6 +396,13 @@ impl From for api::Transaction { } else { (None, None, None) }; + // Legacy transactions are not supposed to have `yParity` and are reliant on `v` instead. + // Other transactions are required to have `yParity` which replaces the deprecated `v` value + // (still included for backwards compatibility). + let y_parity = match tx.common_data.transaction_type { + TransactionType::LegacyTransaction => None, + _ => v, + }; Self { hash: tx.hash(), @@ -409,6 +416,7 @@ impl From for api::Transaction { max_fee_per_gas: Some(tx.common_data.fee.max_fee_per_gas), gas: tx.common_data.fee.gas_limit, input: Bytes(tx.execute.calldata), + y_parity, v, r, s, diff --git a/core/lib/types/src/l2_to_l1_log.rs b/core/lib/types/src/l2_to_l1_log.rs index 957cfa9a1a6a..1b84a79024c7 100644 --- a/core/lib/types/src/l2_to_l1_log.rs +++ b/core/lib/types/src/l2_to_l1_log.rs @@ -117,11 +117,10 @@ pub fn parse_system_logs_for_blob_hashes_pre_gateway( #[cfg(test)] mod tests { - use zksync_basic_types::U256; use zksync_system_constants::L1_MESSENGER_ADDRESS; - use zksync_utils::u256_to_h256; - use super::L2ToL1Log; + use super::*; + use crate::{u256_to_h256, U256}; #[test] fn l2_to_l1_log_to_bytes() { diff --git a/core/lib/types/src/lib.rs b/core/lib/types/src/lib.rs index 320264f28f0a..8ec98ec0571e 100644 --- a/core/lib/types/src/lib.rs +++ b/core/lib/types/src/lib.rs @@ -15,11 +15,9 @@ pub use protocol_upgrade::{ProtocolUpgrade, ProtocolVersion}; use serde::{Deserialize, Serialize}; pub use storage::*; pub use tx::Execute; +use zksync_basic_types::bytecode::BytecodeHash; pub use zksync_basic_types::{protocol_version::ProtocolVersionId, vm, *}; pub use zksync_crypto_primitives::*; -use zksync_utils::{ - address_to_u256, bytecode::hash_bytecode, h256_to_u256, u256_to_account_address, -}; use crate::{ l2::{L2Tx, TransactionType}, @@ -286,7 +284,7 @@ impl TryFrom for abi::Transaction { signature: vec![], factory_deps: factory_deps .iter() - .map(|b| h256_to_u256(hash_bytecode(b))) + .map(|b| BytecodeHash::for_bytecode(b).value_u256()) .collect(), paymaster_input: vec![], reserved_dynamic: vec![], @@ -317,7 +315,7 @@ impl TryFrom for abi::Transaction { signature: vec![], factory_deps: factory_deps .iter() - .map(|b| h256_to_u256(hash_bytecode(b))) + .map(|b| BytecodeHash::for_bytecode(b).value_u256()) .collect(), paymaster_input: vec![], reserved_dynamic: vec![], @@ -346,7 +344,7 @@ impl Transaction { } => { let factory_deps_hashes: Vec<_> = factory_deps .iter() - .map(|b| h256_to_u256(hash_bytecode(b))) + .map(|b| BytecodeHash::for_bytecode(b).value_u256()) .collect(); anyhow::ensure!(tx.factory_deps == factory_deps_hashes); for item in &tx.reserved[2..] { @@ -368,10 +366,10 @@ impl Transaction { .map_err(|err| anyhow::format_err!("{err}"))?, ), canonical_tx_hash: hash, - sender: u256_to_account_address(&tx.from), + sender: u256_to_address(&tx.from), layer_2_tip_fee: U256::zero(), to_mint: tx.reserved[0], - refund_recipient: u256_to_account_address(&tx.reserved[1]), + refund_recipient: u256_to_address(&tx.reserved[1]), full_fee: U256::zero(), gas_limit: tx.gas_limit, max_fee_per_gas: tx.max_fee_per_gas, @@ -385,9 +383,9 @@ impl Transaction { ExecuteTransactionCommon::ProtocolUpgrade(ProtocolUpgradeTxCommonData { upgrade_id: tx.nonce.try_into().unwrap(), canonical_tx_hash: hash, - sender: u256_to_account_address(&tx.from), + sender: u256_to_address(&tx.from), to_mint: tx.reserved[0], - refund_recipient: u256_to_account_address(&tx.reserved[1]), + refund_recipient: u256_to_address(&tx.reserved[1]), gas_limit: tx.gas_limit, max_fee_per_gas: tx.max_fee_per_gas, gas_per_pubdata_limit: tx.gas_per_pubdata_byte_limit, @@ -397,7 +395,7 @@ impl Transaction { unknown_type => anyhow::bail!("unknown tx type {unknown_type}"), }, execute: Execute { - contract_address: Some(u256_to_account_address(&tx.to)), + contract_address: Some(u256_to_address(&tx.to)), calldata: tx.data, factory_deps, value: tx.value, diff --git a/core/lib/types/src/protocol_upgrade.rs b/core/lib/types/src/protocol_upgrade.rs index 48f26dfd5c7f..7d8f678fa851 100644 --- a/core/lib/types/src/protocol_upgrade.rs +++ b/core/lib/types/src/protocol_upgrade.rs @@ -12,11 +12,10 @@ use zksync_contracts::{ BaseSystemContractsHashes, ADMIN_EXECUTE_UPGRADE_FUNCTION, ADMIN_UPGRADE_CHAIN_FROM_VERSION_FUNCTION, DIAMOND_CUT, }; -use zksync_utils::h256_to_u256; use crate::{ - abi, ethabi::ParamType, web3::Log, Address, Execute, ExecuteTransactionCommon, Transaction, - TransactionType, H256, U256, + abi, ethabi::ParamType, h256_to_u256, web3::Log, Address, Execute, ExecuteTransactionCommon, + Transaction, TransactionType, H256, U256, }; /// Represents a call to be made during governance operation. diff --git a/core/lib/types/src/snapshots.rs b/core/lib/types/src/snapshots.rs index 156d1e4723dd..b9ee62ab24ec 100644 --- a/core/lib/types/src/snapshots.rs +++ b/core/lib/types/src/snapshots.rs @@ -5,9 +5,8 @@ use num_enum::{IntoPrimitive, TryFromPrimitive}; use serde::{Deserialize, Serialize}; use zksync_basic_types::{AccountTreeId, L1BatchNumber, L2BlockNumber, H256}; use zksync_protobuf::{required, ProtoFmt}; -use zksync_utils::u256_to_h256; -use crate::{utils, web3::Bytes, ProtocolVersionId, StorageKey, StorageValue, U256}; +use crate::{u256_to_h256, utils, web3::Bytes, ProtocolVersionId, StorageKey, StorageValue, U256}; /// Information about all snapshots persisted by the node. #[derive(Debug, Clone, Serialize, Deserialize)] @@ -331,9 +330,8 @@ pub fn uniform_hashed_keys_chunk(chunk_id: u64, chunk_count: u64) -> ops::RangeI #[cfg(test)] mod tests { - use zksync_utils::h256_to_u256; - use super::*; + use crate::h256_to_u256; #[test] fn chunking_is_correct() { diff --git a/core/lib/types/src/storage/log.rs b/core/lib/types/src/storage/log.rs index a05e25abccb5..075a05781b67 100644 --- a/core/lib/types/src/storage/log.rs +++ b/core/lib/types/src/storage/log.rs @@ -2,10 +2,10 @@ use std::mem; use serde::{Deserialize, Serialize}; use zksync_basic_types::AccountTreeId; -use zksync_utils::{h256_to_u256, u256_to_h256}; use crate::{ api::ApiStorageLog, + h256_to_u256, u256_to_h256, zk_evm_types::{self, LogQuery, Timestamp}, StorageKey, StorageValue, U256, }; diff --git a/core/lib/types/src/storage/mod.rs b/core/lib/types/src/storage/mod.rs index 9ef037dc29b2..84a29ed8c039 100644 --- a/core/lib/types/src/storage/mod.rs +++ b/core/lib/types/src/storage/mod.rs @@ -5,9 +5,8 @@ pub use log::*; use serde::{Deserialize, Serialize}; use zksync_basic_types::{web3::keccak256, L2ChainId}; pub use zksync_system_constants::*; -use zksync_utils::{address_to_h256, u256_to_h256}; -use crate::{AccountTreeId, Address, H160, H256, U256}; +use crate::{address_to_h256, u256_to_h256, AccountTreeId, Address, H160, H256, U256}; pub mod log; pub mod witness_block_state; diff --git a/core/lib/types/src/transaction_request.rs b/core/lib/types/src/transaction_request.rs index a8713f301ba6..db66c6955bda 100644 --- a/core/lib/types/src/transaction_request.rs +++ b/core/lib/types/src/transaction_request.rs @@ -3,21 +3,18 @@ use std::convert::{TryFrom, TryInto}; use rlp::{DecoderError, Rlp, RlpStream}; use serde::{Deserialize, Serialize}; use thiserror::Error; -use zksync_basic_types::H256; use zksync_system_constants::{DEFAULT_L2_TX_GAS_PER_PUBDATA_BYTE, MAX_ENCODED_TX_SIZE}; -use zksync_utils::{ - bytecode::{hash_bytecode, validate_bytecode, InvalidBytecodeError}, - concat_and_hash, u256_to_h256, -}; use super::{EIP_1559_TX_TYPE, EIP_2930_TX_TYPE, EIP_712_TX_TYPE}; use crate::{ + bytecode::{validate_bytecode, BytecodeHash, InvalidBytecodeError}, fee::Fee, l1::L1Tx, l2::{L2Tx, TransactionType}, - web3::{keccak256, AccessList, Bytes}, + u256_to_h256, + web3::{keccak256, keccak256_concat, AccessList, Bytes}, Address, EIP712TypedStructure, Eip712Domain, L1TxCommonData, L2ChainId, Nonce, - PackedEthSignature, StructBuilder, LEGACY_TX_TYPE, U256, U64, + PackedEthSignature, StructBuilder, H256, LEGACY_TX_TYPE, U256, U64, }; /// Call contract request (eth_call / eth_estimateGas) @@ -176,7 +173,7 @@ impl CallRequestBuilder { } } -#[derive(Debug, Error, PartialEq)] +#[derive(Debug, Error)] pub enum SerializationTransactionError { #[error("transaction type is not supported")] UnknownTransactionFormat, @@ -355,7 +352,7 @@ impl EIP712TypedStructure for TransactionRequest { let factory_dep_hashes: Vec<_> = self .get_factory_deps() .into_iter() - .map(|dep| hash_bytecode(&dep)) + .map(|dep| BytecodeHash::for_bytecode(&dep).value()) .collect(); builder.add_member("factoryDeps", &factory_dep_hashes.as_slice()); @@ -732,7 +729,7 @@ impl TransactionRequest { signed_message: H256, ) -> Result, SerializationTransactionError> { if self.is_eip712_tx() { - return Ok(Some(concat_and_hash( + return Ok(Some(keccak256_concat( signed_message, H256(keccak256(&self.get_signature()?)), ))); @@ -1160,9 +1157,9 @@ mod tests { let decoded_tx = TransactionRequest::from_bytes(encoded_tx.as_slice(), L2ChainId::from(272)); - assert_eq!( - decoded_tx, - Err(SerializationTransactionError::WrongChainId(Some(270))) + assert_matches!( + decoded_tx.unwrap_err(), + SerializationTransactionError::WrongChainId(Some(270)) ); } @@ -1238,9 +1235,9 @@ mod tests { data.insert(0, EIP_1559_TX_TYPE); let decoded_tx = TransactionRequest::from_bytes(data.as_slice(), L2ChainId::from(270)); - assert_eq!( - decoded_tx, - Err(SerializationTransactionError::WrongChainId(Some(272))) + assert_matches!( + decoded_tx.unwrap_err(), + SerializationTransactionError::WrongChainId(Some(272)) ); } @@ -1278,9 +1275,9 @@ mod tests { data.insert(0, EIP_1559_TX_TYPE); let res = TransactionRequest::from_bytes(data.as_slice(), L2ChainId::from(270)); - assert_eq!( - res, - Err(SerializationTransactionError::AccessListsNotSupported) + assert_matches!( + res.unwrap_err(), + SerializationTransactionError::AccessListsNotSupported ); } @@ -1315,9 +1312,9 @@ mod tests { data.insert(0, EIP_2930_TX_TYPE); let res = TransactionRequest::from_bytes(data.as_slice(), L2ChainId::from(270)); - assert_eq!( - res, - Err(SerializationTransactionError::AccessListsNotSupported) + assert_matches!( + res.unwrap_err(), + SerializationTransactionError::AccessListsNotSupported ); } @@ -1343,7 +1340,7 @@ mod tests { }; let execute_tx2: Result = L2Tx::from_request(tx2, usize::MAX, true); - assert_eq!( + assert_matches!( execute_tx2.unwrap_err(), SerializationTransactionError::TooBigNonce ); @@ -1360,7 +1357,7 @@ mod tests { }; let execute_tx1: Result = L2Tx::from_request(tx1, usize::MAX, true); - assert_eq!( + assert_matches!( execute_tx1.unwrap_err(), SerializationTransactionError::MaxFeePerGasNotU64 ); @@ -1374,7 +1371,7 @@ mod tests { }; let execute_tx2: Result = L2Tx::from_request(tx2, usize::MAX, true); - assert_eq!( + assert_matches!( execute_tx2.unwrap_err(), SerializationTransactionError::MaxPriorityFeePerGasNotU64 ); @@ -1392,7 +1389,7 @@ mod tests { let execute_tx3: Result = L2Tx::from_request(tx3, usize::MAX, true); - assert_eq!( + assert_matches!( execute_tx3.unwrap_err(), SerializationTransactionError::MaxFeePerPubdataByteNotU64 ); diff --git a/core/lib/types/src/tx/execute.rs b/core/lib/types/src/tx/execute.rs index 0edece9e46b4..d36f4b6521ee 100644 --- a/core/lib/types/src/tx/execute.rs +++ b/core/lib/types/src/tx/execute.rs @@ -1,9 +1,12 @@ use once_cell::sync::Lazy; use serde::{Deserialize, Serialize}; +use zksync_basic_types::bytecode::BytecodeHash; use zksync_system_constants::CONTRACT_DEPLOYER_ADDRESS; -use zksync_utils::{bytecode::hash_bytecode, ZeroPrefixHexSerde}; -use crate::{ethabi, Address, EIP712TypedStructure, StructBuilder, H256, U256}; +use crate::{ + ethabi, serde_wrappers::ZeroPrefixHexSerde, Address, EIP712TypedStructure, StructBuilder, H256, + U256, +}; /// This struct is the `serde` schema for the `Execute` struct. /// It allows us to modify `Execute` struct without worrying @@ -124,7 +127,7 @@ impl Execute { contract_bytecode: Vec, constructor_input: &[ethabi::Token], ) -> Self { - let bytecode_hash = hash_bytecode(&contract_bytecode); + let bytecode_hash = BytecodeHash::for_bytecode(&contract_bytecode).value(); Self { contract_address: Some(CONTRACT_DEPLOYER_ADDRESS), calldata: Self::encode_deploy_params_create( @@ -136,4 +139,14 @@ impl Execute { factory_deps: vec![contract_bytecode], } } + + /// Creates an instance for transferring base token to the specified recipient. + pub fn transfer(to: Address, value: U256) -> Self { + Self { + contract_address: Some(to), + calldata: vec![], + value, + factory_deps: vec![], + } + } } diff --git a/core/lib/types/src/utils.rs b/core/lib/types/src/utils.rs index bf086d6cdcd4..56a8ccf9fe9f 100644 --- a/core/lib/types/src/utils.rs +++ b/core/lib/types/src/utils.rs @@ -2,11 +2,10 @@ use std::fmt; use chrono::{DateTime, TimeZone, Utc}; use zksync_basic_types::{Address, H256}; -use zksync_utils::{address_to_h256, u256_to_h256}; use crate::{ - system_contracts::DEPLOYMENT_NONCE_INCREMENT, web3::keccak256, AccountTreeId, StorageKey, - L2_BASE_TOKEN_ADDRESS, U256, + address_to_h256, system_contracts::DEPLOYMENT_NONCE_INCREMENT, u256_to_h256, web3::keccak256, + AccountTreeId, StorageKey, L2_BASE_TOKEN_ADDRESS, U256, }; /// Displays a Unix timestamp (seconds since epoch) in human-readable form. Useful for logging. diff --git a/core/lib/utils/Cargo.toml b/core/lib/utils/Cargo.toml index 9b65ccdd29cb..216f3b12d426 100644 --- a/core/lib/utils/Cargo.toml +++ b/core/lib/utils/Cargo.toml @@ -11,26 +11,16 @@ keywords.workspace = true categories.workspace = true [dependencies] -zksync_basic_types.workspace = true -zk_evm.workspace = true zksync_vlog.workspace = true -bigdecimal.workspace = true -const-decoder.workspace = true -num = { workspace = true, features = ["serde"] } -serde = { workspace = true, features = ["derive"] } tokio = { workspace = true, features = ["time"] } tracing.workspace = true anyhow.workspace = true -thiserror.workspace = true futures.workspace = true -hex.workspace = true reqwest = { workspace = true, features = ["blocking"] } serde_json.workspace = true once_cell.workspace = true [dev-dependencies] -rand.workspace = true tokio = { workspace = true, features = ["macros", "rt"] } -bincode.workspace = true assert_matches.workspace = true diff --git a/core/lib/utils/src/convert.rs b/core/lib/utils/src/convert.rs deleted file mode 100644 index e086e385c8ef..000000000000 --- a/core/lib/utils/src/convert.rs +++ /dev/null @@ -1,185 +0,0 @@ -use std::convert::TryInto; - -use bigdecimal::BigDecimal; -use num::BigUint; -use zksync_basic_types::{Address, H256, U256}; - -pub fn u256_to_big_decimal(value: U256) -> BigDecimal { - let mut u32_digits = vec![0_u32; 8]; - // `u64_digit`s from `U256` are little-endian - for (i, &u64_digit) in value.0.iter().enumerate() { - u32_digits[2 * i] = u64_digit as u32; - u32_digits[2 * i + 1] = (u64_digit >> 32) as u32; - } - let value = BigUint::new(u32_digits); - BigDecimal::new(value.into(), 0) -} - -/// Converts `BigUint` value into the corresponding `U256` value. -fn biguint_to_u256(value: BigUint) -> U256 { - let bytes = value.to_bytes_le(); - U256::from_little_endian(&bytes) -} - -/// Converts `BigDecimal` value into the corresponding `U256` value. -pub fn bigdecimal_to_u256(value: BigDecimal) -> U256 { - let bigint = value.with_scale(0).into_bigint_and_exponent().0; - biguint_to_u256(bigint.to_biguint().unwrap()) -} - -fn ensure_chunkable(bytes: &[u8]) { - assert!( - bytes.len() % 32 == 0, - "Bytes must be divisible by 32 to split into chunks" - ); -} - -pub fn h256_to_u256(num: H256) -> U256 { - U256::from_big_endian(num.as_bytes()) -} - -pub fn address_to_h256(address: &Address) -> H256 { - let mut buffer = [0u8; 32]; - buffer[12..].copy_from_slice(address.as_bytes()); - H256(buffer) -} - -pub fn address_to_u256(address: &Address) -> U256 { - h256_to_u256(address_to_h256(address)) -} - -pub fn bytes_to_chunks(bytes: &[u8]) -> Vec<[u8; 32]> { - ensure_chunkable(bytes); - bytes - .chunks(32) - .map(|el| { - let mut chunk = [0u8; 32]; - chunk.copy_from_slice(el); - chunk - }) - .collect() -} - -pub fn be_chunks_to_h256_words(chunks: Vec<[u8; 32]>) -> Vec { - chunks.into_iter().map(|el| H256::from_slice(&el)).collect() -} - -pub fn bytes_to_be_words(vec: Vec) -> Vec { - ensure_chunkable(&vec); - vec.chunks(32).map(U256::from_big_endian).collect() -} - -pub fn be_words_to_bytes(words: &[U256]) -> Vec { - words - .iter() - .flat_map(|w| { - let mut bytes = [0u8; 32]; - w.to_big_endian(&mut bytes); - bytes - }) - .collect() -} - -pub fn u256_to_h256(num: U256) -> H256 { - let mut bytes = [0u8; 32]; - num.to_big_endian(&mut bytes); - H256::from_slice(&bytes) -} - -/// Converts `U256` value into the Address -pub fn u256_to_account_address(value: &U256) -> Address { - let mut bytes = [0u8; 32]; - value.to_big_endian(&mut bytes); - - Address::from_slice(&bytes[12..]) -} - -/// Converts `H256` value into the Address -pub fn h256_to_account_address(value: &H256) -> Address { - Address::from_slice(&value.as_bytes()[12..]) -} - -pub fn be_bytes_to_safe_address(bytes: &[u8]) -> Option
{ - if bytes.len() < 20 { - return None; - } - - let (zero_bytes, address_bytes) = bytes.split_at(bytes.len() - 20); - - if zero_bytes.iter().any(|b| *b != 0) { - None - } else { - Some(Address::from_slice(address_bytes)) - } -} - -/// Converts `h256` value as BE into the u32 -pub fn h256_to_u32(value: H256) -> u32 { - let be_u32_bytes: [u8; 4] = value[28..].try_into().unwrap(); - u32::from_be_bytes(be_u32_bytes) -} - -/// Converts u32 into the H256 as BE bytes -pub fn u32_to_h256(value: u32) -> H256 { - let mut result = [0u8; 32]; - result[28..].copy_from_slice(&value.to_be_bytes()); - H256(result) -} - -/// Converts `U256` value into bytes array -pub fn u256_to_bytes_be(value: &U256) -> Vec { - let mut bytes = vec![0u8; 32]; - value.to_big_endian(bytes.as_mut_slice()); - bytes -} - -#[cfg(test)] -mod test { - use num::BigInt; - use rand::{rngs::StdRng, Rng, SeedableRng}; - - use super::*; - - #[test] - fn test_u256_to_bigdecimal() { - const RNG_SEED: u64 = 123; - - let mut rng = StdRng::seed_from_u64(RNG_SEED); - // Small values. - for _ in 0..10_000 { - let value: u64 = rng.gen(); - let expected = BigDecimal::from(value); - assert_eq!(u256_to_big_decimal(value.into()), expected); - } - - // Arbitrary values - for _ in 0..10_000 { - let u64_digits: [u64; 4] = rng.gen(); - let value = u64_digits - .iter() - .enumerate() - .map(|(i, &digit)| U256::from(digit) << (i * 64)) - .fold(U256::zero(), |acc, x| acc + x); - let expected_value = u64_digits - .iter() - .enumerate() - .map(|(i, &digit)| BigInt::from(digit) << (i * 64)) - .fold(BigInt::from(0), |acc, x| acc + x); - assert_eq!( - u256_to_big_decimal(value), - BigDecimal::new(expected_value, 0) - ); - } - } - - #[test] - fn test_bigdecimal_to_u256() { - let value = BigDecimal::from(100u32); - let expected = U256::from(100u32); - assert_eq!(bigdecimal_to_u256(value), expected); - - let value = BigDecimal::new(BigInt::from(100), -2); - let expected = U256::from(10000u32); - assert_eq!(bigdecimal_to_u256(value), expected); - } -} diff --git a/core/lib/utils/src/format.rs b/core/lib/utils/src/format.rs deleted file mode 100644 index 9d15d4c358e7..000000000000 --- a/core/lib/utils/src/format.rs +++ /dev/null @@ -1,78 +0,0 @@ -// Built-in deps -use std::collections::VecDeque; -use std::string::ToString; -// External deps -// Workspace deps - -/// Formats amount in wei to tokens with precision. -/// Behaves just like ethers.utils.formatUnits -pub fn format_units(wei: impl ToString, units: u8) -> String { - let mut chars: VecDeque = wei.to_string().chars().collect(); - - while chars.len() < units as usize { - chars.push_front('0'); - } - chars.insert(chars.len() - units as usize, '.'); - if *chars.front().unwrap() == '.' { - chars.push_front('0'); - } - while *chars.back().unwrap() == '0' { - chars.pop_back(); - } - if *chars.back().unwrap() == '.' { - chars.push_back('0'); - } - chars.iter().collect() -} - -/// Formats amount in wei to tokens. -/// Behaves just like js ethers.utils.formatEther -pub fn format_ether(wei: impl ToString) -> String { - format_units(wei, 18) -} - -#[cfg(test)] -mod test { - use super::*; - - #[test] - fn test_format_units() { - // Test vector of (decimals, wei input, expected output) - let vals = vec![ - (0, "1000000000000000100000", "1000000000000000100000.0"), - (1, "0", "0.0"), - (1, "11000000000000000000", "1100000000000000000.0"), - (2, "0", "0.0"), - (2, "1000000000000000100000", "10000000000000001000.0"), - (4, "10001000000", "1000100.0"), - (4, "10100000000000000000000", "1010000000000000000.0"), - (4, "110", "0.011"), - (6, "1000000000000000100000", "1000000000000000.1"), - (8, "0", "0.0"), - (8, "10100000000000000000000", "101000000000000.0"), - (8, "110", "0.0000011"), - (9, "10000000000000000001", "10000000000.000000001"), - (9, "11000000", "0.011"), - (9, "11000000000000000000", "11000000000.0"), - (10, "10001000000", "1.0001"), - (10, "20000000000000000000000", "2000000000000.0"), - (11, "0", "0.0"), - (11, "10100000000000000000000", "101000000000.0"), - (12, "1000000000000000100000", "1000000000.0000001"), - (12, "10001000000", "0.010001"), - (12, "10010000000", "0.01001"), - (12, "110", "0.00000000011"), - (13, "10010000000", "0.001001"), - (14, "10010000000", "0.0001001"), - (14, "110", "0.0000000000011"), - (15, "0", "0.0"), - (17, "1000000000000000100000", "10000.000000000001"), - (17, "10001000000", "0.00000010001"), - (18, "1000000000000000100000", "1000.0000000000001"), - ]; - - for (dec, input, output) in vals { - assert_eq!(format_units(&input, dec), output); - } - } -} diff --git a/core/lib/utils/src/lib.rs b/core/lib/utils/src/lib.rs index 92a1d7a0c470..85618a2e61ef 100644 --- a/core/lib/utils/src/lib.rs +++ b/core/lib/utils/src/lib.rs @@ -1,13 +1,6 @@ //! Various helpers used in the ZKsync stack. -pub mod bytecode; -mod convert; pub mod env; pub mod http_with_retries; -pub mod misc; pub mod panic_extractor; -mod serde_wrappers; -pub mod time; pub mod wait_for_tasks; - -pub use self::{convert::*, misc::*, serde_wrappers::*}; diff --git a/core/lib/utils/src/misc.rs b/core/lib/utils/src/misc.rs deleted file mode 100644 index 52bd7657c4e1..000000000000 --- a/core/lib/utils/src/misc.rs +++ /dev/null @@ -1,55 +0,0 @@ -use zksync_basic_types::{web3::keccak256, H256, U256}; - -pub const fn ceil_div(a: u64, b: u64) -> u64 { - if a == 0 { - a - } else { - (a - 1) / b + 1 - } -} - -pub fn ceil_div_u256(a: U256, b: U256) -> U256 { - (a + b - U256::from(1)) / b -} - -pub fn concat_and_hash(hash1: H256, hash2: H256) -> H256 { - let mut bytes = [0_u8; 64]; - bytes[..32].copy_from_slice(&hash1.0); - bytes[32..].copy_from_slice(&hash2.0); - H256(keccak256(&bytes)) -} - -pub fn expand_memory_contents(packed: &[(usize, U256)], memory_size_bytes: usize) -> Vec { - let mut result: Vec = vec![0; memory_size_bytes]; - - for (offset, value) in packed { - value.to_big_endian(&mut result[(offset * 32)..(offset + 1) * 32]); - } - - result -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_ceil_div_u64_max() { - assert_eq!(0, ceil_div(u64::MIN, u64::MAX)); - assert_eq!(1, ceil_div(u64::MAX, u64::MAX)); - } - - #[test] - fn test_ceil_div_roundup_required() { - assert_eq!(3, ceil_div(5, 2)); - assert_eq!(4, ceil_div(10, 3)); - assert_eq!(3, ceil_div(15, 7)); - } - - #[test] - fn test_ceil_div_no_roundup_required() { - assert_eq!(2, ceil_div(4, 2)); - assert_eq!(2, ceil_div(6, 3)); - assert_eq!(2, ceil_div(14, 7)); - } -} diff --git a/core/lib/utils/src/time.rs b/core/lib/utils/src/time.rs deleted file mode 100644 index 70372db34f49..000000000000 --- a/core/lib/utils/src/time.rs +++ /dev/null @@ -1,19 +0,0 @@ -use std::time::{Duration, SystemTime, UNIX_EPOCH}; - -pub fn seconds_since_epoch() -> u64 { - duration_since_epoch().as_secs() -} - -pub fn millis_since(since: u64) -> u64 { - (millis_since_epoch() - since as u128 * 1000) as u64 -} - -pub fn millis_since_epoch() -> u128 { - duration_since_epoch().as_millis() -} - -fn duration_since_epoch() -> Duration { - SystemTime::now() - .duration_since(UNIX_EPOCH) - .expect("Incorrect system time") -} diff --git a/core/lib/vm_executor/Cargo.toml b/core/lib/vm_executor/Cargo.toml index 06a531252c54..0402b7828e58 100644 --- a/core/lib/vm_executor/Cargo.toml +++ b/core/lib/vm_executor/Cargo.toml @@ -15,7 +15,6 @@ zksync_contracts.workspace = true zksync_dal.workspace = true zksync_types.workspace = true zksync_multivm.workspace = true -zksync_utils.workspace = true async-trait.workspace = true once_cell.workspace = true diff --git a/core/lib/vm_executor/src/batch/factory.rs b/core/lib/vm_executor/src/batch/factory.rs index de0db5f0bf75..76ef244401bd 100644 --- a/core/lib/vm_executor/src/batch/factory.rs +++ b/core/lib/vm_executor/src/batch/factory.rs @@ -18,7 +18,7 @@ use zksync_multivm::{ tracers::CallTracer, vm_fast, vm_latest::HistoryEnabled, - FastVmInstance, LegacyVmInstance, MultiVMTracer, + FastVmInstance, LegacyVmInstance, MultiVmTracer, }; use zksync_types::{commitment::PubdataParams, vm::FastVmMode, Transaction}; diff --git a/core/lib/vm_executor/src/oneshot/block.rs b/core/lib/vm_executor/src/oneshot/block.rs index d6118f15b98e..66bdd30e40ea 100644 --- a/core/lib/vm_executor/src/oneshot/block.rs +++ b/core/lib/vm_executor/src/oneshot/block.rs @@ -1,3 +1,5 @@ +use std::time::SystemTime; + use anyhow::Context; use zksync_dal::{Connection, Core, CoreDal, DalError}; use zksync_multivm::{ @@ -8,11 +10,10 @@ use zksync_types::{ api, block::{unpack_block_info, L2BlockHasher}, fee_model::BatchFeeInput, - AccountTreeId, L1BatchNumber, L2BlockNumber, ProtocolVersionId, StorageKey, H256, + h256_to_u256, AccountTreeId, L1BatchNumber, L2BlockNumber, ProtocolVersionId, StorageKey, H256, SYSTEM_CONTEXT_ADDRESS, SYSTEM_CONTEXT_CURRENT_L2_BLOCK_INFO_POSITION, SYSTEM_CONTEXT_CURRENT_TX_ROLLING_HASH_POSITION, ZKPORTER_IS_AVAILABLE, }; -use zksync_utils::{h256_to_u256, time::seconds_since_epoch}; use super::{env::OneshotEnvParameters, ContractsKind}; @@ -124,7 +125,11 @@ impl BlockInfo { state_l2_block_number = sealed_l2_block_header.number; // Timestamp of the next L1 batch must be greater than the timestamp of the last L2 block. - l1_batch_timestamp = seconds_since_epoch().max(sealed_l2_block_header.timestamp + 1); + let current_timestamp = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .context("incorrect system time")? + .as_secs(); + l1_batch_timestamp = current_timestamp.max(sealed_l2_block_header.timestamp + 1); sealed_l2_block_header }; diff --git a/core/lib/vm_executor/src/oneshot/contracts.rs b/core/lib/vm_executor/src/oneshot/contracts.rs index d4e0a94f9178..cacab36cb1c2 100644 --- a/core/lib/vm_executor/src/oneshot/contracts.rs +++ b/core/lib/vm_executor/src/oneshot/contracts.rs @@ -26,7 +26,7 @@ impl ContractsKind for CallOrExecute {} /// Provider of [`BaseSystemContracts`] for oneshot execution. /// -/// The main implementation of this trait is [`MultiVMBaseSystemContracts`], which selects contracts +/// The main implementation of this trait is [`MultiVmBaseSystemContracts`], which selects contracts /// based on [`ProtocolVersionId`]. #[async_trait] pub trait BaseSystemContractsProvider: fmt::Debug + Send + Sync { @@ -46,7 +46,7 @@ pub trait BaseSystemContractsProvider: fmt::Debug + Send + Syn /// System contracts (bootloader and default account abstraction) for all supported VM versions. #[derive(Debug)] -pub struct MultiVMBaseSystemContracts { +pub struct MultiVmBaseSystemContracts { /// Contracts to be used for pre-virtual-blocks protocol versions. pre_virtual_blocks: BaseSystemContracts, /// Contracts to be used for post-virtual-blocks protocol versions. @@ -69,11 +69,11 @@ pub struct MultiVMBaseSystemContracts { vm_protocol_defense: BaseSystemContracts, /// Contracts to be used after the gateway upgrade gateway: BaseSystemContracts, - // We use `fn() -> C` marker so that the `MultiVMBaseSystemContracts` unconditionally implements `Send + Sync`. + // We use `fn() -> C` marker so that the `MultiVmBaseSystemContracts` unconditionally implements `Send + Sync`. _contracts_kind: PhantomData C>, } -impl MultiVMBaseSystemContracts { +impl MultiVmBaseSystemContracts { fn get_by_protocol_version( &self, version: ProtocolVersionId, @@ -120,7 +120,7 @@ impl MultiVMBaseSystemContracts { } } -impl MultiVMBaseSystemContracts { +impl MultiVmBaseSystemContracts { /// Returned system contracts (mainly the bootloader) are tuned to provide accurate execution metrics. pub fn load_estimate_gas_blocking() -> Self { Self { @@ -142,7 +142,7 @@ impl MultiVMBaseSystemContracts { } } -impl MultiVMBaseSystemContracts { +impl MultiVmBaseSystemContracts { /// Returned system contracts (mainly the bootloader) are tuned to provide better UX (e.g. revert messages). pub fn load_eth_call_blocking() -> Self { Self { @@ -165,7 +165,7 @@ impl MultiVMBaseSystemContracts { } #[async_trait] -impl BaseSystemContractsProvider for MultiVMBaseSystemContracts { +impl BaseSystemContractsProvider for MultiVmBaseSystemContracts { async fn base_system_contracts( &self, block_info: &ResolvedBlockInfo, diff --git a/core/lib/vm_executor/src/oneshot/mod.rs b/core/lib/vm_executor/src/oneshot/mod.rs index 7d45dcca8cd3..0dfdb67bff52 100644 --- a/core/lib/vm_executor/src/oneshot/mod.rs +++ b/core/lib/vm_executor/src/oneshot/mod.rs @@ -29,24 +29,24 @@ use zksync_multivm::{ utils::adjust_pubdata_price_for_tx, vm_latest::{HistoryDisabled, HistoryEnabled}, zk_evm_latest::ethereum_types::U256, - FastVmInstance, HistoryMode, LegacyVmInstance, MultiVMTracer, + FastVmInstance, HistoryMode, LegacyVmInstance, MultiVmTracer, }; use zksync_types::{ block::pack_block_info, - get_nonce_key, + get_nonce_key, h256_to_u256, l2::L2Tx, + u256_to_h256, utils::{decompose_full_nonce, nonces_to_full_nonce, storage_key_for_eth_balance}, vm::FastVmMode, AccountTreeId, Nonce, StorageKey, Transaction, SYSTEM_CONTEXT_ADDRESS, SYSTEM_CONTEXT_CURRENT_L2_BLOCK_INFO_POSITION, SYSTEM_CONTEXT_CURRENT_TX_ROLLING_HASH_POSITION, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; pub use self::{ block::{BlockInfo, ResolvedBlockInfo}, contracts::{ BaseSystemContractsProvider, CallOrExecute, ContractsKind, EstimateGas, - MultiVMBaseSystemContracts, + MultiVmBaseSystemContracts, }, env::OneshotEnvParameters, mock::MockOneshotExecutor, diff --git a/core/lib/vm_executor/src/oneshot/tests.rs b/core/lib/vm_executor/src/oneshot/tests.rs index 65d2ff3727c0..9649f5b49905 100644 --- a/core/lib/vm_executor/src/oneshot/tests.rs +++ b/core/lib/vm_executor/src/oneshot/tests.rs @@ -4,7 +4,6 @@ use assert_matches::assert_matches; use test_casing::{test_casing, Product}; use zksync_multivm::interface::storage::InMemoryStorage; use zksync_types::{ProtocolVersionId, H256}; -use zksync_utils::bytecode::hash_bytecode; use super::*; use crate::testonly::{ @@ -75,7 +74,7 @@ fn setting_up_nonce_and_balance_in_storage() { #[tokio::test] async fn inspecting_transfer(exec_mode: TxExecutionMode, fast_vm_mode: FastVmMode) { let tx = create_l2_transaction(1_000_000_000.into(), Nonce(0)); - let mut storage = InMemoryStorage::with_system_contracts(hash_bytecode); + let mut storage = InMemoryStorage::with_system_contracts(); storage.set_value( storage_key_for_eth_balance(&tx.initiator_account()), u256_to_h256(u64::MAX.into()), diff --git a/core/lib/vm_interface/src/storage/in_memory.rs b/core/lib/vm_interface/src/storage/in_memory.rs index d83f675cd54e..f756e7a6d76f 100644 --- a/core/lib/vm_interface/src/storage/in_memory.rs +++ b/core/lib/vm_interface/src/storage/in_memory.rs @@ -1,9 +1,9 @@ use std::collections::{hash_map::Entry, BTreeMap, HashMap}; use zksync_types::{ - block::DeployedContract, get_code_key, get_known_code_key, get_system_context_init_logs, - system_contracts::get_system_smart_contracts, L2ChainId, StorageKey, StorageLog, StorageValue, - H256, + block::DeployedContract, bytecode::BytecodeHash, get_code_key, get_known_code_key, + get_system_context_init_logs, system_contracts::get_system_smart_contracts, L2ChainId, + StorageKey, StorageLog, StorageValue, H256, }; use super::ReadStorage; @@ -21,29 +21,20 @@ pub struct InMemoryStorage { impl InMemoryStorage { /// Constructs a storage that contains system smart contracts. - pub fn with_system_contracts(bytecode_hasher: impl Fn(&[u8]) -> H256) -> Self { - Self::with_system_contracts_and_chain_id( - L2ChainId::from(IN_MEMORY_STORAGE_DEFAULT_NETWORK_ID), - bytecode_hasher, - ) + pub fn with_system_contracts() -> Self { + Self::with_system_contracts_and_chain_id(L2ChainId::from( + IN_MEMORY_STORAGE_DEFAULT_NETWORK_ID, + )) } /// Constructs a storage that contains system smart contracts (with a given chain id). - pub fn with_system_contracts_and_chain_id( - chain_id: L2ChainId, - bytecode_hasher: impl Fn(&[u8]) -> H256, - ) -> Self { - Self::with_custom_system_contracts_and_chain_id( - chain_id, - bytecode_hasher, - get_system_smart_contracts(false), - ) + pub fn with_system_contracts_and_chain_id(chain_id: L2ChainId) -> Self { + Self::with_custom_system_contracts_and_chain_id(chain_id, get_system_smart_contracts(false)) } /// Constructs a storage that contains custom system contracts (provided in a vector). pub fn with_custom_system_contracts_and_chain_id( chain_id: L2ChainId, - bytecode_hasher: impl Fn(&[u8]) -> H256, contracts: Vec, ) -> Self { let system_context_init_log = get_system_context_init_logs(chain_id); @@ -51,7 +42,7 @@ impl InMemoryStorage { let state_without_indices: BTreeMap<_, _> = contracts .iter() .flat_map(|contract| { - let bytecode_hash = bytecode_hasher(&contract.bytecode); + let bytecode_hash = BytecodeHash::for_bytecode(&contract.bytecode).value(); let deployer_code_key = get_code_key(contract.account_id.address()); let is_known_code_key = get_known_code_key(&bytecode_hash); @@ -72,7 +63,12 @@ impl InMemoryStorage { let factory_deps = contracts .into_iter() - .map(|contract| (bytecode_hasher(&contract.bytecode), contract.bytecode)) + .map(|contract| { + ( + BytecodeHash::for_bytecode(&contract.bytecode).value(), + contract.bytecode, + ) + }) .collect(); let last_enum_index_set = state.len() as u64; diff --git a/core/node/api_server/Cargo.toml b/core/node/api_server/Cargo.toml index d0723a9d23e7..debabb8d3666 100644 --- a/core/node/api_server/Cargo.toml +++ b/core/node/api_server/Cargo.toml @@ -25,7 +25,6 @@ zksync_state.workspace = true zksync_system_constants.workspace = true zksync_metadata_calculator.workspace = true zksync_web3_decl = { workspace = true, features = ["server"] } -zksync_utils.workspace = true zksync_protobuf.workspace = true zksync_mini_merkle_tree.workspace = true zksync_multivm.workspace = true @@ -59,6 +58,7 @@ lru.workspace = true zk_evm_1_5_0.workspace = true zksync_node_genesis.workspace = true zksync_node_test_utils.workspace = true +zksync_test_contracts.workspace = true assert_matches.workspace = true test-casing.workspace = true diff --git a/core/node/api_server/src/execution_sandbox/storage.rs b/core/node/api_server/src/execution_sandbox/storage.rs index c80356f6e36e..026ac58733a4 100644 --- a/core/node/api_server/src/execution_sandbox/storage.rs +++ b/core/node/api_server/src/execution_sandbox/storage.rs @@ -3,11 +3,10 @@ use zksync_multivm::interface::storage::{ReadStorage, StorageWithOverrides}; use zksync_types::{ api::state_override::{OverrideState, StateOverride}, - get_code_key, get_known_code_key, get_nonce_key, + get_code_key, get_known_code_key, get_nonce_key, h256_to_u256, u256_to_h256, utils::{decompose_full_nonce, nonces_to_full_nonce, storage_key_for_eth_balance}, AccountTreeId, StorageKey, H256, }; -use zksync_utils::{h256_to_u256, u256_to_h256}; /// This method is blocking. pub(super) fn apply_state_override( diff --git a/core/node/api_server/src/execution_sandbox/vm_metrics.rs b/core/node/api_server/src/execution_sandbox/vm_metrics.rs index 613475b6ef92..282d9bdf1b77 100644 --- a/core/node/api_server/src/execution_sandbox/vm_metrics.rs +++ b/core/node/api_server/src/execution_sandbox/vm_metrics.rs @@ -7,8 +7,7 @@ use zksync_multivm::{ interface::{TransactionExecutionMetrics, VmEvent, VmExecutionResultAndLogs}, utils::StorageWritesDeduplicator, }; -use zksync_types::H256; -use zksync_utils::bytecode::bytecode_len_in_bytes; +use zksync_types::{bytecode::BytecodeHash, H256}; use crate::utils::ReportFilter; @@ -149,7 +148,11 @@ pub(super) fn collect_tx_execution_metrics( .sum(); let published_bytecode_bytes = VmEvent::extract_published_bytecodes(&result.logs.events) .iter() - .map(|bytecode_hash| bytecode_len_in_bytes(*bytecode_hash)) + .map(|&bytecode_hash| { + BytecodeHash::try_from(bytecode_hash) + .expect("published unparseable bytecode hash") + .len_in_bytes() + }) .sum(); TransactionExecutionMetrics { diff --git a/core/node/api_server/src/testonly.rs b/core/node/api_server/src/testonly.rs index de6501716125..06b31427ed61 100644 --- a/core/node/api_server/src/testonly.rs +++ b/core/node/api_server/src/testonly.rs @@ -3,14 +3,13 @@ use std::{collections::HashMap, iter}; use zk_evm_1_5_0::zkevm_opcode_defs::decoding::{EncodingModeProduction, VmEncodingMode}; -use zksync_contracts::{ - eth_contract, get_loadnext_contract, load_contract, read_bytecode, - test_contracts::LoadnextContractExecutionParams, -}; +use zksync_contracts::{eth_contract, load_contract, read_bytecode}; use zksync_dal::{Connection, Core, CoreDal}; use zksync_multivm::utils::derive_base_fee_and_gas_per_pubdata; use zksync_system_constants::{L2_BASE_TOKEN_ADDRESS, REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_BYTE}; +use zksync_test_contracts::{LoadnextContractExecutionParams, TestContract}; use zksync_types::{ + address_to_u256, api::state_override::{Bytecode, OverrideAccount, OverrideState, StateOverride}, ethabi, ethabi::Token, @@ -20,20 +19,12 @@ use zksync_types::{ l1::L1Tx, l2::L2Tx, transaction_request::{CallRequest, Eip712Meta, PaymasterParams}, + u256_to_h256, utils::storage_key_for_eth_balance, AccountTreeId, Address, K256PrivateKey, L2BlockNumber, L2ChainId, Nonce, ProtocolVersionId, StorageKey, StorageLog, EIP_712_TX_TYPE, H256, U256, }; -use zksync_utils::{address_to_u256, u256_to_h256}; - -const EXPENSIVE_CONTRACT_PATH: &str = - "etc/contracts-test-data/artifacts-zk/contracts/expensive/expensive.sol/Expensive.json"; -const PRECOMPILES_CONTRACT_PATH: &str = - "etc/contracts-test-data/artifacts-zk/contracts/precompiles/precompiles.sol/Precompiles.json"; -const COUNTER_CONTRACT_PATH: &str = - "etc/contracts-test-data/artifacts-zk/contracts/counter/counter.sol/Counter.json"; -const INFINITE_LOOP_CONTRACT_PATH: &str = - "etc/contracts-test-data/artifacts-zk/contracts/infinite/infinite.sol/InfiniteLoop.json"; + const MULTICALL3_CONTRACT_PATH: &str = "contracts/l2-contracts/zkout/Multicall3.sol/Multicall3.json"; @@ -97,7 +88,7 @@ impl StateBuilder { self.inner.insert( Self::LOAD_TEST_ADDRESS, OverrideAccount { - code: Some(Bytecode::new(get_loadnext_contract().bytecode).unwrap()), + code: Some(Bytecode::new(TestContract::load_test().bytecode.to_vec()).unwrap()), state: Some(OverrideState::State(state)), ..OverrideAccount::default() }, @@ -113,21 +104,21 @@ impl StateBuilder { pub fn with_expensive_contract(self) -> Self { self.with_contract( Self::EXPENSIVE_CONTRACT_ADDRESS, - read_bytecode(EXPENSIVE_CONTRACT_PATH), + TestContract::expensive().bytecode.to_vec(), ) } pub fn with_precompiles_contract(self) -> Self { self.with_contract( Self::PRECOMPILES_CONTRACT_ADDRESS, - read_bytecode(PRECOMPILES_CONTRACT_PATH), + TestContract::precompiles_test().bytecode.to_vec(), ) } pub fn with_counter_contract(self, initial_value: u64) -> Self { let mut this = self.with_contract( Self::COUNTER_CONTRACT_ADDRESS, - read_bytecode(COUNTER_CONTRACT_PATH), + TestContract::counter().bytecode.to_vec(), ); if initial_value != 0 { let state = HashMap::from([(H256::zero(), H256::from_low_u64_be(initial_value))]); @@ -142,7 +133,7 @@ impl StateBuilder { pub fn with_infinite_loop_contract(self) -> Self { self.with_contract( Self::INFINITE_LOOP_CONTRACT_ADDRESS, - read_bytecode(INFINITE_LOOP_CONTRACT_PATH), + TestContract::infinite_loop().bytecode.to_vec(), ) } @@ -368,7 +359,7 @@ impl TestAccount for K256PrivateKey { L2ChainId::default(), self, if params.deploys > 0 { - get_loadnext_contract().factory_deps + TestContract::load_test().factory_deps() } else { vec![] }, @@ -378,9 +369,8 @@ impl TestAccount for K256PrivateKey { } fn create_expensive_tx(&self, write_count: usize) -> L2Tx { - let calldata = load_contract(EXPENSIVE_CONTRACT_PATH) + let calldata = TestContract::expensive() .function("expensive") - .expect("no `expensive` function in contract") .encode_input(&[Token::Uint(write_count.into())]) .expect("failed encoding `expensive` function"); L2Tx::new_signed( @@ -398,9 +388,8 @@ impl TestAccount for K256PrivateKey { } fn create_expensive_cleanup_tx(&self) -> L2Tx { - let calldata = load_contract(EXPENSIVE_CONTRACT_PATH) + let calldata = TestContract::expensive() .function("cleanUp") - .expect("no `cleanUp` function in contract") .encode_input(&[]) .expect("failed encoding `cleanUp` input"); L2Tx::new_signed( @@ -418,9 +407,8 @@ impl TestAccount for K256PrivateKey { } fn create_code_oracle_tx(&self, bytecode_hash: H256, expected_keccak_hash: H256) -> L2Tx { - let calldata = load_contract(PRECOMPILES_CONTRACT_PATH) + let calldata = TestContract::precompiles_test() .function("callCodeOracle") - .expect("no `callCodeOracle` function") .encode_input(&[ Token::FixedBytes(bytecode_hash.0.to_vec()), Token::FixedBytes(expected_keccak_hash.0.to_vec()), @@ -441,9 +429,8 @@ impl TestAccount for K256PrivateKey { } fn create_counter_tx(&self, increment: U256, revert: bool) -> L2Tx { - let calldata = load_contract(COUNTER_CONTRACT_PATH) + let calldata = TestContract::counter() .function("incrementWithRevert") - .expect("no `incrementWithRevert` function") .encode_input(&[Token::Uint(increment), Token::Bool(revert)]) .expect("failed encoding `incrementWithRevert` input"); L2Tx::new_signed( @@ -461,9 +448,8 @@ impl TestAccount for K256PrivateKey { } fn create_l1_counter_tx(&self, increment: U256, revert: bool) -> L1Tx { - let calldata = load_contract(COUNTER_CONTRACT_PATH) + let calldata = TestContract::counter() .function("incrementWithRevert") - .expect("no `incrementWithRevert` function") .encode_input(&[Token::Uint(increment), Token::Bool(revert)]) .expect("failed encoding `incrementWithRevert` input"); let request = CallRequest { @@ -481,9 +467,8 @@ impl TestAccount for K256PrivateKey { } fn query_counter_value(&self) -> CallRequest { - let calldata = load_contract(COUNTER_CONTRACT_PATH) + let calldata = TestContract::counter() .function("get") - .expect("no `get` function") .encode_input(&[]) .expect("failed encoding `get` input"); CallRequest { @@ -495,9 +480,8 @@ impl TestAccount for K256PrivateKey { } fn create_infinite_loop_tx(&self) -> L2Tx { - let calldata = load_contract(INFINITE_LOOP_CONTRACT_PATH) + let calldata = TestContract::infinite_loop() .function("infiniteLoop") - .expect("no `infiniteLoop` function") .encode_input(&[]) .expect("failed encoding `infiniteLoop` input"); L2Tx::new_signed( diff --git a/core/node/api_server/src/tx_sender/mod.rs b/core/node/api_server/src/tx_sender/mod.rs index 011d9e4e2b2f..91fb84ab8f17 100644 --- a/core/node/api_server/src/tx_sender/mod.rs +++ b/core/node/api_server/src/tx_sender/mod.rs @@ -24,7 +24,7 @@ use zksync_state_keeper::{ use zksync_types::{ api::state_override::StateOverride, fee_model::BatchFeeInput, - get_intrinsic_constants, + get_intrinsic_constants, h256_to_u256, l2::{error::TxCheckError::TxDuplication, L2Tx}, transaction_request::CallOverrides, utils::storage_key_for_eth_balance, @@ -32,9 +32,8 @@ use zksync_types::{ AccountTreeId, Address, L2ChainId, Nonce, ProtocolVersionId, Transaction, H160, H256, MAX_NEW_FACTORY_DEPS, U256, }; -use zksync_utils::h256_to_u256; use zksync_vm_executor::oneshot::{ - CallOrExecute, EstimateGas, MultiVMBaseSystemContracts, OneshotEnvParameters, + CallOrExecute, EstimateGas, MultiVmBaseSystemContracts, OneshotEnvParameters, }; pub(super) use self::{gas_estimation::BinarySearchKind, result::SubmitTxError}; @@ -110,11 +109,11 @@ impl SandboxExecutorOptions { validation_computational_gas_limit: u32, ) -> anyhow::Result { let estimate_gas_contracts = - tokio::task::spawn_blocking(MultiVMBaseSystemContracts::load_estimate_gas_blocking) + tokio::task::spawn_blocking(MultiVmBaseSystemContracts::load_estimate_gas_blocking) .await .context("failed loading base contracts for gas estimation")?; let call_contracts = - tokio::task::spawn_blocking(MultiVMBaseSystemContracts::load_eth_call_blocking) + tokio::task::spawn_blocking(MultiVmBaseSystemContracts::load_eth_call_blocking) .await .context("failed loading base contracts for calls / tx execution")?; diff --git a/core/node/api_server/src/tx_sender/tests/call.rs b/core/node/api_server/src/tx_sender/tests/call.rs index e43f55b2b9af..08571790e8eb 100644 --- a/core/node/api_server/src/tx_sender/tests/call.rs +++ b/core/node/api_server/src/tx_sender/tests/call.rs @@ -238,7 +238,8 @@ async fn eth_call_with_load_test_transactions() { }, LoadnextContractExecutionParams { reads: 100, - writes: 100, + initial_writes: 100, + repeated_writes: 100, ..LoadnextContractExecutionParams::empty() }, ]; diff --git a/core/node/api_server/src/tx_sender/tests/gas_estimation.rs b/core/node/api_server/src/tx_sender/tests/gas_estimation.rs index 7db1b8339314..954792f915cc 100644 --- a/core/node/api_server/src/tx_sender/tests/gas_estimation.rs +++ b/core/node/api_server/src/tx_sender/tests/gas_estimation.rs @@ -7,10 +7,10 @@ use test_casing::{test_casing, Product}; use zksync_system_constants::CODE_ORACLE_ADDRESS; use zksync_types::{ api::state_override::{OverrideAccount, OverrideState}, + bytecode::BytecodeHash, web3::keccak256, K256PrivateKey, }; -use zksync_utils::bytecode::hash_bytecode; use super::*; use crate::{ @@ -116,7 +116,7 @@ async fn initial_estimate_for_deep_recursion(with_reads: bool) { (75, 1.2), (100, 1.4), (125, 1.7), - (150, 2.1), + (150, 2.2), ] }; for &(recursion_depth, multiplier) in depths_and_multipliers { @@ -216,7 +216,7 @@ async fn initial_estimate_for_code_oracle_tx() { // Add another contract that is never executed, but has a large bytecode. let huge_contact_address = Address::repeat_byte(23); let huge_contract_bytecode = vec![0_u8; 10_001 * 32]; - let huge_contract_bytecode_hash = hash_bytecode(&huge_contract_bytecode); + let huge_contract_bytecode_hash = BytecodeHash::for_bytecode(&huge_contract_bytecode).value(); let huge_contract_keccak_hash = H256(keccak256(&huge_contract_bytecode)); let state_override = StateBuilder::default() @@ -240,7 +240,7 @@ async fn initial_estimate_for_code_oracle_tx() { (*contract.account_id.address() == CODE_ORACLE_ADDRESS).then_some(&contract.bytecode) }) .expect("no code oracle"); - let code_oracle_bytecode_hash = hash_bytecode(code_oracle_bytecode); + let code_oracle_bytecode_hash = BytecodeHash::for_bytecode(code_oracle_bytecode).value(); let code_oracle_keccak_hash = H256(keccak256(code_oracle_bytecode)); let warm_bytecode_hashes = [ @@ -444,7 +444,7 @@ async fn estimating_gas_for_code_oracle_tx() { // Add another contract that is never executed, but has a large bytecode. let huge_contact_address = Address::repeat_byte(23); let huge_contract_bytecode = vec![0_u8; 10_001 * 32]; - let huge_contract_bytecode_hash = hash_bytecode(&huge_contract_bytecode); + let huge_contract_bytecode_hash = BytecodeHash::for_bytecode(&huge_contract_bytecode).value(); let huge_contract_keccak_hash = H256(keccak256(&huge_contract_bytecode)); let state_override = StateBuilder::default() diff --git a/core/node/api_server/src/tx_sender/tests/mod.rs b/core/node/api_server/src/tx_sender/tests/mod.rs index cbe405b2aa63..014bc5636c2d 100644 --- a/core/node/api_server/src/tx_sender/tests/mod.rs +++ b/core/node/api_server/src/tx_sender/tests/mod.rs @@ -1,9 +1,9 @@ //! Tests for the transaction sender. use test_casing::TestCases; -use zksync_contracts::test_contracts::LoadnextContractExecutionParams; use zksync_node_genesis::{insert_genesis_batch, GenesisParams}; use zksync_node_test_utils::{create_l2_block, prepare_recovery_snapshot}; +use zksync_test_contracts::LoadnextContractExecutionParams; use zksync_types::{get_nonce_key, L1BatchNumber, L2BlockNumber, StorageLog}; use zksync_vm_executor::oneshot::MockOneshotExecutor; @@ -18,7 +18,8 @@ const LOAD_TEST_CASES: TestCases = test_casing: LoadnextContractExecutionParams::default(), // No storage modification LoadnextContractExecutionParams { - writes: 0, + initial_writes: 0, + repeated_writes: 0, events: 0, ..LoadnextContractExecutionParams::default() }, diff --git a/core/node/api_server/src/web3/namespaces/debug.rs b/core/node/api_server/src/web3/namespaces/debug.rs index 726f35ac29a9..4fd32c1b5223 100644 --- a/core/node/api_server/src/web3/namespaces/debug.rs +++ b/core/node/api_server/src/web3/namespaces/debug.rs @@ -96,13 +96,16 @@ impl DebugNamespace { CallType::NearCall => unreachable!("We have to filter our near calls before"), }; - let result = if call.error.is_none() { - Some(CallResult { - output: web3::Bytes::from(call.output), - gas_used: U256::from(call.gas_used), - }) + let (result, error) = if let Some(error) = call.revert_reason { + (None, Some(error)) } else { - None + ( + Some(CallResult { + output: web3::Bytes::from(call.output), + gas_used: U256::from(call.gas_used), + }), + None, + ) }; calls.push(DebugCallFlat { @@ -116,6 +119,7 @@ impl DebugNamespace { }, result, subtraces, + error, trace_address: trace_address.clone(), // Clone the current trace address transaction_position: meta.index_in_block, transaction_hash: meta.tx_hash, diff --git a/core/node/api_server/src/web3/namespaces/eth.rs b/core/node/api_server/src/web3/namespaces/eth.rs index e594af20d183..2765de2c2892 100644 --- a/core/node/api_server/src/web3/namespaces/eth.rs +++ b/core/node/api_server/src/web3/namespaces/eth.rs @@ -6,16 +6,14 @@ use zksync_types::{ state_override::StateOverride, BlockId, BlockNumber, FeeHistory, GetLogsFilter, Transaction, TransactionId, TransactionReceipt, TransactionVariant, }, + bytecode::{trim_padded_evm_bytecode, BytecodeMarker}, l2::{L2Tx, TransactionType}, transaction_request::CallRequest, + u256_to_h256, utils::decompose_full_nonce, web3::{self, Bytes, SyncInfo, SyncState}, AccountTreeId, L2BlockNumber, StorageKey, H256, L2_BASE_TOKEN_ADDRESS, U256, }; -use zksync_utils::{ - bytecode::{prepare_evm_bytecode, BytecodeMarker}, - u256_to_h256, -}; use zksync_web3_decl::{ error::Web3Error, types::{Address, Block, Filter, FilterChanges, Log, U64}, @@ -406,7 +404,7 @@ impl EthNamespace { // Check if the bytecode is an EVM bytecode, and if so, pre-process it correspondingly. let marker = BytecodeMarker::new(contract_code.bytecode_hash); let prepared_bytecode = if marker == Some(BytecodeMarker::Evm) { - prepare_evm_bytecode(&contract_code.bytecode) + trim_padded_evm_bytecode(&contract_code.bytecode) .with_context(|| { format!( "malformed EVM bytecode at address {address:?}, hash = {:?}", diff --git a/core/node/api_server/src/web3/namespaces/zks.rs b/core/node/api_server/src/web3/namespaces/zks.rs index 1a4114bd2c6a..05c90f0b0140 100644 --- a/core/node/api_server/src/web3/namespaces/zks.rs +++ b/core/node/api_server/src/web3/namespaces/zks.rs @@ -7,12 +7,14 @@ use zksync_mini_merkle_tree::MiniMerkleTree; use zksync_multivm::interface::VmExecutionResultAndLogs; use zksync_system_constants::DEFAULT_L2_TX_GAS_PER_PUBDATA_BYTE; use zksync_types::{ + address_to_h256, api::{ state_override::StateOverride, BlockDetails, BridgeAddresses, GetLogsFilter, L1BatchDetails, L2ToL1LogProof, Proof, ProtocolVersion, StorageProof, TransactionDetails, }, fee::Fee, fee_model::{FeeParams, PubdataIndependentBatchFeeModelInput}, + h256_to_u256, l1::L1Tx, l2::L2Tx, l2_to_l1_log::{l2_to_l1_logs_tree_size, L2ToL1Log}, @@ -23,7 +25,6 @@ use zksync_types::{ AccountTreeId, L1BatchNumber, L2BlockNumber, ProtocolVersionId, StorageKey, Transaction, L1_MESSENGER_ADDRESS, L2_BASE_TOKEN_ADDRESS, REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_BYTE, U256, U64, }; -use zksync_utils::{address_to_h256, h256_to_u256}; use zksync_web3_decl::{ error::Web3Error, types::{Address, Token, H256}, diff --git a/core/node/api_server/src/web3/tests/mod.rs b/core/node/api_server/src/web3/tests/mod.rs index b35bb9f5fad7..feac8eb8d17f 100644 --- a/core/node/api_server/src/web3/tests/mod.rs +++ b/core/node/api_server/src/web3/tests/mod.rs @@ -33,6 +33,10 @@ use zksync_system_constants::{ use zksync_types::{ api, block::{pack_block_info, L2BlockHasher, L2BlockHeader}, + bytecode::{ + testonly::{PROCESSED_EVM_BYTECODE, RAW_EVM_BYTECODE}, + BytecodeHash, + }, fee_model::{BatchFeeInput, FeeParams}, get_nonce_key, l2::L2Tx, @@ -40,17 +44,11 @@ use zksync_types::{ system_contracts::get_system_smart_contracts, tokens::{TokenInfo, TokenMetadata}, tx::IncludedTxLocation, + u256_to_h256, utils::{storage_key_for_eth_balance, storage_key_for_standard_token_balance}, AccountTreeId, Address, L1BatchNumber, Nonce, ProtocolVersionId, StorageKey, StorageLog, H256, U256, U64, }; -use zksync_utils::{ - bytecode::{ - hash_bytecode, hash_evm_bytecode, - testonly::{PROCESSED_EVM_BYTECODE, RAW_EVM_BYTECODE}, - }, - u256_to_h256, -}; use zksync_vm_executor::oneshot::MockOneshotExecutor; use zksync_web3_decl::{ client::{Client, DynClient, L2}, @@ -678,7 +676,7 @@ impl HttpTest for StorageAccessWithSnapshotRecovery { fn storage_initialization(&self) -> StorageInitialization { let address = Address::repeat_byte(1); let code_key = get_code_key(&address); - let code_hash = hash_bytecode(&[0; 32]); + let code_hash = BytecodeHash::for_bytecode(&[0; 32]).value(); let balance_key = storage_key_for_eth_balance(&address); let logs = vec![ StorageLog::new_write_log(code_key, code_hash), @@ -1173,7 +1171,7 @@ impl GetBytecodeTest { at_block: L2BlockNumber, address: Address, ) -> anyhow::Result<()> { - let evm_bytecode_hash = hash_evm_bytecode(RAW_EVM_BYTECODE); + let evm_bytecode_hash = BytecodeHash::for_evm_bytecode(RAW_EVM_BYTECODE).value(); let code_log = StorageLog::new_write_log(get_code_key(&address), evm_bytecode_hash); connection .storage_logs_dal() diff --git a/core/node/api_server/src/web3/tests/vm.rs b/core/node/api_server/src/web3/tests/vm.rs index 4e0426de7bfa..a82ca3b9e347 100644 --- a/core/node/api_server/src/web3/tests/vm.rs +++ b/core/node/api_server/src/web3/tests/vm.rs @@ -16,10 +16,9 @@ use zksync_multivm::interface::{ }; use zksync_types::{ api::ApiStorageLog, fee_model::BatchFeeInput, get_intrinsic_constants, - transaction_request::CallRequest, vm::FastVmMode, K256PrivateKey, L2ChainId, + transaction_request::CallRequest, u256_to_h256, vm::FastVmMode, K256PrivateKey, L2ChainId, PackedEthSignature, StorageLogKind, StorageLogWithPreviousValue, Transaction, U256, }; -use zksync_utils::u256_to_h256; use zksync_vm_executor::oneshot::{ BaseSystemContractsProvider, ContractsKind, MockOneshotExecutor, OneshotEnvParameters, ResolvedBlockInfo, diff --git a/core/node/base_token_adjuster/Cargo.toml b/core/node/base_token_adjuster/Cargo.toml index 9dcf5d796530..b326e7a6b42d 100644 --- a/core/node/base_token_adjuster/Cargo.toml +++ b/core/node/base_token_adjuster/Cargo.toml @@ -19,7 +19,6 @@ zksync_external_price_api.workspace = true zksync_contracts.workspace = true zksync_eth_client.workspace = true zksync_node_fee_model.workspace = true -zksync_utils.workspace = true vise.workspace = true bigdecimal.workspace = true diff --git a/core/node/commitment_generator/Cargo.toml b/core/node/commitment_generator/Cargo.toml index 1f4645414cbd..f0b4046bab42 100644 --- a/core/node/commitment_generator/Cargo.toml +++ b/core/node/commitment_generator/Cargo.toml @@ -16,7 +16,6 @@ zksync_types.workspace = true zksync_dal.workspace = true zksync_health_check.workspace = true zksync_l1_contract_interface.workspace = true -zksync_utils.workspace = true zksync_eth_client.workspace = true zksync_contracts.workspace = true zksync_multivm.workspace = true diff --git a/core/node/commitment_generator/src/lib.rs b/core/node/commitment_generator/src/lib.rs index 9a33d4766f6e..2ce0152abab6 100644 --- a/core/node/commitment_generator/src/lib.rs +++ b/core/node/commitment_generator/src/lib.rs @@ -12,10 +12,10 @@ use zksync_types::{ AuxCommitments, BlobHash, CommitmentCommonInput, CommitmentInput, L1BatchAuxiliaryOutput, L1BatchCommitment, L1BatchCommitmentArtifacts, L1BatchCommitmentMode, }, + h256_to_u256, writes::{InitialStorageWrite, RepeatedStorageWrite, StateDiffRecord}, L1BatchNumber, ProtocolVersionId, StorageKey, H256, U256, }; -use zksync_utils::h256_to_u256; use crate::{ metrics::{CommitmentStage, METRICS}, diff --git a/core/node/commitment_generator/src/utils.rs b/core/node/commitment_generator/src/utils.rs index d405a1256a29..cc44d7a03c71 100644 --- a/core/node/commitment_generator/src/utils.rs +++ b/core/node/commitment_generator/src/utils.rs @@ -21,13 +21,13 @@ use zksync_l1_contract_interface::i_executor::commit::kzg::ZK_SYNC_BYTES_PER_BLO use zksync_multivm::{interface::VmEvent, utils::get_used_bootloader_memory_bytes}; use zksync_system_constants::message_root::{AGG_TREE_HEIGHT_KEY, AGG_TREE_NODES_KEY}; use zksync_types::{ + address_to_u256, h256_to_u256, u256_to_h256, vm::VmVersion, web3::keccak256, zk_evm_types::{LogQuery, Timestamp}, AccountTreeId, L1BatchNumber, ProtocolVersionId, StorageKey, EVENT_WRITER_ADDRESS, H256, L2_MESSAGE_ROOT_ADDRESS, U256, }; -use zksync_utils::{address_to_u256, expand_memory_contents, h256_to_u256, u256_to_h256}; /// Encapsulates computations of commitment components. /// @@ -124,6 +124,15 @@ impl CommitmentComputer for RealCommitmentComputer { } } +fn expand_memory_contents(packed: &[(usize, U256)], memory_size_bytes: usize) -> Vec { + let mut result: Vec = vec![0; memory_size_bytes]; + + for (offset, value) in packed { + value.to_big_endian(&mut result[(offset * 32)..(offset + 1) * 32]); + } + + result +} fn to_log_query_1_3_3(log_query: LogQuery) -> LogQuery_1_3_3 { LogQuery_1_3_3 { timestamp: Timestamp_1_3_3(log_query.timestamp.0), diff --git a/core/node/consensus/Cargo.toml b/core/node/consensus/Cargo.toml index fdcc9089e339..427454221c84 100644 --- a/core/node/consensus/Cargo.toml +++ b/core/node/consensus/Cargo.toml @@ -30,7 +30,6 @@ zksync_state_keeper.workspace = true zksync_node_sync.workspace = true zksync_system_constants.workspace = true zksync_types.workspace = true -zksync_utils.workspace = true zksync_web3_decl.workspace = true zksync_state.workspace = true zksync_vm_executor.workspace = true @@ -43,12 +42,13 @@ thiserror.workspace = true tracing.workspace = true tokio.workspace = true semver.workspace = true +vise.workspace = true [dev-dependencies] zksync_node_genesis.workspace = true zksync_node_test_utils.workspace = true zksync_node_api_server.workspace = true -zksync_test_account.workspace = true +zksync_test_contracts.workspace = true test-casing.workspace = true rand.workspace = true diff --git a/core/node/consensus/src/en.rs b/core/node/consensus/src/en.rs index ec8d3c19b54a..e417b68cf2cb 100644 --- a/core/node/consensus/src/en.rs +++ b/core/node/consensus/src/en.rs @@ -17,13 +17,14 @@ use zksync_web3_decl::{ use super::{config, storage::Store, ConsensusConfig, ConsensusSecrets}; use crate::{ + metrics::METRICS, registry, storage::{self, ConnectionPool}, }; -/// If less than TEMPORARY_FETCHER_THRESHOLD certificates are missing, -/// the temporary fetcher will stop fetching blocks. -pub(crate) const TEMPORARY_FETCHER_THRESHOLD: u64 = 10; +/// Whenever more than FALLBACK_FETCHER_THRESHOLD certificates are missing, +/// the fallback fetcher is active. +pub(crate) const FALLBACK_FETCHER_THRESHOLD: u64 = 10; /// External node. pub(super) struct EN { @@ -115,11 +116,9 @@ impl EN { let store = store.clone(); async { let store = store; - self.temporary_block_fetcher(ctx, &store).await?; - tracing::info!( - "temporary block fetcher finished, switching to p2p fetching only" - ); - Ok(()) + self.fallback_block_fetcher(ctx, &store) + .await + .wrap("fallback_block_fetcher()") } }); @@ -179,7 +178,7 @@ impl EN { tracing::warn!("\ WARNING: this node is using ZKsync API synchronization, which will be deprecated soon. \ Please follow this instruction to switch to p2p synchronization: \ - https://github.com/matter-labs/zksync-era/blob/main/docs/guides/external-node/09_decentralization.md"); + https://github.com/matter-labs/zksync-era/blob/main/docs/guides/external-node/10_decentralization.md"); let res: ctx::Result<()> = scope::run!(ctx, |ctx, s| async { // Update sync state in the background. s.spawn_bg(self.fetch_state_loop(ctx)); @@ -191,7 +190,7 @@ impl EN { .new_payload_queue(ctx, actions, self.sync_state.clone()) .await .wrap("new_fetcher_cursor()")?; - self.fetch_blocks(ctx, &mut payload_queue, None).await + self.fetch_blocks(ctx, &mut payload_queue).await }) .await; match res { @@ -362,9 +361,14 @@ impl EN { } /// Fetches (with retries) the given block from the main node. - async fn fetch_block(&self, ctx: &ctx::Ctx, n: L2BlockNumber) -> ctx::Result { + async fn fetch_block( + &self, + ctx: &ctx::Ctx, + n: validator::BlockNumber, + ) -> ctx::Result { const RETRY_INTERVAL: time::Duration = time::Duration::seconds(5); - + let n = L2BlockNumber(n.0.try_into().context("overflow")?); + METRICS.fetch_block.inc(); loop { match ctx.wait(self.client.sync_l2_block(n, true)).await? { Ok(Some(block)) => return Ok(block.try_into()?), @@ -376,9 +380,8 @@ impl EN { } } - /// Fetches blocks from the main node directly, until the certificates - /// are backfilled. This allows for smooth transition from json RPC to p2p block syncing. - pub(crate) async fn temporary_block_fetcher( + /// Fetches blocks from the main node directly whenever the EN is lagging behind too much. + pub(crate) async fn fallback_block_fetcher( &self, ctx: &ctx::Ctx, store: &Store, @@ -386,66 +389,63 @@ impl EN { const MAX_CONCURRENT_REQUESTS: usize = 30; scope::run!(ctx, |ctx, s| async { let (send, mut recv) = ctx::channel::bounded(MAX_CONCURRENT_REQUESTS); - s.spawn(async { - let Some(mut next) = store.next_block(ctx).await? else { - return Ok(()); - }; - while store.persisted().borrow().next().0 + TEMPORARY_FETCHER_THRESHOLD < next.0 { - let n = L2BlockNumber(next.0.try_into().context("overflow")?); - self.sync_state.wait_for_main_node_block(ctx, n).await?; - send.send(ctx, s.spawn(self.fetch_block(ctx, n))).await?; + // TODO: metrics. + s.spawn::<()>(async { + let send = send; + let is_lagging = + |main| main >= store.persisted().borrow().next() + FALLBACK_FETCHER_THRESHOLD; + let mut next = store.next_block(ctx).await.wrap("next_block()")?; + loop { + // Wait until p2p syncing is lagging. + self.sync_state + .wait_for_main_node_block(ctx, is_lagging) + .await?; + // Determine the next block to fetch and wait for it to be available. + next = next.max(store.next_block(ctx).await.wrap("next_block()")?); + self.sync_state + .wait_for_main_node_block(ctx, |main| main >= next) + .await?; + // Fetch the block asynchronously. + send.send(ctx, s.spawn(self.fetch_block(ctx, next))).await?; next = next.next(); } - drop(send); - Ok(()) }); - while let Ok(block) = recv.recv_or_disconnected(ctx).await? { + loop { + let block = recv.recv(ctx).await?; store .queue_next_fetched_block(ctx, block.join(ctx).await?) .await .wrap("queue_next_fetched_block()")?; } - Ok(()) }) .await } - /// Fetches blocks from the main node in range `[cursor.next()..end)`. + /// Fetches blocks starting with `queue.next()`. async fn fetch_blocks( &self, ctx: &ctx::Ctx, queue: &mut storage::PayloadQueue, - end: Option, ) -> ctx::Result<()> { const MAX_CONCURRENT_REQUESTS: usize = 30; - let first = queue.next(); - let mut next = first; + let mut next = queue.next(); scope::run!(ctx, |ctx, s| async { let (send, mut recv) = ctx::channel::bounded(MAX_CONCURRENT_REQUESTS); - s.spawn(async { + s.spawn::<()>(async { let send = send; - while end.map_or(true, |end| next < end) { - let n = L2BlockNumber(next.0.try_into().context("overflow")?); - self.sync_state.wait_for_main_node_block(ctx, n).await?; - send.send(ctx, s.spawn(self.fetch_block(ctx, n))).await?; + loop { + self.sync_state + .wait_for_main_node_block(ctx, |main| main >= next) + .await?; + send.send(ctx, s.spawn(self.fetch_block(ctx, next))).await?; next = next.next(); } - Ok(()) }); - while end.map_or(true, |end| queue.next() < end) { + loop { let block = recv.recv(ctx).await?.join(ctx).await?; queue.send(block).await.context("queue.send()")?; } - Ok(()) }) - .await?; - // If fetched anything, wait for the last block to be stored persistently. - if first < queue.next() { - self.pool - .wait_for_payload(ctx, queue.next().prev().unwrap()) - .await - .wrap("wait_for_payload()")?; - } - Ok(()) + .await } } diff --git a/core/node/consensus/src/lib.rs b/core/node/consensus/src/lib.rs index 8bf078120aa9..d89aa5f5e829 100644 --- a/core/node/consensus/src/lib.rs +++ b/core/node/consensus/src/lib.rs @@ -9,6 +9,7 @@ mod abi; mod config; mod en; pub mod era; +mod metrics; mod mn; mod registry; mod storage; diff --git a/core/node/consensus/src/metrics.rs b/core/node/consensus/src/metrics.rs new file mode 100644 index 000000000000..f53bb9320917 --- /dev/null +++ b/core/node/consensus/src/metrics.rs @@ -0,0 +1,13 @@ +//! Consensus related metrics. + +#[derive(Debug, vise::Metrics)] +#[metrics(prefix = "zksync_node_consensus")] +pub(crate) struct Metrics { + /// Number of blocks that has been fetched via JSON-RPC. + /// It is used only as a fallback when the p2p syncing is disabled or falling behind. + /// so it shouldn't be increasing under normal circumstances if p2p syncing is enabled. + pub fetch_block: vise::Counter, +} + +#[vise::register] +pub(super) static METRICS: vise::Global = vise::Global::new(); diff --git a/core/node/consensus/src/registry/testonly.rs b/core/node/consensus/src/registry/testonly.rs index 07a87e3b676e..8742d9e52c63 100644 --- a/core/node/consensus/src/registry/testonly.rs +++ b/core/node/consensus/src/registry/testonly.rs @@ -1,7 +1,7 @@ use rand::Rng; use zksync_consensus_crypto::ByteFmt; use zksync_consensus_roles::{attester, validator}; -use zksync_test_account::Account; +use zksync_test_contracts::Account; use zksync_types::{ethabi, Execute, Transaction, U256}; use super::*; @@ -74,7 +74,7 @@ impl Registry { let tx = account.get_deploy_tx( &abi::ConsensusRegistry::bytecode(), None, - zksync_test_account::TxType::L2, + zksync_test_contracts::TxType::L2, ); (Address::new(tx.address), tx.tx) } diff --git a/core/node/consensus/src/registry/tests.rs b/core/node/consensus/src/registry/tests.rs index 89afc20e1d57..15329077a651 100644 --- a/core/node/consensus/src/registry/tests.rs +++ b/core/node/consensus/src/registry/tests.rs @@ -1,7 +1,7 @@ use rand::Rng as _; use zksync_concurrency::{ctx, scope, time}; use zksync_consensus_roles::{attester, validator::testonly::Setup}; -use zksync_test_account::Account; +use zksync_test_contracts::Account; use zksync_types::ProtocolVersionId; use super::*; diff --git a/core/node/consensus/src/storage/store.rs b/core/node/consensus/src/storage/store.rs index 154509e97b14..c42e78658dc2 100644 --- a/core/node/consensus/src/storage/store.rs +++ b/core/node/consensus/src/storage/store.rs @@ -114,14 +114,12 @@ impl Store { } /// Number of the next block to queue. - pub(crate) async fn next_block( - &self, - ctx: &ctx::Ctx, - ) -> ctx::OrCanceled> { + pub(crate) async fn next_block(&self, ctx: &ctx::Ctx) -> ctx::Result { Ok(sync::lock(ctx, &self.block_payloads) .await? .as_ref() - .map(|p| p.next())) + .context("payload_queue not set")? + .next()) } /// Queues the next block. diff --git a/core/node/consensus/src/testonly.rs b/core/node/consensus/src/testonly.rs index ef4226c915f0..225a38aee760 100644 --- a/core/node/consensus/src/testonly.rs +++ b/core/node/consensus/src/testonly.rs @@ -30,25 +30,20 @@ use zksync_state_keeper::{ executor::MainBatchExecutorFactory, io::{IoCursor, L1BatchParams, L2BlockParams}, seal_criteria::NoopSealer, - testonly::{ - fund, l1_transaction, l2_transaction, test_batch_executor::MockReadStorageFactory, - MockBatchExecutor, - }, + testonly::{fee, fund, test_batch_executor::MockReadStorageFactory, MockBatchExecutor}, AsyncRocksdbCache, OutputHandler, StateKeeperPersistence, TreeWritesPersistence, ZkSyncStateKeeper, }; -use zksync_test_account::Account; +use zksync_test_contracts::Account; use zksync_types::{ ethabi, fee_model::{BatchFeeInput, L1PeggedBatchFeeModelInput}, - L1BatchNumber, L2BlockNumber, L2ChainId, PriorityOpId, ProtocolVersionId, Transaction, + Address, Execute, L1BatchNumber, L2BlockNumber, L2ChainId, PriorityOpId, ProtocolVersionId, + Transaction, }; use zksync_web3_decl::client::{Client, DynClient, L2}; -use crate::{ - en, - storage::{ConnectionPool, Store}, -}; +use crate::{en, storage::ConnectionPool}; /// Fake StateKeeper for tests. #[derive(Debug)] @@ -318,12 +313,15 @@ impl StateKeeper { /// Pushes a new L2 block with `transactions` transactions to the `StateKeeper`. pub async fn push_random_block(&mut self, rng: &mut impl Rng, account: &mut Account) { let txs: Vec<_> = (0..rng.gen_range(3..8)) - .map(|_| match rng.gen() { - true => l2_transaction(account, 1_000_000), - false => { - let tx = l1_transaction(account, self.next_priority_op); - self.next_priority_op += 1; - tx + .map(|_| { + let execute = Execute::transfer(Address::random(), 0.into()); + match rng.gen() { + true => account.get_l2_tx_for_execute(execute, Some(fee(1_000_000))), + false => { + let tx = account.get_l1_tx(execute, self.next_priority_op.0); + self.next_priority_op += 1; + tx + } } }) .collect(); @@ -413,40 +411,6 @@ impl StateKeeper { .await } - pub async fn run_temporary_fetcher( - self, - ctx: &ctx::Ctx, - client: Box>, - ) -> ctx::Result<()> { - scope::run!(ctx, |ctx, s| async { - let payload_queue = self - .pool - .connection(ctx) - .await - .wrap("connection()")? - .new_payload_queue(ctx, self.actions_sender, self.sync_state.clone()) - .await - .wrap("new_payload_queue()")?; - let (store, runner) = Store::new( - ctx, - self.pool.clone(), - Some(payload_queue), - Some(client.clone()), - ) - .await - .wrap("Store::new()")?; - s.spawn_bg(async { Ok(runner.run(ctx).await?) }); - en::EN { - pool: self.pool.clone(), - client, - sync_state: self.sync_state.clone(), - } - .temporary_block_fetcher(ctx, &store) - .await - }) - .await - } - /// Runs consensus node for the external node. pub async fn run_consensus( self, diff --git a/core/node/consensus/src/tests/attestation.rs b/core/node/consensus/src/tests/attestation.rs index 5ee17d5e2eda..6f24fbe65b4c 100644 --- a/core/node/consensus/src/tests/attestation.rs +++ b/core/node/consensus/src/tests/attestation.rs @@ -8,7 +8,7 @@ use zksync_consensus_roles::{ validator::testonly::{Setup, SetupSpec}, }; use zksync_dal::consensus_dal; -use zksync_test_account::Account; +use zksync_test_contracts::Account; use zksync_types::ProtocolVersionId; use zksync_web3_decl::namespaces::EnNamespaceClient as _; diff --git a/core/node/consensus/src/tests/mod.rs b/core/node/consensus/src/tests/mod.rs index 663ccab49904..c7697ba8480e 100644 --- a/core/node/consensus/src/tests/mod.rs +++ b/core/node/consensus/src/tests/mod.rs @@ -11,12 +11,12 @@ use zksync_consensus_roles::{ }; use zksync_consensus_storage::{BlockStore, PersistentBlockStore}; use zksync_dal::consensus_dal; -use zksync_test_account::Account; +use zksync_test_contracts::Account; use zksync_types::ProtocolVersionId; use zksync_web3_decl::namespaces::EnNamespaceClient as _; use crate::{ - en::TEMPORARY_FETCHER_THRESHOLD, + en::FALLBACK_FETCHER_THRESHOLD, mn::run_main_node, storage::{ConnectionPool, Store}, testonly, @@ -665,7 +665,7 @@ async fn test_p2p_fetcher_backfill_certs(from_snapshot: bool, version: ProtocolV // Test temporary fetcher fetching blocks if a lot of certs are missing. #[test_casing(4, Product((FROM_SNAPSHOT,VERSIONS)))] #[tokio::test] -async fn test_temporary_fetcher(from_snapshot: bool, version: ProtocolVersionId) { +async fn test_fallback_fetcher(from_snapshot: bool, version: ProtocolVersionId) { zksync_concurrency::testonly::abort_on_panic(); let ctx = &ctx::test_root(&ctx::AffineClock::new(10.)); let rng = &mut ctx.rng(); @@ -705,7 +705,7 @@ async fn test_temporary_fetcher(from_snapshot: bool, version: ProtocolVersionId) s.spawn_bg(runner.run(ctx)); s.spawn_bg(node.run_fetcher(ctx, client.clone())); validator - .push_random_blocks(rng, account, TEMPORARY_FETCHER_THRESHOLD as usize + 1) + .push_random_blocks(rng, account, FALLBACK_FETCHER_THRESHOLD as usize + 1) .await; node_pool .wait_for_payload(ctx, validator.last_block()) @@ -715,58 +715,7 @@ async fn test_temporary_fetcher(from_snapshot: bool, version: ProtocolVersionId) .await .unwrap(); - tracing::info!( - "Run p2p fetcher. Blocks should be fetched by the temporary fetcher anyway." - ); - scope::run!(ctx, |ctx, s| async { - let (node, runner) = testonly::StateKeeper::new(ctx, node_pool.clone()).await?; - s.spawn_bg(runner.run(ctx)); - s.spawn_bg(node.run_consensus(ctx, client.clone(), node_cfg.clone())); - validator.push_random_blocks(rng, account, 5).await; - node_pool - .wait_for_payload(ctx, validator.last_block()) - .await?; - Ok(()) - }) - .await - .unwrap(); - Ok(()) - }) - .await - .unwrap(); -} - -// Test that temporary fetcher terminates once enough blocks have certs. -#[test_casing(4, Product((FROM_SNAPSHOT,VERSIONS)))] -#[tokio::test] -async fn test_temporary_fetcher_termination(from_snapshot: bool, version: ProtocolVersionId) { - zksync_concurrency::testonly::abort_on_panic(); - let ctx = &ctx::test_root(&ctx::AffineClock::new(10.)); - let rng = &mut ctx.rng(); - let setup = Setup::new(rng, 1); - let validator_cfg = testonly::new_configs(rng, &setup, 0)[0].clone(); - let node_cfg = validator_cfg.new_fullnode(rng); - let account = &mut Account::random(); - - scope::run!(ctx, |ctx, s| async { - tracing::info!("Spawn validator."); - let validator_pool = ConnectionPool::test(from_snapshot, version).await; - let (mut validator, runner) = - testonly::StateKeeper::new(ctx, validator_pool.clone()).await?; - s.spawn_bg(runner.run(ctx)); - s.spawn_bg(run_main_node( - ctx, - validator_cfg.config.clone(), - validator_cfg.secrets.clone(), - validator_pool.clone(), - )); - // API server needs at least 1 L1 batch to start. - validator.seal_batch().await; - let client = validator.connect(ctx).await?; - - let node_pool = ConnectionPool::test(from_snapshot, version).await; - - // Run the EN so the consensus is initialized on EN and wait for it to sync. + tracing::info!("Run p2p fetcher. Blocks should be fetched by the fallback fetcher anyway."); scope::run!(ctx, |ctx, s| async { let (node, runner) = testonly::StateKeeper::new(ctx, node_pool.clone()).await?; s.spawn_bg(runner.run(ctx)); @@ -779,12 +728,6 @@ async fn test_temporary_fetcher_termination(from_snapshot: bool, version: Protoc }) .await .unwrap(); - - // Run the temporary fetcher. It should terminate immediately, since EN is synced. - let (node, runner) = testonly::StateKeeper::new(ctx, node_pool.clone()).await?; - s.spawn_bg(runner.run(ctx)); - node.run_temporary_fetcher(ctx, client).await?; - Ok(()) }) .await diff --git a/core/node/consensus/src/vm.rs b/core/node/consensus/src/vm.rs index cbd4918dcee1..81d26ebc3758 100644 --- a/core/node/consensus/src/vm.rs +++ b/core/node/consensus/src/vm.rs @@ -8,7 +8,7 @@ use zksync_state::PostgresStorage; use zksync_system_constants::DEFAULT_L2_TX_GAS_PER_PUBDATA_BYTE; use zksync_types::{ethabi, fee::Fee, l2::L2Tx, AccountTreeId, L2ChainId, Nonce, U256}; use zksync_vm_executor::oneshot::{ - CallOrExecute, MainOneshotExecutor, MultiVMBaseSystemContracts, OneshotEnvParameters, + CallOrExecute, MainOneshotExecutor, MultiVmBaseSystemContracts, OneshotEnvParameters, }; use zksync_vm_interface::{ executor::OneshotExecutor, storage::StorageWithOverrides, ExecutionResult, @@ -29,7 +29,7 @@ impl VM { /// Constructs a new `VM` instance. pub async fn new(pool: ConnectionPool) -> Self { let base_system_contracts = - scope::wait_blocking(MultiVMBaseSystemContracts::load_eth_call_blocking).await; + scope::wait_blocking(MultiVmBaseSystemContracts::load_eth_call_blocking).await; Self { pool, // L2 chain ID and fee account don't seem to matter for calls, hence the use of default values. diff --git a/core/node/contract_verification_server/Cargo.toml b/core/node/contract_verification_server/Cargo.toml index 038347debc64..e6a81fe6026a 100644 --- a/core/node/contract_verification_server/Cargo.toml +++ b/core/node/contract_verification_server/Cargo.toml @@ -13,7 +13,6 @@ categories.workspace = true [dependencies] zksync_dal.workspace = true zksync_types.workspace = true -zksync_utils.workspace = true vise.workspace = true anyhow.workspace = true diff --git a/core/node/contract_verification_server/src/api_impl.rs b/core/node/contract_verification_server/src/api_impl.rs index 94be65673bad..b0336fd284b6 100644 --- a/core/node/contract_verification_server/src/api_impl.rs +++ b/core/node/contract_verification_server/src/api_impl.rs @@ -9,12 +9,12 @@ use axum::{ }; use zksync_dal::{CoreDal, DalError}; use zksync_types::{ + bytecode::BytecodeMarker, contract_verification_api::{ CompilerVersions, VerificationIncomingRequest, VerificationInfo, VerificationRequestStatus, }, Address, }; -use zksync_utils::bytecode::BytecodeMarker; use super::{api_decl::RestApi, metrics::METRICS}; diff --git a/core/node/contract_verification_server/src/tests.rs b/core/node/contract_verification_server/src/tests.rs index b7b0d3e8efb4..c5c1d88b3d0c 100644 --- a/core/node/contract_verification_server/src/tests.rs +++ b/core/node/contract_verification_server/src/tests.rs @@ -12,10 +12,10 @@ use tower::ServiceExt; use zksync_dal::{Connection, Core, CoreDal}; use zksync_node_test_utils::create_l2_block; use zksync_types::{ - contract_verification_api::CompilerVersions, get_code_key, Address, L2BlockNumber, - ProtocolVersion, StorageLog, + bytecode::{BytecodeHash, BytecodeMarker}, + contract_verification_api::CompilerVersions, + get_code_key, Address, L2BlockNumber, ProtocolVersion, StorageLog, }; -use zksync_utils::bytecode::{hash_bytecode, hash_evm_bytecode, BytecodeMarker}; use super::*; use crate::api_impl::ApiError; @@ -53,8 +53,8 @@ async fn mock_deploy_contract( kind: BytecodeMarker, ) { let bytecode_hash = match kind { - BytecodeMarker::EraVm => hash_bytecode(&[0; 32]), - BytecodeMarker::Evm => hash_evm_bytecode(&[0; 96]), + BytecodeMarker::EraVm => BytecodeHash::for_bytecode(&[0; 32]).value(), + BytecodeMarker::Evm => BytecodeHash::for_evm_bytecode(&[0; 96]).value(), }; let deploy_log = StorageLog::new_write_log(get_code_key(&address), bytecode_hash); storage diff --git a/core/node/da_clients/src/eigen/client.rs b/core/node/da_clients/src/eigen/client.rs index aa7b701998c6..c8fd45a7dd8b 100644 --- a/core/node/da_clients/src/eigen/client.rs +++ b/core/node/da_clients/src/eigen/client.rs @@ -11,7 +11,7 @@ use zksync_da_client::{ }; use super::{blob_info::BlobInfo, sdk::RawEigenClient}; -use crate::utils::to_non_retriable_da_error; +use crate::utils::to_retriable_da_error; /// EigenClient is a client for the Eigen DA service. /// It can be configured to use one of two dispersal methods: @@ -54,7 +54,7 @@ impl DataAvailabilityClient for EigenClient { .client .dispatch_blob(data) .await - .map_err(to_non_retriable_da_error)?; + .map_err(to_retriable_da_error)?; Ok(DispatchResponse::from(blob_id)) } diff --git a/core/node/da_clients/src/eigen/eigenda-integration.md b/core/node/da_clients/src/eigen/eigenda-integration.md index fa31ef596d96..ce9544e01025 100644 --- a/core/node/da_clients/src/eigen/eigenda-integration.md +++ b/core/node/da_clients/src/eigen/eigenda-integration.md @@ -23,7 +23,7 @@ da_client: disperser_rpc: eth_confirmation_depth: -1 eigenda_eth_rpc: - eigenda_svc_manager_address: '0xD4A7E1Bd8015057293f0D0A557088c286942e84b' + eigenda_svc_manager_address: blob_size_limit: 2097152 status_query_timeout: 1800000 # ms status_query_interval: 5 # ms @@ -34,6 +34,9 @@ da_client: chain_id: ``` +You can find the needed variables for holesky dispersal +[here](https://github.com/Layr-Labs/eigenda-proxy/blob/main/.env.example.holesky). + Also set the private key in `etc/env/file_based/secrets.yaml`: ```yaml @@ -62,7 +65,27 @@ cargo install --path zkstack_cli/crates/zkstack --force --locked zkstack containers --observability true ``` -3. Create `eigen_da` chain +3. Temporary metrics setup (until `era-observabilty` changes are also merged) + +a. Setup the observability container at least once so the `era-observability` directory is cloned. + +```bash +zkstack containers --observability true +``` + +b. Add `lambda` remote to the `era-observability` project: + +```bash +cd era-observability && git remote add lambda https://github.com/lambdaclass/era-observability.git +``` + +c. Fetch and checkout the `eigenda` branch: + +```bash +git fetch lambda && git checkout eigenda +``` + +4. Create `eigen_da` chain ```bash zkstack chain create \ @@ -77,7 +100,7 @@ zkstack chain create \ --set-as-default false ``` -4. Initialize created ecosystem +5. Initialize created ecosystem ```bash zkstack ecosystem init \ @@ -93,21 +116,60 @@ zkstack ecosystem init \ You may enable observability here if you want to. -5. Start the server +6. Setup grafana dashboard for Data Availability + +a. Get the running port of the eigen_da chain in the `chains/eigen_da/configs/general.yaml` file: + +```yaml +prometheus: + listener_port: 3414 # <- this is the port +``` + +(around line 108) + +Then modify the `era-observability/etc/prometheus/prometheus.yml` with the retrieved port: + +```yaml +- job_name: 'zksync' + scrape_interval: 5s + honor_labels: true + static_configs: + - targets: ['host.docker.internal:3312'] # <- change this to the port +``` + +b. Enable the Data Availability Grafana dashboard + +```bash +mv era-observability/additional_dashboards/EigenDA.json era-observability/dashboards/EigenDA.json +``` + +c. Restart the era-observability container + +```bash +docker ps --filter "label=com.docker.compose.project=era-observability" -q | xargs docker restart +``` + +(this can also be done through the docker dashboard) + +7. Start the server ```bash zkstack server --chain eigen_da ``` -### Testing +### Get Blobs from L1 -Modify the following flag in `core/lib/config/src/configs/da_dispatcher.rs` (then restart the server) +In order to retrieve the blobs sent to EigenDA whose commitments are stored on L1 in order to be able to rebuild the +chain from them run: -```rs -pub const DEFAULT_USE_DUMMY_INCLUSION_DATA: bool = true; +```bash +cd get_all_blobs +cargo run ``` -And with the server running on one terminal, you can run the server integration tests on a separate terminal with the +### Testing + +With the server running on one terminal, you can run the server integration tests on a separate terminal with the following command: ```bash @@ -164,8 +226,6 @@ zkstack ecosystem init \ --l1-rpc-url $HOLESKY_RPC_URL \ --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ --server-db-name=zksync_server_holesky_eigen_da \ - --prover-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --prover-db-name=zksync_prover_holesky_eigen_da \ --chain holesky_eigen_da \ --verbose ``` diff --git a/core/node/da_clients/src/eigen/sdk.rs b/core/node/da_clients/src/eigen/sdk.rs index 6f250ba35862..0e50b73206dd 100644 --- a/core/node/da_clients/src/eigen/sdk.rs +++ b/core/node/da_clients/src/eigen/sdk.rs @@ -1,9 +1,12 @@ -use std::{str::FromStr, time::Duration}; +use std::{str::FromStr, sync::Arc, time::Duration}; use backon::{ConstantBuilder, Retryable}; use secp256k1::{ecdsa::RecoverableSignature, SecretKey}; -use tokio::{sync::mpsc, time::Instant}; -use tokio_stream::{wrappers::ReceiverStream, StreamExt}; +use tokio::{ + sync::{mpsc, Mutex}, + time::Instant, +}; +use tokio_stream::{wrappers::UnboundedReceiverStream, StreamExt}; use tonic::{ transport::{Channel, ClientTlsConfig, Endpoint}, Streaming, @@ -29,7 +32,7 @@ use crate::eigen::{ #[derive(Debug, Clone)] pub(crate) struct RawEigenClient { - client: DisperserClient, + client: Arc>>, private_key: SecretKey, pub config: EigenConfig, verifier: Verifier, @@ -39,14 +42,14 @@ pub(crate) const DATA_CHUNK_SIZE: usize = 32; pub(crate) const AVG_BLOCK_TIME: u64 = 12; impl RawEigenClient { - pub(crate) const BUFFER_SIZE: usize = 1000; - pub async fn new(private_key: SecretKey, config: EigenConfig) -> anyhow::Result { let endpoint = Endpoint::from_str(config.disperser_rpc.as_str())?.tls_config(ClientTlsConfig::new())?; - let client = DisperserClient::connect(endpoint) - .await - .map_err(|e| anyhow::anyhow!("Failed to connect to Disperser server: {}", e))?; + let client = Arc::new(Mutex::new( + DisperserClient::connect(endpoint) + .await + .map_err(|e| anyhow::anyhow!("Failed to connect to Disperser server: {}", e))?, + )); let verifier_config = VerifierConfig { verify_certs: true, @@ -76,13 +79,16 @@ impl RawEigenClient { account_id: String::default(), // Account Id is not used in non-authenticated mode }; - let mut client_clone = self.client.clone(); - let disperse_reply = client_clone.disperse_blob(request).await?.into_inner(); + let disperse_reply = self + .client + .lock() + .await + .disperse_blob(request) + .await? + .into_inner(); let disperse_time = Instant::now(); - let blob_info = self - .await_for_inclusion(client_clone, disperse_reply) - .await?; + let blob_info = self.await_for_inclusion(disperse_reply).await?; let disperse_elapsed = Instant::now() - disperse_time; let blob_info = blob_info::BlobInfo::try_from(blob_info) @@ -123,25 +129,29 @@ impl RawEigenClient { } async fn dispatch_blob_authenticated(&self, data: Vec) -> anyhow::Result { - let mut client_clone = self.client.clone(); - let (tx, rx) = mpsc::channel(Self::BUFFER_SIZE); + let (tx, rx) = mpsc::unbounded_channel(); let disperse_time = Instant::now(); - let response_stream = client_clone.disperse_blob_authenticated(ReceiverStream::new(rx)); - let padded_data = convert_by_padding_empty_byte(&data); // 1. send DisperseBlobRequest - self.disperse_data(padded_data, &tx).await?; + let padded_data = convert_by_padding_empty_byte(&data); + self.disperse_data(padded_data, &tx)?; // this await is blocked until the first response on the stream, so we only await after sending the `DisperseBlobRequest` - let mut response_stream = response_stream.await?.into_inner(); + let mut response_stream = self + .client + .clone() + .lock() + .await + .disperse_blob_authenticated(UnboundedReceiverStream::new(rx)) + .await?; + let response_stream = response_stream.get_mut(); // 2. receive BlobAuthHeader - let blob_auth_header = self.receive_blob_auth_header(&mut response_stream).await?; + let blob_auth_header = self.receive_blob_auth_header(response_stream).await?; // 3. sign and send BlobAuthHeader - self.submit_authentication_data(blob_auth_header.clone(), &tx) - .await?; + self.submit_authentication_data(blob_auth_header.clone(), &tx)?; // 4. receive DisperseBlobReply let reply = response_stream @@ -157,9 +167,7 @@ impl RawEigenClient { }; // 5. poll for blob status until it reaches the Confirmed state - let blob_info = self - .await_for_inclusion(client_clone, disperse_reply) - .await?; + let blob_info = self.await_for_inclusion(disperse_reply).await?; let blob_info = blob_info::BlobInfo::try_from(blob_info) .map_err(|e| anyhow::anyhow!("Failed to convert blob info: {}", e))?; @@ -189,10 +197,10 @@ impl RawEigenClient { } } - async fn disperse_data( + fn disperse_data( &self, data: Vec, - tx: &mpsc::Sender, + tx: &mpsc::UnboundedSender, ) -> anyhow::Result<()> { let req = disperser::AuthenticatedRequest { payload: Some(DisperseRequest(disperser::DisperseBlobRequest { @@ -203,14 +211,13 @@ impl RawEigenClient { }; tx.send(req) - .await .map_err(|e| anyhow::anyhow!("Failed to send DisperseBlobRequest: {}", e)) } - async fn submit_authentication_data( + fn submit_authentication_data( &self, blob_auth_header: BlobAuthHeader, - tx: &mpsc::Sender, + tx: &mpsc::UnboundedSender, ) -> anyhow::Result<()> { // TODO: replace challenge_parameter with actual auth header when it is available let digest = zksync_basic_types::web3::keccak256( @@ -234,7 +241,6 @@ impl RawEigenClient { }; tx.send(req) - .await .map_err(|e| anyhow::anyhow!("Failed to send AuthenticationData: {}", e)) } @@ -264,7 +270,6 @@ impl RawEigenClient { async fn await_for_inclusion( &self, - client: DisperserClient, disperse_blob_reply: DisperseBlobReply, ) -> anyhow::Result { let polling_request = disperser::BlobStatusRequest { @@ -272,8 +277,10 @@ impl RawEigenClient { }; let blob_info = (|| async { - let mut client_clone = client.clone(); - let resp = client_clone + let resp = self + .client + .lock() + .await .get_blob_status(polling_request.clone()) .await? .into_inner(); @@ -340,7 +347,8 @@ impl RawEigenClient { .batch_header_hash; let get_response = self .client - .clone() + .lock() + .await .retrieve_blob(disperser::RetrieveBlobRequest { batch_header_hash, blob_index, diff --git a/core/node/da_dispatcher/Cargo.toml b/core/node/da_dispatcher/Cargo.toml index 8a10d6813a5a..57d00cabaaa8 100644 --- a/core/node/da_dispatcher/Cargo.toml +++ b/core/node/da_dispatcher/Cargo.toml @@ -14,7 +14,6 @@ categories.workspace = true [dependencies] vise.workspace = true zksync_dal.workspace = true -zksync_utils.workspace = true zksync_config.workspace = true zksync_types.workspace = true zksync_da_client.workspace = true diff --git a/core/node/da_dispatcher/src/da_dispatcher.rs b/core/node/da_dispatcher/src/da_dispatcher.rs index f8e6f6b31723..e37d9ac60929 100644 --- a/core/node/da_dispatcher/src/da_dispatcher.rs +++ b/core/node/da_dispatcher/src/da_dispatcher.rs @@ -1,10 +1,19 @@ -use std::{future::Future, time::Duration}; +use std::{collections::HashSet, future::Future, sync::Arc, time::Duration}; use anyhow::Context; use chrono::Utc; use rand::Rng; -use tokio::sync::watch::Receiver; -use zksync_config::DADispatcherConfig; +use tokio::{ + sync::{ + watch::{self, Receiver}, + Mutex, Notify, + }, + task::JoinSet, +}; +use zksync_config::{ + configs::da_dispatcher::{DEFAULT_MAX_CONCURRENT_REQUESTS, DEFAULT_POLLING_INTERVAL_MS}, + DADispatcherConfig, +}; use zksync_da_client::{ types::{DAError, InclusionData}, DataAvailabilityClient, @@ -19,6 +28,7 @@ pub struct DataAvailabilityDispatcher { client: Box, pool: ConnectionPool, config: DADispatcherConfig, + request_semaphore: Arc, } impl DataAvailabilityDispatcher { @@ -27,157 +37,284 @@ impl DataAvailabilityDispatcher { config: DADispatcherConfig, client: Box, ) -> Self { + let request_semaphore = Arc::new(tokio::sync::Semaphore::new( + config + .max_concurrent_requests + .unwrap_or(DEFAULT_MAX_CONCURRENT_REQUESTS) as usize, + )); Self { pool, config, client, + request_semaphore, } } - pub async fn run(self, mut stop_receiver: Receiver) -> anyhow::Result<()> { + pub async fn run(self, stop_receiver: Receiver) -> anyhow::Result<()> { + let subtasks = futures::future::join( + async { + if let Err(err) = self.dispatch_batches(stop_receiver.clone()).await { + tracing::error!("dispatch error {err:?}"); + } + }, + async { + if let Err(err) = self.inclusion_poller(stop_receiver.clone()).await { + tracing::error!("poll_for_inclusion error {err:?}"); + } + }, + ); + + tokio::select! { + _ = subtasks => {}, + } + Ok(()) + } + + async fn dispatch_batches(&self, stop_receiver: Receiver) -> anyhow::Result<()> { + let next_expected_batch = Arc::new(Mutex::new(None)); + let mut pending_batches = HashSet::new(); + let mut dispatcher_tasks: JoinSet> = JoinSet::new(); + + let (shutdown_tx, shutdown_rx) = watch::channel(false); + let notifier = Arc::new(Notify::new()); loop { - if *stop_receiver.borrow() { + if *stop_receiver.clone().borrow() { + tracing::info!("Stop signal received, da_dispatcher is shutting down"); + break; + } + if *shutdown_rx.borrow() { + tracing::error!("A blob dispatch failed, da_dispatcher is shutting down"); break; } - let subtasks = futures::future::join( - async { - if let Err(err) = self.dispatch().await { - tracing::error!("dispatch error {err:?}"); - } - }, - async { - if let Err(err) = self.poll_for_inclusion().await { - tracing::error!("poll_for_inclusion error {err:?}"); - } - }, - ); + let mut conn = self.pool.connection_tagged("da_dispatcher").await?; + let batches = conn + .data_availability_dal() + .get_ready_for_da_dispatch_l1_batches(self.config.max_rows_to_dispatch() as usize) + .await?; + drop(conn); + let shutdown_tx = shutdown_tx.clone(); + for batch in batches { + if pending_batches.contains(&batch.l1_batch_number.0) { + continue; + } - tokio::select! { - _ = subtasks => {}, - _ = stop_receiver.changed() => { - break; + // This should only happen once. + // We can't assume that the first batch is always 1 because the dispatcher can be restarted + // and resume from a different batch. + let mut next_expected_batch_lock = next_expected_batch.lock().await; + if next_expected_batch_lock.is_none() { + next_expected_batch_lock.replace(batch.l1_batch_number); } + drop(next_expected_batch_lock); + + pending_batches.insert(batch.l1_batch_number.0); + METRICS.blobs_pending_dispatch.inc_by(1); + + let request_semaphore = self.request_semaphore.clone(); + let client = self.client.clone(); + let config = self.config.clone(); + let notifier = notifier.clone(); + let shutdown_rx = shutdown_rx.clone(); + let shutdown_tx = shutdown_tx.clone(); + let next_expected_batch = next_expected_batch.clone(); + let pool = self.pool.clone(); + dispatcher_tasks.spawn(async move { + let permit = request_semaphore.clone().acquire_owned().await?; + let dispatch_latency = METRICS.blob_dispatch_latency.start(); + + let result = retry(config.max_retries(), batch.l1_batch_number, || { + client.dispatch_blob(batch.l1_batch_number.0, batch.pubdata.clone()) + }) + .await; + drop(permit); + if result.is_err() { + shutdown_tx.clone().send(true)?; + notifier.notify_waiters(); + }; + + let dispatch_response = result.with_context(|| { + format!( + "failed to dispatch a blob with batch_number: {}, pubdata_len: {}", + batch.l1_batch_number, + batch.pubdata.len() + ) + })?; + let dispatch_latency_duration = dispatch_latency.observe(); + + let sent_at = Utc::now().naive_utc(); + + // Before saving the blob in the database, we need to be sure that we are doing it + // in the correct order. + while next_expected_batch + .lock() + .await + .map_or(true, |next_expected_batch| { + batch.l1_batch_number > next_expected_batch + }) + { + if *shutdown_rx.clone().borrow() { + return Err(anyhow::anyhow!( + "Batch {} failed to disperse: Shutdown signal received", + batch.l1_batch_number + )); + } + notifier.clone().notified().await; + } + + let mut conn = pool.connection_tagged("da_dispatcher").await?; + conn.data_availability_dal() + .insert_l1_batch_da( + batch.l1_batch_number, + dispatch_response.blob_id.as_str(), + sent_at, + ) + .await?; + drop(conn); + + // Update the next expected batch number + next_expected_batch + .lock() + .await + .replace(batch.l1_batch_number + 1); + notifier.notify_waiters(); + + METRICS + .last_dispatched_l1_batch + .set(batch.l1_batch_number.0 as usize); + METRICS.blob_size.observe(batch.pubdata.len()); + METRICS.blobs_dispatched.inc_by(1); + METRICS.blobs_pending_dispatch.dec_by(1); + tracing::info!( + "Dispatched a DA for batch_number: {}, pubdata_size: {}, dispatch_latency: {dispatch_latency_duration:?}", + batch.l1_batch_number, + batch.pubdata.len(), + ); + Ok(()) + }); } - if tokio::time::timeout(self.config.polling_interval(), stop_receiver.changed()) - .await - .is_ok() - { - break; + // Sleep so we prevent hammering the database + tokio::time::sleep(Duration::from_millis( + self.config + .polling_interval_ms + .unwrap_or(DEFAULT_POLLING_INTERVAL_MS) as u64, + )) + .await; + } + + while let Some(next) = dispatcher_tasks.join_next().await { + match next { + Ok(value) => match value { + Ok(_) => (), + Err(err) => { + dispatcher_tasks.shutdown().await; + return Err(err); + } + }, + Err(err) => { + dispatcher_tasks.shutdown().await; + return Err(err.into()); + } } } - tracing::info!("Stop signal received, da_dispatcher is shutting down"); Ok(()) } - /// Dispatches the blobs to the data availability layer, and saves the blob_id in the database. - async fn dispatch(&self) -> anyhow::Result<()> { - let mut conn = self.pool.connection_tagged("da_dispatcher").await?; - let batches = conn - .data_availability_dal() - .get_ready_for_da_dispatch_l1_batches(self.config.max_rows_to_dispatch() as usize) - .await?; - drop(conn); - - for batch in batches { - let dispatch_latency = METRICS.blob_dispatch_latency.start(); - let dispatch_response = retry(self.config.max_retries(), batch.l1_batch_number, || { - self.client - .dispatch_blob(batch.l1_batch_number.0, batch.pubdata.clone()) - }) - .await - .with_context(|| { - format!( - "failed to dispatch a blob with batch_number: {}, pubdata_len: {}", - batch.l1_batch_number, - batch.pubdata.len() - ) - })?; - let dispatch_latency_duration = dispatch_latency.observe(); - - let sent_at = Utc::now().naive_utc(); + async fn inclusion_poller(&self, stop_receiver: Receiver) -> anyhow::Result<()> { + let mut pending_inclusions = HashSet::new(); + let mut inclusion_tasks: JoinSet> = JoinSet::new(); + + loop { + if *stop_receiver.borrow() { + break; + } let mut conn = self.pool.connection_tagged("da_dispatcher").await?; - conn.data_availability_dal() - .insert_l1_batch_da( - batch.l1_batch_number, - dispatch_response.blob_id.as_str(), - sent_at, - ) + let pending_blobs = conn + .data_availability_dal() + .get_da_blob_ids_awaiting_inclusion() .await?; drop(conn); - METRICS - .last_dispatched_l1_batch - .set(batch.l1_batch_number.0 as usize); - METRICS.blob_size.observe(batch.pubdata.len()); - tracing::info!( - "Dispatched a DA for batch_number: {}, pubdata_size: {}, dispatch_latency: {dispatch_latency_duration:?}", - batch.l1_batch_number, - batch.pubdata.len(), - ); - } + for blob_info in pending_blobs.into_iter().flatten() { + if pending_inclusions.contains(&blob_info.blob_id) { + continue; + } + pending_inclusions.insert(blob_info.blob_id.clone()); - Ok(()) - } + let client = self.client.clone(); + let config = self.config.clone(); + let pool = self.pool.clone(); + let request_semaphore = self.request_semaphore.clone(); + inclusion_tasks.spawn(async move { + let inclusion_data = if config.use_dummy_inclusion_data() { + // if the inclusion verification is disabled, we don't need to wait for the inclusion + // data before committing the batch, so simply return an empty vector + Some(InclusionData { data: vec![] }) + } else { + let _permit = request_semaphore.acquire_owned().await?; + client + .get_inclusion_data(blob_info.blob_id.as_str()) + .await + .with_context(|| { + format!( + "failed to get inclusion data for blob_id: {}, batch_number: {}", + blob_info.blob_id, blob_info.l1_batch_number + ) + })? + }; + + let Some(inclusion_data) = inclusion_data else { + return Ok(()); + }; + + let mut conn = pool.connection_tagged("da_dispatcher").await?; + conn.data_availability_dal() + .save_l1_batch_inclusion_data( + L1BatchNumber(blob_info.l1_batch_number.0), + inclusion_data.data.as_slice(), + ) + .await?; + drop(conn); + + let inclusion_latency = Utc::now().signed_duration_since(blob_info.sent_at); + if let Ok(latency) = inclusion_latency.to_std() { + METRICS.inclusion_latency.observe(latency); + } + METRICS + .last_included_l1_batch + .set(blob_info.l1_batch_number.0 as usize); + METRICS.blobs_included.inc_by(1); - /// Polls the data availability layer for inclusion data, and saves it in the database. - async fn poll_for_inclusion(&self) -> anyhow::Result<()> { - let mut conn = self.pool.connection_tagged("da_dispatcher").await?; - let blob_info = conn - .data_availability_dal() - .get_first_da_blob_awaiting_inclusion() - .await?; - drop(conn); - - let Some(blob_info) = blob_info else { - return Ok(()); - }; - - let inclusion_data = if self.config.use_dummy_inclusion_data() { - self.client - .get_inclusion_data(blob_info.blob_id.as_str()) - .await - .with_context(|| { - format!( - "failed to get inclusion data for blob_id: {}, batch_number: {}", - blob_info.blob_id, blob_info.l1_batch_number - ) - })? - } else { - // if the inclusion verification is disabled, we don't need to wait for the inclusion - // data before committing the batch, so simply return an empty vector - Some(InclusionData { data: vec![] }) - }; - - let Some(inclusion_data) = inclusion_data else { - return Ok(()); - }; - - let mut conn = self.pool.connection_tagged("da_dispatcher").await?; - conn.data_availability_dal() - .save_l1_batch_inclusion_data( - L1BatchNumber(blob_info.l1_batch_number.0), - inclusion_data.data.as_slice(), - ) - .await?; - drop(conn); - - let inclusion_latency = Utc::now().signed_duration_since(blob_info.sent_at); - if let Ok(latency) = inclusion_latency.to_std() { - METRICS.inclusion_latency.observe(latency); + tracing::info!( + "Received an inclusion data for a batch_number: {}, inclusion_latency_seconds: {}", + blob_info.l1_batch_number, + inclusion_latency.num_seconds() + ); + Ok(()) + }); + } + + // Sleep so we prevent hammering the database + tokio::time::sleep(Duration::from_millis( + self.config + .polling_interval_ms + .unwrap_or(DEFAULT_POLLING_INTERVAL_MS) as u64, + )) + .await; + } + + while let Some(next) = inclusion_tasks.join_next().await { + match next { + Ok(_) => (), + Err(e) => { + inclusion_tasks.shutdown().await; + return Err(e.into()); + } + } } - METRICS - .last_included_l1_batch - .set(blob_info.l1_batch_number.0 as usize); - - tracing::info!( - "Received an inclusion data for a batch_number: {}, inclusion_latency_seconds: {}", - blob_info.l1_batch_number, - inclusion_latency.num_seconds() - ); Ok(()) } diff --git a/core/node/da_dispatcher/src/metrics.rs b/core/node/da_dispatcher/src/metrics.rs index 67ac5ed68222..4c21e556abe1 100644 --- a/core/node/da_dispatcher/src/metrics.rs +++ b/core/node/da_dispatcher/src/metrics.rs @@ -19,6 +19,12 @@ pub(super) struct DataAvailabilityDispatcherMetrics { /// Buckets are bytes ranging from 1 KB to 16 MB, which has to satisfy all blob size values. #[metrics(buckets = Buckets::exponential(1_024.0..=16.0 * 1_024.0 * 1_024.0, 2.0), unit = Unit::Bytes)] pub blob_size: Histogram, + /// Amount of pending blobs to be dispatched. + pub blobs_pending_dispatch: Gauge, + /// Total number of blobs dispatched. + pub blobs_dispatched: Gauge, + /// Total number of blobs included. + pub blobs_included: Gauge, /// Number of transactions resent by the DA dispatcher. #[metrics(buckets = Buckets::linear(0.0..=10.0, 1.0))] diff --git a/core/node/eth_sender/Cargo.toml b/core/node/eth_sender/Cargo.toml index a7aa88c3550e..a33536baa986 100644 --- a/core/node/eth_sender/Cargo.toml +++ b/core/node/eth_sender/Cargo.toml @@ -17,7 +17,6 @@ zksync_dal.workspace = true zksync_config.workspace = true zksync_contracts.workspace = true zksync_eth_client.workspace = true -zksync_utils.workspace = true zksync_l1_contract_interface.workspace = true zksync_object_store.workspace = true zksync_prover_interface.workspace = true diff --git a/core/node/eth_sender/src/eth_tx_manager.rs b/core/node/eth_sender/src/eth_tx_manager.rs index 7de91a3b7736..6992bea1007c 100644 --- a/core/node/eth_sender/src/eth_tx_manager.rs +++ b/core/node/eth_sender/src/eth_tx_manager.rs @@ -1,4 +1,7 @@ -use std::{sync::Arc, time::Duration}; +use std::{ + sync::Arc, + time::{Duration, SystemTime}, +}; use tokio::sync::watch; use zksync_config::configs::eth_sender::SenderConfig; @@ -9,7 +12,6 @@ use zksync_eth_client::{ use zksync_node_fee_model::l1_gas_price::TxParamsProvider; use zksync_shared_metrics::BlockL1Stage; use zksync_types::{eth_sender::EthTx, Address, L1BlockNumber, H256, U256}; -use zksync_utils::time::seconds_since_epoch; use super::{metrics::METRICS, EthSenderError}; use crate::{ @@ -501,9 +503,13 @@ impl EthTxManager { ); let tx_type_label = tx.tx_type.into(); METRICS.l1_gas_used[&tx_type_label].observe(gas_used.low_u128() as f64); - METRICS.l1_tx_mined_latency[&tx_type_label].observe(Duration::from_secs( - seconds_since_epoch() - tx.created_at_timestamp, - )); + + let duration_since_epoch = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .expect("incorrect system time"); + let tx_latency = + duration_since_epoch.saturating_sub(Duration::from_secs(tx.created_at_timestamp)); + METRICS.l1_tx_mined_latency[&tx_type_label].observe(tx_latency); let sent_at_block = storage .eth_sender_dal() diff --git a/core/node/eth_sender/src/metrics.rs b/core/node/eth_sender/src/metrics.rs index 462fe3ed6e59..571837036045 100644 --- a/core/node/eth_sender/src/metrics.rs +++ b/core/node/eth_sender/src/metrics.rs @@ -1,12 +1,14 @@ //! Metrics for the Ethereum sender component. -use std::{fmt, time::Duration}; +use std::{ + fmt, + time::{Duration, SystemTime}, +}; use vise::{Buckets, Counter, EncodeLabelSet, EncodeLabelValue, Family, Gauge, Histogram, Metrics}; use zksync_dal::{Connection, Core, CoreDal}; use zksync_shared_metrics::{BlockL1Stage, BlockStage, APP_METRICS}; use zksync_types::{aggregated_operations::AggregatedActionType, eth_sender::EthTx}; -use zksync_utils::time::seconds_since_epoch; use crate::abstract_l1_interface::{L1BlockNumbers, OperatorType}; @@ -143,10 +145,13 @@ impl EthSenderMetrics { return; } + let duration_since_epoch = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .expect("incorrect system time"); for statistics in l1_batches_statistics { - APP_METRICS.block_latency[&stage].observe(Duration::from_secs( - seconds_since_epoch() - statistics.timestamp, - )); + let block_latency = + duration_since_epoch.saturating_sub(Duration::from_secs(statistics.timestamp)); + APP_METRICS.block_latency[&stage].observe(block_latency); APP_METRICS.processed_txs[&stage.into()] .inc_by(statistics.l2_tx_count as u64 + statistics.l1_tx_count as u64); APP_METRICS.processed_l1_txs[&stage.into()].inc_by(statistics.l1_tx_count as u64); diff --git a/core/node/eth_sender/src/tests.rs b/core/node/eth_sender/src/tests.rs index 8e5032a69cfc..6a21767f4ea3 100644 --- a/core/node/eth_sender/src/tests.rs +++ b/core/node/eth_sender/src/tests.rs @@ -130,6 +130,7 @@ pub(crate) fn default_l1_batch_metadata() -> L1BatchMetadata { local_root: Some(H256::default()), aggregation_root: Some(H256::default()), da_inclusion_data: Some(vec![]), + da_blob_id: Some(vec![]), } } diff --git a/core/node/genesis/Cargo.toml b/core/node/genesis/Cargo.toml index 71c4c45e9e38..d625d7186bdf 100644 --- a/core/node/genesis/Cargo.toml +++ b/core/node/genesis/Cargo.toml @@ -20,7 +20,6 @@ zksync_contracts.workspace = true zksync_eth_client.workspace = true zksync_merkle_tree.workspace = true zksync_system_constants.workspace = true -zksync_utils.workspace = true tokio = { workspace = true, features = ["time"] } anyhow.workspace = true diff --git a/core/node/genesis/src/lib.rs b/core/node/genesis/src/lib.rs index 82732342b407..0a0e77d97f95 100644 --- a/core/node/genesis/src/lib.rs +++ b/core/node/genesis/src/lib.rs @@ -17,16 +17,17 @@ use zksync_multivm::utils::get_max_gas_per_pubdata_byte; use zksync_system_constants::PRIORITY_EXPIRATION; use zksync_types::{ block::{BlockGasCount, DeployedContract, L1BatchHeader, L2BlockHasher, L2BlockHeader}, + bytecode::BytecodeHash, commitment::{CommitmentInput, L1BatchCommitment}, fee_model::BatchFeeInput, protocol_upgrade::decode_set_chain_id_event, protocol_version::{L1VerifierConfig, ProtocolSemanticVersion}, system_contracts::get_system_smart_contracts, + u256_to_h256, web3::{BlockNumber, FilterBuilder}, AccountTreeId, Address, Bloom, L1BatchNumber, L1ChainId, L2BlockNumber, L2ChainId, ProtocolVersion, ProtocolVersionId, StorageKey, H256, U256, }; -use zksync_utils::{bytecode::hash_bytecode, u256_to_h256}; use crate::utils::{ add_eth_token, get_deduped_log_queries, get_storage_logs, @@ -446,7 +447,12 @@ pub async fn create_genesis_l1_batch( let factory_deps = system_contracts .iter() - .map(|c| (hash_bytecode(&c.bytecode), c.bytecode.clone())) + .map(|c| { + ( + BytecodeHash::for_bytecode(&c.bytecode).value(), + c.bytecode.clone(), + ) + }) .collect(); insert_base_system_contracts_to_factory_deps(&mut transaction, base_system_contracts).await?; diff --git a/core/node/genesis/src/utils.rs b/core/node/genesis/src/utils.rs index 6042513537cd..a51f49a166a2 100644 --- a/core/node/genesis/src/utils.rs +++ b/core/node/genesis/src/utils.rs @@ -5,18 +5,19 @@ use zksync_contracts::BaseSystemContracts; use zksync_dal::{Connection, Core, CoreDal}; use zksync_multivm::{ circuit_sequencer_api_latest::sort_storage_access::sort_storage_access_queries, - zk_evm_latest::aux_structures::{LogQuery as MultiVmLogQuery, Timestamp as MultiVMTimestamp}, + zk_evm_latest::aux_structures::{LogQuery as MultiVmLogQuery, Timestamp as MultiVmTimestamp}, }; use zksync_system_constants::{DEFAULT_ERA_CHAIN_ID, ETHEREUM_ADDRESS}; use zksync_types::{ block::{DeployedContract, L1BatchTreeData}, + bytecode::BytecodeHash, commitment::L1BatchCommitment, - get_code_key, get_known_code_key, get_system_context_init_logs, + get_code_key, get_known_code_key, get_system_context_init_logs, h256_to_u256, tokens::{TokenInfo, TokenMetadata}, + u256_to_h256, zk_evm_types::{LogQuery, Timestamp}, AccountTreeId, L1BatchNumber, L2BlockNumber, L2ChainId, StorageKey, StorageLog, H256, }; -use zksync_utils::{be_words_to_bytes, bytecode::hash_bytecode, h256_to_u256, u256_to_h256}; use crate::GenesisError; @@ -50,7 +51,7 @@ pub(super) fn get_storage_logs(system_contracts: &[DeployedContract]) -> Vec = system_contracts .iter() .map(|contract| { - let hash = hash_bytecode(&contract.bytecode); + let hash = BytecodeHash::for_bytecode(&contract.bytecode).value(); let known_code_key = get_known_code_key(&hash); let marked_known_value = H256::from_low_u64_be(1u64); @@ -62,7 +63,7 @@ pub(super) fn get_storage_logs(system_contracts: &[DeployedContract]) -> Vec = system_contracts .iter() .map(|contract| { - let hash = hash_bytecode(&contract.bytecode); + let hash = BytecodeHash::for_bytecode(&contract.bytecode).value(); let code_key = get_code_key(contract.account_id.address()); StorageLog::new_write_log(code_key, hash) }) @@ -83,7 +84,7 @@ pub(super) fn get_deduped_log_queries(storage_logs: &[StorageLog]) -> Vec, + Json(request): Json, + ) -> Json { + let latency = API_METRICS.latency[&MerkleTreeApiMethod::GetBogusStaleKeys].start(); + let stale_keys = this.clone().bogus_stale_keys(request.l1_batch_number).await; + let stale_keys = stale_keys.into_iter().map(HexNodeKey).collect(); + latency.observe(); + Json(StaleKeysResponse { stale_keys }) + } + async fn create_api_server( self, bind_address: &SocketAddr, @@ -502,6 +512,10 @@ impl AsyncTreeReader { "/debug/stale-keys", routing::post(Self::get_stale_keys_handler), ) + .route( + "/debug/stale-keys/bogus", + routing::post(Self::bogus_stale_keys_handler), + ) .with_state(self); let listener = tokio::net::TcpListener::bind(bind_address) diff --git a/core/node/metadata_calculator/src/api_server/tests.rs b/core/node/metadata_calculator/src/api_server/tests.rs index 815522a4cd8e..9bb994cb4163 100644 --- a/core/node/metadata_calculator/src/api_server/tests.rs +++ b/core/node/metadata_calculator/src/api_server/tests.rs @@ -96,6 +96,21 @@ async fn merkle_tree_api() { let raw_stale_keys_response: serde_json::Value = raw_stale_keys_response.json().await.unwrap(); assert_raw_stale_keys_response(&raw_stale_keys_response); + let raw_stale_keys_response = api_client + .inner + .post(format!("http://{local_addr}/debug/stale-keys/bogus")) + .json(&serde_json::json!({ "l1_batch_number": 1 })) + .send() + .await + .unwrap() + .error_for_status() + .unwrap(); + let raw_stale_keys_response: serde_json::Value = raw_stale_keys_response.json().await.unwrap(); + assert_eq!( + raw_stale_keys_response, + serde_json::json!({ "stale_keys": [] }) + ); + // Stop the calculator and the tree API server. stop_sender.send_replace(true); api_server_task.await.unwrap().unwrap(); diff --git a/core/node/metadata_calculator/src/helpers.rs b/core/node/metadata_calculator/src/helpers.rs index 3f370afaf77e..b8d02067f8ea 100644 --- a/core/node/metadata_calculator/src/helpers.rs +++ b/core/node/metadata_calculator/src/helpers.rs @@ -22,6 +22,7 @@ use zksync_health_check::{CheckHealth, Health, HealthStatus, ReactiveHealthCheck use zksync_merkle_tree::{ domain::{TreeMetadata, ZkSyncTree, ZkSyncTreeReader}, recovery::{MerkleTreeRecovery, PersistenceThreadHandle}, + repair::StaleKeysRepairTask, unstable::{NodeKey, RawNode}, Database, Key, MerkleTreeColumnFamily, NoVersionError, RocksDBWrapper, TreeEntry, TreeEntryWithProof, TreeInstruction, @@ -420,6 +421,19 @@ impl AsyncTreeReader { .await .unwrap() } + + pub(crate) async fn bogus_stale_keys(self, l1_batch_number: L1BatchNumber) -> Vec { + let version = l1_batch_number.0.into(); + tokio::task::spawn_blocking(move || { + StaleKeysRepairTask::bogus_stale_keys(self.inner.db(), version) + }) + .await + .unwrap() + } + + pub(crate) fn into_db(self) -> RocksDBWrapper { + self.inner.into_db() + } } /// Version of async tree reader that holds a weak reference to RocksDB. Used in [`MerkleTreeHealthCheck`]. diff --git a/core/node/metadata_calculator/src/lib.rs b/core/node/metadata_calculator/src/lib.rs index 5c64330a0e7d..dddb53b4c52f 100644 --- a/core/node/metadata_calculator/src/lib.rs +++ b/core/node/metadata_calculator/src/lib.rs @@ -26,6 +26,7 @@ use self::{ pub use self::{ helpers::{AsyncTreeReader, LazyAsyncTreeReader, MerkleTreeInfo}, pruning::MerkleTreePruningTask, + repair::StaleKeysRepairTask, }; use crate::helpers::create_readonly_db; @@ -34,6 +35,7 @@ mod helpers; mod metrics; mod pruning; mod recovery; +mod repair; #[cfg(test)] pub(crate) mod tests; mod updater; @@ -203,6 +205,11 @@ impl MetadataCalculator { MerkleTreePruningTask::new(pruning_handles, self.pool.clone(), poll_interval) } + /// This method should be called once. + pub fn stale_keys_repair_task(&self) -> StaleKeysRepairTask { + StaleKeysRepairTask::new(self.tree_reader()) + } + async fn create_tree(&self) -> anyhow::Result { self.health_updater .update(MerkleTreeHealth::Initialization.into()); diff --git a/core/node/metadata_calculator/src/metrics.rs b/core/node/metadata_calculator/src/metrics.rs index 7eb49b95afd4..c6d7094ef839 100644 --- a/core/node/metadata_calculator/src/metrics.rs +++ b/core/node/metadata_calculator/src/metrics.rs @@ -1,6 +1,6 @@ //! Metrics for `MetadataCalculator`. -use std::time::{Duration, Instant}; +use std::time::{Duration, Instant, SystemTime}; use vise::{ Buckets, DurationAsSecs, EncodeLabelSet, EncodeLabelValue, Family, Gauge, Histogram, Info, @@ -9,7 +9,6 @@ use vise::{ use zksync_config::configs::database::MerkleTreeMode; use zksync_shared_metrics::{BlockStage, APP_METRICS}; use zksync_types::block::L1BatchHeader; -use zksync_utils::time::seconds_since_epoch; use super::{MetadataCalculator, MetadataCalculatorConfig}; @@ -187,6 +186,11 @@ impl MetadataCalculator { total_logs: usize, start: Instant, ) { + let (Some(first_header), Some(last_header)) = (batch_headers.first(), batch_headers.last()) + else { + return; + }; + let elapsed = start.elapsed(); METRICS.update_tree_latency.observe(elapsed); if total_logs > 0 { @@ -205,17 +209,20 @@ impl MetadataCalculator { METRICS.log_batch.observe(total_logs); METRICS.blocks_batch.observe(batch_headers.len()); - let first_batch_number = batch_headers.first().unwrap().number.0; - let last_batch_number = batch_headers.last().unwrap().number.0; + let first_batch_number = first_header.number.0; + let last_batch_number = last_header.number.0; tracing::info!( "L1 batches #{:?} processed in tree", first_batch_number..=last_batch_number ); APP_METRICS.block_number[&BlockStage::Tree].set(last_batch_number.into()); + let duration_since_epoch = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .expect("incorrect system time"); let latency = - seconds_since_epoch().saturating_sub(batch_headers.first().unwrap().timestamp); - APP_METRICS.block_latency[&BlockStage::Tree].observe(Duration::from_secs(latency)); + duration_since_epoch.saturating_sub(Duration::from_secs(first_header.timestamp)); + APP_METRICS.block_latency[&BlockStage::Tree].observe(latency); } } diff --git a/core/node/metadata_calculator/src/recovery/tests.rs b/core/node/metadata_calculator/src/recovery/tests.rs index 1d83c2f06031..4b2ba578a5b6 100644 --- a/core/node/metadata_calculator/src/recovery/tests.rs +++ b/core/node/metadata_calculator/src/recovery/tests.rs @@ -15,6 +15,7 @@ use zksync_health_check::{CheckHealth, HealthStatus, ReactiveHealthCheck}; use zksync_merkle_tree::{domain::ZkSyncTree, recovery::PersistenceThreadHandle, TreeInstruction}; use zksync_node_genesis::{insert_genesis_batch, GenesisParams}; use zksync_node_test_utils::prepare_recovery_snapshot; +use zksync_storage::RocksDB; use zksync_types::{L1BatchNumber, U256}; use super::*; @@ -543,4 +544,9 @@ async fn pruning_during_recovery_is_detected() { .unwrap_err(); let err = format!("{err:#}").to_lowercase(); assert!(err.contains("continuing recovery is impossible"), "{err}"); + + // Because of an abrupt error, terminating a RocksDB instance needs to be handled explicitly. + tokio::task::spawn_blocking(RocksDB::await_rocksdb_termination) + .await + .unwrap(); } diff --git a/core/node/metadata_calculator/src/repair.rs b/core/node/metadata_calculator/src/repair.rs new file mode 100644 index 000000000000..9dfec4348ed6 --- /dev/null +++ b/core/node/metadata_calculator/src/repair.rs @@ -0,0 +1,258 @@ +//! High-level wrapper for the stale keys repair task. + +use std::{ + sync::{Arc, Weak}, + time::Duration, +}; + +use anyhow::Context as _; +use async_trait::async_trait; +use once_cell::sync::OnceCell; +use serde::Serialize; +use tokio::sync::watch; +use zksync_health_check::{CheckHealth, Health, HealthStatus}; +use zksync_merkle_tree::repair; + +use crate::LazyAsyncTreeReader; + +#[derive(Debug, Serialize)] +struct RepairHealthDetails { + #[serde(skip_serializing_if = "Option::is_none")] + earliest_checked_version: Option, + #[serde(skip_serializing_if = "Option::is_none")] + latest_checked_version: Option, + repaired_key_count: usize, +} + +impl From for RepairHealthDetails { + fn from(stats: repair::StaleKeysRepairStats) -> Self { + let versions = stats.checked_versions.as_ref(); + Self { + earliest_checked_version: versions.map(|versions| *versions.start()), + latest_checked_version: versions.map(|versions| *versions.end()), + repaired_key_count: stats.repaired_key_count, + } + } +} + +#[derive(Debug, Default)] +struct RepairHealthCheck { + handle: OnceCell>, +} + +#[async_trait] +impl CheckHealth for RepairHealthCheck { + fn name(&self) -> &'static str { + "tree_stale_keys_repair" + } + + async fn check_health(&self) -> Health { + let Some(weak_handle) = self.handle.get() else { + return HealthStatus::Affected.into(); + }; + let Some(handle) = weak_handle.upgrade() else { + return HealthStatus::ShutDown.into(); + }; + Health::from(HealthStatus::Ready).with_details(RepairHealthDetails::from(handle.stats())) + } +} + +/// Stale keys repair task. +#[derive(Debug)] +#[must_use = "Task should `run()` in a managed Tokio task"] +pub struct StaleKeysRepairTask { + tree_reader: LazyAsyncTreeReader, + health_check: Arc, + poll_interval: Duration, +} + +impl StaleKeysRepairTask { + pub(super) fn new(tree_reader: LazyAsyncTreeReader) -> Self { + Self { + tree_reader, + health_check: Arc::default(), + poll_interval: Duration::from_secs(60), + } + } + + pub fn health_check(&self) -> Arc { + self.health_check.clone() + } + + /// Runs this task indefinitely. + #[tracing::instrument(skip_all)] + pub async fn run(self, mut stop_receiver: watch::Receiver) -> anyhow::Result<()> { + let db = tokio::select! { + res = self.tree_reader.wait() => { + match res { + Some(reader) => reader.into_db(), + None => { + tracing::info!("Merkle tree dropped; shutting down stale keys repair"); + return Ok(()); + } + } + } + _ = stop_receiver.changed() => { + tracing::info!("Stop signal received before Merkle tree is initialized; shutting down stale keys repair"); + return Ok(()); + } + }; + + let (mut task, handle) = repair::StaleKeysRepairTask::new(db); + task.set_poll_interval(self.poll_interval); + let handle = Arc::new(handle); + self.health_check + .handle + .set(Arc::downgrade(&handle)) + .map_err(|_| anyhow::anyhow!("failed setting health check handle"))?; + + let mut task = tokio::task::spawn_blocking(|| task.run()); + tokio::select! { + res = &mut task => { + tracing::error!("Stale keys repair spontaneously stopped"); + res.context("repair task panicked")? + }, + _ = stop_receiver.changed() => { + tracing::info!("Stop signal received, stale keys repair is shutting down"); + // This is the only strong reference to the handle, so dropping it should signal the task to stop. + drop(handle); + task.await.context("stale keys repair task panicked")? + } + } + } +} + +#[cfg(test)] +mod tests { + use std::time::Duration; + + use tempfile::TempDir; + use zksync_dal::{ConnectionPool, Core}; + use zksync_node_genesis::{insert_genesis_batch, GenesisParams}; + use zksync_types::L1BatchNumber; + + use super::*; + use crate::{ + tests::{extend_db_state, gen_storage_logs, mock_config, reset_db_state}, + MetadataCalculator, + }; + + const POLL_INTERVAL: Duration = Duration::from_millis(50); + + async fn wait_for_health( + check: &dyn CheckHealth, + mut condition: impl FnMut(&Health) -> bool, + ) -> Health { + loop { + let health = check.check_health().await; + if condition(&health) { + return health; + } else if matches!( + health.status(), + HealthStatus::ShutDown | HealthStatus::Panicked + ) { + panic!("reached terminal health: {health:?}"); + } + tokio::time::sleep(POLL_INTERVAL).await; + } + } + + #[tokio::test] + async fn repair_task_basics() { + let pool = ConnectionPool::::test_pool().await; + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let config = mock_config(temp_dir.path()); + let mut storage = pool.connection().await.unwrap(); + insert_genesis_batch(&mut storage, &GenesisParams::mock()) + .await + .unwrap(); + reset_db_state(&pool, 5).await; + + let calculator = MetadataCalculator::new(config, None, pool.clone()) + .await + .unwrap(); + let reader = calculator.tree_reader(); + let mut repair_task = calculator.stale_keys_repair_task(); + repair_task.poll_interval = POLL_INTERVAL; + let health_check = repair_task.health_check(); + + let (stop_sender, stop_receiver) = watch::channel(false); + let calculator_handle = tokio::spawn(calculator.run(stop_receiver.clone())); + let repair_task_handle = tokio::spawn(repair_task.run(stop_receiver)); + wait_for_health(&health_check, |health| { + matches!(health.status(), HealthStatus::Ready) + }) + .await; + + // Wait until the calculator is initialized and then drop the reader so that it doesn't lock RocksDB. + { + let reader = reader.wait().await.unwrap(); + while reader.clone().info().await.next_l1_batch_number < L1BatchNumber(6) { + tokio::time::sleep(POLL_INTERVAL).await; + } + } + + // Wait until all tree versions have been checked. + let health = wait_for_health(&health_check, |health| { + if !matches!(health.status(), HealthStatus::Ready) { + return false; + } + let details = health.details().unwrap(); + details.get("latest_checked_version") == Some(&5.into()) + }) + .await; + let details = health.details().unwrap(); + assert_eq!(details["earliest_checked_version"], 1); + assert_eq!(details["repaired_key_count"], 0); + + stop_sender.send_replace(true); + calculator_handle.await.unwrap().unwrap(); + repair_task_handle.await.unwrap().unwrap(); + wait_for_health(&health_check, |health| { + matches!(health.status(), HealthStatus::ShutDown) + }) + .await; + + test_repair_persistence(temp_dir, pool).await; + } + + async fn test_repair_persistence(temp_dir: TempDir, pool: ConnectionPool) { + let config = mock_config(temp_dir.path()); + let calculator = MetadataCalculator::new(config, None, pool.clone()) + .await + .unwrap(); + let mut repair_task = calculator.stale_keys_repair_task(); + repair_task.poll_interval = POLL_INTERVAL; + let health_check = repair_task.health_check(); + + let (stop_sender, stop_receiver) = watch::channel(false); + let calculator_handle = tokio::spawn(calculator.run(stop_receiver.clone())); + let repair_task_handle = tokio::spawn(repair_task.run(stop_receiver)); + wait_for_health(&health_check, |health| { + matches!(health.status(), HealthStatus::Ready) + }) + .await; + + // Add more batches to the storage. + let mut storage = pool.connection().await.unwrap(); + let logs = gen_storage_logs(200..300, 5); + extend_db_state(&mut storage, logs).await; + + // Wait until new tree versions have been checked. + let health = wait_for_health(&health_check, |health| { + if !matches!(health.status(), HealthStatus::Ready) { + return false; + } + let details = health.details().unwrap(); + details.get("latest_checked_version") == Some(&10.into()) + }) + .await; + let details = health.details().unwrap(); + assert_eq!(details["earliest_checked_version"], 6); + assert_eq!(details["repaired_key_count"], 0); + + stop_sender.send_replace(true); + calculator_handle.await.unwrap().unwrap(); + repair_task_handle.await.unwrap().unwrap(); + } +} diff --git a/core/node/metadata_calculator/src/tests.rs b/core/node/metadata_calculator/src/tests.rs index 1c003c4ecf78..9717ce5682ce 100644 --- a/core/node/metadata_calculator/src/tests.rs +++ b/core/node/metadata_calculator/src/tests.rs @@ -23,7 +23,6 @@ use zksync_types::{ block::{L1BatchHeader, L1BatchTreeData}, AccountTreeId, Address, L1BatchNumber, L2BlockNumber, StorageKey, StorageLog, H256, }; -use zksync_utils::u32_to_h256; use super::{ helpers::L1BatchWithLogs, GenericAsyncTree, MetadataCalculator, MetadataCalculatorConfig, @@ -904,9 +903,9 @@ pub(crate) fn gen_storage_logs( let proof_keys = accounts.iter().flat_map(|&account| { account_keys .clone() - .map(move |i| StorageKey::new(account, u32_to_h256(i))) + .map(move |i| StorageKey::new(account, H256::from_low_u64_be(i.into()))) }); - let proof_values = indices.map(u32_to_h256); + let proof_values = indices.map(|i| H256::from_low_u64_be(i.into())); let logs: Vec<_> = proof_keys .zip(proof_values) diff --git a/core/node/node_framework/src/implementations/layers/metadata_calculator.rs b/core/node/node_framework/src/implementations/layers/metadata_calculator.rs index 4092ee6dcd56..45aa320786ef 100644 --- a/core/node/node_framework/src/implementations/layers/metadata_calculator.rs +++ b/core/node/node_framework/src/implementations/layers/metadata_calculator.rs @@ -8,7 +8,7 @@ use anyhow::Context as _; use zksync_config::configs::{api::MerkleTreeApiConfig, database::MerkleTreeMode}; use zksync_metadata_calculator::{ LazyAsyncTreeReader, MerkleTreePruningTask, MerkleTreeReaderConfig, MetadataCalculator, - MetadataCalculatorConfig, TreeReaderTask, + MetadataCalculatorConfig, StaleKeysRepairTask, TreeReaderTask, }; use zksync_storage::RocksDB; @@ -31,6 +31,7 @@ pub struct MetadataCalculatorLayer { config: MetadataCalculatorConfig, tree_api_config: Option, pruning_config: Option, + stale_keys_repair_enabled: bool, } #[derive(Debug, FromContext)] @@ -56,6 +57,9 @@ pub struct Output { /// Only provided if configuration is provided. #[context(task)] pub pruning_task: Option, + /// Only provided if enabled in the config. + #[context(task)] + pub stale_keys_repair_task: Option, pub rocksdb_shutdown_hook: ShutdownHook, } @@ -65,6 +69,7 @@ impl MetadataCalculatorLayer { config, tree_api_config: None, pruning_config: None, + stale_keys_repair_enabled: false, } } @@ -77,6 +82,11 @@ impl MetadataCalculatorLayer { self.pruning_config = Some(pruning_config); self } + + pub fn with_stale_keys_repair(mut self) -> Self { + self.stale_keys_repair_enabled = true; + self + } } #[async_trait::async_trait] @@ -141,6 +151,12 @@ impl WiringLayer for MetadataCalculatorLayer { ) .transpose()?; + let stale_keys_repair_task = if self.stale_keys_repair_enabled { + Some(metadata_calculator.stale_keys_repair_task()) + } else { + None + }; + let tree_api_client = TreeApiClientResource(Arc::new(metadata_calculator.tree_reader())); let rocksdb_shutdown_hook = ShutdownHook::new("rocksdb_terminaton", async { @@ -155,6 +171,7 @@ impl WiringLayer for MetadataCalculatorLayer { tree_api_client, tree_api_task, pruning_task, + stale_keys_repair_task, rocksdb_shutdown_hook, }) } @@ -196,6 +213,17 @@ impl Task for TreeApiTask { } } +#[async_trait::async_trait] +impl Task for StaleKeysRepairTask { + fn id(&self) -> TaskId { + "merkle_tree_stale_keys_repair_task".into() + } + + async fn run(self: Box, stop_receiver: StopReceiver) -> anyhow::Result<()> { + (*self).run(stop_receiver.0).await + } +} + #[async_trait::async_trait] impl Task for MerkleTreePruningTask { fn id(&self) -> TaskId { diff --git a/core/node/node_sync/Cargo.toml b/core/node/node_sync/Cargo.toml index 9c5b0c000700..28dbf6b3150e 100644 --- a/core/node/node_sync/Cargo.toml +++ b/core/node/node_sync/Cargo.toml @@ -21,9 +21,9 @@ zksync_state_keeper.workspace = true zksync_shared_metrics.workspace = true zksync_web3_decl.workspace = true zksync_health_check.workspace = true -zksync_utils.workspace = true zksync_eth_client.workspace = true zksync_concurrency.workspace = true +zksync_consensus_roles.workspace = true vise.workspace = true zksync_vm_executor.workspace = true diff --git a/core/node/node_sync/src/client.rs b/core/node/node_sync/src/client.rs index ee89db10ddd1..ce6e08e29227 100644 --- a/core/node/node_sync/src/client.rs +++ b/core/node/node_sync/src/client.rs @@ -8,7 +8,8 @@ use zksync_health_check::{CheckHealth, Health, HealthStatus}; use zksync_system_constants::ACCOUNT_CODE_STORAGE_ADDRESS; use zksync_types::{ api::{self, en}, - get_code_key, Address, L2BlockNumber, ProtocolVersionId, H256, U64, + bytecode::BytecodeHash, + get_code_key, h256_to_u256, Address, L2BlockNumber, ProtocolVersionId, H256, U64, }; use zksync_web3_decl::{ client::{DynClient, L2}, @@ -57,7 +58,7 @@ impl MainNodeClient for Box> { .with_arg("hash", &hash) .await?; if let Some(bytecode) = &bytecode { - let actual_bytecode_hash = zksync_utils::bytecode::hash_bytecode(bytecode); + let actual_bytecode_hash = BytecodeHash::for_bytecode(bytecode).value(); if actual_bytecode_hash != hash { return Err(EnrichedClientError::custom( "Got invalid base system contract bytecode from main node", @@ -81,7 +82,7 @@ impl MainNodeClient for Box> { let code_hash = self .get_storage_at( ACCOUNT_CODE_STORAGE_ADDRESS, - zksync_utils::h256_to_u256(*code_key.key()), + h256_to_u256(*code_key.key()), Some(GENESIS_BLOCK), ) .rpc_context("get_storage_at") diff --git a/core/node/node_sync/src/external_io.rs b/core/node/node_sync/src/external_io.rs index 0f5f4d6253fa..d3d908cfc169 100644 --- a/core/node/node_sync/src/external_io.rs +++ b/core/node/node_sync/src/external_io.rs @@ -20,7 +20,6 @@ use zksync_types::{ protocol_version::{ProtocolSemanticVersion, VersionPatch}, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersionId, Transaction, H256, }; -use zksync_utils::bytes_to_be_words; use zksync_vm_executor::storage::L1BatchParamsProvider; use super::{ @@ -75,7 +74,7 @@ impl ExternalIO { Ok(match bytecode { Some(bytecode) => SystemContractCode { - code: bytes_to_be_words(bytecode), + code: bytecode, hash, }, None => { @@ -98,7 +97,7 @@ impl ExternalIO { ) .await?; SystemContractCode { - code: bytes_to_be_words(contract_bytecode), + code: contract_bytecode, hash, } } diff --git a/core/node/node_sync/src/genesis.rs b/core/node/node_sync/src/genesis.rs index c5d4869175df..7401bdd9c9d4 100644 --- a/core/node/node_sync/src/genesis.rs +++ b/core/node/node_sync/src/genesis.rs @@ -110,20 +110,17 @@ async fn fetch_base_system_contracts( .fetch_system_contract_by_hash(hash) .await? .context("EVM emulator bytecode is missing on main node")?; - Some(SystemContractCode { - code: zksync_utils::bytes_to_be_words(bytes), - hash, - }) + Some(SystemContractCode { code: bytes, hash }) } else { None }; Ok(BaseSystemContracts { bootloader: SystemContractCode { - code: zksync_utils::bytes_to_be_words(bootloader_bytecode), + code: bootloader_bytecode, hash: contract_hashes.bootloader, }, default_aa: SystemContractCode { - code: zksync_utils::bytes_to_be_words(default_aa_bytecode), + code: default_aa_bytecode, hash: contract_hashes.default_aa, }, evm_emulator, diff --git a/core/node/node_sync/src/sync_state.rs b/core/node/node_sync/src/sync_state.rs index f8a2fe00ec09..1ffec757c9b1 100644 --- a/core/node/node_sync/src/sync_state.rs +++ b/core/node/node_sync/src/sync_state.rs @@ -4,6 +4,7 @@ use async_trait::async_trait; use serde::Serialize; use tokio::sync::watch; use zksync_concurrency::{ctx, sync}; +use zksync_consensus_roles::validator; use zksync_dal::{ConnectionPool, Core, CoreDal}; use zksync_health_check::{CheckHealth, Health, HealthStatus}; use zksync_shared_metrics::EN_METRICS; @@ -50,18 +51,20 @@ impl SyncState { .unwrap(); } + /// Waits until the main node block is greater or equal to the given block number. + /// Returns the current main node block number. pub async fn wait_for_main_node_block( &self, ctx: &ctx::Ctx, - want: L2BlockNumber, - ) -> ctx::OrCanceled<()> { - sync::wait_for( - ctx, - &mut self.0.subscribe(), - |inner| matches!(inner.main_node_block, Some(got) if got >= want), - ) - .await?; - Ok(()) + pred: impl Fn(validator::BlockNumber) -> bool, + ) -> ctx::OrCanceled { + sync::wait_for_some(ctx, &mut self.0.subscribe(), |inner| { + inner + .main_node_block + .map(|n| validator::BlockNumber(n.0.into())) + .filter(|n| pred(*n)) + }) + .await } pub fn set_main_node_block(&self, block: L2BlockNumber) { diff --git a/core/node/proof_data_handler/Cargo.toml b/core/node/proof_data_handler/Cargo.toml index e2ddc972a2f5..0bd1501277b7 100644 --- a/core/node/proof_data_handler/Cargo.toml +++ b/core/node/proof_data_handler/Cargo.toml @@ -11,6 +11,7 @@ keywords.workspace = true categories.workspace = true [dependencies] +chrono.workspace = true vise.workspace = true zksync_config.workspace = true zksync_dal.workspace = true @@ -18,7 +19,6 @@ zksync_object_store.workspace = true zksync_prover_interface.workspace = true zksync_types.workspace = true zksync_vm_executor.workspace = true -zksync_utils.workspace = true anyhow.workspace = true axum.workspace = true tokio.workspace = true @@ -27,9 +27,7 @@ tracing.workspace = true [dev-dependencies] hyper.workspace = true -chrono.workspace = true zksync_multivm.workspace = true serde_json.workspace = true tower.workspace = true -zksync_basic_types.workspace = true zksync_contracts.workspace = true diff --git a/core/node/proof_data_handler/src/metrics.rs b/core/node/proof_data_handler/src/metrics.rs index edccda90dc24..233db15aa0dd 100644 --- a/core/node/proof_data_handler/src/metrics.rs +++ b/core/node/proof_data_handler/src/metrics.rs @@ -1,6 +1,9 @@ -use vise::{Histogram, Metrics}; +use std::{fmt, time::Duration}; + +use vise::{EncodeLabelSet, EncodeLabelValue, Family, Histogram, Metrics, Unit}; use zksync_object_store::bincode; use zksync_prover_interface::inputs::WitnessInputData; +use zksync_types::tee_types::TeeType; const BYTES_IN_MEGABYTE: u64 = 1024 * 1024; @@ -14,6 +17,24 @@ pub(super) struct ProofDataHandlerMetrics { pub eip_4844_blob_size_in_mb: Histogram, #[metrics(buckets = vise::Buckets::exponential(1.0..=2_048.0, 2.0))] pub total_blob_size_in_mb: Histogram, + #[metrics(buckets = vise::Buckets::LATENCIES, unit = Unit::Seconds)] + pub tee_proof_roundtrip_time: Family>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, EncodeLabelSet, EncodeLabelValue)] +#[metrics(label = "tee_type")] +pub(crate) struct MetricsTeeType(pub TeeType); + +impl fmt::Display for MetricsTeeType { + fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(formatter) + } +} + +impl From for MetricsTeeType { + fn from(value: TeeType) -> Self { + Self(value) + } } impl ProofDataHandlerMetrics { diff --git a/core/node/proof_data_handler/src/tee_request_processor.rs b/core/node/proof_data_handler/src/tee_request_processor.rs index b265b94d4d74..971b94fe315f 100644 --- a/core/node/proof_data_handler/src/tee_request_processor.rs +++ b/core/node/proof_data_handler/src/tee_request_processor.rs @@ -1,8 +1,12 @@ use std::sync::Arc; use axum::{extract::Path, Json}; +use chrono::{Duration as ChronoDuration, Utc}; use zksync_config::configs::ProofDataHandlerConfig; -use zksync_dal::{ConnectionPool, Core, CoreDal}; +use zksync_dal::{ + tee_proof_generation_dal::{LockedBatch, TeeProofGenerationJobStatus}, + ConnectionPool, Core, CoreDal, +}; use zksync_object_store::{ObjectStore, ObjectStoreError}; use zksync_prover_interface::{ api::{ @@ -16,7 +20,7 @@ use zksync_prover_interface::{ use zksync_types::{tee_types::TeeType, L1BatchNumber, L2ChainId}; use zksync_vm_executor::storage::L1BatchParamsProvider; -use crate::errors::RequestProcessorError; +use crate::{errors::RequestProcessorError, metrics::METRICS}; #[derive(Clone)] pub(crate) struct TeeRequestProcessor { @@ -47,49 +51,62 @@ impl TeeRequestProcessor { ) -> Result>, RequestProcessorError> { tracing::info!("Received request for proof generation data: {:?}", request); - let mut min_batch_number = self.config.tee_config.first_tee_processed_batch; - let mut missing_range: Option<(L1BatchNumber, L1BatchNumber)> = None; + let batch_ignored_timeout = ChronoDuration::from_std( + self.config + .tee_config + .tee_batch_permanently_ignored_timeout(), + ) + .map_err(|err| { + RequestProcessorError::GeneralError(format!( + "Failed to convert batch_ignored_timeout: {}", + err + )) + })?; + let min_batch_number = self.config.tee_config.first_tee_processed_batch; - let result = loop { - let Some(l1_batch_number) = self + loop { + let Some(locked_batch) = self .lock_batch_for_proving(request.tee_type, min_batch_number) .await? else { - // No job available - return Ok(None); + break Ok(None); // no job available }; + let batch_number = locked_batch.l1_batch_number; match self - .tee_verifier_input_for_existing_batch(l1_batch_number) + .tee_verifier_input_for_existing_batch(batch_number) .await { Ok(input) => { break Ok(Some(Json(TeeProofGenerationDataResponse(Box::new(input))))); } Err(RequestProcessorError::ObjectStore(ObjectStoreError::KeyNotFound(_))) => { - missing_range = match missing_range { - Some((start, _)) => Some((start, l1_batch_number)), - None => Some((l1_batch_number, l1_batch_number)), + let duration = Utc::now().signed_duration_since(locked_batch.created_at); + let status = if duration > batch_ignored_timeout { + TeeProofGenerationJobStatus::PermanentlyIgnored + } else { + TeeProofGenerationJobStatus::Failed }; - self.unlock_batch(l1_batch_number, request.tee_type).await?; - min_batch_number = l1_batch_number + 1; + self.unlock_batch(batch_number, request.tee_type, status) + .await?; + tracing::warn!( + "Assigned status {} to batch {} created at {}", + status, + batch_number, + locked_batch.created_at + ); } Err(err) => { - self.unlock_batch(l1_batch_number, request.tee_type).await?; + self.unlock_batch( + batch_number, + request.tee_type, + TeeProofGenerationJobStatus::Failed, + ) + .await?; break Err(err); } } - }; - - if let Some((start, end)) = missing_range { - tracing::warn!( - "Blobs for batch numbers {} to {} not found in the object store. Marked as unpicked.", - start, - end - ); } - - result } #[tracing::instrument(skip(self))] @@ -157,7 +174,7 @@ impl TeeRequestProcessor { &self, tee_type: TeeType, min_batch_number: L1BatchNumber, - ) -> Result, RequestProcessorError> { + ) -> Result, RequestProcessorError> { self.pool .connection_tagged("tee_request_processor") .await? @@ -175,12 +192,13 @@ impl TeeRequestProcessor { &self, l1_batch_number: L1BatchNumber, tee_type: TeeType, + status: TeeProofGenerationJobStatus, ) -> Result<(), RequestProcessorError> { self.pool .connection_tagged("tee_request_processor") .await? .tee_proof_generation_dal() - .unlock_batch(l1_batch_number, tee_type) + .unlock_batch(l1_batch_number, tee_type, status) .await?; Ok(()) } @@ -194,11 +212,6 @@ impl TeeRequestProcessor { let mut connection = self.pool.connection_tagged("tee_request_processor").await?; let mut dal = connection.tee_proof_generation_dal(); - tracing::info!( - "Received proof {:?} for batch number: {:?}", - proof, - l1_batch_number - ); dal.save_proof_artifacts_metadata( l1_batch_number, proof.0.tee_type, @@ -208,6 +221,27 @@ impl TeeRequestProcessor { ) .await?; + let sealed_at = connection + .blocks_dal() + .get_batch_sealed_at(l1_batch_number) + .await?; + + let duration = sealed_at.and_then(|sealed_at| (Utc::now() - sealed_at).to_std().ok()); + + let duration_secs_f64 = if let Some(duration) = duration { + METRICS.tee_proof_roundtrip_time[&proof.0.tee_type.into()].observe(duration); + duration.as_secs_f64() + } else { + f64::NAN + }; + + tracing::info!( + l1_batch_number = %l1_batch_number, + sealed_to_proven_in_secs = duration_secs_f64, + "Received proof {:?}", + proof + ); + Ok(Json(SubmitProofResponse::Success)) } diff --git a/core/node/proof_data_handler/src/tests.rs b/core/node/proof_data_handler/src/tests.rs index 87c6bff8a1f4..dae2ef8cd0c0 100644 --- a/core/node/proof_data_handler/src/tests.rs +++ b/core/node/proof_data_handler/src/tests.rs @@ -6,12 +6,13 @@ use axum::{ }; use serde_json::json; use tower::ServiceExt; -use zksync_basic_types::L2ChainId; use zksync_config::configs::{ProofDataHandlerConfig, TeeConfig}; use zksync_dal::{ConnectionPool, CoreDal}; use zksync_object_store::MockObjectStore; use zksync_prover_interface::api::SubmitTeeProofRequest; -use zksync_types::{commitment::L1BatchCommitmentMode, tee_types::TeeType, L1BatchNumber}; +use zksync_types::{ + commitment::L1BatchCommitmentMode, tee_types::TeeType, L1BatchNumber, L2ChainId, +}; use crate::create_proof_processing_router; @@ -29,6 +30,7 @@ async fn request_tee_proof_inputs() { tee_support: true, first_tee_processed_batch: L1BatchNumber(0), tee_proof_generation_timeout_in_secs: 600, + tee_batch_permanently_ignored_timeout_in_hours: 10 * 24, }, }, L1BatchCommitmentMode::Rollup, @@ -88,6 +90,7 @@ async fn submit_tee_proof() { tee_support: true, first_tee_processed_batch: L1BatchNumber(0), tee_proof_generation_timeout_in_secs: 600, + tee_batch_permanently_ignored_timeout_in_hours: 10 * 24, }, }, L1BatchCommitmentMode::Rollup, @@ -119,7 +122,7 @@ async fn submit_tee_proof() { let mut proof_db_conn = db_conn_pool.connection().await.unwrap(); let oldest_batch_number = proof_db_conn .tee_proof_generation_dal() - .get_oldest_unpicked_batch() + .get_oldest_picked_by_prover_batch() .await .unwrap(); @@ -156,7 +159,7 @@ async fn mock_tee_batch_status( // there should not be any batches awaiting proof in the db yet - let oldest_batch_number = proof_dal.get_oldest_unpicked_batch().await.unwrap(); + let oldest_batch_number = proof_dal.get_oldest_picked_by_prover_batch().await.unwrap(); assert!(oldest_batch_number.is_none()); // mock SQL table with relevant information about the status of TEE proof generation @@ -169,7 +172,7 @@ async fn mock_tee_batch_status( // now, there should be one batch in the db awaiting proof let oldest_batch_number = proof_dal - .get_oldest_unpicked_batch() + .get_oldest_picked_by_prover_batch() .await .unwrap() .unwrap(); diff --git a/core/node/state_keeper/Cargo.toml b/core/node/state_keeper/Cargo.toml index 75d7c9f1e943..49d4209a4c4f 100644 --- a/core/node/state_keeper/Cargo.toml +++ b/core/node/state_keeper/Cargo.toml @@ -22,17 +22,14 @@ zksync_mempool.workspace = true zksync_shared_metrics.workspace = true zksync_config.workspace = true zksync_node_fee_model.workspace = true -zksync_utils.workspace = true zksync_contracts.workspace = true zksync_protobuf.workspace = true -zksync_test_account.workspace = true zksync_node_genesis.workspace = true zksync_node_test_utils.workspace = true zksync_vm_executor.workspace = true zksync_system_constants.workspace = true zksync_base_token_adjuster.workspace = true - anyhow.workspace = true async-trait.workspace = true tokio = { workspace = true, features = ["time"] } @@ -48,7 +45,6 @@ assert_matches.workspace = true rand.workspace = true tempfile.workspace = true test-casing.workspace = true -futures.workspace = true zksync_eth_client.workspace = true -zksync_system_constants.workspace = true +zksync_test_contracts.workspace = true diff --git a/core/node/state_keeper/src/executor/tests/mod.rs b/core/node/state_keeper/src/executor/tests/mod.rs index 04fb016ab639..eade0233d0e0 100644 --- a/core/node/state_keeper/src/executor/tests/mod.rs +++ b/core/node/state_keeper/src/executor/tests/mod.rs @@ -5,14 +5,12 @@ use rand::{thread_rng, Rng}; use test_casing::{test_casing, Product}; use zksync_dal::{ConnectionPool, Core}; use zksync_multivm::interface::{BatchTransactionExecutionResult, ExecutionResult, Halt}; -use zksync_test_account::Account; +use zksync_test_contracts::{Account, TestContract}; use zksync_types::{ - get_nonce_key, utils::storage_key_for_eth_balance, vm::FastVmMode, PriorityOpId, + get_nonce_key, utils::storage_key_for_eth_balance, vm::FastVmMode, web3, PriorityOpId, H256, }; -use self::tester::{ - AccountFailedCall, AccountLoadNextExecutable, StorageSnapshot, TestConfig, Tester, -}; +use self::tester::{AccountExt, StorageSnapshot, TestConfig, Tester}; mod read_storage_factory; mod tester; @@ -26,6 +24,11 @@ fn assert_executed(execution_result: &BatchTransactionExecutionResult) { ); } +fn assert_succeeded(execution_result: &BatchTransactionExecutionResult) { + let result = &execution_result.tx_result.result; + assert_matches!(result, ExecutionResult::Success { .. }) +} + /// Ensures that the transaction was rejected by the VM. fn assert_rejected(execution_result: &BatchTransactionExecutionResult) { let result = &execution_result.tx_result.result; @@ -173,6 +176,62 @@ async fn execute_l2_and_l1_txs(vm_mode: FastVmMode) { executor.finish_batch().await.unwrap(); } +#[tokio::test] +async fn working_with_transient_storage() { + let connection_pool = ConnectionPool::::constrained_test_pool(1).await; + let mut alice = Account::random(); + + let mut tester = Tester::new(connection_pool, FastVmMode::Shadow); + tester.genesis().await; + tester.fund(&[alice.address()]).await; + let mut executor = tester + .create_batch_executor(StorageType::AsyncRocksdbCache) + .await; + + let deploy_tx = alice.deploy_storage_tester(); + let res = executor.execute_tx(deploy_tx.tx).await.unwrap(); + assert_succeeded(&res); + + let storage_test_address = deploy_tx.address; + let test_tx = alice.test_transient_store(storage_test_address); + let res = executor.execute_tx(test_tx).await.unwrap(); + assert_succeeded(&res); + + let test_tx = alice.assert_transient_value(storage_test_address, 0.into()); + let res = executor.execute_tx(test_tx).await.unwrap(); + assert_succeeded(&res); + + executor.finish_batch().await.unwrap(); +} + +#[tokio::test] +async fn decommitting_contract() { + let connection_pool = ConnectionPool::::constrained_test_pool(1).await; + let mut alice = Account::random(); + + let mut tester = Tester::new(connection_pool, FastVmMode::Shadow); + tester.genesis().await; + tester.fund(&[alice.address()]).await; + let mut executor = tester + .create_batch_executor(StorageType::AsyncRocksdbCache) + .await; + + let deploy_tx = alice.deploy_precompiles_test(); + let res = executor.execute_tx(deploy_tx.tx).await.unwrap(); + assert_succeeded(&res); + + let keccak_bytecode_hash = web3::keccak256(TestContract::precompiles_test().bytecode); + let test_tx = alice.test_decommit( + deploy_tx.address, + deploy_tx.bytecode_hash, + H256(keccak_bytecode_hash), + ); + let res = executor.execute_tx(test_tx).await.unwrap(); + assert_succeeded(&res); + + executor.finish_batch().await.unwrap(); +} + /// Checks that we can successfully rollback the transaction and execute it once again. #[test_casing(3, FAST_VM_MODES)] #[tokio::test] @@ -296,7 +355,7 @@ async fn deploy_and_call_loadtest(vm_mode: FastVmMode) { ); assert_executed( &executor - .execute_tx(alice.loadnext_custom_writes_call(tx.address, 1, 500_000_000)) + .execute_tx(alice.loadnext_custom_initial_writes_call(tx.address, 1, 500_000_000)) .await .unwrap(), ); @@ -316,7 +375,7 @@ async fn deploy_failedcall(vm_mode: FastVmMode) { .create_batch_executor(StorageType::AsyncRocksdbCache) .await; - let tx = alice.deploy_failedcall_tx(); + let tx = alice.deploy_failed_call_tx(); let execute_tx = executor.execute_tx(tx.tx).await.unwrap(); assert_executed(&execute_tx); @@ -344,7 +403,7 @@ async fn execute_reverted_tx(vm_mode: FastVmMode) { assert_reverted( &executor - .execute_tx(alice.loadnext_custom_writes_call( + .execute_tx(alice.loadnext_custom_initial_writes_call( tx.address, 1, 1_000_000, // We provide enough gas for tx to be executed, but not enough for the call to be successful. )) diff --git a/core/node/state_keeper/src/executor/tests/tester.rs b/core/node/state_keeper/src/executor/tests/tester.rs index 800bf398938d..3727d9c16bfb 100644 --- a/core/node/state_keeper/src/executor/tests/tester.rs +++ b/core/node/state_keeper/src/executor/tests/tester.rs @@ -6,10 +6,6 @@ use std::{collections::HashMap, fmt::Debug, sync::Arc}; use tempfile::TempDir; use tokio::{sync::watch, task::JoinHandle}; use zksync_config::configs::chain::StateKeeperConfig; -use zksync_contracts::{ - get_loadnext_contract, load_contract, read_bytecode, - test_contracts::LoadnextContractExecutionParams, TestContract, -}; use zksync_dal::{ConnectionPool, Core, CoreDal}; use zksync_multivm::{ interface::{ @@ -22,7 +18,9 @@ use zksync_multivm::{ use zksync_node_genesis::create_genesis_l1_batch; use zksync_node_test_utils::{recover, Snapshot}; use zksync_state::{OwnedStorage, ReadStorageFactory, RocksdbStorageOptions}; -use zksync_test_account::{Account, DeployContractsTx, TxType}; +use zksync_test_contracts::{ + Account, DeployContractsTx, LoadnextContractExecutionParams, TestContract, TxType, +}; use zksync_types::{ block::L2BlockHasher, commitment::PubdataParams, @@ -30,12 +28,12 @@ use zksync_types::{ protocol_version::ProtocolSemanticVersion, snapshots::{SnapshotRecoveryStatus, SnapshotStorageLog}, system_contracts::get_system_smart_contracts, + u256_to_h256, utils::storage_key_for_standard_token_balance, vm::FastVmMode, AccountTreeId, Address, Execute, L1BatchNumber, L2BlockNumber, PriorityOpId, ProtocolVersionId, StorageLog, Transaction, H256, L2_BASE_TOKEN_ADDRESS, U256, }; -use zksync_utils::u256_to_h256; use zksync_vm_executor::batch::{MainBatchExecutorFactory, TraceCalls}; use super::{read_storage_factory::RocksdbStorageFactory, StorageType}; @@ -325,7 +323,7 @@ impl Tester { } } -pub trait AccountLoadNextExecutable { +pub(super) trait AccountExt { fn deploy_loadnext_tx(&mut self) -> DeployContractsTx; fn l1_execute(&mut self, serial_id: PriorityOpId) -> Transaction; @@ -335,7 +333,7 @@ pub trait AccountLoadNextExecutable { /// Returns an `execute` transaction with custom factory deps (which aren't used in a transaction, /// so they are mostly useful to test bytecode compression). fn execute_with_factory_deps(&mut self, factory_deps: Vec>) -> Transaction; - fn loadnext_custom_writes_call( + fn loadnext_custom_initial_writes_call( &mut self, address: Address, writes: u32, @@ -352,39 +350,38 @@ pub trait AccountLoadNextExecutable { gas_to_burn: u32, gas_limit: u32, ) -> Transaction; -} -pub trait AccountFailedCall { - fn deploy_failedcall_tx(&mut self) -> DeployContractsTx; -} + fn deploy_failed_call_tx(&mut self) -> DeployContractsTx; -impl AccountFailedCall for Account { - fn deploy_failedcall_tx(&mut self) -> DeployContractsTx { - let bytecode = read_bytecode( - "etc/contracts-test-data/artifacts-zk/contracts/failed-call/failed_call.sol/FailedCall.json"); - let failedcall_contract = TestContract { - bytecode, - contract: load_contract("etc/contracts-test-data/artifacts-zk/contracts/failed-call/failed_call.sol/FailedCall.json"), - factory_deps: vec![], - }; + fn deploy_storage_tester(&mut self) -> DeployContractsTx; - self.get_deploy_tx(&failedcall_contract.bytecode, None, TxType::L2) - } + fn test_transient_store(&mut self, address: Address) -> Transaction; + + fn assert_transient_value(&mut self, address: Address, expected: U256) -> Transaction; + + fn deploy_precompiles_test(&mut self) -> DeployContractsTx; + + fn test_decommit( + &mut self, + address: Address, + bytecode_hash: H256, + expected_keccak_hash: H256, + ) -> Transaction; } -impl AccountLoadNextExecutable for Account { +impl AccountExt for Account { fn deploy_loadnext_tx(&mut self) -> DeployContractsTx { - let loadnext_contract = get_loadnext_contract(); + let loadnext_contract = TestContract::load_test(); let loadnext_constructor_data = &[Token::Uint(U256::from(100))]; self.get_deploy_tx_with_factory_deps( - &loadnext_contract.bytecode, + loadnext_contract.bytecode, Some(loadnext_constructor_data), - loadnext_contract.factory_deps.clone(), + loadnext_contract.factory_deps(), TxType::L2, ) } fn l1_execute(&mut self, serial_id: PriorityOpId) -> Transaction { - testonly::l1_transaction(self, serial_id) + self.get_l1_tx(Execute::transfer(Address::random(), 0.into()), serial_id.0) } /// Returns a valid `execute` transaction. @@ -407,17 +404,17 @@ impl AccountLoadNextExecutable for Account { /// Returns a transaction to the loadnext contract with custom amount of write requests. /// Increments the account nonce. - fn loadnext_custom_writes_call( + fn loadnext_custom_initial_writes_call( &mut self, address: Address, - writes: u32, + initial_writes: u32, gas_limit: u32, ) -> Transaction { // For each iteration of the expensive contract, there are two slots that are updated: // the length of the vector and the new slot with the element itself. let minimal_fee = 2 * testonly::DEFAULT_GAS_PER_PUBDATA - * writes + * initial_writes * INITIAL_STORAGE_WRITE_PUBDATA_BYTES as u32; let fee = testonly::fee(minimal_fee + gas_limit); @@ -427,7 +424,8 @@ impl AccountLoadNextExecutable for Account { contract_address: Some(address), calldata: LoadnextContractExecutionParams { reads: 100, - writes: writes as usize, + initial_writes: initial_writes as usize, + repeated_writes: 100, events: 100, hashes: 100, recursive_calls: 0, @@ -444,7 +442,10 @@ impl AccountLoadNextExecutable for Account { /// Returns a valid `execute` transaction. /// Automatically increments nonce of the account. fn execute_with_gas_limit(&mut self, gas_limit: u32) -> Transaction { - testonly::l2_transaction(self, gas_limit) + self.get_l2_tx_for_execute( + Execute::transfer(Address::random(), 0.into()), + Some(testonly::fee(gas_limit)), + ) } /// Returns a transaction to the loadnext contract with custom gas limit and expected burned gas amount. @@ -462,17 +463,78 @@ impl AccountLoadNextExecutable for Account { Execute { contract_address: Some(address), calldata, - value: Default::default(), + value: 0.into(), factory_deps: vec![], }, Some(fee), ) } + + fn deploy_failed_call_tx(&mut self) -> DeployContractsTx { + self.get_deploy_tx(TestContract::failed_call().bytecode, None, TxType::L2) + } + + fn deploy_storage_tester(&mut self) -> DeployContractsTx { + self.get_deploy_tx(TestContract::storage_test().bytecode, None, TxType::L2) + } + + fn test_transient_store(&mut self, address: Address) -> Transaction { + let test_fn = TestContract::storage_test().function("testTransientStore"); + let calldata = test_fn.encode_input(&[]).unwrap(); + self.get_l2_tx_for_execute( + Execute { + contract_address: Some(address), + calldata, + value: 0.into(), + factory_deps: vec![], + }, + None, + ) + } + + fn assert_transient_value(&mut self, address: Address, expected: U256) -> Transaction { + let assert_fn = TestContract::storage_test().function("assertTValue"); + let calldata = assert_fn.encode_input(&[Token::Uint(expected)]).unwrap(); + self.get_l2_tx_for_execute( + Execute { + contract_address: Some(address), + calldata, + value: 0.into(), + factory_deps: vec![], + }, + None, + ) + } + + fn deploy_precompiles_test(&mut self) -> DeployContractsTx { + self.get_deploy_tx(TestContract::precompiles_test().bytecode, None, TxType::L2) + } + + fn test_decommit( + &mut self, + address: Address, + bytecode_hash: H256, + expected_keccak_hash: H256, + ) -> Transaction { + let assert_fn = TestContract::precompiles_test().function("callCodeOracle"); + let calldata = assert_fn.encode_input(&[ + Token::FixedBytes(bytecode_hash.0.to_vec()), + Token::FixedBytes(expected_keccak_hash.0.to_vec()), + ]); + self.get_l2_tx_for_execute( + Execute { + contract_address: Some(address), + calldata: calldata.unwrap(), + value: 0.into(), + factory_deps: vec![], + }, + None, + ) + } } pub fn mock_loadnext_gas_burn_calldata(gas: u32) -> Vec { - let loadnext_contract = get_loadnext_contract(); - let contract_function = loadnext_contract.contract.function("burnGas").unwrap(); + let contract_function = TestContract::load_test().function("burnGas"); let params = vec![Token::Uint(U256::from(gas))]; contract_function .encode_input(¶ms) diff --git a/core/node/state_keeper/src/io/mempool.rs b/core/node/state_keeper/src/io/mempool.rs index 370d46fd544c..991ecee699c3 100644 --- a/core/node/state_keeper/src/io/mempool.rs +++ b/core/node/state_keeper/src/io/mempool.rs @@ -20,8 +20,6 @@ use zksync_types::{ utils::display_timestamp, Address, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersionId, Transaction, H256, U256, }; -// TODO (SMA-1206): use seconds instead of milliseconds. -use zksync_utils::time::millis_since_epoch; use zksync_vm_executor::storage::L1BatchParamsProvider; use crate::{ @@ -36,6 +34,7 @@ use crate::{ IoSealCriteria, L2BlockMaxPayloadSizeSealer, TimeoutSealer, UnexecutableReason, }, updates::UpdatesManager, + utils::millis_since_epoch, MempoolGuard, }; @@ -531,9 +530,9 @@ impl MempoolIO { #[cfg(test)] mod tests { use tokio::time::timeout_at; - use zksync_utils::time::seconds_since_epoch; use super::*; + use crate::tests::seconds_since_epoch; // This test defensively uses large deadlines in order to account for tests running in parallel etc. #[tokio::test] diff --git a/core/node/state_keeper/src/io/persistence.rs b/core/node/state_keeper/src/io/persistence.rs index 8bfd812c8a1f..d8fd99bfc95d 100644 --- a/core/node/state_keeper/src/io/persistence.rs +++ b/core/node/state_keeper/src/io/persistence.rs @@ -7,8 +7,7 @@ use async_trait::async_trait; use tokio::sync::{mpsc, oneshot}; use zksync_dal::{ConnectionPool, Core, CoreDal}; use zksync_shared_metrics::{BlockStage, APP_METRICS}; -use zksync_types::{writes::TreeWrite, Address, ProtocolVersionId}; -use zksync_utils::u256_to_h256; +use zksync_types::{u256_to_h256, writes::TreeWrite, Address, ProtocolVersionId}; use crate::{ io::{ @@ -387,10 +386,9 @@ mod tests { use zksync_multivm::interface::{FinishedL1Batch, VmExecutionMetrics}; use zksync_node_genesis::{insert_genesis_batch, GenesisParams}; use zksync_types::{ - api::TransactionStatus, block::BlockGasCount, writes::StateDiffRecord, L1BatchNumber, - L2BlockNumber, StorageLogKind, H256, U256, + api::TransactionStatus, block::BlockGasCount, h256_to_u256, writes::StateDiffRecord, + L1BatchNumber, L2BlockNumber, StorageLogKind, H256, U256, }; - use zksync_utils::h256_to_u256; use super::*; use crate::{ diff --git a/core/node/state_keeper/src/io/seal_logic/l2_block_seal_subtasks.rs b/core/node/state_keeper/src/io/seal_logic/l2_block_seal_subtasks.rs index 53871c54a19f..a6356a838602 100644 --- a/core/node/state_keeper/src/io/seal_logic/l2_block_seal_subtasks.rs +++ b/core/node/state_keeper/src/io/seal_logic/l2_block_seal_subtasks.rs @@ -5,11 +5,10 @@ use zksync_dal::{Connection, Core, CoreDal}; use zksync_multivm::interface::VmEvent; use zksync_system_constants::{CONTRACT_DEPLOYER_ADDRESS, L2_NATIVE_TOKEN_VAULT_ADDRESS}; use zksync_types::{ - ethabi, + ethabi, h256_to_address, tokens::{TokenInfo, TokenMetadata}, Address, L2BlockNumber, H256, }; -use zksync_utils::h256_to_account_address; use crate::{ io::seal_logic::SealStrategy, @@ -28,9 +27,9 @@ fn extract_added_tokens( event.address == CONTRACT_DEPLOYER_ADDRESS && event.indexed_topics.len() == 4 && event.indexed_topics[0] == VmEvent::DEPLOY_EVENT_SIGNATURE - && h256_to_account_address(&event.indexed_topics[1]) == l2_token_deployer_addr + && h256_to_address(&event.indexed_topics[1]) == l2_token_deployer_addr }) - .map(|event| h256_to_account_address(&event.indexed_topics[3])); + .map(|event| h256_to_address(&event.indexed_topics[3])); extract_added_token_info_from_addresses(all_generated_events, deployed_tokens) } @@ -73,7 +72,7 @@ fn extract_added_token_info_from_addresses( || event.indexed_topics[0] == *BRIDGE_INITIALIZATION_SIGNATURE_OLD) }) .map(|event| { - let l1_token_address = h256_to_account_address(&event.indexed_topics[1]); + let l1_token_address = h256_to_address(&event.indexed_topics[1]); let mut dec_ev = ethabi::decode( &[ ethabi::ParamType::String, @@ -467,11 +466,11 @@ mod tests { use zksync_types::{ block::L2BlockHeader, commitment::PubdataParams, + h256_to_u256, l2_to_l1_log::{L2ToL1Log, UserL2ToL1Log}, AccountTreeId, Address, L1BatchNumber, ProtocolVersionId, StorageKey, StorageLog, StorageLogKind, StorageLogWithPreviousValue, }; - use zksync_utils::h256_to_u256; use super::*; use crate::updates::L2BlockUpdates; diff --git a/core/node/state_keeper/src/io/seal_logic/mod.rs b/core/node/state_keeper/src/io/seal_logic/mod.rs index 7f05bda7a6f5..419413e127d3 100644 --- a/core/node/state_keeper/src/io/seal_logic/mod.rs +++ b/core/node/state_keeper/src/io/seal_logic/mod.rs @@ -22,11 +22,11 @@ use zksync_types::{ helpers::unix_timestamp_ms, l2_to_l1_log::UserL2ToL1Log, tx::IncludedTxLocation, + u256_to_h256, utils::display_timestamp, Address, BloomInput, ExecuteTransactionCommon, ProtocolVersionId, StorageKey, StorageLog, Transaction, H256, }; -use zksync_utils::u256_to_h256; use crate::{ io::seal_logic::l2_block_seal_subtasks::L2BlockSealProcess, diff --git a/core/node/state_keeper/src/io/tests/mod.rs b/core/node/state_keeper/src/io/tests/mod.rs index 7196236475df..5a44bf71ad39 100644 --- a/core/node/state_keeper/src/io/tests/mod.rs +++ b/core/node/state_keeper/src/io/tests/mod.rs @@ -14,23 +14,20 @@ use zksync_node_test_utils::prepare_recovery_snapshot; use zksync_system_constants::KNOWN_CODES_STORAGE_ADDRESS; use zksync_types::{ block::{BlockGasCount, L2BlockHasher}, + bytecode::BytecodeHash, commitment::{L1BatchCommitmentMode, PubdataParams}, fee_model::{BatchFeeInput, PubdataIndependentBatchFeeModelInput}, l2::L2Tx, AccountTreeId, Address, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersion, ProtocolVersionId, StorageKey, TransactionTimeRangeConstraint, H256, U256, }; -use zksync_utils::{ - bytecode::{hash_bytecode, hash_evm_bytecode}, - time::seconds_since_epoch, -}; use self::tester::Tester; use crate::{ io::{seal_logic::l2_block_seal_subtasks::L2BlockSealProcess, StateKeeperIO}, mempool_actor::l2_tx_filter, testonly::BASE_SYSTEM_CONTRACTS, - tests::{create_execution_result, create_transaction, Query}, + tests::{create_execution_result, create_transaction, seconds_since_epoch, Query}, updates::{L2BlockSealCommand, L2BlockUpdates, UpdatesManager}, StateKeeperOutputHandler, StateKeeperPersistence, }; @@ -441,13 +438,19 @@ async fn processing_dynamic_factory_deps_when_sealing_l2_block() { let static_factory_deps: Vec<_> = (0_u8..10) .map(|byte| { let era_bytecode = vec![byte; 32]; - (hash_bytecode(&era_bytecode), era_bytecode) + ( + BytecodeHash::for_bytecode(&era_bytecode).value(), + era_bytecode, + ) }) .collect(); let dynamic_factory_deps: Vec<_> = (0_u8..10) .map(|byte| { let evm_bytecode = vec![byte; 96]; - (hash_evm_bytecode(&evm_bytecode), evm_bytecode) + ( + BytecodeHash::for_evm_bytecode(&evm_bytecode).value(), + evm_bytecode, + ) }) .collect(); let mut all_factory_deps = static_factory_deps.clone(); diff --git a/core/node/state_keeper/src/mempool_actor.rs b/core/node/state_keeper/src/mempool_actor.rs index 8e9d674f8787..fea1fcf89291 100644 --- a/core/node/state_keeper/src/mempool_actor.rs +++ b/core/node/state_keeper/src/mempool_actor.rs @@ -171,7 +171,9 @@ async fn get_transaction_nonces( Ok(nonce_values .into_iter() .map(|(nonce_key, nonce_value)| { - let nonce = Nonce(zksync_utils::h256_to_u32(nonce_value)); + // `unwrap()` is safe by construction. + let be_u32_bytes: [u8; 4] = nonce_value[28..].try_into().unwrap(); + let nonce = Nonce(u32::from_be_bytes(be_u32_bytes)); (address_by_nonce_key[&nonce_key], nonce) }) .collect()) @@ -183,8 +185,9 @@ mod tests { use zksync_node_fee_model::MockBatchFeeParamsProvider; use zksync_node_genesis::{insert_genesis_batch, GenesisParams}; use zksync_node_test_utils::create_l2_transaction; - use zksync_types::{L2BlockNumber, PriorityOpId, ProtocolVersionId, StorageLog, H256}; - use zksync_utils::u256_to_h256; + use zksync_types::{ + u256_to_h256, L2BlockNumber, PriorityOpId, ProtocolVersionId, StorageLog, H256, + }; use super::*; diff --git a/core/node/state_keeper/src/seal_criteria/mod.rs b/core/node/state_keeper/src/seal_criteria/mod.rs index c10b01e7e73d..4c6f56a6f5b7 100644 --- a/core/node/state_keeper/src/seal_criteria/mod.rs +++ b/core/node/state_keeper/src/seal_criteria/mod.rs @@ -20,18 +20,17 @@ use zksync_multivm::{ use zksync_types::{ block::BlockGasCount, utils::display_timestamp, ProtocolVersionId, Transaction, }; -use zksync_utils::time::millis_since; - -mod conditional_sealer; -pub(super) mod criteria; pub use self::conditional_sealer::{ConditionalSealer, NoopSealer, SequencerSealer}; -use super::{ +use crate::{ metrics::AGGREGATION_METRICS, updates::UpdatesManager, - utils::{gas_count_from_tx_and_metrics, gas_count_from_writes}, + utils::{gas_count_from_tx_and_metrics, gas_count_from_writes, millis_since}, }; +mod conditional_sealer; +pub(super) mod criteria; + fn halt_as_metric_label(halt: &Halt) -> &'static str { match halt { Halt::ValidationFailed(_) => "ValidationFailed", @@ -278,10 +277,10 @@ impl L2BlockMaxPayloadSizeSealer { #[cfg(test)] mod tests { - use zksync_utils::time::seconds_since_epoch; - use super::*; - use crate::tests::{create_execution_result, create_transaction, create_updates_manager}; + use crate::tests::{ + create_execution_result, create_transaction, create_updates_manager, seconds_since_epoch, + }; fn apply_tx_to_manager(tx: Transaction, manager: &mut UpdatesManager) { manager.extend_from_executed_transaction( diff --git a/core/node/state_keeper/src/testonly/mod.rs b/core/node/state_keeper/src/testonly/mod.rs index b0f641ccbc1a..3da666628b1b 100644 --- a/core/node/state_keeper/src/testonly/mod.rs +++ b/core/node/state_keeper/src/testonly/mod.rs @@ -12,13 +12,12 @@ use zksync_multivm::interface::{ VmExecutionResultAndLogs, }; use zksync_state::OwnedStorage; -use zksync_test_account::Account; use zksync_types::{ - commitment::PubdataParams, fee::Fee, utils::storage_key_for_standard_token_balance, - AccountTreeId, Address, Execute, L1BatchNumber, L2BlockNumber, PriorityOpId, StorageLog, - Transaction, L2_BASE_TOKEN_ADDRESS, SYSTEM_CONTEXT_MINIMAL_BASE_FEE, U256, + commitment::PubdataParams, fee::Fee, u256_to_h256, + utils::storage_key_for_standard_token_balance, AccountTreeId, Address, L1BatchNumber, + L2BlockNumber, StorageLog, Transaction, L2_BASE_TOKEN_ADDRESS, SYSTEM_CONTEXT_MINIMAL_BASE_FEE, + U256, }; -use zksync_utils::u256_to_h256; pub mod test_batch_executor; @@ -121,29 +120,3 @@ pub fn fee(gas_limit: u32) -> Fee { gas_per_pubdata_limit: U256::from(DEFAULT_GAS_PER_PUBDATA), } } - -/// Returns a valid L2 transaction. -/// Automatically increments nonce of the account. -pub fn l2_transaction(account: &mut Account, gas_limit: u32) -> Transaction { - account.get_l2_tx_for_execute( - Execute { - contract_address: Some(Address::random()), - calldata: vec![], - value: Default::default(), - factory_deps: vec![], - }, - Some(fee(gas_limit)), - ) -} - -pub fn l1_transaction(account: &mut Account, serial_id: PriorityOpId) -> Transaction { - account.get_l1_tx( - Execute { - contract_address: Some(Address::random()), - value: Default::default(), - calldata: vec![], - factory_deps: vec![], - }, - serial_id.0, - ) -} diff --git a/core/node/state_keeper/src/tests/mod.rs b/core/node/state_keeper/src/tests/mod.rs index 28e2f9886b49..ca078354c896 100644 --- a/core/node/state_keeper/src/tests/mod.rs +++ b/core/node/state_keeper/src/tests/mod.rs @@ -3,7 +3,7 @@ use std::{ atomic::{AtomicBool, AtomicU64, Ordering}, Arc, }, - time::Instant, + time::{Instant, SystemTime, UNIX_EPOCH}, }; use tokio::sync::watch; @@ -20,11 +20,10 @@ use zksync_types::{ aggregated_operations::AggregatedActionType, block::{BlockGasCount, L2BlockExecutionData, L2BlockHasher}, fee_model::{BatchFeeInput, PubdataIndependentBatchFeeModelInput}, - AccountTreeId, Address, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersionId, StorageKey, - StorageLog, StorageLogKind, StorageLogWithPreviousValue, Transaction, H256, U256, - ZKPORTER_IS_AVAILABLE, + u256_to_h256, AccountTreeId, Address, L1BatchNumber, L2BlockNumber, L2ChainId, + ProtocolVersionId, StorageKey, StorageLog, StorageLogKind, StorageLogWithPreviousValue, + Transaction, H256, U256, ZKPORTER_IS_AVAILABLE, }; -use zksync_utils::u256_to_h256; use crate::{ io::PendingBatchData, @@ -46,6 +45,13 @@ use crate::{ ZkSyncStateKeeper, }; +pub(crate) fn seconds_since_epoch() -> u64 { + SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("Incorrect system time") + .as_secs() +} + /// Creates a mock `PendingBatchData` object containing the provided sequence of L2 blocks. pub(crate) fn pending_batch_data(pending_l2_blocks: Vec) -> PendingBatchData { PendingBatchData { diff --git a/core/node/state_keeper/src/updates/l2_block_updates.rs b/core/node/state_keeper/src/updates/l2_block_updates.rs index 6faa098d40a2..d258f8eeac0b 100644 --- a/core/node/state_keeper/src/updates/l2_block_updates.rs +++ b/core/node/state_keeper/src/updates/l2_block_updates.rs @@ -9,10 +9,10 @@ use zksync_multivm::{ }; use zksync_types::{ block::{BlockGasCount, L2BlockHasher}, + bytecode::BytecodeHash, l2_to_l1_log::{SystemL2ToL1Log, UserL2ToL1Log}, L2BlockNumber, ProtocolVersionId, StorageLogWithPreviousValue, Transaction, H256, }; -use zksync_utils::bytecode::hash_bytecode; use crate::metrics::KEEPER_METRICS; @@ -119,7 +119,12 @@ impl L2BlockUpdates { let factory_deps = &tx.execute.factory_deps; let mut tx_factory_deps: HashMap<_, _> = factory_deps .iter() - .map(|bytecode| (hash_bytecode(bytecode), bytecode.clone())) + .map(|bytecode| { + ( + BytecodeHash::for_bytecode(bytecode).value(), + bytecode.clone(), + ) + }) .collect(); // Ensure that *dynamic* factory deps (ones that may be created when executing EVM contracts) // are added into the lookup map as well. diff --git a/core/node/state_keeper/src/utils.rs b/core/node/state_keeper/src/utils.rs index 4240ad306251..320dd49583ed 100644 --- a/core/node/state_keeper/src/utils.rs +++ b/core/node/state_keeper/src/utils.rs @@ -1,3 +1,5 @@ +use std::time::{SystemTime, UNIX_EPOCH}; + use zksync_multivm::interface::{DeduplicatedWritesMetrics, VmExecutionMetrics}; use zksync_types::{ aggregated_operations::AggregatedActionType, block::BlockGasCount, ExecuteTransactionCommon, @@ -86,3 +88,15 @@ pub(super) fn gas_count_from_writes( execute: 0, } } + +// TODO (SMA-1206): use seconds instead of milliseconds. +pub(super) fn millis_since_epoch() -> u128 { + SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("Incorrect system time") + .as_millis() +} + +pub(super) fn millis_since(since: u64) -> u64 { + (millis_since_epoch() - since as u128 * 1000) as u64 +} diff --git a/core/node/test_utils/Cargo.toml b/core/node/test_utils/Cargo.toml index 6df100c51a7d..fd657c7d82c0 100644 --- a/core/node/test_utils/Cargo.toml +++ b/core/node/test_utils/Cargo.toml @@ -17,4 +17,3 @@ zksync_contracts.workspace = true zksync_merkle_tree.workspace = true zksync_system_constants.workspace = true zksync_vm_interface.workspace = true -zksync_utils.workspace = true diff --git a/core/node/test_utils/src/lib.rs b/core/node/test_utils/src/lib.rs index 2b446fff12c5..8b65be95b4fc 100644 --- a/core/node/test_utils/src/lib.rs +++ b/core/node/test_utils/src/lib.rs @@ -102,6 +102,7 @@ pub fn create_l1_batch_metadata(number: u32) -> L1BatchMetadata { local_root: Some(H256::zero()), aggregation_root: Some(H256::zero()), da_inclusion_data: Some(vec![]), + da_blob_id: Some(vec![]), } } @@ -226,7 +227,7 @@ impl Snapshot { factory_deps: [&contracts.bootloader, &contracts.default_aa] .into_iter() .chain(contracts.evm_emulator.as_ref()) - .map(|c| (c.hash, zksync_utils::be_words_to_bytes(&c.code))) + .map(|c| (c.hash, c.code.clone())) .collect(), storage_logs, } diff --git a/core/node/vm_runner/Cargo.toml b/core/node/vm_runner/Cargo.toml index 9c235ad6b291..333647b64367 100644 --- a/core/node/vm_runner/Cargo.toml +++ b/core/node/vm_runner/Cargo.toml @@ -17,7 +17,6 @@ zksync_dal.workspace = true zksync_contracts.workspace = true zksync_state.workspace = true zksync_storage.workspace = true -zksync_utils.workspace = true zksync_prover_interface.workspace = true zksync_object_store.workspace = true zksync_vm_executor.workspace = true @@ -36,7 +35,7 @@ vise.workspace = true [dev-dependencies] zksync_node_test_utils.workspace = true zksync_node_genesis.workspace = true -zksync_test_account.workspace = true +zksync_test_contracts.workspace = true assert_matches.workspace = true backon.workspace = true futures = { workspace = true, features = ["compat"] } diff --git a/core/node/vm_runner/src/impls/bwip.rs b/core/node/vm_runner/src/impls/bwip.rs index a2cf126f5499..5d63d09b5caf 100644 --- a/core/node/vm_runner/src/impls/bwip.rs +++ b/core/node/vm_runner/src/impls/bwip.rs @@ -1,4 +1,7 @@ -use std::{collections::HashSet, sync::Arc}; +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, +}; use anyhow::anyhow; use async_trait::async_trait; @@ -8,10 +11,9 @@ use zksync_object_store::ObjectStore; use zksync_prover_interface::inputs::VMRunWitnessInputData; use zksync_state::OwnedStorage; use zksync_types::{ - block::StorageOracleInfo, witness_block_state::WitnessStorageState, L1BatchNumber, L2ChainId, - H256, + block::StorageOracleInfo, h256_to_u256, u256_to_h256, witness_block_state::WitnessStorageState, + L1BatchNumber, L2ChainId, H256, }; -use zksync_utils::{bytes_to_chunks, h256_to_u256, u256_to_h256}; use zksync_vm_interface::{executor::BatchExecutorFactory, L1BatchEnv, L2BlockEnv, SystemEnv}; use crate::{ @@ -224,7 +226,6 @@ async fn get_updates_manager_witness_input_data( .get_sealed_factory_dep(default_aa) .await? .ok_or_else(|| anyhow!("Default account bytecode should exist"))?; - let account_bytecode = bytes_to_chunks(&account_bytecode_bytes); let used_contract_hashes = &output.batch.final_execution_state.used_contract_hashes; let hashes: HashSet = used_contract_hashes @@ -238,7 +239,7 @@ async fn get_updates_manager_witness_input_data( .get_factory_deps(&hashes) .await; if used_contract_hashes.contains(&account_code_hash) { - used_bytecodes.insert(account_code_hash, account_bytecode); + used_bytecodes.insert(account_code_hash, account_bytecode_bytes); } let evm_emulator_code_hash = if let Some(evm_emulator) = evm_emulator { @@ -249,7 +250,6 @@ async fn get_updates_manager_witness_input_data( .get_sealed_factory_dep(evm_emulator) .await? .ok_or_else(|| anyhow!("EVM emulator bytecode should exist"))?; - let evm_emulator_bytecode = bytes_to_chunks(&evm_emulator_bytecode); used_bytecodes.insert(evm_emulator_code_hash, evm_emulator_bytecode); } Some(evm_emulator_code_hash) @@ -266,7 +266,10 @@ async fn get_updates_manager_witness_input_data( Ok(VMRunWitnessInputData { l1_batch_number, - used_bytecodes, + used_bytecodes: used_bytecodes + .into_iter() + .map(|(hash, code)| (hash, bytes_to_chunks(&code))) + .collect(), initial_heap_content, protocol_version: system_env.version, bootloader_code, @@ -278,6 +281,13 @@ async fn get_updates_manager_witness_input_data( }) } +fn bytes_to_chunks(bytes: &[u8]) -> Vec<[u8; 32]> { + bytes + .chunks(32) + .map(|chunk| chunk.try_into().unwrap()) + .collect() +} + #[tracing::instrument(skip_all)] async fn assert_database_witness_input_data( connection: &mut Connection<'_, Core>, @@ -305,7 +315,6 @@ async fn assert_database_witness_input_data( .await .expect("Failed fetching default account bytecode from DB") .expect("Default account bytecode should exist"); - let account_bytecode = bytes_to_chunks(&account_bytecode_bytes); let hashes: HashSet = block_header .used_contract_hashes @@ -322,7 +331,7 @@ async fn assert_database_witness_input_data( .used_contract_hashes .contains(&account_code_hash) { - used_bytecodes.insert(account_code_hash, account_bytecode); + used_bytecodes.insert(account_code_hash, account_bytecode_bytes); } assert_eq!( @@ -331,6 +340,10 @@ async fn assert_database_witness_input_data( "{} factory deps are not found in DB", hashes.len() - used_bytecodes.len() ); + let used_bytecodes: HashMap<_, _> = used_bytecodes + .into_iter() + .map(|(hash, code)| (hash, bytes_to_chunks(&code))) + .collect(); let StorageOracleInfo { storage_refunds, diff --git a/core/node/vm_runner/src/tests/mod.rs b/core/node/vm_runner/src/tests/mod.rs index a3438d5a4e11..6bd6d662cfa9 100644 --- a/core/node/vm_runner/src/tests/mod.rs +++ b/core/node/vm_runner/src/tests/mod.rs @@ -9,17 +9,18 @@ use zksync_node_test_utils::{ create_l1_batch_metadata, create_l2_block, execute_l2_transaction, l1_batch_metadata_to_commitment_artifacts, }; -use zksync_test_account::Account; +use zksync_test_contracts::Account; use zksync_types::{ block::{L1BatchHeader, L2BlockHasher}, + bytecode::BytecodeHash, fee::Fee, - get_intrinsic_constants, + get_intrinsic_constants, h256_to_u256, l2::L2Tx, + u256_to_h256, utils::storage_key_for_standard_token_balance, AccountTreeId, Address, Execute, L1BatchNumber, L2BlockNumber, ProtocolVersionId, StorageKey, StorageLog, StorageLogKind, StorageValue, H160, H256, L2_BASE_TOKEN_ADDRESS, U256, }; -use zksync_utils::{bytecode::hash_bytecode, h256_to_u256, u256_to_h256}; use zksync_vm_interface::{ tracer::ValidationTraces, L1BatchEnv, L2BlockEnv, SystemEnv, TransactionExecutionMetrics, }; @@ -326,7 +327,7 @@ async fn store_l1_batches( header.used_contract_hashes = genesis_params .system_contracts() .iter() - .map(|contract| hash_bytecode(&contract.bytecode)) + .map(|contract| BytecodeHash::for_bytecode(&contract.bytecode).value()) .chain([genesis_params.base_system_contracts().hashes().default_aa]) .chain(genesis_params.base_system_contracts().hashes().evm_emulator) .map(h256_to_u256) diff --git a/core/node/vm_runner/src/tests/process.rs b/core/node/vm_runner/src/tests/process.rs index 8e9bd66f3c91..cd77bca79c1a 100644 --- a/core/node/vm_runner/src/tests/process.rs +++ b/core/node/vm_runner/src/tests/process.rs @@ -5,7 +5,7 @@ use test_casing::test_casing; use tokio::sync::{watch, RwLock}; use zksync_dal::{ConnectionPool, Core}; use zksync_node_genesis::{insert_genesis_batch, GenesisParams}; -use zksync_test_account::Account; +use zksync_test_contracts::Account; use zksync_types::{L1BatchNumber, L2ChainId}; use zksync_vm_executor::batch::MainBatchExecutorFactory; diff --git a/core/node/vm_runner/src/tests/storage.rs b/core/node/vm_runner/src/tests/storage.rs index 838b469f0ef3..8727eecbcd0a 100644 --- a/core/node/vm_runner/src/tests/storage.rs +++ b/core/node/vm_runner/src/tests/storage.rs @@ -10,7 +10,7 @@ use tokio::{ use zksync_dal::{ConnectionPool, Core, CoreDal}; use zksync_node_genesis::{insert_genesis_batch, GenesisParams}; use zksync_state::{interface::ReadStorage, OwnedStorage, PostgresStorage}; -use zksync_test_account::Account; +use zksync_test_contracts::Account; use zksync_types::{AccountTreeId, L1BatchNumber, L2ChainId, StorageKey}; use crate::{ diff --git a/core/tests/loadnext/Cargo.toml b/core/tests/loadnext/Cargo.toml index adb5c9eca429..91f987035acf 100644 --- a/core/tests/loadnext/Cargo.toml +++ b/core/tests/loadnext/Cargo.toml @@ -9,6 +9,7 @@ license.workspace = true keywords.workspace = true categories.workspace = true publish = false +exclude = ["./dump"] [dependencies] zksync_types.workspace = true @@ -17,7 +18,7 @@ zksync_eth_signer.workspace = true zksync_web3_decl.workspace = true zksync_eth_client.workspace = true zksync_config.workspace = true -zksync_contracts.workspace = true +zksync_test_contracts.workspace = true zksync_system_constants.workspace = true zksync_vlog.workspace = true diff --git a/core/tests/loadnext/README.md b/core/tests/loadnext/README.md index 59288a7160ec..cc873c598c18 100644 --- a/core/tests/loadnext/README.md +++ b/core/tests/loadnext/README.md @@ -1,4 +1,4 @@ -# Loadnext: loadtest for ZKsync +# Loadnext: load test for ZKsync Loadnext is a utility for random stress-testing the ZKsync server. It is capable of simulating the behavior of many independent users of ZKsync network, who are sending quasi-random requests to the server. @@ -27,21 +27,21 @@ It: ## Transactions Parameters -The smart contract that is used for every l2 transaction can be found here: -`etc/contracts-test-data/contracts/loadnext/loadnext_contract.sol`. +The smart contract that is used for every l2 transaction can be found in the [`zksync_test_contracts`] crate. The `execute` function of the contract has the following parameters: -``` - function execute(uint reads, uint writes, uint hashes, uint events, uint max_recursion, uint deploys) external returns(uint) { +```solidity +function execute(uint reads, uint initialWrites, uint repeatedWrites, uint hashes, uint events, uint maxRecursion, uint deploys) external returns(uint) { ``` which correspond to the following configuration options: -``` +```rust pub struct LoadnextContractExecutionParams { pub reads: usize, - pub writes: usize, + pub initial_writes: usize, + pub repeated_writes: usize, pub events: usize, pub hashes: usize, pub recursive_calls: usize, @@ -51,8 +51,9 @@ pub struct LoadnextContractExecutionParams { For example, to simulate an average transaction on mainnet, one could do: -``` -CONTRACT_EXECUTION_PARAMS_WRITES=2 +```env +CONTRACT_EXECUTION_PARAMS_INITIAL_WRITES=2 +CONTRACT_EXECUTION_PARAMS_REPEATED_WRITES=2 CONTRACT_EXECUTION_PARAMS_READS=6 CONTRACT_EXECUTION_PARAMS_EVENTS=2 CONTRACT_EXECUTION_PARAMS_HASHES=10 @@ -62,8 +63,9 @@ CONTRACT_EXECUTION_PARAMS_DEPLOYS=0 Similarly, to simulate a lightweight transaction: -``` -CONTRACT_EXECUTION_PARAMS_WRITES=0 +```env +CONTRACT_EXECUTION_PARAMS_INITIAL_WRITES=0 +CONTRACT_EXECUTION_PARAMS_REPEATED_WRITES=0 CONTRACT_EXECUTION_PARAMS_READS=0 CONTRACT_EXECUTION_PARAMS_EVENTS=0 CONTRACT_EXECUTION_PARAMS_HASHES=0 @@ -86,10 +88,11 @@ Example invocation: - `MASTER_WALLET_PK` needs to be set to the private key of the master account. - `MAIN_TOKEN` needs to be set to the address of the token to be used for the loadtest. -``` +```shell cargo build -CONTRACT_EXECUTION_PARAMS_WRITES=2 \ +CONTRACT_EXECUTION_PARAMS_INITIAL_WRITES=2 \ +CONTRACT_EXECUTION_PARAMS_REPEATED_WRITES=2 \ CONTRACT_EXECUTION_PARAMS_READS=6 \ CONTRACT_EXECUTION_PARAMS_EVENTS=2 \ CONTRACT_EXECUTION_PARAMS_HASHES=10 \ @@ -110,3 +113,5 @@ MASTER_WALLET_PK="..." \ MAIN_TOKEN="..." \ cargo run --bin loadnext ``` + +[`zksync_test_contracts`]: ../../lib/test_contracts diff --git a/core/tests/loadnext/src/account/api_request_executor.rs b/core/tests/loadnext/src/account/api_request_executor.rs index 20c4bc2f5970..4733b4c09206 100644 --- a/core/tests/loadnext/src/account/api_request_executor.rs +++ b/core/tests/loadnext/src/account/api_request_executor.rs @@ -52,8 +52,7 @@ impl AccountLifespan { err => RpcError::Custom(err.to_string()), }), ApiRequestType::GetLogs => { - let topics = - random_topics(&self.wallet.test_contract.contract, &mut self.wallet.rng); + let topics = random_topics(&self.wallet.test_contract.abi, &mut self.wallet.rng); // `run_api_requests_task` checks whether the cell is initialized // at every loop iteration and skips logs action if it's not. Thus, // it's safe to unwrap it. diff --git a/core/tests/loadnext/src/account/mod.rs b/core/tests/loadnext/src/account/mod.rs index 0f418bf12676..967970f96fb9 100644 --- a/core/tests/loadnext/src/account/mod.rs +++ b/core/tests/loadnext/src/account/mod.rs @@ -7,7 +7,7 @@ use std::{ use futures::{channel::mpsc, SinkExt}; use rand::Rng; use tokio::sync::RwLock; -use zksync_contracts::test_contracts::LoadnextContractExecutionParams; +use zksync_test_contracts::LoadnextContractExecutionParams; use zksync_types::{api::TransactionReceipt, Address, Nonce, H256, U256, U64}; use zksync_web3_decl::{ client::{Client, L2}, diff --git a/core/tests/loadnext/src/account/pubsub_executor.rs b/core/tests/loadnext/src/account/pubsub_executor.rs index 07f45b4ae972..1b31207aab87 100644 --- a/core/tests/loadnext/src/account/pubsub_executor.rs +++ b/core/tests/loadnext/src/account/pubsub_executor.rs @@ -67,7 +67,7 @@ impl AccountLifespan { let params = match subscription_type { SubscriptionType::Logs => { let topics = super::api_request_executor::random_topics( - &self.wallet.test_contract.contract, + &self.wallet.test_contract.abi, &mut self.wallet.rng, ); let contract_address = self.wallet.deployed_contract_address.get().unwrap(); diff --git a/core/tests/loadnext/src/account/tx_command_executor.rs b/core/tests/loadnext/src/account/tx_command_executor.rs index 2a916564fd61..55e5a6b2a2d9 100644 --- a/core/tests/loadnext/src/account/tx_command_executor.rs +++ b/core/tests/loadnext/src/account/tx_command_executor.rs @@ -272,7 +272,7 @@ impl AccountLifespan { let mut builder = wallet .start_deploy_contract() - .bytecode(self.wallet.test_contract.bytecode.clone()) + .bytecode(self.wallet.test_contract.bytecode.to_vec()) .constructor_calldata(constructor_calldata); let fee = builder @@ -329,7 +329,7 @@ impl AccountLifespan { U256::zero(), calldata, L1_TRANSACTION_GAS_LIMIT.into(), - Some(self.wallet.test_contract.factory_deps.clone()), + Some(self.wallet.test_contract.factory_deps()), None, None, Default::default(), @@ -375,12 +375,13 @@ impl AccountLifespan { } fn prepare_calldata_for_loadnext_contract(&self) -> Vec { - let contract = &self.wallet.test_contract.contract; + let contract = &self.wallet.test_contract.abi; let function = contract.function("execute").unwrap(); function .encode_input(&vec![ ethabi::Token::Uint(U256::from(self.contract_execution_params.reads)), - ethabi::Token::Uint(U256::from(self.contract_execution_params.writes)), + ethabi::Token::Uint(U256::from(self.contract_execution_params.initial_writes)), + ethabi::Token::Uint(U256::from(self.contract_execution_params.repeated_writes)), ethabi::Token::Uint(U256::from(self.contract_execution_params.hashes)), ethabi::Token::Uint(U256::from(self.contract_execution_params.events)), ethabi::Token::Uint(U256::from(self.contract_execution_params.recursive_calls)), @@ -401,7 +402,7 @@ impl AccountLifespan { .start_execute_contract() .calldata(calldata) .contract_address(contract_address) - .factory_deps(self.wallet.test_contract.factory_deps.clone()); + .factory_deps(self.wallet.test_contract.factory_deps()); let fee = builder .estimate_fee(Some(get_approval_based_paymaster_input_for_estimation( diff --git a/core/tests/loadnext/src/account_pool.rs b/core/tests/loadnext/src/account_pool.rs index 3fa3141553cd..6cc8d7f6949c 100644 --- a/core/tests/loadnext/src/account_pool.rs +++ b/core/tests/loadnext/src/account_pool.rs @@ -5,13 +5,13 @@ use once_cell::sync::OnceCell; use rand::Rng; use tokio::time::timeout; use zksync_eth_signer::PrivateKeySigner; +use zksync_test_contracts::TestContract; use zksync_types::{Address, K256PrivateKey, L2ChainId, H256}; use zksync_web3_decl::client::{Client, L2}; use crate::{ config::LoadtestConfig, corrupted_tx::CorruptedSigner, - fs_utils::{loadnext_contract, TestContract}, rng::{LoadtestRng, Random}, sdk::{signer::Signer, Wallet, ZksNamespaceClient}, }; @@ -68,7 +68,7 @@ pub struct TestWallet { /// Wallet with corrupted signer. pub corrupted_wallet: CorruptedSyncWallet, /// Contract bytecode and calldata to be used for sending `Execute` transactions. - pub test_contract: TestContract, + pub test_contract: &'static TestContract, /// Address of the deployed contract to be used for sending /// `Execute` transaction. pub deployed_contract_address: Arc>, @@ -116,7 +116,7 @@ impl AccountPool { anyhow::bail!("ZKsync server does not respond. Please check RPC address and whether server is launched"); } - let test_contract = loadnext_contract(&config.test_contracts_path)?; + let test_contract = TestContract::load_test(); let master_wallet = { let eth_private_key: H256 = config @@ -166,7 +166,7 @@ impl AccountPool { let account = TestWallet { wallet: Arc::new(wallet), corrupted_wallet: Arc::new(corrupted_wallet), - test_contract: test_contract.clone(), + test_contract, deployed_contract_address: deployed_contract_address.clone(), rng: rng.derive(private_key_bytes), }; diff --git a/core/tests/loadnext/src/config.rs b/core/tests/loadnext/src/config.rs index ab578ecfdc6b..c05bf94df04a 100644 --- a/core/tests/loadnext/src/config.rs +++ b/core/tests/loadnext/src/config.rs @@ -1,10 +1,9 @@ -use std::{path::PathBuf, time::Duration}; +use std::time::Duration; use serde::Deserialize; use tokio::sync::Semaphore; -use zksync_contracts::test_contracts::LoadnextContractExecutionParams; +use zksync_test_contracts::LoadnextContractExecutionParams; use zksync_types::{network::Network, Address, L2ChainId, H160}; -use zksync_utils::env::Workspace; use crate::fs_utils::read_tokens; @@ -49,28 +48,6 @@ pub struct LoadtestConfig { #[serde(default = "default_main_token")] pub main_token: Address, - /// Path to test contracts bytecode and ABI required for sending - /// deploy and execute L2 transactions. Each folder in the path is expected - /// to have the following structure: - ///```ignore - /// . - /// ├── bytecode - /// └── abi.json - ///``` - /// Contract folder names names are not restricted. - /// - /// An example: - ///```ignore - /// . - /// ├── erc-20 - /// │   ├── bytecode - /// │   └── abi.json - /// └── simple-contract - /// ├── bytecode - /// └── abi.json - ///``` - #[serde(default = "default_test_contracts_path")] - pub test_contracts_path: PathBuf, /// Limits the number of simultaneous API requests being performed at any moment of time. /// /// Setting it to: @@ -189,12 +166,6 @@ fn default_main_token() -> H160 { main_token.address } -fn default_test_contracts_path() -> PathBuf { - let test_contracts_path = Workspace::locate().core().join("etc/contracts-test-data"); - tracing::info!("Test contracts path: {}", test_contracts_path.display()); - test_contracts_path -} - fn default_sync_api_requests_limit() -> usize { let result = 20; tracing::info!("Using default SYNC_API_REQUESTS_LIMIT: {result}"); @@ -281,8 +252,9 @@ impl ExecutionConfig { pub fn from_env() -> Self { let transaction_weights = TransactionWeights::from_env().unwrap_or_else(default_transaction_weights); - let contract_execution_params = LoadnextContractExecutionParams::from_env() - .unwrap_or_else(default_contract_execution_params); + let contract_execution_params = envy::prefixed("CONTRACT_EXECUTION_PARAMS_") + .from_env() + .unwrap_or_else(|_| default_contract_execution_params()); Self { transaction_weights, contract_execution_params, @@ -341,16 +313,3 @@ impl RequestLimiters { } } } - -#[cfg(test)] -mod tests { - - use super::*; - use crate::fs_utils::loadnext_contract; - - #[test] - fn check_read_test_contract() { - let test_contracts_path = default_test_contracts_path(); - loadnext_contract(&test_contracts_path).unwrap(); - } -} diff --git a/core/tests/loadnext/src/fs_utils.rs b/core/tests/loadnext/src/fs_utils.rs index c4472a00531c..0e5107f40861 100644 --- a/core/tests/loadnext/src/fs_utils.rs +++ b/core/tests/loadnext/src/fs_utils.rs @@ -1,10 +1,10 @@ //! Utilities used for reading tokens, contracts bytecode and ABI from the //! filesystem. -use std::{fs::File, io::BufReader, path::Path}; +use std::{fs::File, io::BufReader}; use serde::Deserialize; -use zksync_types::{ethabi::Contract, network::Network, Address}; +use zksync_types::{network::Network, Address}; use zksync_utils::env::Workspace; /// A token stored in `etc/tokens/{network}.json` files. @@ -16,16 +16,6 @@ pub struct Token { pub address: Address, } -#[derive(Debug, Clone)] -pub struct TestContract { - /// Contract bytecode to be used for sending deploy transaction. - pub bytecode: Vec, - /// Contract ABI. - pub contract: Contract, - - pub factory_deps: Vec>, -} - pub fn read_tokens(network: Network) -> anyhow::Result> { let home = Workspace::locate().core(); let path = home.join(format!("etc/tokens/{network}.json")); @@ -34,54 +24,3 @@ pub fn read_tokens(network: Network) -> anyhow::Result> { Ok(serde_json::from_reader(reader)?) } - -fn extract_bytecode(artifact: &serde_json::Value) -> anyhow::Result> { - let bytecode = artifact["bytecode"] - .as_str() - .ok_or_else(|| anyhow::anyhow!("Failed to parse contract bytecode from artifact",))?; - - if let Some(stripped) = bytecode.strip_prefix("0x") { - hex::decode(stripped) - } else { - hex::decode(bytecode) - } - .map_err(|e| e.into()) -} - -/// Reads test contract bytecode and its ABI. -fn read_contract_dir(path: &Path) -> anyhow::Result { - use serde_json::Value; - - let mut artifact: Value = - serde_json::from_reader(File::open(path.join("LoadnextContract.json"))?)?; - - let bytecode = extract_bytecode(&artifact)?; - - let abi = artifact["abi"].take(); - let contract: Contract = serde_json::from_value(abi)?; - - let factory_dep: Value = serde_json::from_reader(File::open(path.join("Foo.json"))?)?; - let factory_dep_bytecode = extract_bytecode(&factory_dep)?; - - anyhow::ensure!( - contract.functions().count() > 0, - "Invalid contract: no methods defined: {:?}", - path - ); - anyhow::ensure!( - contract.events().count() > 0, - "Invalid contract: no events defined: {:?}", - path - ); - - Ok(TestContract { - bytecode, - contract, - factory_deps: vec![factory_dep_bytecode], - }) -} - -pub fn loadnext_contract(path: &Path) -> anyhow::Result { - let path = path.join("artifacts-zk/contracts/loadnext/loadnext_contract.sol"); - read_contract_dir(&path) -} diff --git a/core/tests/loadnext/src/sdk/operations/deploy_contract.rs b/core/tests/loadnext/src/sdk/operations/deploy_contract.rs index 67e877ae8efb..cac49559c468 100644 --- a/core/tests/loadnext/src/sdk/operations/deploy_contract.rs +++ b/core/tests/loadnext/src/sdk/operations/deploy_contract.rs @@ -1,8 +1,8 @@ use zksync_eth_signer::EthereumSigner; use zksync_types::{ - l2::L2Tx, transaction_request::PaymasterParams, Execute, Nonce, CONTRACT_DEPLOYER_ADDRESS, U256, + bytecode::BytecodeHash, l2::L2Tx, transaction_request::PaymasterParams, Execute, Nonce, + CONTRACT_DEPLOYER_ADDRESS, U256, }; -use zksync_utils::bytecode::hash_bytecode; use zksync_web3_decl::namespaces::EthNamespaceClient; use crate::sdk::{ @@ -60,7 +60,7 @@ where None => Nonce(self.wallet.get_nonce().await?), }; - let main_contract_hash = hash_bytecode(&bytecode); + let main_contract_hash = BytecodeHash::for_bytecode(&bytecode).value(); let execute_calldata = Execute::encode_deploy_params_create(Default::default(), main_contract_hash, calldata); @@ -141,7 +141,7 @@ where .unwrap_or_default(); let calldata = self.calldata.clone().unwrap_or_default(); - let main_contract_hash = hash_bytecode(&bytecode); + let main_contract_hash = BytecodeHash::for_bytecode(&bytecode).value(); let mut factory_deps = self.factory_deps.clone().unwrap_or_default(); factory_deps.push(bytecode); let l2_tx = L2Tx::new( diff --git a/core/tests/ts-integration/package.json b/core/tests/ts-integration/package.json index 8e5c0cf7470e..ee0fa9c99848 100644 --- a/core/tests/ts-integration/package.json +++ b/core/tests/ts-integration/package.json @@ -17,6 +17,7 @@ "@matterlabs/hardhat-zksync-solc": "^1.2.4", "@matterlabs/hardhat-zksync-vyper": "^1.1.0", "@nomiclabs/hardhat-vyper": "^3.0.6", + "@openzeppelin/contracts": "^4.8.0", "@types/jest": "^29.0.3", "@types/node": "^18.19.15", "@types/node-fetch": "^2.5.7", diff --git a/core/tests/ts-integration/tests/contracts.test.ts b/core/tests/ts-integration/tests/contracts.test.ts index b17c2b335989..de1c632ab9cc 100644 --- a/core/tests/ts-integration/tests/contracts.test.ts +++ b/core/tests/ts-integration/tests/contracts.test.ts @@ -423,35 +423,6 @@ describe('Smart contract behavior checks', () => { expect(receipt.status).toEqual(1); }); - test('Should check transient storage', async () => { - const artifact = require(`${ - testMaster.environment().pathToHome - }/etc/contracts-test-data/artifacts-zk/contracts/storage/storage.sol/StorageTester.json`); - const contractFactory = new zksync.ContractFactory(artifact.abi, artifact.bytecode, alice); - const storageContract = (await contractFactory.deploy()) as zksync.Contract; - await storageContract.waitForDeployment(); - // Tests transient storage, see contract code for details. - await expect(storageContract.testTransientStore()).toBeAccepted([]); - // Checks that transient storage is cleaned up after each tx. - await expect(storageContract.assertTValue(0)).toBeAccepted([]); - }); - - test('Should check code oracle works', async () => { - // Deploy contract that calls CodeOracle. - const artifact = require(`${ - testMaster.environment().pathToHome - }/etc/contracts-test-data/artifacts-zk/contracts/precompiles/precompiles.sol/Precompiles.json`); - const contractFactory = new zksync.ContractFactory(artifact.abi, artifact.bytecode, alice); - const contract = (await contractFactory.deploy()) as zksync.Contract; - await contract.waitForDeployment(); - - // Check that CodeOracle can decommit code of just deployed contract. - const versionedHash = zksync.utils.hashBytecode(artifact.bytecode); - const expectedBytecodeHash = ethers.keccak256(artifact.bytecode); - - await expect(contract.callCodeOracle(versionedHash, expectedBytecodeHash)).toBeAccepted([]); - }); - afterAll(async () => { await testMaster.deinitialize(); }); diff --git a/core/tests/vm-benchmark/Cargo.toml b/core/tests/vm-benchmark/Cargo.toml index 892bcf1c1051..eb4a5a239252 100644 --- a/core/tests/vm-benchmark/Cargo.toml +++ b/core/tests/vm-benchmark/Cargo.toml @@ -7,9 +7,9 @@ publish = false [dependencies] zksync_contracts.workspace = true +zksync_test_contracts.workspace = true zksync_multivm.workspace = true zksync_types.workspace = true -zksync_utils.workspace = true zksync_vlog.workspace = true zksync_vm2.workspace = true diff --git a/core/tests/vm-benchmark/src/lib.rs b/core/tests/vm-benchmark/src/lib.rs index 9c4f547c1de2..dbe2fdb808db 100644 --- a/core/tests/vm-benchmark/src/lib.rs +++ b/core/tests/vm-benchmark/src/lib.rs @@ -70,3 +70,33 @@ pub const BYTECODES: &[Bytecode] = &[ include_bytecode!(slot_hash_collision), include_bytecode!(write_and_decode), ]; + +#[cfg(test)] +mod tests { + use zksync_multivm::interface::{ExecutionResult, VmRevertReason}; + + use super::*; + + #[test] + fn deploy_transactions_are_valid() { + for bytecode in BYTECODES { + println!("Testing bytecode {}", bytecode.name); + + let mut vm = BenchmarkingVm::new(); + let res = vm.run_transaction(&bytecode.deploy_tx()); + match &res.result { + ExecutionResult::Success { .. } => { /* OK */ } + ExecutionResult::Revert { + output: + VmRevertReason::Unknown { + function_selector, + data, + }, + } if function_selector.is_empty() && data.is_empty() => { + // out of gas; this is expected for most fuzzed bytecodes + } + _ => panic!("Unexpected execution result: {:?}", res.result), + } + } + } +} diff --git a/core/tests/vm-benchmark/src/transaction.rs b/core/tests/vm-benchmark/src/transaction.rs index d5fedfa4df94..5c1824e6ffa2 100644 --- a/core/tests/vm-benchmark/src/transaction.rs +++ b/core/tests/vm-benchmark/src/transaction.rs @@ -1,66 +1,30 @@ use once_cell::sync::Lazy; -pub use zksync_contracts::test_contracts::LoadnextContractExecutionParams as LoadTestParams; -use zksync_contracts::{deployer_contract, TestContract}; use zksync_multivm::utils::get_max_gas_per_pubdata_byte; +pub use zksync_test_contracts::LoadnextContractExecutionParams as LoadTestParams; +use zksync_test_contracts::{Account, TestContract}; use zksync_types::{ - ethabi::{encode, Token}, - fee::Fee, - l2::L2Tx, - utils::deployed_address_create, - Address, K256PrivateKey, L2ChainId, Nonce, ProtocolVersionId, Transaction, - CONTRACT_DEPLOYER_ADDRESS, H256, U256, + ethabi::Token, fee::Fee, l2::L2Tx, utils::deployed_address_create, Address, Execute, + K256PrivateKey, L2ChainId, Nonce, ProtocolVersionId, Transaction, H256, U256, }; -use zksync_utils::bytecode::hash_bytecode; -const LOAD_TEST_MAX_READS: usize = 100; +const LOAD_TEST_MAX_READS: usize = 3000; pub(crate) static PRIVATE_KEY: Lazy = Lazy::new(|| K256PrivateKey::from_bytes(H256([42; 32])).expect("invalid key bytes")); static LOAD_TEST_CONTRACT_ADDRESS: Lazy
= Lazy::new(|| deployed_address_create(PRIVATE_KEY.address(), 0.into())); -static LOAD_TEST_CONTRACT: Lazy = Lazy::new(zksync_contracts::get_loadnext_contract); - -static CREATE_FUNCTION_SIGNATURE: Lazy<[u8; 4]> = Lazy::new(|| { - deployer_contract() - .function("create") - .unwrap() - .short_signature() -}); - pub fn get_deploy_tx(code: &[u8]) -> Transaction { get_deploy_tx_with_gas_limit(code, 30_000_000, 0) } pub fn get_deploy_tx_with_gas_limit(code: &[u8], gas_limit: u32, nonce: u32) -> Transaction { - let mut salt = vec![0_u8; 32]; - salt[28..32].copy_from_slice(&nonce.to_be_bytes()); - let params = [ - Token::FixedBytes(salt), - Token::FixedBytes(hash_bytecode(code).0.to_vec()), - Token::Bytes([].to_vec()), - ]; - let calldata = CREATE_FUNCTION_SIGNATURE - .iter() - .cloned() - .chain(encode(¶ms)) - .collect(); - - let mut signed = L2Tx::new_signed( - Some(CONTRACT_DEPLOYER_ADDRESS), - calldata, - Nonce(nonce), - tx_fee(gas_limit), - U256::zero(), - L2ChainId::from(270), - &PRIVATE_KEY, - vec![code.to_vec()], // maybe not needed? - Default::default(), - ) - .expect("should create a signed execute transaction"); - - signed.set_input(H256::random().as_bytes().to_vec(), H256::random()); - signed.into() + let mut salt = H256::zero(); + salt.0[28..32].copy_from_slice(&nonce.to_be_bytes()); + let execute = Execute::for_deploy(salt, code.to_vec(), &[]); + let mut account = Account::new(PRIVATE_KEY.clone()); + account.nonce = Nonce(nonce); + account.get_l2_tx_for_execute(execute, Some(tx_fee(gas_limit))) } fn tx_fee(gas_limit: u32) -> Fee { @@ -94,35 +58,8 @@ pub fn get_transfer_tx(nonce: u32) -> Transaction { pub fn get_load_test_deploy_tx() -> Transaction { let calldata = [Token::Uint(LOAD_TEST_MAX_READS.into())]; - let params = [ - Token::FixedBytes(vec![0_u8; 32]), - Token::FixedBytes(hash_bytecode(&LOAD_TEST_CONTRACT.bytecode).0.to_vec()), - Token::Bytes(encode(&calldata)), - ]; - let create_calldata = CREATE_FUNCTION_SIGNATURE - .iter() - .cloned() - .chain(encode(¶ms)) - .collect(); - - let mut factory_deps = LOAD_TEST_CONTRACT.factory_deps.clone(); - factory_deps.push(LOAD_TEST_CONTRACT.bytecode.clone()); - - let mut signed = L2Tx::new_signed( - Some(CONTRACT_DEPLOYER_ADDRESS), - create_calldata, - Nonce(0), - tx_fee(100_000_000), - U256::zero(), - L2ChainId::from(270), - &PRIVATE_KEY, - factory_deps, - Default::default(), - ) - .expect("should create a signed execute transaction"); - - signed.set_input(H256::random().as_bytes().to_vec(), H256::random()); - signed.into() + let execute = TestContract::load_test().deploy_payload(&calldata); + Account::new(PRIVATE_KEY.clone()).get_l2_tx_for_execute(execute, Some(tx_fee(500_000_000))) } pub fn get_load_test_tx(nonce: u32, gas_limit: u32, params: LoadTestParams) -> Transaction { @@ -131,14 +68,15 @@ pub fn get_load_test_tx(nonce: u32, gas_limit: u32, params: LoadTestParams) -> T "Too many reads: {params:?}, should be <={LOAD_TEST_MAX_READS}" ); - let execute_function = LOAD_TEST_CONTRACT - .contract + let execute_function = TestContract::load_test() + .abi .function("execute") .expect("no `execute` function in load test contract"); let calldata = execute_function .encode_input(&vec![ Token::Uint(U256::from(params.reads)), - Token::Uint(U256::from(params.writes)), + Token::Uint(U256::from(params.initial_writes)), + Token::Uint(U256::from(params.repeated_writes)), Token::Uint(U256::from(params.hashes)), Token::Uint(U256::from(params.events)), Token::Uint(U256::from(params.recursive_calls)), @@ -154,7 +92,7 @@ pub fn get_load_test_tx(nonce: u32, gas_limit: u32, params: LoadTestParams) -> T U256::zero(), L2ChainId::from(270), &PRIVATE_KEY, - LOAD_TEST_CONTRACT.factory_deps.clone(), + TestContract::load_test().factory_deps(), Default::default(), ) .expect("should create a signed execute transaction"); @@ -168,9 +106,10 @@ pub fn get_realistic_load_test_tx(nonce: u32) -> Transaction { nonce, 10_000_000, LoadTestParams { - reads: 30, - writes: 2, - events: 5, + reads: 243, + initial_writes: 1, + repeated_writes: 11, + events: 6, hashes: 10, recursive_calls: 0, deploys: 0, @@ -183,9 +122,10 @@ pub fn get_heavy_load_test_tx(nonce: u32) -> Transaction { nonce, 10_000_000, LoadTestParams { - reads: 100, - writes: 5, - events: 20, + reads: 296, + initial_writes: 13, + repeated_writes: 92, + events: 140, hashes: 100, recursive_calls: 20, deploys: 5, diff --git a/core/tests/vm-benchmark/src/vm.rs b/core/tests/vm-benchmark/src/vm.rs index bf969e0de5c0..4bd7d7eb1aa6 100644 --- a/core/tests/vm-benchmark/src/vm.rs +++ b/core/tests/vm-benchmark/src/vm.rs @@ -14,22 +14,21 @@ use zksync_multivm::{ zk_evm_latest::ethereum_types::{Address, U256}, }; use zksync_types::{ - block::L2BlockHasher, fee_model::BatchFeeInput, helpers::unix_timestamp_ms, + block::L2BlockHasher, fee_model::BatchFeeInput, helpers::unix_timestamp_ms, u256_to_h256, utils::storage_key_for_eth_balance, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersionId, Transaction, }; -use zksync_utils::bytecode::hash_bytecode; use crate::{instruction_counter::InstructionCounter, transaction::PRIVATE_KEY}; static SYSTEM_CONTRACTS: Lazy = Lazy::new(BaseSystemContracts::load_from_disk); static STORAGE: Lazy = Lazy::new(|| { - let mut storage = InMemoryStorage::with_system_contracts(hash_bytecode); + let mut storage = InMemoryStorage::with_system_contracts(); // Give `PRIVATE_KEY` some money let balance = U256::from(10u32).pow(U256::from(32)); //10^32 wei let key = storage_key_for_eth_balance(&PRIVATE_KEY.address()); - storage.set_value(key, zksync_utils::u256_to_h256(balance)); + storage.set_value(key, u256_to_h256(balance)); storage }); @@ -235,8 +234,8 @@ impl BenchmarkingVm { #[cfg(test)] mod tests { use assert_matches::assert_matches; - use zksync_contracts::read_bytecode; use zksync_multivm::interface::ExecutionResult; + use zksync_test_contracts::TestContract; use super::*; use crate::{ @@ -246,11 +245,9 @@ mod tests { #[test] fn can_deploy_contract() { - let test_contract = read_bytecode( - "etc/contracts-test-data/artifacts-zk/contracts/counter/counter.sol/Counter.json", - ); + let test_contract = &TestContract::counter().bytecode; let mut vm = BenchmarkingVm::new(); - let res = vm.run_transaction(&get_deploy_tx(&test_contract)); + let res = vm.run_transaction(&get_deploy_tx(test_contract)); assert_matches!(res.result, ExecutionResult::Success { .. }); } diff --git a/deny.toml b/deny.toml index dc5a32c2c070..13ce6504107f 100644 --- a/deny.toml +++ b/deny.toml @@ -9,14 +9,13 @@ feature-depth = 1 [advisories] ignore = [ "RUSTSEC-2024-0375", # atty dependency being unmaintained, dependency of clap and criterion, we would need to update to newer major of dependencies - "RUSTSEC-2024-0320", # yaml_rust dependency being unmaintained, dependency in core, we should consider moving to yaml_rust2 fork "RUSTSEC-2020-0168", # mach dependency being unmaintained, dependency in consensus, we should consider moving to mach2 fork "RUSTSEC-2024-0370", # `cs_derive` needs to be updated to not rely on `proc-macro-error` - # all below caused by StructOpt which we still use and we should move to clap v3 instead + # all below caused by StructOpt which we still use and we should move to clap v4 instead "RUSTSEC-2024-0375", "RUSTSEC-2021-0145", "RUSTSEC-2021-0139", - "RUSTSEC-2024-0375", + "RUSTSEC-2024-0388", # `derivative` is unmaintained, crypto dependenicies (boojum, circuit_encodings and others) rely on it ] [licenses] @@ -34,6 +33,7 @@ allow = [ "OpenSSL", "Apache-2.0 WITH LLVM-exception", "0BSD", + "BSL-1.0", ] confidence-threshold = 0.8 diff --git a/docker/Makefile b/docker/Makefile index 4e0ca51f904e..19d5fee0907f 100644 --- a/docker/Makefile +++ b/docker/Makefile @@ -7,7 +7,7 @@ NODE_VERSION_MIN=20.17.0 YARN_VERSION_MIN=1.22.19 RUST_VERSION=nightly-2024-08-01 SQLX_CLI_VERSION=0.8.1 -FORGE_MIN_VERSION=0.2.0 +FORGE_MIN_VERSION=0.0.2 # Versions and packages checks check-nodejs: diff --git a/docker/contract-verifier/Dockerfile b/docker/contract-verifier/Dockerfile index b1b63429a637..d5f3c53db99f 100644 --- a/docker/contract-verifier/Dockerfile +++ b/docker/contract-verifier/Dockerfile @@ -17,7 +17,7 @@ ENV RUSTC_WRAPPER=${RUSTC_WRAPPER} WORKDIR /usr/src/zksync COPY . . -RUN cargo build --release +RUN cargo build --release --bin zksync_contract_verifier FROM ghcr.io/matter-labs/zksync-runtime-base:latest diff --git a/docker/external-node/Dockerfile b/docker/external-node/Dockerfile index f5c558607400..2effe1051b4a 100644 --- a/docker/external-node/Dockerfile +++ b/docker/external-node/Dockerfile @@ -15,7 +15,7 @@ ENV RUSTC_WRAPPER=${RUSTC_WRAPPER} WORKDIR /usr/src/zksync COPY . . -RUN cargo build --release +RUN cargo build --release --bin zksync_external_node --bin block_reverter FROM ghcr.io/matter-labs/zksync-runtime-base:latest diff --git a/docker/server-v2/Dockerfile b/docker/server-v2/Dockerfile index 319d0cefbe34..9557156fa7c4 100644 --- a/docker/server-v2/Dockerfile +++ b/docker/server-v2/Dockerfile @@ -17,7 +17,7 @@ WORKDIR /usr/src/zksync COPY . . -RUN cargo build --release --features=rocksdb/io-uring +RUN cargo build --release --features=rocksdb/io-uring --bin zksync_server --bin block_reverter --bin merkle_tree_consistency_checker FROM ghcr.io/matter-labs/zksync-runtime-base:latest diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 000000000000..7585238efedf --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1 @@ +book diff --git a/docs/book.toml b/docs/book.toml new file mode 100644 index 000000000000..89420a95ba38 --- /dev/null +++ b/docs/book.toml @@ -0,0 +1,32 @@ +[book] +authors = ["ZKsync team"] +language = "en" +multilingual = false +src = "src" +title = "ZKsync Era Documentation" + +[output.html] +smart-punctuation = true +mathjax-support = true +git-repository-url = "https://github.com/matter-labs/zksync-era/tree/main/docs" +edit-url-template = "https://github.com/matter-labs/zksync-era/tree/main/docs/{path}" +additional-js = ["js/version-box.js", "js/mermaid-init.js"] +additional-css = ["css/version-box.css"] + +[output.html.playground] +editable = true +line-numbers = true + +[output.html.search] +limit-results = 20 +use-boolean-and = true +boost-title = 2 +boost-hierarchy = 2 +boost-paragraph = 1 +expand = true +heading-split-level = 2 + +[preprocessor] + +[preprocessor.mermaid] +command = "mdbook-mermaid" diff --git a/docs/css/version-box.css b/docs/css/version-box.css new file mode 100644 index 000000000000..4006ac7804b3 --- /dev/null +++ b/docs/css/version-box.css @@ -0,0 +1,46 @@ +#version-box { + display: flex; + align-items: center; + margin-right: 15px; /* Space from the right side */ + background-color: transparent; /* Make the box background transparent */ +} + +/* Base styles for the version selector */ +#version-selector { + background-color: transparent; /* Remove background color */ + border: 1px solid #4a5568; /* Subtle border */ + border-radius: 4px; /* Rounded edges */ + padding: 5px 10px; /* Padding inside dropdown */ + font-size: 0.9em; + font-weight: normal; + outline: none; /* Removes default focus outline */ + cursor: pointer; +} + +/* Text color for dark themes */ +.theme-navy #version-selector, +.theme-coal #version-selector { + color: #f7fafc; /* Light text color for dark backgrounds */ +} + +/* Text color for light theme */ +.theme-light #version-selector { + color: #333333; /* Dark text color for light background */ +} + +/* Hover effect for better user feedback */ +#version-selector:hover { + background-color: rgba(255, 255, 255, 0.1); /* Light hover effect */ +} + +/* Optional: Style for when the selector is focused */ +#version-selector:focus { + border-color: #63b3ed; /* Accent color for focused state */ +} + +.right-buttons { + display: flex; + flex-direction: row; /* Aligns items in a row, left to right */ + align-items: center; /* Centers items vertically */ + gap: 10px; /* Adds space between items */ +} diff --git a/docs/guides/development.md b/docs/guides/development.md deleted file mode 100644 index c859017848b5..000000000000 --- a/docs/guides/development.md +++ /dev/null @@ -1,148 +0,0 @@ -# Development guide - -This document covers development-related actions in ZKsync. - -## Initializing the project - -To setup the main toolkit, `zk`, simply run: - -``` -zk -``` - -You may also configure autocompletion for your shell via: - -``` -zk completion install -``` - -Once all the dependencies were installed, project can be initialized: - -``` -zk init -``` - -This command will do the following: - -- Generate `$ZKSYNC_HOME/etc/env/target/dev.env` file with settings for the applications. -- Initialize docker containers with `reth` Ethereum node for local development. -- Download and unpack files for cryptographical backend. -- Generate required smart contracts. -- Compile all the smart contracts. -- Deploy smart contracts to the local Ethereum network. -- Create “genesis block” for server. - -Initializing may take pretty long, but many steps (such as downloading & unpacking keys and initializing containers) are -required to be done only once. - -Usually, it is a good idea to do `zk init` once after each merge to the `main` branch (as application setup may change). - -Additionally, there is a subcommand `zk clean` to remove previously generated data. Examples: - -``` -zk clean --all # Remove generated configs, database and backups. -zk clean --config # Remove configs only. -zk clean --database # Remove database. -zk clean --backups # Remove backups. -zk clean --database --backups # Remove database *and* backups, but not configs. -``` - -**When do you need it?** - -1. If you have an initialized database and want to run `zk init`, you have to remove the database first. -2. If after getting new functionality from the `main` branch your code stopped working and `zk init` doesn't help, you - may try removing `$ZKSYNC_HOME/etc/env/target/dev.env` and running `zk init` once again. This may help if the - application configuration has changed. - -If you don’t need all of the `zk init` functionality, but just need to start/stop containers, use the following -commands: - -``` -zk up # Set up `reth` and `postgres` containers -zk down # Shut down `reth` and `postgres` containers -``` - -## Reinitializing - -When actively changing something that affects infrastructure (for example, contracts code), you normally don't need the -whole `init` functionality, as it contains many external steps (e.g. deploying ERC20 tokens) which don't have to be -redone. - -For this case, there is an additional command: - -``` -zk reinit -``` - -This command does the minimal subset of `zk init` actions required to "reinitialize" the network. It assumes that -`zk init` was called in the current environment before. If `zk reinit` doesn't work for you, you may want to run -`zk init` instead. - -## Committing changes - -`zksync` uses pre-commit and pre-push git hooks for basic code integrity checks. Hooks are set up automatically within -the workspace initialization process. These hooks will not allow to commit the code which does not pass several checks. - -Currently the following criteria are checked: - -- Rust code should always be formatted via `cargo fmt`. -- Other code should always be formatted via `zk fmt`. -- Dummy Prover should not be staged for commit (see below for the explanation). - -## Using Dummy Prover - -By default, the chosen prover is a "dummy" one, meaning that it doesn't actually compute proofs but rather uses mocks to -avoid expensive computations in the development environment. - -To switch dummy prover to real prover, one must change `dummy_verifier` to `false` in `contracts.toml` for your env -(most likely, `etc/env/base/contracts.toml`) and run `zk init` to redeploy smart contracts. - -## Testing - -- Running the `rust` unit-tests: - - ``` - zk test rust - ``` - -- Running a specific `rust` unit-test: - - ``` - zk test rust --package --lib ::tests:: - # e.g. zk test rust --package zksync_core --lib eth_sender::tests::resend_each_block - ``` - -- Running the integration test: - - ``` - zk server # Has to be run in the 1st terminal - zk test i server # Has to be run in the 2nd terminal - ``` - -- Running the benchmarks: - - ``` - zk f cargo bench - ``` - -- Running the loadtest: - - ``` - zk server # Has to be run in the 1st terminal - zk prover # Has to be run in the 2nd terminal if you want to use real prover, otherwise it's not required. - zk run loadtest # Has to be run in the 3rd terminal - ``` - -## Contracts - -### Re-build contracts - -``` -zk contract build -``` - -### Publish source code on etherscan - -``` -zk contract publish -``` diff --git a/docs/guides/external-node/docker-compose-examples/configs/mainnet_consensus_config.yaml b/docs/guides/external-node/docker-compose-examples/configs/mainnet_consensus_config.yaml deleted file mode 100644 index f2a0ce318757..000000000000 --- a/docs/guides/external-node/docker-compose-examples/configs/mainnet_consensus_config.yaml +++ /dev/null @@ -1,11 +0,0 @@ -server_addr: '0.0.0.0:3054' -public_addr: '127.0.0.1:3054' -debug_page_addr: '0.0.0.0:5000' -max_payload_size: 5000000 -gossip_dynamic_inbound_limit: 100 -gossip_static_outbound: - # preconfigured ENs owned by Matterlabs that you can connect to - - key: 'node:public:ed25519:68d29127ab03408bf5c838553b19c32bdb3aaaae9bf293e5e078c3a0d265822a' - addr: 'external-node-consensus-mainnet.zksync.dev:3054' - - key: 'node:public:ed25519:b521e1bb173d04bc83d46b859d1296378e94a40427a6beb9e7fdd17cbd934c11' - addr: 'external-node-moby-consensus-mainnet.zksync.dev:3054' diff --git a/docs/guides/external-node/docker-compose-examples/configs/testnet_consensus_config.yaml b/docs/guides/external-node/docker-compose-examples/configs/testnet_consensus_config.yaml deleted file mode 100644 index a5f752fe405a..000000000000 --- a/docs/guides/external-node/docker-compose-examples/configs/testnet_consensus_config.yaml +++ /dev/null @@ -1,11 +0,0 @@ -server_addr: '0.0.0.0:3054' -public_addr: '127.0.0.1:3054' -debug_page_addr: '0.0.0.0:5000' -max_payload_size: 5000000 -gossip_dynamic_inbound_limit: 100 -gossip_static_outbound: - # preconfigured ENs owned by Matterlabs that you can connect to - - key: 'node:public:ed25519:4a94067664e7b8d0927ab1443491dab71a1d0c63f861099e1852f2b6d0831c3e' - addr: 'external-node-consensus-sepolia.zksync.dev:3054' - - key: 'node:public:ed25519:cfbbebc74127099680584f07a051a2573e2dd7463abdd000d31aaa44a7985045' - addr: 'external-node-moby-consensus-sepolia.zksync.dev:3054' diff --git a/docs/guides/external-node/prepared_configs/mainnet_consensus_config.yaml b/docs/guides/external-node/prepared_configs/mainnet_consensus_config.yaml deleted file mode 100644 index be37aaf29329..000000000000 --- a/docs/guides/external-node/prepared_configs/mainnet_consensus_config.yaml +++ /dev/null @@ -1,14 +0,0 @@ -server_addr: '0.0.0.0:3054' -public_addr: ':3054' -max_payload_size: 5000000 -gossip_dynamic_inbound_limit: 100 -gossip_static_outbound: - # preconfigured ENs owned by Matterlabs that you can connect to - - key: 'node:public:ed25519:68d29127ab03408bf5c838553b19c32bdb3aaaae9bf293e5e078c3a0d265822a' - addr: 'external-node-consensus-mainnet.zksync.dev:3054' - - key: 'node:public:ed25519:b521e1bb173d04bc83d46b859d1296378e94a40427a6beb9e7fdd17cbd934c11' - addr: 'external-node-moby-consensus-mainnet.zksync.dev:3054' - - key: 'node:public:ed25519:45d23515008b5121484eb774507df63ff4ce9f4b65e6a03b7c9ec4e0474d3044' - addr: 'consensus-mainnet-1.zksync-nodes.com:3054' - - key: 'node:public:ed25519:c278bb0831e8d0dcd3aaf0b7af7c3dca048d50b28c578ceecce61a412986b883' - addr: 'consensus-mainnet-2.zksync-nodes.com:3054' diff --git a/docs/guides/external-node/prepared_configs/testnet_consensus_config.yaml b/docs/guides/external-node/prepared_configs/testnet_consensus_config.yaml deleted file mode 100644 index 8d2551c07087..000000000000 --- a/docs/guides/external-node/prepared_configs/testnet_consensus_config.yaml +++ /dev/null @@ -1,14 +0,0 @@ -server_addr: '0.0.0.0:3054' -public_addr: ':3054' -max_payload_size: 5000000 -gossip_dynamic_inbound_limit: 100 -gossip_static_outbound: - # preconfigured ENs owned by Matterlabs that you can connect to - - key: 'node:public:ed25519:4a94067664e7b8d0927ab1443491dab71a1d0c63f861099e1852f2b6d0831c3e' - addr: 'external-node-consensus-sepolia.zksync.dev:3054' - - key: 'node:public:ed25519:cfbbebc74127099680584f07a051a2573e2dd7463abdd000d31aaa44a7985045' - addr: 'external-node-moby-consensus-sepolia.zksync.dev:3054' - - key: 'node:public:ed25519:f48616db5965ada49dcbd51b1de11068a27c9886c900d3522607f16dff2e66fc' - addr: 'consensus-sepolia-1.zksync-nodes.com:3054' - - key: 'node:public:ed25519:3789d49293792755a9c1c2a7ed9b0e210e92994606dcf76388b5635d7ed676cb' - addr: 'consensus-sepolia-2.zksync-nodes.com:3054' diff --git a/docs/guides/launch.md b/docs/guides/launch.md deleted file mode 100644 index 10c0b10f5d84..000000000000 --- a/docs/guides/launch.md +++ /dev/null @@ -1,343 +0,0 @@ -# Running the application - -This document covers common scenarios for launching ZKsync applications set locally. - -## Prerequisites - -Prepare dev environment prerequisites: see - -[Installing dependencies](./setup-dev.md) - -## Setup local dev environment - -Setup: - -``` -zk # installs and builds zk itself -zk init -``` - -If you face any other problems with the `zk init` command, go to the -[Troubleshooting](https://github.com/matter-labs/zksync-era/blob/main/docs/guides/launch.md#troubleshooting) section at -the end of this file. There are solutions for some common error cases. - -To completely reset the dev environment: - -- Stop services: - - ``` - zk down - ``` - -- Repeat the setup procedure above - -If `zk init` has already been executed, and now you only need to start docker containers (e.g. after reboot), simply -launch: - -``` -zk up -``` - -### Run observability stack - -If you want to run [Dockprom](https://github.com/stefanprodan/dockprom/) stack (Prometheus, Grafana) alongside other -containers - add `--run-observability` parameter during initialisation. - -``` -zk init --run-observability -``` - -That will also provision Grafana with -[era-observability](https://github.com/matter-labs/era-observability/tree/main/dashboards) dashboards. You can then -access it at `http://127.0.0.1:3000/` under credentials `admin/admin`. - -> If you don't see any data displayed on the Grafana dashboards - try setting the timeframe to "Last 30 minutes". You -> will also have to have `jq` installed on your system. - -## (Re)deploy db and contracts - -``` -zk contract redeploy -``` - -## Environment configurations - -Env config files are held in `etc/env/target/` - -List configurations: - -``` -zk env -``` - -Switch between configurations: - -``` -zk env -``` - -Default configuration is `dev.env`, which is generated automatically from `dev.env.example` during `zk init` command -execution. - -## Build and run server - -Run server: - -``` -zk server -``` - -Server is configured using env files in `./etc/env` directory. After the first initialization, file -`./etc/env/target/dev.env`will be created. By default, this file is copied from the `./etc/env/target/dev.env.example` -template. - -Make sure you have environment variables set right, you can check it by running: `zk env`. You should see `* dev` in -output. - -## Running server using Google cloud storage object store instead of default In memory store - -Get the service_account.json file containing the GCP credentials from kubernetes secret for relevant environment(stage2/ -testnet2) add that file to the default location ~/gcloud/service_account.json or update object_store.toml with the file -location - -``` -zk server -``` - -## Running prover server - -Running on machine without GPU - -```shell -zk f cargo +nightly run --release --bin zksync_prover -``` - -Running on machine with GPU - -```shell -zk f cargo +nightly run --features gpu --release --bin zksync_prover -``` - -## Running the verification key generator - -```shell -# ensure that the setup_2^26.key in the current directory, the file can be download from https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^26.key - -# To generate all verification keys -cargo run --release --bin zksync_verification_key_generator - - -``` - -## Generating binary verification keys for existing json verification keys - -```shell -cargo run --release --bin zksync_json_to_binary_vk_converter -- -o /path/to/output-binary-vk -``` - -## Generating commitment for existing verification keys - -```shell -cargo run --release --bin zksync_commitment_generator -``` - -## Running the contract verifier - -```shell -# To process fixed number of jobs -cargo run --release --bin zksync_contract_verifier -- --jobs-number X - -# To run until manual exit -zk contract_verifier -``` - -## Troubleshooting - -### SSL error: certificate verify failed - -**Problem**. `zk init` fails with the following error: - -``` -Initializing download: https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2%5E20.key -SSL error: certificate verify failed -``` - -**Solution**. Make sure that the version of `axel` on your computer is `2.17.10` or higher. - -### rmSync is not a function - -**Problem**. `zk init` fails with the following error: - -``` -fs_1.default.rmSync is not a function -``` - -**Solution**. Make sure that the version of `node.js` installed on your computer is `14.14.0` or higher. - -### Invalid bytecode: () - -**Problem**. `zk init` fails with an error similar to: - -``` -Running `target/release/zksync_server --genesis` -2023-04-05T14:23:40.291277Z INFO zksync_core::genesis: running regenesis -thread 'main' panicked at 'Invalid bytecode: ()', core/lib/utils/src/bytecode.rs:159:10 -stack backtrace: - 0: 0x104551410 - std::backtrace_rs::backtrace::libunwind::trace::hf9c5171f212b04e2 - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/std/src/../../backtrace/src/backtrace/libunwind.rs:93:5 - 1: 0x104551410 - std::backtrace_rs::backtrace::trace_unsynchronized::h179003f6ec753118 - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/std/src/../../backtrace/src/backtrace/mod.rs:66:5 - 2: 0x104551410 - std::sys_common::backtrace::_print_fmt::h92d38f701cf42b17 - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/std/src/sys_common/backtrace.rs:65:5 - 3: 0x104551410 - ::fmt::hb33e6e8152f78c95 - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/std/src/sys_common/backtrace.rs:44:22 - 4: 0x10456cdb0 - core::fmt::write::hd33da007f7a27e39 - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/core/src/fmt/mod.rs:1208:17 - 5: 0x10454b41c - std::io::Write::write_fmt::h7edc10723862001e - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/std/src/io/mod.rs:1682:15 - 6: 0x104551224 - std::sys_common::backtrace::_print::h5e00f05f436af01f - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/std/src/sys_common/backtrace.rs:47:5 - 7: 0x104551224 - std::sys_common::backtrace::print::h895ee35b3f17b334 - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/std/src/sys_common/backtrace.rs:34:9 - 8: 0x104552d84 - std::panicking::default_hook::{{closure}}::h3b7ee083edc2ea3e - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/std/src/panicking.rs:267:22 - 9: 0x104552adc - std::panicking::default_hook::h4e7c2c28eba716f5 - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/std/src/panicking.rs:286:9 - 10: 0x1045533a8 - std::panicking::rust_panic_with_hook::h1672176227032c45 - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/std/src/panicking.rs:688:13 - 11: 0x1045531c8 - std::panicking::begin_panic_handler::{{closure}}::h0b2d072f9624d32e - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/std/src/panicking.rs:579:13 - 12: 0x104551878 - std::sys_common::backtrace::__rust_end_short_backtrace::he9abda779115b93c - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/std/src/sys_common/backtrace.rs:137:18 - 13: 0x104552f24 - rust_begin_unwind - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/std/src/panicking.rs:575:5 - 14: 0x1045f89c0 - core::panicking::panic_fmt::h23ae44661fec0889 - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/core/src/panicking.rs:64:14 - 15: 0x1045f8ce0 - core::result::unwrap_failed::h414a6cbb12b1e143 - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/core/src/result.rs:1791:5 - 16: 0x103f79a30 - zksync_utils::bytecode::hash_bytecode::h397dd7c5b6202bf4 - 17: 0x103e47e78 - zksync_contracts::BaseSystemContracts::load_from_disk::h0e2da8f63292ac46 - 18: 0x102d885a0 - zksync_core::genesis::ensure_genesis_state::{{closure}}::h5143873f2c337e11 - 19: 0x102d7dee0 - zksync_core::genesis_init::{{closure}}::h4e94f3d4ad984788 - 20: 0x102d9c048 - zksync_server::main::{{closure}}::h3fe943a3627d31e1 - 21: 0x102d966f8 - tokio::runtime::park::CachedParkThread::block_on::h2f2fdf7edaf08470 - 22: 0x102df0dd4 - tokio::runtime::runtime::Runtime::block_on::h1fd1d83272a23194 - 23: 0x102e21470 - zksync_server::main::h500621fd4d160768 - 24: 0x102d328f0 - std::sys_common::backtrace::__rust_begin_short_backtrace::h52973e519e2e8a0d - 25: 0x102e08ea8 - std::rt::lang_start::{{closure}}::hbd395afe0ab3b799 - 26: 0x10454508c - core::ops::function::impls:: for &F>::call_once::ha1c2447b9b665e13 - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/core/src/ops/function.rs:606:13 - 27: 0x10454508c - std::panicking::try::do_call::ha57d6d1e9532dc1f - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/std/src/panicking.rs:483:40 - 28: 0x10454508c - std::panicking::try::hca0526f287961ecd - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/std/src/panicking.rs:447:19 - 29: 0x10454508c - std::panic::catch_unwind::hdcaa7fa896e0496a - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/std/src/panic.rs:137:14 - 30: 0x10454508c - std::rt::lang_start_internal::{{closure}}::h142ec071d3766871 - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/std/src/rt.rs:148:48 - 31: 0x10454508c - std::panicking::try::do_call::h95f5e55d6f048978 - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/std/src/panicking.rs:483:40 - 32: 0x10454508c - std::panicking::try::h0fa00e2f7b4a5c64 - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/std/src/panicking.rs:447:19 - 33: 0x10454508c - std::panic::catch_unwind::h1765f149814d4d3e - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/std/src/panic.rs:137:14 - 34: 0x10454508c - std::rt::lang_start_internal::h00a235e820a7f01c - at /rustc/d5a82bbd26e1ad8b7401f6a718a9c57c96905483/library/std/src/rt.rs:148:20 - 35: 0x102e21578 - _main -Error: Genesis is not needed (either Postgres DB or tree's Rocks DB is not empty) -``` - -**Description**. This means that your bytecode config file has an empty entry: `"bytecode": "0x"`. This happens because -your `zksync-2-dev/etc/system-contracts/package.json`'s dependency on `"@matterlabs/hardhat-zksync-solc"` is outdated. -We don't expect this error to happen as we've updated to latest version which fixes the problem. - -**Solution**. Update your dependency and reinit: - -``` -yarn add -D @matterlabs/hardhat-zksync-solc # in the system-contracts folder -zk clean --all && zk init -``` - -On the run, it moved from: - -``` - "@matterlabs/hardhat-zksync-solc": "^0.3.14-beta.3", -``` - -to: - -``` - "@matterlabs/hardhat-zksync-solc": "^0.3.15", -``` - -### Error: Bytecode length in 32-byte words must be odd - -**Problem**. `zk init` fails with an error similar to: - -``` -Successfully generated Typechain artifacts! -Error: Error: Bytecode length in 32-byte words must be odd - at hashL2Bytecode (/Users/emilluta/code/zksync-2-dev/contracts/zksync/src/utils.ts:29:15) - at computeL2Create2Address (/Users/emilluta/code/zksync-2-dev/contracts/zksync/src/utils.ts:53:26) - at /Users/emilluta/code/zksync-2-dev/contracts/zksync/src/compileAndDeployLibs.ts:50:63 - at step (/Users/emilluta/code/zksync-2-dev/contracts/zksync/src/compileAndDeployLibs.ts:33:23) - at Object.next (/Users/emilluta/code/zksync-2-dev/contracts/zksync/src/compileAndDeployLibs.ts:14:53) - at fulfilled (/Users/emilluta/code/zksync-2-dev/contracts/zksync/src/compileAndDeployLibs.ts:5:58) -error Command failed with exit code 1. -info Visit https://yarnpkg.com/en/docs/cli/run for documentation about this command. -error Command failed. -Exit code: 1 -Command: /Users/emilluta/.nvm/versions/node/v16.19.1/bin/node -Arguments: /opt/homebrew/Cellar/yarn/1.22.19/libexec/lib/cli.js compile-and-deploy-libs -Directory: /Users/emilluta/code/zksync-2-dev/contracts/zksync -Output: - -info Visit https://yarnpkg.com/en/docs/cli/workspace for documentation about this command. -error Command failed with exit code 1. -info Visit https://yarnpkg.com/en/docs/cli/run for documentation about this command. -Error: Child process exited with code 1 -``` - -**Description**. This means that your bytecode config file has an empty entry: `"bytecode": "0x"`. This happens because -your `zksync-2-dev/contracts/zksync/package.json`'s dependency on `"@matterlabs/hardhat-zksync-solc"` is outdated. We -don't expect this error to happen as we've updated to latest version which fixes the problem. - -**Solution**. Update your dependency and reinit: - -``` -yarn add -D @matterlabs/hardhat-zksync-solc # in the system-contracts folder -zk clean --all && zk init -``` - -On the run, it moved from: - -``` - "@matterlabs/hardhat-zksync-solc": "^0.3.14-beta.3", -``` - -to: - -``` - "@matterlabs/hardhat-zksync-solc": "^0.3.15", -``` - -### Error: Cannot read properties of undefined (reading 'compilerPath') - -**Problem**. `zk init` fails with an error similar to the following: - -```text -Yarn project directory: /Users//Projects/zksync-era/contracts/system-contracts -Error: Cannot read properties of undefined (reading 'compilerPath') -error Command failed with exit code 1. -``` - -**Description**. The compiler downloader -[could not verify](https://github.com/NomicFoundation/hardhat/blob/0d850d021f3ab33b59b1ea2ae70d1e659e579e40/packages/hardhat-core/src/internal/solidity/compiler/downloader.ts#L336-L383) -that the Solidity compiler it downloaded actually works. - -**Solution**. Delete the cached `*.does.not.work` file to run the check again: - -```sh -# NOTE: Compiler version, commit hash may differ. -rm $HOME/Library/Caches/hardhat-nodejs/compilers-v2/macosx-amd64/solc-macosx-amd64-v0.8.20+commit.a1b79de6.does.not.work -``` diff --git a/docs/js/mermaid-init.js b/docs/js/mermaid-init.js new file mode 100644 index 000000000000..15a7f4e57c60 --- /dev/null +++ b/docs/js/mermaid-init.js @@ -0,0 +1,35 @@ +(() => { + const darkThemes = ['ayu', 'navy', 'coal']; + const lightThemes = ['light', 'rust']; + + const classList = document.getElementsByTagName('html')[0].classList; + + let lastThemeWasLight = true; + for (const cssClass of classList) { + if (darkThemes.includes(cssClass)) { + lastThemeWasLight = false; + break; + } + } + + const theme = lastThemeWasLight ? 'default' : 'dark'; + mermaid.initialize({ startOnLoad: true, theme }); + + // Simplest way to make mermaid re-render the diagrams in the new theme is via refreshing the page + + for (const darkTheme of darkThemes) { + document.getElementById(darkTheme).addEventListener('click', () => { + if (lastThemeWasLight) { + window.location.reload(); + } + }); + } + + for (const lightTheme of lightThemes) { + document.getElementById(lightTheme).addEventListener('click', () => { + if (!lastThemeWasLight) { + window.location.reload(); + } + }); + } +})(); diff --git a/docs/js/version-box.js b/docs/js/version-box.js new file mode 100644 index 000000000000..932a75a5e3bb --- /dev/null +++ b/docs/js/version-box.js @@ -0,0 +1,61 @@ +document.addEventListener('DOMContentLoaded', function () { + // Get the base URL from the mdBook configuration + const baseUrl = document.location.origin + '/zksync-era/core'; + + // Function to create version selector + function createVersionSelector(versions) { + const versionSelector = document.createElement('select'); + versionSelector.id = 'version-selector'; + + // Get the current path + const currentPath = window.location.pathname; + + // Iterate over the versions object + for (const [versionName, versionUrl] of Object.entries(versions)) { + const option = document.createElement('option'); + option.value = versionUrl + '/'; + option.textContent = versionName; + + // Check if the current URL matches this option's value + if (currentPath.includes(option.value)) { + option.selected = true; // Set this option as selected + } + + versionSelector.appendChild(option); + } + + // Event listener to handle version change + versionSelector.addEventListener('change', function () { + const selectedVersion = versionSelector.value; + // Redirect to the selected version URL + window.location.href = '/zksync-era/core' + selectedVersion; + }); + + return versionSelector; + } + + // Fetch versions from JSON file + fetch(baseUrl + '/versions.json') + .then((response) => { + if (!response.ok) { + throw new Error('Network response was not ok ' + response.statusText); + } + return response.json(); + }) + .then((data) => { + const versionSelector = createVersionSelector(data); + const nav = document.querySelector('.right-buttons'); + + if (nav) { + const versionBox = document.createElement('div'); + versionBox.id = 'version-box'; + versionBox.appendChild(versionSelector); + nav.appendChild(versionBox); // Append to the .right-buttons container + } else { + console.error('.right-buttons element not found.'); + } + }) + .catch((error) => { + console.error('There has been a problem with your fetch operation:', error); + }); +}); diff --git a/docs/src/README.md b/docs/src/README.md new file mode 100644 index 000000000000..ab6a417877b5 --- /dev/null +++ b/docs/src/README.md @@ -0,0 +1,26 @@ +# Introduction + +Welcome to the documentation! This guide provides comprehensive insights into the architecture, setup, usage, and +advanced features of ZKsync. + +## Documentation Structure + +- **Guides**: The Guides section is designed to help users at every level, from setup and development to advanced + configuration and debugging techniques. It covers essential topics, including Docker setup, repository management, and + architecture. + +- **Specs**: This section dives into the technical specifications of our system. Here, you’ll find detailed + documentation on data availability, L1 and L2 communication, smart contract interactions, Zero-Knowledge proofs, and + more. Each topic includes an in-depth explanation to support advanced users and developers. + +- **Announcements**: This section highlights important updates, announcements, and committee details, providing + essential information to keep users informed on the latest changes. + +## Getting Started + +Feel free to explore each section according to your needs. This documentation is designed to be modular, so you can jump +to specific topics or follow through step-by-step. + +--- + +Thank you for using our documentation! diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md new file mode 100644 index 000000000000..c0dd8638c8d9 --- /dev/null +++ b/docs/src/SUMMARY.md @@ -0,0 +1,86 @@ + + +# Summary + +[Introduction](README.md) + +# Guides + +- [Basic](guides/README.md) + + - [Setup Dev](guides/setup-dev.md) + - [Development](guides/development.md) + - [Launch](guides/launch.md) + - [Architecture](guides/architecture.md) + - [Build Docker](guides/build-docker.md) + - [Repositories](guides/repositories.md) + +- [Advanced](guides/advanced/README.md) + - [Local initialization](guides/advanced/01_initialization.md) + - [Deposits](guides/advanced/02_deposits.md) + - [Withdrawals](guides/advanced/03_withdrawals.md) + - [Contracts](guides/advanced/04_contracts.md) + - [Calls](guides/advanced/05_how_call_works.md) + - [Transactions](guides/advanced/06_how_transaction_works.md) + - [Fee Model](guides/advanced/07_fee_model.md) + - [L2 Messaging](guides/advanced/08_how_l2_messaging_works.md) + - [Pubdata](guides/advanced/09_pubdata.md) + - [Pubdata with Blobs](guides/advanced/10_pubdata_with_blobs.md) + - [Bytecode compression](guides/advanced/11_compression.md) + - [EraVM intro](guides/advanced/12_alternative_vm_intro.md) + - [ZK Intuition](guides/advanced/13_zk_intuition.md) + - [ZK Deeper Dive](guides/advanced/14_zk_deeper_overview.md) + - [Prover Keys](guides/advanced/15_prover_keys.md) + - [Advanced Debugging](guides/advanced/90_advanced_debugging.md) + - [Docker and CI](guides/advanced/91_docker_and_ci.md) + +# External Node + +- [External node](guides/external-node/01_intro.md) + - [Quick Start](guides/external-node/00_quick_start.md) + - [Configuration](guides/external-node/02_configuration.md) + - [Running](guides/external-node/03_running.md) + - [Observability](guides/external-node/04_observability.md) + - [Troubleshooting](guides/external-node/05_troubleshooting.md) + - [Components](guides/external-node/06_components.md) + - [Snapshots Recovery](guides/external-node/07_snapshots_recovery.md) + - [Pruning](guides/external-node/08_pruning.md) + - [Treeless Mode](guides/external-node/09_treeless_mode.md) + - [Decentralization](guides/external-node/10_decentralization.md) + +# Specs + +- [Introduction](specs/introduction.md) + - [Overview](specs/overview.md) + - [Blocks and Batches](specs/blocks_batches.md) + - [L1 Smart Contracts](specs/l1_smart_contracts.md) +- [Data Availability](specs/data_availability/overview.md) + - [Pubdata](specs/data_availability/pubdata.md) + - [Compression](specs/data_availability/compression.md) + - [Reconstruction](specs/data_availability/reconstruction.md) + - [Validium ZK Porter](specs/data_availability/validium_zk_porter.md) +- [L1 L2 Communication](specs/l1_l2_communication/overview_deposits_withdrawals.md) + - [L1 to L2](specs/l1_l2_communication/l1_to_l2.md) + - [L2 to L1](specs/l1_l2_communication/l2_to_l1.md) +- [Prover](specs/prover/overview.md) + - [Getting Started](specs/prover/getting_started.md) + - [ZK Terminology](specs/prover/zk_terminology.md) + - [Function Check if Satisfied](specs/prover/boojum_function_check_if_satisfied.md) + - [Gadgets](specs/prover/boojum_gadgets.md) + - [Circuit Testing](specs/prover/circuit_testing.md) + - [Circuits Overview](specs/prover/circuits/overview.md) +- [ZK Chains](specs/zk_chains/overview.md) + - [Gateway](specs/zk_chains/gateway.md) + - [Interop](specs/zk_chains/interop.md) + - [Shared Bridge](specs/zk_chains/shared_bridge.md) +- [ZK EVM](specs/zk_evm/vm_overview.md) + - [Account Abstraction](specs/zk_evm/account_abstraction.md) + - [Bootloader](specs/zk_evm/bootloader.md) + - [Fee Model](specs/zk_evm/fee_model.md) + - [Precompiles](specs/zk_evm/precompiles.md) + - [System Contracts](specs/zk_evm/system_contracts.md) + +# Announcements + +- [Announcements](announcements/README.md) + - [Attester Committee](announcements/attester_commitee.md) diff --git a/docs/announcements/README.md b/docs/src/announcements/README.md similarity index 100% rename from docs/announcements/README.md rename to docs/src/announcements/README.md diff --git a/docs/announcements/attester_commitee.md b/docs/src/announcements/attester_commitee.md similarity index 97% rename from docs/announcements/attester_commitee.md rename to docs/src/announcements/attester_commitee.md index 84ff8aa5be6d..148e51a4f976 100644 --- a/docs/announcements/attester_commitee.md +++ b/docs/src/announcements/attester_commitee.md @@ -36,7 +36,7 @@ Participants can leave the committee at any time. The only action that is required to participate is to share your attester public key with the Main Node operator (by opening an issue in this repo or using any other communication channel). You can find it in the comment in the `consensus_secrets.yaml` file (that was - in most cases - generated by the tool described -[here](https://github.com/matter-labs/zksync-era/blob/main/docs/guides/external-node/09_decentralization.md#generating-secrets)) +[here](https://github.com/matter-labs/zksync-era/blob/main/docs/guides/external-node/10_decentralization.md#generating-secrets)) > [!WARNING] > diff --git a/docs/src/guides/README.md b/docs/src/guides/README.md new file mode 100644 index 000000000000..f9d5bc852a26 --- /dev/null +++ b/docs/src/guides/README.md @@ -0,0 +1,12 @@ +# ZKsync basic guides + +This section contains basic guides that aim to explain the ZKsync ecosystem in an easy to grasp way. + +## Table of Contents + +- [Architecture](./architecture.md) +- [Build Docker](./build-docker.md) +- [Development](./development.md) +- [Launch](./launch.md) +- [Repositories](./repositories.md) +- [Setup Dev](./setup-dev.md) diff --git a/docs/guides/advanced/01_initialization.md b/docs/src/guides/advanced/01_initialization.md similarity index 75% rename from docs/guides/advanced/01_initialization.md rename to docs/src/guides/advanced/01_initialization.md index 79c33434d3b5..2bc4a9c3a459 100644 --- a/docs/guides/advanced/01_initialization.md +++ b/docs/src/guides/advanced/01_initialization.md @@ -1,4 +1,4 @@ -# ZKsync deeper dive +# ZKsync Deeper Dive The goal of this doc is to show you some more details on how ZKsync works internally. @@ -7,18 +7,22 @@ system). Now let's take a look at what's inside: -### Initialization (zk init) +### Initialization -Let's take a deeper look into what `zk init` does. +Let's take a deeper look into what `zkstack ecosystem init` does. -#### zk tool +#### ZK Stack CLI -`zk` itself is implemented in typescript (you can see the code in `infrastructure` directory). If you change anything -there, make sure to run `zk` (that compiles this code), before re-running `zk init`. +`zkstack` itself is implemented in Rust (you can see the code in `/zkstack_cli` directory). If you change anything +there, make sure to run `zkstackup --local` from the root folder (that compiles and installs this code), before +re-running any `zkstack` command. -#### zk init +#### Containers -As first step, it gets the docker images for postgres and reth. +The first step to initialize a ZK Stack ecosystem is to run the command `zkstack containers`. This command gets the +docker images for `postgres` and `reth`. If the `--observability` option is passed to the command, or the corresponding +option is selected in the interactive prompt, then Prometheus, Grafana and other observability-related images are +downloaded and run. Reth (one of the Ethereum clients) will be used to setup our own copy of L1 chain (that our local ZKsync would use). @@ -26,11 +30,19 @@ Postgres is one of the two databases, that is used by ZKsync (the other one is R stored in postgres (blocks, transactions etc) - while RocksDB is only storing the state (Tree & Map) - and it used by VM. -Then we compile JS packages (these include our web3 sdk, tools and testing infrastructure). +#### Ecosystem -Then L1 & L2 contracts. +The next step is to run the command `zkstack ecosystem init`. -And now we're ready to start setting up the system. +This command: + +- Collects and finalize the ecosystem configuration. +- Builds and deploys L1 & L2 contracts. +- Initializes each chain defined in the `/chains` folder. (Currently, a single chain `era` is defined there, but you can + create your own chains running `zkstack chain create`). +- Sets up observability. +- Runs the genesis process. +- Initializes the database. #### Postgres @@ -83,8 +95,8 @@ If everything goes well, you should see that L1 blocks are being produced. Now we can start the main server: -```shell -zk server +```bash +zkstack server ``` This will actually run a cargo binary (`zksync_server`). @@ -96,7 +108,7 @@ Currently we don't send any transactions there (so the logs might be empty). But you should see some initial blocks in postgres: -``` +```sql select * from miniblocks; ``` @@ -107,7 +119,7 @@ Let's finish this article, by taking a look at our L1: We will use the `web3` tool to communicate with the L1, have a look at [02_deposits.md](02_deposits.md) for installation instructions. You can check that you're a (localnet) crypto trillionaire, by running: -```shell +```bash ./web3 --rpc-url http://localhost:8545 balance 0x36615Cf349d7F6344891B1e7CA7C72883F5dc049 ``` @@ -120,14 +132,14 @@ In order to communicate with L2 (our ZKsync) - we have to deploy multiple contra Ethereum). You can look on the `deployL1.log` file - to see the list of contracts that were deployed and their accounts. First thing in the file, is the deployer/governor wallet - this is the account that can change, freeze and unfreeze the -contracts (basically the owner). You can also verify (using the getBalance method above), that is has a lot of tokens. +contracts (basically the owner). You can verify the token balance using the `getBalance` method above. Then, there are a bunch of contracts (CRATE2_FACTOR, DIAMOND_PROXY, L1_ALLOW_LIST etc etc) - for each one, the file contains the address. You can quickly verify that they were really deployed, by calling: -```shell +```bash ./web3 --rpc-url http://localhost:8545 address XXX ``` diff --git a/docs/guides/advanced/02_deposits.md b/docs/src/guides/advanced/02_deposits.md similarity index 100% rename from docs/guides/advanced/02_deposits.md rename to docs/src/guides/advanced/02_deposits.md diff --git a/docs/guides/advanced/03_withdrawals.md b/docs/src/guides/advanced/03_withdrawals.md similarity index 100% rename from docs/guides/advanced/03_withdrawals.md rename to docs/src/guides/advanced/03_withdrawals.md diff --git a/docs/guides/advanced/04_contracts.md b/docs/src/guides/advanced/04_contracts.md similarity index 100% rename from docs/guides/advanced/04_contracts.md rename to docs/src/guides/advanced/04_contracts.md diff --git a/docs/guides/advanced/05_how_call_works.md b/docs/src/guides/advanced/05_how_call_works.md similarity index 98% rename from docs/guides/advanced/05_how_call_works.md rename to docs/src/guides/advanced/05_how_call_works.md index 5b9458ddce8e..0126c5349e90 100644 --- a/docs/guides/advanced/05_how_call_works.md +++ b/docs/src/guides/advanced/05_how_call_works.md @@ -12,7 +12,7 @@ Since the 'call' method is only for reading data, all the calculations will happ ### Calling the 'call' method If you need to make calls quickly, you can use the 'cast' binary from the -[foundry](https://github.com/foundry-rs/foundry) suite: +[Foundry ZKsync](https://foundry-book.zksync.io/getting-started/installation) suite: ```shell= cast call 0x23DF7589897C2C9cBa1C3282be2ee6a938138f10 "myfunction()()" --rpc-url http://localhost:3050 diff --git a/docs/guides/advanced/06_how_transaction_works.md b/docs/src/guides/advanced/06_how_transaction_works.md similarity index 100% rename from docs/guides/advanced/06_how_transaction_works.md rename to docs/src/guides/advanced/06_how_transaction_works.md diff --git a/docs/guides/advanced/07_fee_model.md b/docs/src/guides/advanced/07_fee_model.md similarity index 100% rename from docs/guides/advanced/07_fee_model.md rename to docs/src/guides/advanced/07_fee_model.md diff --git a/docs/guides/advanced/08_how_l2_messaging_works.md b/docs/src/guides/advanced/08_how_l2_messaging_works.md similarity index 100% rename from docs/guides/advanced/08_how_l2_messaging_works.md rename to docs/src/guides/advanced/08_how_l2_messaging_works.md diff --git a/docs/guides/advanced/09_pubdata.md b/docs/src/guides/advanced/09_pubdata.md similarity index 100% rename from docs/guides/advanced/09_pubdata.md rename to docs/src/guides/advanced/09_pubdata.md diff --git a/docs/guides/advanced/10_pubdata_with_blobs.md b/docs/src/guides/advanced/10_pubdata_with_blobs.md similarity index 100% rename from docs/guides/advanced/10_pubdata_with_blobs.md rename to docs/src/guides/advanced/10_pubdata_with_blobs.md diff --git a/docs/guides/advanced/11_compression.md b/docs/src/guides/advanced/11_compression.md similarity index 100% rename from docs/guides/advanced/11_compression.md rename to docs/src/guides/advanced/11_compression.md diff --git a/docs/guides/advanced/12_alternative_vm_intro.md b/docs/src/guides/advanced/12_alternative_vm_intro.md similarity index 100% rename from docs/guides/advanced/12_alternative_vm_intro.md rename to docs/src/guides/advanced/12_alternative_vm_intro.md diff --git a/docs/guides/advanced/13_zk_intuition.md b/docs/src/guides/advanced/13_zk_intuition.md similarity index 100% rename from docs/guides/advanced/13_zk_intuition.md rename to docs/src/guides/advanced/13_zk_intuition.md diff --git a/docs/guides/advanced/14_zk_deeper_overview.md b/docs/src/guides/advanced/14_zk_deeper_overview.md similarity index 100% rename from docs/guides/advanced/14_zk_deeper_overview.md rename to docs/src/guides/advanced/14_zk_deeper_overview.md diff --git a/docs/guides/advanced/15_prover_keys.md b/docs/src/guides/advanced/15_prover_keys.md similarity index 100% rename from docs/guides/advanced/15_prover_keys.md rename to docs/src/guides/advanced/15_prover_keys.md diff --git a/docs/src/guides/advanced/16_decentralization.md b/docs/src/guides/advanced/16_decentralization.md new file mode 100644 index 000000000000..a5f889a813d0 --- /dev/null +++ b/docs/src/guides/advanced/16_decentralization.md @@ -0,0 +1,104 @@ +# Decentralization + +To enable support for synchronization over p2p network, the main node needs to have the "consensus" component configured +and enabled as follows: + +## Generating the consensus secrets + +Run the following to generate consensus secrets: + +``` +docker run --entrypoint /usr/bin/zksync_external_node "matterlabs/external-node:2.0-v25.0.0" generate-secrets > consensus_secrets.yaml +chmod 600 consensus_secrets.yaml +``` + +## Preparing the consensus config + +Create `consensus_config.yaml` file with the following content (remember to replace the placeholders): + +```yaml +server_addr: '0.0.0.0:3054' +public_addr: + # Address under which the node is accessible to the other nodes. + # It can be a public domain, like `example.com:3054`, in case the main node is accessible from the internet, + # or it can be a kubernetes cluster domain, like `server-v2-core..svc.cluster.local:3054` in + # case the main node should be only accessible within the cluster. +debug_page_addr: '0.0.0.0:5000' +max_payload_size: 3200000 +gossip_dynamic_inbound_limit: 10 +genesis_spec: + chain_id: # chain id + protocol_version: 1 # consensus protocol version + validators: + - key: validator:public:??? # public key of the main node (copy this PUBLIC key from consensus_secrets.yaml) + weight: 1 + leader: validator:public:??? # same as above - main node will be the only validator and the only leader. +``` + +## Providing the configuration to the `zksync_server` + +To enable consensus component for the main node you need to append +`--components=,consensus` to the `zksync_server` command line arguments. +In addition to that, you need to provide the configuration (from the files `consensus_config.yaml` and +`consensus_secrets.yaml` that we have just prepared) to the `zksync_server` binary. There are 2 ways (hopefully not for +long) to achieve that: + +- In file-based configuration system, the consensus config is embedded in the + [general config](https://github.com/matter-labs/zksync-era/blob/1edcabe0c6a02d5b6700c29c0d9f6220ec6fb03c/core/lib/config/src/configs/general.rs#L58), + and the consensus secrets are embedded in the + [secrets config](https://github.com/matter-labs/zksync-era/blob/main/core/bin/zksync_server/src/main.rs). Paste the + content of the generated `consensus_secrets.yaml` file to the `secrets` config, and prepared config to the `general` + config. + +- In env-var-based configuration system, the consensus config and consensus secrets files are passed as standalone + files. The paths to these files need to be passed as env vars `CONSENSUS_CONFIG_PATH` and `CONSENSUS_SECRETS_PATH`. + +## Gitops repo config + +If you are using the matterlabs gitops repo to configure the main node, it is even more complicated because the +`consensus_config.yaml` file is rendered from a helm chart. See the +[example](https://github.com/matter-labs/gitops-kubernetes/blob/main/apps/environments/mainnet2/server-v2/server-v2-core.yaml), +to see where you have to paste the content of the `consensus_config.yaml` file. + +You need to embed the `consensus_secrets.yaml` file into a kubernetes config: + +```yaml +apiVersion: v1 +kind: Secret +metadata: + name: consensus-secrets +type: Opaque +stringData: + .consensus_secrets.yaml: +``` + +You need to add the following sections to your kubernetes config for the core server: + +```yaml +spec: + values: + persistence: + consensus-secrets-volume: + name: consensus-secrets # this is the name of the secret kubernetes object we defined above + enabled: true + type: secret + mountPath: '/etc/consensus_secrets/' + args: + - --components=state_keeper,consensus + service: + main: + ports: + consensus: + enabled: true + port: 3054 + configMap: + consensus: + enabled: true + data: + consensus_config.yaml: + env: + - name: CONSENSUS_CONFIG_PATH + value: /etc/consensus_config.yaml # this is the location rendered by the helm chart, you can't change it + - name: CONSENSUS_SECRETS_PATH + value: /etc/consensus_secrets/.consensus_secrets.yaml +``` diff --git a/docs/guides/advanced/90_advanced_debugging.md b/docs/src/guides/advanced/90_advanced_debugging.md similarity index 100% rename from docs/guides/advanced/90_advanced_debugging.md rename to docs/src/guides/advanced/90_advanced_debugging.md diff --git a/docs/guides/advanced/91_docker_and_ci.md b/docs/src/guides/advanced/91_docker_and_ci.md similarity index 93% rename from docs/guides/advanced/91_docker_and_ci.md rename to docs/src/guides/advanced/91_docker_and_ci.md index ff1c7843b8b1..885d3155dd6c 100644 --- a/docs/guides/advanced/91_docker_and_ci.md +++ b/docs/src/guides/advanced/91_docker_and_ci.md @@ -64,8 +64,8 @@ zk After this, you can run any commands you need. -When you see a command like `ci_run zk contract build` in the CI - this simply means that it executed -`zk contract build` inside that docker container. +When you see a command like `ci_run zkstack dev contracts` in the CI - this simply means that it executed +`zkstack dev contracts` inside that docker container. **IMPORTANT** - by default, docker is running in the mode, where it does NOT persist the changes. So if you exit that shell, all the changes will be removed (so when you restart, you'll end up in the same pristine condition). You can diff --git a/docs/guides/advanced/README.md b/docs/src/guides/advanced/README.md similarity index 93% rename from docs/guides/advanced/README.md rename to docs/src/guides/advanced/README.md index 5a3673b558ad..e0f8a82f2fb5 100644 --- a/docs/guides/advanced/README.md +++ b/docs/src/guides/advanced/README.md @@ -20,9 +20,10 @@ way. - [ZK intuition](./13_zk_intuition.md) - [ZK deeper overview](./14_zk_deeper_overview.md) - [Prover keys](./15_prover_keys.md) +- [Decentralization](./16_decentralization.md) Additionally, there are a few articles that cover specific topics that may be useful for developers actively working on -`zksync-era` repo. +`zksync-era` repo: - [Advanced debugging](./90_advanced_debugging.md) - [Docker and CI](./91_docker_and_ci.md) diff --git a/docs/guides/architecture.md b/docs/src/guides/architecture.md similarity index 61% rename from docs/guides/architecture.md rename to docs/src/guides/architecture.md index 25676ad74aa7..6af177ab8b69 100644 --- a/docs/guides/architecture.md +++ b/docs/src/guides/architecture.md @@ -5,7 +5,7 @@ structure of the physical architecture of the ZKsync Era project. ## High-Level Overview -The zksync-2-dev repository has the following main units: +The zksync-era repository has the following main units: **Smart Contracts:** All the smart contracts in charge of the protocols on the L1 & L2. Some main contracts: @@ -31,29 +31,53 @@ APIs, rather via the single source of truth -- the db storage layer. ## Low-Level Overview -This section provides a physical map of folders & files in this repository. +This section provides a physical map of folders & files in this repository. It doesn't aim to be complete, it only shows +the most important parts. -- `/contracts` - - - `/ethereum`: Smart contracts deployed on the Ethereum L1. - - `/zksync`: Smart contracts deployed on the ZKsync L2. +- `/contracts`: A submodule with L1, L2, and system contracts. See + [repository](https://github.com/matter-labs/era-contracts/). - `/core` - `/bin`: Executables for the microservices components comprising ZKsync Core Node. - - `/admin-tools`: CLI tools for admin operations (e.g. restarting prover jobs). + - `/zksync_server`: Main sequencer implementation. - `/external_node`: A read replica that can sync from the main node. + - `/tee_prover`: Implementation of the TEE prover. + + - `/node`: Composable node parts. + + - `/node_framework`: Framework used to compose parts of the node. + - `/api_server`: Implementation of Web3 JSON RPC server. + - `/base_token_adjuster`: Adaptor to support custom (non-ETH) base tokens. + - `/block_reverter`: Component for reverting L2 blocks and L1 batches. + - `/commitment_generator`: Component for calculation of commitments required for ZKP generation. + - `/consensus`: p2p utilities. + - `/consistency_checker`: Security component for the external node. + - `/da_clients`: Clients for different data availability solutions. + - `/da_dispatcher`: Adaptor for alternative DA solutions. + - `/eth_sender`: Component responsible for submitting batches to L1 contract. + - `/eth_watch`: Component responsible for retrieving data from the L1 contract. + - `/fee_model`: Fee logic implementation. + - `/genesis`: Logic for performing chain genesis. + - `/metadata_calculator`: Component responsible for Merkle tree maintenance. + - `/node_storage_init`: Strategies for the node initialization. + - `/node_sync`: Node synchronization for the external node. + - `/proof_data_handler`: Gateway API for interaction with the prover subsystem. + - `/reorg_detector`: Component responsible for detecting reorgs on the external node. + - `/state_keeper`: Main part of the sequencer, responsible for forming blocks and L1 batches. + - `/vm_runner`: Set of components generating various data by re-running sealed L1 batches. - `/lib`: All the library crates used as dependencies of the binary crates above. - `/basic_types`: Crate with essential ZKsync primitive types. - - `/config`: All the configured values used by the different ZKsync apps. + - `/config`: All the configuration values used by the different ZKsync apps. - `/contracts`: Contains definitions of commonly used smart contracts. - - `/crypto`: Cryptographical primitives used by the different ZKsync crates. + - `/crypto_primitives`: Cryptographical primitives used by the different ZKsync crates. - `/dal`: Data availability layer - `/migrations`: All the db migrations applied to create the storage layer. - `/src`: Functionality to interact with the different db tables. + - `/db_connection`: Generic DB interface. - `/eth_client`: Module providing an interface to interact with an Ethereum node. - `/eth_signer`: Module to sign messages and txs. - `/mempool`: Implementation of the ZKsync transaction pool. @@ -61,37 +85,17 @@ This section provides a physical map of folders & files in this repository. - `/mini_merkle_tree`: In-memory implementation of a sparse Merkle tree. - `/multivm`: A wrapper over several versions of VM that have been used by the main node. - `/object_store`: Abstraction for storing blobs outside the main data store. - - `/prometheus_exporter`: Prometheus data exporter. - `/queued_job_processor`: An abstraction for async job processing - `/state`: A state keeper responsible for handling transaction execution and creating miniblocks and L1 batches. - `/storage`: An encapsulated database interface. - `/test_account`: A representation of ZKsync account. - `/types`: ZKsync network operations, transactions, and common types. - `/utils`: Miscellaneous helpers for ZKsync crates. - - `/vlog`: ZKsync logging utility. - - `/vm`: ULightweight out-of-circuit VM interface. + - `/vlog`: ZKsync observability stack. + - `/vm_interface`: Generic interface for ZKsync virtual machine. - `/web3_decl`: Declaration of the Web3 API. - - `zksync_core/src` - - `/api_server` Externally facing APIs. - - `/web3`: ZKsync implementation of the Web3 API. - - `/tx_sender`: Helper module encapsulating the transaction processing logic. - - `/bin`: The executable main starting point for the ZKsync server. - - `/consistency_checker`: ZKsync watchdog. - - `/eth_sender`: Submits transactions to the ZKsync smart contract. - - `/eth_watch`: Fetches data from the L1. for L2 censorship resistance. - - `/fee_monitor`: Monitors the ratio of fees collected by executing txs over the costs of interacting with - Ethereum. - - `/fee_ticker`: Module to define the price components of L2 transactions. - - `/gas_adjuster`: Module to determine the fees to pay in txs containing blocks submitted to the L1. - - `/gas_tracker`: Module for predicting L1 gas cost for the Commit/PublishProof/Execute operations. - - `/metadata_calculator`: Module to maintain the ZKsync state tree. - - `/state_keeper`: The sequencer. In charge of collecting the pending txs from the mempool, executing them in the - VM, and sealing them in blocks. - - `/witness_generator`: Takes the sealed blocks and generates a _Witness_, the input for the prover containing the - circuits to be proved. - `/tests`: Testing infrastructure for ZKsync network. - - `/cross_external_nodes_checker`: A tool for checking external nodes consistency against the main node. - `/loadnext`: An app for load testing the ZKsync server. - `/ts-integration`: Integration tests set implemented in TypeScript. @@ -106,6 +110,3 @@ This section provides a physical map of folders & files in this repository. - `/env`:`.env` files that contain environment variables for different configurations of ZKsync Server / Prover. - `/keys`: Verification keys for `circuit` module. - -- `/sdk`: Implementation of client libraries for the ZKsync network in different programming languages. - - `/zksync-rs`: Rust client library for ZKsync. diff --git a/docs/guides/build-docker.md b/docs/src/guides/build-docker.md similarity index 100% rename from docs/guides/build-docker.md rename to docs/src/guides/build-docker.md diff --git a/docs/src/guides/development.md b/docs/src/guides/development.md new file mode 100644 index 000000000000..fb8dd44a6c7a --- /dev/null +++ b/docs/src/guides/development.md @@ -0,0 +1,197 @@ +# Development guide + +This document outlines the steps for setting up and working with ZKsync. + +## Prerequisites + +If you haven't already, install the prerequisites as described in [Install Dependencies](./setup-dev.md). + +## Installing the local ZK Stack CLI + +To set up local development, begin by installing +[ZK Stack CLI](https://github.com/matter-labs/zksync-era/blob/main/zkstack_cli/README.md). From the project's root +directory, run the following commands: + +```bash +cd ./zkstack_cli/zkstackup +./install --local +``` + +This installs `zkstackup` in your user binaries directory (e.g., `$HOME/.local/bin/`) and adds it to your `PATH`. + +After installation, open a new terminal or reload your shell profile. From the project's root directory, you can now +run: + +```bash +zkstackup --local +``` + +This command installs `zkstack` from the current source directory. + +You can proceed to verify the installation and start familiarizing with the CLI by running: + +```bash +zkstack --help +``` + +> NOTE: Whenever you want to update you local installation with your changes, just rerun: +> +> ```bash +> zkstackup --local +> ``` +> +> You might find convenient to add this alias to your shell profile: +> +> `alias zkstackup='zkstackup --path /path/to/zksync-era'` + +## Configure Ecosystem + +The project root directory includes configuration files for an ecosystem with a single chain, `era`. To initialize the +ecosystem, first start the required containers: + +```bash +zkstack containers +``` + +Next, run: + +```bash +zkstack ecosystem init +``` + +These commands will guide you through the configuration options for setting up the ecosystem. + +> NOTE: For local development only. You can also use the development defaults by supplying the `--dev` flag. + +Initialization may take some time, but key steps (such as downloading and unpacking keys or setting up containers) only +need to be completed once. + +To see more detailed output, you can run commands with the `--verbose` flag. + +## Cleanup + +To clean up the local ecosystem (e.g., removing containers and clearing the contract cache), run: + +```bash +zkstack dev clean all +``` + +You can then reinitialize the ecosystem as described in the [Configure Ecosystem](#configure-ecosystem) section. + +```bash +zkstack containers +zkstack ecosystem init +``` + +## Committing changes + +`zksync` uses pre-commit and pre-push git hooks for basic code integrity checks. Hooks are set up automatically within +the workspace initialization process. These hooks will not allow to commit the code which does not pass several checks. + +Currently the following criteria are checked: + +- Code must be formatted via `zkstack dev fmt`. +- Code must be linted via `zkstack dev lint`. + +## Testing + +ZKstack CLI offers multiple subcommands to run specific integration and unit test: + +```bash +zkstack dev test --help +``` + +```bash +Usage: zkstack dev test [OPTIONS] + +Commands: + integration Run integration tests + fees Run fees test + revert Run revert tests + recovery Run recovery tests + upgrade Run upgrade tests + build Build all test dependencies + rust Run unit-tests, accepts optional cargo test flags + l1-contracts Run L1 contracts tests + prover Run prover tests + wallet Print test wallets information + loadtest Run loadtest + help Print this message or the help of the given subcommand(s) +``` + +### Running unit tests + +You can run unit tests for the Rust crates in the project by running: + +```bash +zkstack dev test rust +``` + +### Running integration tests + +Running integration tests is more complex. Some tests require a running server, while others need the system to be in a +specific state. Please refer to our CI scripts +[ci-core-reusable.yml](https://github.com/matter-labs/zksync-era/blob/main/.github/workflows/ci-core-reusable.yml) to +have a better understanding of the process. + +### Running load tests + +The current load test implementation only supports the legacy bridge. To use it, you need to create a new chain with +legacy bridge support: + +```bash +zkstack chain create --legacy-bridge +zkstack chain init +``` + +After initializing the chain with a legacy bridge, you can run the load test against it. + +```bash +zkstack dev test loadtest +``` + +> WARNING: Never use legacy bridges in non-testing environments. + +## Contracts + +### Build contracts + +Run: + +```bash +zkstack dev contracts --help +``` + +to see all the options. + +### Publish source code on Etherscan + +#### Verifier Options + +Most commands interacting with smart contracts support the same verification options as Foundry's `forge` command. Just +double check if the following options are available in the subcommand: + +```bash +--verifier -- Verifier to use +--verifier-api-key -- Verifier API key +--verifier-url -- Verifier URL, if using a custom provider +``` + +#### Using Foundry + +You can use `foundry` to verify the source code of the contracts. + +```bash +forge verify-contract +``` + +Verifies a smart contract on a chosen verification provider. + +You must provide: + +- The contract address +- The contract name or the path to the contract. +- In case of Etherscan verification, you must also provide: + - Your Etherscan API key, either by passing it as an argument or setting `ETHERSCAN_API_KEY` + +For more information check [Foundry's documentation](https://book.getfoundry.sh/reference/forge/forge-verify-contract). diff --git a/docs/guides/external-node/00_quick_start.md b/docs/src/guides/external-node/00_quick_start.md similarity index 100% rename from docs/guides/external-node/00_quick_start.md rename to docs/src/guides/external-node/00_quick_start.md diff --git a/docs/guides/external-node/01_intro.md b/docs/src/guides/external-node/01_intro.md similarity index 100% rename from docs/guides/external-node/01_intro.md rename to docs/src/guides/external-node/01_intro.md diff --git a/docs/guides/external-node/02_configuration.md b/docs/src/guides/external-node/02_configuration.md similarity index 100% rename from docs/guides/external-node/02_configuration.md rename to docs/src/guides/external-node/02_configuration.md diff --git a/docs/guides/external-node/03_running.md b/docs/src/guides/external-node/03_running.md similarity index 100% rename from docs/guides/external-node/03_running.md rename to docs/src/guides/external-node/03_running.md diff --git a/docs/guides/external-node/04_observability.md b/docs/src/guides/external-node/04_observability.md similarity index 100% rename from docs/guides/external-node/04_observability.md rename to docs/src/guides/external-node/04_observability.md diff --git a/docs/guides/external-node/05_troubleshooting.md b/docs/src/guides/external-node/05_troubleshooting.md similarity index 100% rename from docs/guides/external-node/05_troubleshooting.md rename to docs/src/guides/external-node/05_troubleshooting.md diff --git a/docs/guides/external-node/06_components.md b/docs/src/guides/external-node/06_components.md similarity index 100% rename from docs/guides/external-node/06_components.md rename to docs/src/guides/external-node/06_components.md diff --git a/docs/guides/external-node/07_snapshots_recovery.md b/docs/src/guides/external-node/07_snapshots_recovery.md similarity index 100% rename from docs/guides/external-node/07_snapshots_recovery.md rename to docs/src/guides/external-node/07_snapshots_recovery.md diff --git a/docs/guides/external-node/08_pruning.md b/docs/src/guides/external-node/08_pruning.md similarity index 100% rename from docs/guides/external-node/08_pruning.md rename to docs/src/guides/external-node/08_pruning.md diff --git a/docs/guides/external-node/09_treeless_mode.md b/docs/src/guides/external-node/09_treeless_mode.md similarity index 100% rename from docs/guides/external-node/09_treeless_mode.md rename to docs/src/guides/external-node/09_treeless_mode.md diff --git a/docs/src/guides/external-node/10_decentralization.md b/docs/src/guides/external-node/10_decentralization.md new file mode 100644 index 000000000000..f2b1782c2d72 --- /dev/null +++ b/docs/src/guides/external-node/10_decentralization.md @@ -0,0 +1,91 @@ +# Decentralization + +In the default setup, the ZKsync node will fetch data from the ZKsync API endpoint maintained by Matter Labs. To reduce +the reliance on this centralized endpoint we have developed a decentralized p2p networking stack (aka gossipnet) which +will eventually be used instead of ZKsync API for synchronizing data. + +On the gossipnet, the data integrity will be protected by the BFT (byzantine fault-tolerant) consensus algorithm +(currently data is signed just by the main node though). + +## Enabling gossipnet on your node + +> [!NOTE] +> +> Because the data transmitted over the gossipnet is signed by the main node (and eventually by the consensus quorum), +> the signatures need to be backfilled to the node's local storage the first time you switch from centralized (ZKsync +> API based) synchronization to the decentralized (gossipnet based) synchronization (this is a one-time thing). With the +> current implementation it may take a couple of hours and gets faster the more nodes you add to the +> `gossip_static_outbound` list (see below). We are working to remove this inconvenience. + +> [!NOTE] +> +> The minimal supported server version for this is +> [24.11.0](https://github.com/matter-labs/zksync-era/releases/tag/core-v24.11.0) + +### Generating secrets + +Each participant node of the gossipnet has to have an identity (a public/secret key pair). When running your node for +the first time, generate the secrets by running: + +``` +docker run --entrypoint /usr/bin/zksync_external_node "matterlabs/external-node:2.0-v25.1.0" generate-secrets > consensus_secrets.yaml +chmod 600 consensus_secrets.yaml +``` + +> [!NOTE] +> +> NEVER reveal the secret keys used by your node. Otherwise, someone can impersonate your node on the gossipnet. If you +> suspect that your secret key has been leaked, you can generate fresh keys using the same tool. +> +> If you want someone else to connect to your node, give them your PUBLIC key instead. Both public and secret keys are +> present in the `consensus_secrets.yaml` (public keys are in comments). + +### Preparing configuration file + +Copy the template of the consensus configuration file (for +[mainnet](https://github.com/matter-labs/zksync-era/blob/main/docs/guides/external-node/prepared_configs/mainnet_consensus_config.yaml) +or +[testnet](https://github.com/matter-labs/zksync-era/blob/main/docs/guides/external-node/prepared_configs/testnet_consensus_config.yaml) +). + +> [!NOTE] +> +> You need to fill in the `public_addr` field. This is the address that will (not implemented yet) be advertised over +> gossipnet to other nodes, so that they can establish connections to your node. If you don't want to expose your node +> to the public internet, you can use IP in your local network. + +Currently the config contains the following fields (refer to config +[schema](https://github.com/matter-labs/zksync-era/blob/990676c5f84afd2ff8cd337f495c82e8d1f305a4/core/lib/protobuf_config/src/proto/core/consensus.proto#L66) +for more details): + +- `server_addr` - local TCP socket address that the node should listen on for incoming connections. Note that this is an + additional TCP port that will be opened by the node. +- `public_addr` - the public address of your node that will be advertised over the gossipnet. +- `max_payload_size` - limit (in bytes) on the sized of the ZKsync ERA block received from the gossipnet. This protects + your node from getting DoS`ed by too large network messages. Use the value from the template. +- `gossip_dynamic_inbound_limit` - maximal number of unauthenticated concurrent inbound connections that can be + established to your node. This is a DDoS protection measure. +- `gossip_static_outbound` - list of trusted peers that your node should always try to connect to. The template contains + the nodes maintained by Matterlabs, but you can add more if you know any. Note that the list contains both the network + address AND the public key of the node - this prevents spoofing attacks. + +### Setting environment variables + +Uncomment (or add) the following lines in your `.env` config: + +``` +EN_CONSENSUS_CONFIG_PATH=... +EN_CONSENSUS_SECRETS_PATH=... +``` + +These variables should point to your consensus config and secrets files that we have just created. Tweak the paths to +the files if you have placed them differently. + +### Add `--enable-consensus` flag to your entry point command + +For the consensus configuration to take effect you have to add `--enable-consensus` flag to the command line when +running the node, for example: + +``` +docker run "matterlabs/external-node:2.0-v24.12.0" --enable-consensus +``` diff --git a/docs/src/guides/external-node/README.md b/docs/src/guides/external-node/README.md new file mode 100644 index 000000000000..becd9846d4f2 --- /dev/null +++ b/docs/src/guides/external-node/README.md @@ -0,0 +1 @@ +# External node diff --git a/docs/guides/external-node/docker-compose-examples/configs/generate_secrets.sh b/docs/src/guides/external-node/docker-compose-examples/configs/generate_secrets.sh similarity index 100% rename from docs/guides/external-node/docker-compose-examples/configs/generate_secrets.sh rename to docs/src/guides/external-node/docker-compose-examples/configs/generate_secrets.sh diff --git a/docs/src/guides/external-node/docker-compose-examples/configs/mainnet_consensus_config.yaml b/docs/src/guides/external-node/docker-compose-examples/configs/mainnet_consensus_config.yaml new file mode 100644 index 000000000000..08f5861daa83 --- /dev/null +++ b/docs/src/guides/external-node/docker-compose-examples/configs/mainnet_consensus_config.yaml @@ -0,0 +1,5 @@ +server_addr: '0.0.0.0:3054' +public_addr: '127.0.0.1:3054' +debug_page_addr: '0.0.0.0:5000' +max_payload_size: 5000000 +gossip_dynamic_inbound_limit: 100 diff --git a/docs/src/guides/external-node/docker-compose-examples/configs/testnet_consensus_config.yaml b/docs/src/guides/external-node/docker-compose-examples/configs/testnet_consensus_config.yaml new file mode 100644 index 000000000000..08f5861daa83 --- /dev/null +++ b/docs/src/guides/external-node/docker-compose-examples/configs/testnet_consensus_config.yaml @@ -0,0 +1,5 @@ +server_addr: '0.0.0.0:3054' +public_addr: '127.0.0.1:3054' +debug_page_addr: '0.0.0.0:5000' +max_payload_size: 5000000 +gossip_dynamic_inbound_limit: 100 diff --git a/docs/guides/external-node/docker-compose-examples/grafana/provisioning/dashboards/Consensus.json b/docs/src/guides/external-node/docker-compose-examples/grafana/provisioning/dashboards/Consensus.json similarity index 100% rename from docs/guides/external-node/docker-compose-examples/grafana/provisioning/dashboards/Consensus.json rename to docs/src/guides/external-node/docker-compose-examples/grafana/provisioning/dashboards/Consensus.json diff --git a/docs/guides/external-node/docker-compose-examples/grafana/provisioning/dashboards/General.json b/docs/src/guides/external-node/docker-compose-examples/grafana/provisioning/dashboards/General.json similarity index 100% rename from docs/guides/external-node/docker-compose-examples/grafana/provisioning/dashboards/General.json rename to docs/src/guides/external-node/docker-compose-examples/grafana/provisioning/dashboards/General.json diff --git a/docs/guides/external-node/docker-compose-examples/grafana/provisioning/dashboards/default.yml b/docs/src/guides/external-node/docker-compose-examples/grafana/provisioning/dashboards/default.yml similarity index 100% rename from docs/guides/external-node/docker-compose-examples/grafana/provisioning/dashboards/default.yml rename to docs/src/guides/external-node/docker-compose-examples/grafana/provisioning/dashboards/default.yml diff --git a/docs/guides/external-node/docker-compose-examples/grafana/provisioning/datasources/prometheus.yml b/docs/src/guides/external-node/docker-compose-examples/grafana/provisioning/datasources/prometheus.yml similarity index 100% rename from docs/guides/external-node/docker-compose-examples/grafana/provisioning/datasources/prometheus.yml rename to docs/src/guides/external-node/docker-compose-examples/grafana/provisioning/datasources/prometheus.yml diff --git a/docs/guides/external-node/docker-compose-examples/mainnet-external-node-docker-compose.yml b/docs/src/guides/external-node/docker-compose-examples/mainnet-external-node-docker-compose.yml similarity index 97% rename from docs/guides/external-node/docker-compose-examples/mainnet-external-node-docker-compose.yml rename to docs/src/guides/external-node/docker-compose-examples/mainnet-external-node-docker-compose.yml index 9c8c5bb31425..5ee9de187bf0 100644 --- a/docs/guides/external-node/docker-compose-examples/mainnet-external-node-docker-compose.yml +++ b/docs/src/guides/external-node/docker-compose-examples/mainnet-external-node-docker-compose.yml @@ -52,7 +52,7 @@ services: # Generation of consensus secrets. # The secrets are generated iff the secrets file doesn't already exist. generate-secrets: - image: "matterlabs/external-node:2.0-v24.16.0" + image: "matterlabs/external-node:2.0-v25.1.0" entrypoint: [ "/configs/generate_secrets.sh", @@ -61,7 +61,7 @@ services: volumes: - ./configs:/configs external-node: - image: "matterlabs/external-node:2.0-v24.16.0" + image: "matterlabs/external-node:2.0-v25.1.0" entrypoint: [ "/usr/bin/entrypoint.sh", diff --git a/docs/guides/external-node/docker-compose-examples/prometheus/prometheus.yml b/docs/src/guides/external-node/docker-compose-examples/prometheus/prometheus.yml similarity index 100% rename from docs/guides/external-node/docker-compose-examples/prometheus/prometheus.yml rename to docs/src/guides/external-node/docker-compose-examples/prometheus/prometheus.yml diff --git a/docs/guides/external-node/docker-compose-examples/testnet-external-node-docker-compose.yml b/docs/src/guides/external-node/docker-compose-examples/testnet-external-node-docker-compose.yml similarity index 100% rename from docs/guides/external-node/docker-compose-examples/testnet-external-node-docker-compose.yml rename to docs/src/guides/external-node/docker-compose-examples/testnet-external-node-docker-compose.yml diff --git a/docs/guides/external-node/prepared_configs/mainnet-config.env b/docs/src/guides/external-node/prepared_configs/mainnet-config.env similarity index 98% rename from docs/guides/external-node/prepared_configs/mainnet-config.env rename to docs/src/guides/external-node/prepared_configs/mainnet-config.env index bce812084665..eac24f4ab7ed 100644 --- a/docs/guides/external-node/prepared_configs/mainnet-config.env +++ b/docs/src/guides/external-node/prepared_configs/mainnet-config.env @@ -70,7 +70,7 @@ RUST_LOG=zksync_core=debug,zksync_dal=info,zksync_eth_client=info,zksync_merkle_ RUST_BACKTRACE=full RUST_LIB_BACKTRACE=1 -# Settings related to gossip network, see `09_decentralization.md` +# Settings related to gossip network, see `10_decentralization.md` #EN_CONSENSUS_CONFIG_PATH=./mainnet_consensus_config.yaml #EN_CONSENSUS_SECRETS_PATH=./consensus_secrets.yaml diff --git a/docs/src/guides/external-node/prepared_configs/mainnet_consensus_config.yaml b/docs/src/guides/external-node/prepared_configs/mainnet_consensus_config.yaml new file mode 100644 index 000000000000..08347a14efa0 --- /dev/null +++ b/docs/src/guides/external-node/prepared_configs/mainnet_consensus_config.yaml @@ -0,0 +1,4 @@ +server_addr: '0.0.0.0:3054' +public_addr: ':3054' +max_payload_size: 5000000 +gossip_dynamic_inbound_limit: 100 diff --git a/docs/guides/external-node/prepared_configs/testnet-goerli-config-deprecated.env b/docs/src/guides/external-node/prepared_configs/testnet-goerli-config-deprecated.env similarity index 100% rename from docs/guides/external-node/prepared_configs/testnet-goerli-config-deprecated.env rename to docs/src/guides/external-node/prepared_configs/testnet-goerli-config-deprecated.env diff --git a/docs/guides/external-node/prepared_configs/testnet-sepolia-config.env b/docs/src/guides/external-node/prepared_configs/testnet-sepolia-config.env similarity index 98% rename from docs/guides/external-node/prepared_configs/testnet-sepolia-config.env rename to docs/src/guides/external-node/prepared_configs/testnet-sepolia-config.env index 182012e2850c..c8f855b4a4a2 100644 --- a/docs/guides/external-node/prepared_configs/testnet-sepolia-config.env +++ b/docs/src/guides/external-node/prepared_configs/testnet-sepolia-config.env @@ -70,7 +70,7 @@ RUST_LOG=zksync_core=debug,zksync_dal=info,zksync_eth_client=info,zksync_merkle_ RUST_BACKTRACE=full RUST_LIB_BACKTRACE=1 -# Settings related to gossip network, see `09_decentralization.md` +# Settings related to gossip network, see `10_decentralization.md` #EN_CONSENSUS_CONFIG_PATH=./testnet_consensus_config.yaml #EN_CONSENSUS_SECRETS_PATH=./consensus_secrets.yaml diff --git a/docs/src/guides/external-node/prepared_configs/testnet_consensus_config.yaml b/docs/src/guides/external-node/prepared_configs/testnet_consensus_config.yaml new file mode 100644 index 000000000000..08347a14efa0 --- /dev/null +++ b/docs/src/guides/external-node/prepared_configs/testnet_consensus_config.yaml @@ -0,0 +1,4 @@ +server_addr: '0.0.0.0:3054' +public_addr: ':3054' +max_payload_size: 5000000 +gossip_dynamic_inbound_limit: 100 diff --git a/docs/src/guides/launch.md b/docs/src/guides/launch.md new file mode 100644 index 000000000000..52872a53cf2a --- /dev/null +++ b/docs/src/guides/launch.md @@ -0,0 +1,202 @@ +# Running the application + +This document covers common scenarios for launching ZKsync applications set locally. + +## Prerequisites + +Prepare dev environment prerequisites: see + +[Installing dependencies](./setup-dev.md) + +## Setup local dev environment + +Run the required containers with: + +```bash +zkstack containers +``` + +Setup: + +```bash +zkstack ecosystem init +``` + +To completely reset the dev environment: + +- Stop services: + + ```bash + zkstack dev clean all + ``` + +- Repeat the setup procedure above + + ```bash + zkstack containers + zkstack ecosystem init + ``` + +### Run observability stack + +If you want to run [Dockprom](https://github.com/stefanprodan/dockprom/) stack (Prometheus, Grafana) alongside other +containers - add `--observability` parameter during initialisation. + +```bash +zkstack containers --observability +``` + +or select `yes` when prompted during the interactive execution of the command. + +That will also provision Grafana with +[era-observability](https://github.com/matter-labs/era-observability/tree/main/dashboards) dashboards. You can then +access it at `http://127.0.0.1:3000/` under credentials `admin/admin`. + +> If you don't see any data displayed on the Grafana dashboards - try setting the timeframe to "Last 30 minutes". You +> will also have to have `jq` installed on your system. + +## Ecosystem Configuration + +The ecosystem configuration is spread across multiple files and directories: + +1. Root level: + + - `ZkStack.yaml`: Main configuration file for the entire ecosystem. + +2. `configs/` directory: + + - `apps/`: + - `portal_config.json`: Configuration for the portal application. + - `contracts.yaml`: Defines smart contract settings and addresses. + - `erc20.yaml`: Configuration for ERC20 tokens. + - `initial_deployments.yaml`: Specifies initial ERC20 token deployments. + - `wallets.yaml`: Contains wallet configurations. + +3. `chains//` directory: + + - `artifacts/`: Contains build/execution artifacts. + - `configs/`: Chain-specific configuration files. + - `contracts.yaml`: Chain-specific smart contract settings. + - `external_node.yaml`: Configuration for external nodes. + - `general.yaml`: General chain configuration. + - `genesis.yaml`: Genesis configuration for the chain. + - `secrets.yaml`: Secrets and private keys for the chain. + - `wallets.yaml`: Wallet configurations for the chain. + - `db/main/`: Database files for the chain. + - `ZkStack.yaml`: Chain-specific ZkStack configuration. + +These configuration files are automatically generated during the ecosystem initialization (`zkstack ecosystem init`) and +chain initialization (`zkstack chain init`) processes. They control various aspects of the ZKsync ecosystem, including: + +- Network settings +- Smart contract deployments +- Token configurations +- Database settings +- Application/Service-specific parameters + +It's important to note that while these files can be manually edited, any changes may be overwritten if the ecosystem or +chain is reinitialized. Always back up your modifications and exercise caution when making direct changes to these +files. + +For specific configuration needs, it's recommended to use the appropriate `zkstack` commands or consult the +documentation for safe ways to customize your setup. + +## Build and run server + +Run server: + +```bash +zkstack server +``` + +The server's configuration files can be found in `/chains//configs` directory. These files are created when +running `zkstack chain init` command. + +### Modifying configuration files manually + +To manually modify configuration files: + +1. Locate the relevant config file in `/chains//configs` +2. Open the file in a text editor +3. Make necessary changes, following the existing format +4. Save the file +5. Restart the relevant services for changes to take effect: + +```bash +zkstack server +``` + +> NOTE: Manual changes to configuration files may be overwritten if the ecosystem is reinitialized or the chain is +> reinitialized. + +> WARNING: Some properties, such as ports, may require manual modification across different configuration files to +> ensure consistency and avoid conflicts. + +## Running server using Google cloud storage object store instead of default In memory store + +Get the `service_account.json` file containing the GCP credentials from kubernetes secret for relevant +environment(stage2/ testnet2) add that file to the default location `~/gcloud/service_account.json` or update +`object_store.toml` with the file location + +```bash +zkstack prover init --bucket-base-url={url} --credentials-file={path/to/service_account.json} +``` + +## Running prover server + +Running on a machine with GPU + +```bash +zkstack prover run --component=prover +``` + +> NOTE: Running on machine without GPU is currently not supported by `zkstack`. + +## Running the verification key generator + +```bash +# ensure that the setup_2^26.key in the current directory, the file can be download from https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^26.key + +# To generate all verification keys +cargo run --release --bin zksync_verification_key_generator +``` + +## Generating binary verification keys for existing json verification keys + +```bash +cargo run --release --bin zksync_json_to_binary_vk_converter -- -o /path/to/output-binary-vk +``` + +## Generating commitment for existing verification keys + +```bash +cargo run --release --bin zksync_commitment_generator +``` + +## Running the contract verifier + +```bash +zkstack contract-verifier run +``` + +## Troubleshooting + +### Connection Refused + +#### Problem + +```bash +error sending request for url (http://127.0.0.1:8545/): error trying to connect: tcp connect error: Connection refused (os error 61) +``` + +#### Description + +It appears that no containers are currently running, which is likely the reason you're encountering this error. + +#### Solution + +Ensure that the necessary containers have been started and are functioning correctly to resolve the issue. + +```bash +zkstack containers +``` diff --git a/docs/guides/repositories.md b/docs/src/guides/repositories.md similarity index 100% rename from docs/guides/repositories.md rename to docs/src/guides/repositories.md diff --git a/docs/guides/setup-dev.md b/docs/src/guides/setup-dev.md similarity index 84% rename from docs/guides/setup-dev.md rename to docs/src/guides/setup-dev.md index 4eef211cd3d1..43350ac3314d 100644 --- a/docs/guides/setup-dev.md +++ b/docs/src/guides/setup-dev.md @@ -14,20 +14,20 @@ git config --global url."https://".insteadOf git:// # Rust curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh + # NVM curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.5/install.sh | bash + # All necessary stuff sudo apt-get update -sudo apt-get install build-essential pkg-config cmake clang lldb lld libssl-dev postgresql apt-transport-https ca-certificates curl software-properties-common +sudo apt-get install -y build-essential pkg-config cmake clang lldb lld libssl-dev libpq-dev apt-transport-https ca-certificates curl software-properties-common + # Install docker curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add - sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu focal stable" sudo apt install docker-ce sudo usermod -aG docker ${USER} -# Stop default postgres (as we'll use the docker one) -sudo systemctl stop postgresql -sudo systemctl disable postgresql # Start docker. sudo systemctl start docker @@ -45,9 +45,9 @@ cargo install cargo-nextest # SQL tools cargo install sqlx-cli --version 0.8.1 -# Foundry -curl -L https://foundry.paradigm.xyz | bash -foundryup --branch master +# Foundry ZKsync +curl -L https://raw.githubusercontent.com/matter-labs/foundry-zksync/main/install-foundry-zksync | bash +foundryup-zksync # Non CUDA (GPU) setup, can be skipped if the machine has a CUDA installed for provers # Don't do that if you intend to run provers on your machine. Check the prover docs for a setup instead. @@ -60,24 +60,24 @@ cd zksync-era git submodule update --init --recursive ``` -Don't forget to [add env variables](#Environment) and look at [tips](#tips). +Don't forget to look at [tips](#tips). ## Supported operating systems -ZKsync currently can be launched on any \*nix operating system (e.g. any linux distribution or MacOS). +ZKsync currently can be launched on any \*nix operating system (e.g. any linux distribution or macOS). If you're using Windows, then make sure to use WSL 2. Additionally, if you are going to use WSL 2, make sure that your project is located in the _linux filesystem_, since accessing NTFS partitions from within WSL is very slow. -If you're using MacOS with an ARM processor (e.g. M1/M2), make sure that you are working in the _native_ environment -(e.g. your terminal and IDE don't run in Rosetta, and your toolchain is native). Trying to work with ZKsync code via +If you're using macOS with an ARM processor (e.g. M1/M2), make sure that you are working in the _native_ environment +(e.g., your terminal and IDE don't run in Rosetta, and your toolchain is native). Trying to work with ZKsync code via Rosetta may cause problems that are hard to spot and debug, so make sure to check everything before you start. If you are a NixOS user or would like to have a reproducible environment, skip to the section about `nix`. -## `Docker` +## Docker Install `docker`. It is recommended to follow the instructions from the [official site](https://docs.docker.com/install/). @@ -117,13 +117,13 @@ at this step. If logging out does not resolve the issue, restarting the computer should. -## `Node` & `Yarn` +## Node.js & Yarn 1. Install `Node` (requires version `v20`). The recommended way is via [nvm](https://github.com/nvm-sh/nvm). 2. Install `yarn`. Can be done via `npm install -g yarn`. Make sure to get version 1.22.19 - you can change the version by running `yarn set version 1.22.19`. -## `clang` +## clang In order to compile RocksDB, you must have LLVM available. On debian-based linux it can be installed as follows: @@ -133,12 +133,12 @@ On debian-based linux: sudo apt-get install build-essential pkg-config cmake clang lldb lld ``` -On mac: +On macOS: You need to have an up-to-date `Xcode`. You can install it directly from `App Store`. With Xcode command line tools, you get the Clang compiler installed by default. Thus, having XCode you don't need to install `clang`. -## `OpenSSL` +## OpenSSL Install OpenSSL: @@ -154,9 +154,9 @@ On debian-based linux: sudo apt-get install libssl-dev ``` -## `Rust` +## Rust -Install the latest `rust` version. +Install `Rust`'s toolchain version reported in `/rust-toolchain.toml` (also a later stable version should work). Instructions can be found on the [official site](https://www.rust-lang.org/tools/install). @@ -167,7 +167,7 @@ rustc --version rustc 1.xx.y (xxxxxx 20xx-yy-zz) # Output may vary depending on actual version of rust ``` -If you are using MacOS with ARM processor (e.g. M1/M2), make sure that you use an `aarch64` toolchain. For example, when +If you are using macOS with ARM processor (e.g. M1/M2), make sure that you use an `aarch64` toolchain. For example, when you run `rustup show`, you should see a similar input: ```bash @@ -190,25 +190,26 @@ If you see `x86_64` mentioned in the output, probably you're running (or used to that's the case, you should probably change the way you run terminal, and/or reinstall your IDE, and then reinstall the Rust toolchain as well. -## Postgres +## PostgreSQL Client Library -Install the latest postgres: +For development purposes, you typically only need the PostgreSQL client library, not the full server installation. +Here's how to install it: -On mac: +On macOS: ```bash -brew install postgresql@14 +brew install libpq ``` -On debian-based linux: +On Debian-based Linux: ```bash -sudo apt-get install postgresql +sudo apt-get install libpq-dev ``` ### Cargo nextest -[cargo-nextest](https://nexte.st/) is the next-generation test runner for Rust projects. `zk test rust` uses +[cargo-nextest](https://nexte.st/) is the next-generation test runner for Rust projects. `zkstack dev test rust` uses `cargo nextest` by default. ```bash @@ -236,10 +237,13 @@ enable nix-ld. Go to the zksync folder and run `nix develop`. After it finishes, you are in a shell that has all the dependencies. -## Foundry +## Foundry ZKsync + +ZKSync depends on Foundry ZKsync (which is is a specialized fork of Foundry, tailored for ZKsync). Please follow this +[installation guide](https://foundry-book.zksync.io/getting-started/installation) to get started with Foundry ZKsync. -[Foundry](https://book.getfoundry.sh/getting-started/installation) can be utilized for deploying smart contracts. For -commands related to deployment, you can pass flags for Foundry integration. +Foundry ZKsync can also be used for deploying smart contracts. For commands related to deployment, you can pass flags +for Foundry integration. ## Non-GPU setup @@ -266,17 +270,6 @@ RUSTFLAGS as env var, or pass it in `config.toml` (either project level or globa rustflags = ["--cfg=no_cuda"] ``` -## Environment - -Edit the lines below and add them to your shell profile file (e.g. `~/.bash_profile`, `~/.zshrc`): - -```bash -# Add path here: -export ZKSYNC_HOME=/path/to/zksync - -export PATH=$ZKSYNC_HOME/bin:$PATH -``` - ## Tips ### Tip: `mold` @@ -294,7 +287,7 @@ export RUSTFLAGS='-C link-arg=-fuse-ld=/usr/local/bin/mold' export CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_LINKER="clang" ``` -## Tip: Speeding up building `RocksDB` +### Tip: Speeding up building `RocksDB` By default, each time you compile `rocksdb` crate, it will compile required C++ sources from scratch. It can be avoided by using precompiled versions of library, and it will significantly improve your build times. diff --git a/docs/src/misc/contributors.md b/docs/src/misc/contributors.md new file mode 100644 index 000000000000..77e81149e465 --- /dev/null +++ b/docs/src/misc/contributors.md @@ -0,0 +1 @@ +# Contributors diff --git a/docs/specs/README.md b/docs/src/specs/README.md similarity index 100% rename from docs/specs/README.md rename to docs/src/specs/README.md diff --git a/docs/specs/blocks_batches.md b/docs/src/specs/blocks_batches.md similarity index 100% rename from docs/specs/blocks_batches.md rename to docs/src/specs/blocks_batches.md diff --git a/docs/specs/data_availability/README.md b/docs/src/specs/data_availability/README.md similarity index 100% rename from docs/specs/data_availability/README.md rename to docs/src/specs/data_availability/README.md diff --git a/docs/specs/data_availability/compression.md b/docs/src/specs/data_availability/compression.md similarity index 100% rename from docs/specs/data_availability/compression.md rename to docs/src/specs/data_availability/compression.md diff --git a/docs/specs/data_availability/overview.md b/docs/src/specs/data_availability/overview.md similarity index 100% rename from docs/specs/data_availability/overview.md rename to docs/src/specs/data_availability/overview.md diff --git a/docs/specs/data_availability/pubdata.md b/docs/src/specs/data_availability/pubdata.md similarity index 100% rename from docs/specs/data_availability/pubdata.md rename to docs/src/specs/data_availability/pubdata.md diff --git a/docs/specs/data_availability/reconstruction.md b/docs/src/specs/data_availability/reconstruction.md similarity index 100% rename from docs/specs/data_availability/reconstruction.md rename to docs/src/specs/data_availability/reconstruction.md diff --git a/docs/specs/data_availability/validium_zk_porter.md b/docs/src/specs/data_availability/validium_zk_porter.md similarity index 100% rename from docs/specs/data_availability/validium_zk_porter.md rename to docs/src/specs/data_availability/validium_zk_porter.md diff --git a/docs/specs/img/L2_Components.png b/docs/src/specs/img/L2_Components.png similarity index 100% rename from docs/specs/img/L2_Components.png rename to docs/src/specs/img/L2_Components.png diff --git a/docs/specs/img/diamondProxy.jpg b/docs/src/specs/img/diamondProxy.jpg similarity index 100% rename from docs/specs/img/diamondProxy.jpg rename to docs/src/specs/img/diamondProxy.jpg diff --git a/docs/specs/img/governance.jpg b/docs/src/specs/img/governance.jpg similarity index 100% rename from docs/specs/img/governance.jpg rename to docs/src/specs/img/governance.jpg diff --git a/docs/specs/img/zk-the-collective-action.jpeg b/docs/src/specs/img/zk-the-collective-action.jpeg similarity index 100% rename from docs/specs/img/zk-the-collective-action.jpeg rename to docs/src/specs/img/zk-the-collective-action.jpeg diff --git a/docs/specs/introduction.md b/docs/src/specs/introduction.md similarity index 100% rename from docs/specs/introduction.md rename to docs/src/specs/introduction.md diff --git a/docs/specs/l1_l2_communication/README.md b/docs/src/specs/l1_l2_communication/README.md similarity index 100% rename from docs/specs/l1_l2_communication/README.md rename to docs/src/specs/l1_l2_communication/README.md diff --git a/docs/specs/l1_l2_communication/l1_to_l2.md b/docs/src/specs/l1_l2_communication/l1_to_l2.md similarity index 100% rename from docs/specs/l1_l2_communication/l1_to_l2.md rename to docs/src/specs/l1_l2_communication/l1_to_l2.md diff --git a/docs/specs/l1_l2_communication/l2_to_l1.md b/docs/src/specs/l1_l2_communication/l2_to_l1.md similarity index 100% rename from docs/specs/l1_l2_communication/l2_to_l1.md rename to docs/src/specs/l1_l2_communication/l2_to_l1.md diff --git a/docs/specs/l1_l2_communication/overview_deposits_withdrawals.md b/docs/src/specs/l1_l2_communication/overview_deposits_withdrawals.md similarity index 100% rename from docs/specs/l1_l2_communication/overview_deposits_withdrawals.md rename to docs/src/specs/l1_l2_communication/overview_deposits_withdrawals.md diff --git a/docs/specs/l1_smart_contracts.md b/docs/src/specs/l1_smart_contracts.md similarity index 100% rename from docs/specs/l1_smart_contracts.md rename to docs/src/specs/l1_smart_contracts.md diff --git a/docs/specs/overview.md b/docs/src/specs/overview.md similarity index 100% rename from docs/specs/overview.md rename to docs/src/specs/overview.md diff --git a/docs/specs/prover/README.md b/docs/src/specs/prover/README.md similarity index 100% rename from docs/specs/prover/README.md rename to docs/src/specs/prover/README.md diff --git a/docs/specs/prover/boojum_function_check_if_satisfied.md b/docs/src/specs/prover/boojum_function_check_if_satisfied.md similarity index 100% rename from docs/specs/prover/boojum_function_check_if_satisfied.md rename to docs/src/specs/prover/boojum_function_check_if_satisfied.md diff --git a/docs/specs/prover/boojum_gadgets.md b/docs/src/specs/prover/boojum_gadgets.md similarity index 100% rename from docs/specs/prover/boojum_gadgets.md rename to docs/src/specs/prover/boojum_gadgets.md diff --git a/docs/specs/prover/circuit_testing.md b/docs/src/specs/prover/circuit_testing.md similarity index 100% rename from docs/specs/prover/circuit_testing.md rename to docs/src/specs/prover/circuit_testing.md diff --git a/docs/specs/prover/circuits/README.md b/docs/src/specs/prover/circuits/README.md similarity index 100% rename from docs/specs/prover/circuits/README.md rename to docs/src/specs/prover/circuits/README.md diff --git a/docs/specs/prover/circuits/code_decommitter.md b/docs/src/specs/prover/circuits/code_decommitter.md similarity index 100% rename from docs/specs/prover/circuits/code_decommitter.md rename to docs/src/specs/prover/circuits/code_decommitter.md diff --git a/docs/specs/prover/circuits/demux_log_queue.md b/docs/src/specs/prover/circuits/demux_log_queue.md similarity index 100% rename from docs/specs/prover/circuits/demux_log_queue.md rename to docs/src/specs/prover/circuits/demux_log_queue.md diff --git a/docs/specs/prover/circuits/ecrecover.md b/docs/src/specs/prover/circuits/ecrecover.md similarity index 100% rename from docs/specs/prover/circuits/ecrecover.md rename to docs/src/specs/prover/circuits/ecrecover.md diff --git a/docs/specs/prover/circuits/img/diagram.png b/docs/src/specs/prover/circuits/img/diagram.png similarity index 100% rename from docs/specs/prover/circuits/img/diagram.png rename to docs/src/specs/prover/circuits/img/diagram.png diff --git a/docs/specs/prover/circuits/img/flowchart.png b/docs/src/specs/prover/circuits/img/flowchart.png similarity index 100% rename from docs/specs/prover/circuits/img/flowchart.png rename to docs/src/specs/prover/circuits/img/flowchart.png diff --git a/docs/specs/prover/circuits/img/image.png b/docs/src/specs/prover/circuits/img/image.png similarity index 100% rename from docs/specs/prover/circuits/img/image.png rename to docs/src/specs/prover/circuits/img/image.png diff --git a/docs/specs/prover/circuits/keccak_round_function.md b/docs/src/specs/prover/circuits/keccak_round_function.md similarity index 100% rename from docs/specs/prover/circuits/keccak_round_function.md rename to docs/src/specs/prover/circuits/keccak_round_function.md diff --git a/docs/specs/prover/circuits/l1_messages_hasher.md b/docs/src/specs/prover/circuits/l1_messages_hasher.md similarity index 100% rename from docs/specs/prover/circuits/l1_messages_hasher.md rename to docs/src/specs/prover/circuits/l1_messages_hasher.md diff --git a/docs/specs/prover/circuits/log_sorter.md b/docs/src/specs/prover/circuits/log_sorter.md similarity index 100% rename from docs/specs/prover/circuits/log_sorter.md rename to docs/src/specs/prover/circuits/log_sorter.md diff --git a/docs/specs/prover/circuits/main_vm.md b/docs/src/specs/prover/circuits/main_vm.md similarity index 100% rename from docs/specs/prover/circuits/main_vm.md rename to docs/src/specs/prover/circuits/main_vm.md diff --git a/docs/specs/prover/circuits/overview.md b/docs/src/specs/prover/circuits/overview.md similarity index 100% rename from docs/specs/prover/circuits/overview.md rename to docs/src/specs/prover/circuits/overview.md diff --git a/docs/specs/prover/circuits/ram_permutation.md b/docs/src/specs/prover/circuits/ram_permutation.md similarity index 100% rename from docs/specs/prover/circuits/ram_permutation.md rename to docs/src/specs/prover/circuits/ram_permutation.md diff --git a/docs/specs/prover/circuits/sha256_round_function.md b/docs/src/specs/prover/circuits/sha256_round_function.md similarity index 100% rename from docs/specs/prover/circuits/sha256_round_function.md rename to docs/src/specs/prover/circuits/sha256_round_function.md diff --git a/docs/specs/prover/circuits/sort_decommitments.md b/docs/src/specs/prover/circuits/sort_decommitments.md similarity index 100% rename from docs/specs/prover/circuits/sort_decommitments.md rename to docs/src/specs/prover/circuits/sort_decommitments.md diff --git a/docs/specs/prover/circuits/sorting.md b/docs/src/specs/prover/circuits/sorting.md similarity index 100% rename from docs/specs/prover/circuits/sorting.md rename to docs/src/specs/prover/circuits/sorting.md diff --git a/docs/specs/prover/circuits/sorting_and_deduplicating.md b/docs/src/specs/prover/circuits/sorting_and_deduplicating.md similarity index 100% rename from docs/specs/prover/circuits/sorting_and_deduplicating.md rename to docs/src/specs/prover/circuits/sorting_and_deduplicating.md diff --git a/docs/specs/prover/circuits/storage_application.md b/docs/src/specs/prover/circuits/storage_application.md similarity index 100% rename from docs/specs/prover/circuits/storage_application.md rename to docs/src/specs/prover/circuits/storage_application.md diff --git a/docs/specs/prover/circuits/storage_sorter.md b/docs/src/specs/prover/circuits/storage_sorter.md similarity index 100% rename from docs/specs/prover/circuits/storage_sorter.md rename to docs/src/specs/prover/circuits/storage_sorter.md diff --git a/docs/specs/prover/getting_started.md b/docs/src/specs/prover/getting_started.md similarity index 100% rename from docs/specs/prover/getting_started.md rename to docs/src/specs/prover/getting_started.md diff --git a/docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(1).png b/docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(1).png similarity index 100% rename from docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(1).png rename to docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(1).png diff --git a/docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(11).png b/docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(11).png similarity index 100% rename from docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(11).png rename to docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(11).png diff --git a/docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(12).png b/docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(12).png similarity index 100% rename from docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(12).png rename to docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(12).png diff --git a/docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(13).png b/docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(13).png similarity index 100% rename from docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(13).png rename to docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(13).png diff --git a/docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(14).png b/docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(14).png similarity index 100% rename from docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(14).png rename to docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(14).png diff --git a/docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(16).png b/docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(16).png similarity index 100% rename from docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(16).png rename to docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(16).png diff --git a/docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(17).png b/docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(17).png similarity index 100% rename from docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(17).png rename to docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(17).png diff --git a/docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(2).png b/docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(2).png similarity index 100% rename from docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(2).png rename to docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(2).png diff --git a/docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(3).png b/docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(3).png similarity index 100% rename from docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(3).png rename to docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(3).png diff --git a/docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(4).png b/docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(4).png similarity index 100% rename from docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(4).png rename to docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(4).png diff --git a/docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(7).png b/docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(7).png similarity index 100% rename from docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(7).png rename to docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(7).png diff --git a/docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(8).png b/docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(8).png similarity index 100% rename from docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(8).png rename to docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(8).png diff --git a/docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(9).png b/docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(9).png similarity index 100% rename from docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(9).png rename to docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied(9).png diff --git a/docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied.png b/docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied.png similarity index 100% rename from docs/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied.png rename to docs/src/specs/prover/img/boojum_function_check_if_satisfied/Check_if_satisfied.png diff --git a/docs/specs/prover/img/circuit_testing/Contest(10).png b/docs/src/specs/prover/img/circuit_testing/Contest(10).png similarity index 100% rename from docs/specs/prover/img/circuit_testing/Contest(10).png rename to docs/src/specs/prover/img/circuit_testing/Contest(10).png diff --git a/docs/specs/prover/img/circuit_testing/Contest(11).png b/docs/src/specs/prover/img/circuit_testing/Contest(11).png similarity index 100% rename from docs/specs/prover/img/circuit_testing/Contest(11).png rename to docs/src/specs/prover/img/circuit_testing/Contest(11).png diff --git a/docs/specs/prover/img/circuit_testing/Contest(12).png b/docs/src/specs/prover/img/circuit_testing/Contest(12).png similarity index 100% rename from docs/specs/prover/img/circuit_testing/Contest(12).png rename to docs/src/specs/prover/img/circuit_testing/Contest(12).png diff --git a/docs/specs/prover/img/circuit_testing/Contest(4).png b/docs/src/specs/prover/img/circuit_testing/Contest(4).png similarity index 100% rename from docs/specs/prover/img/circuit_testing/Contest(4).png rename to docs/src/specs/prover/img/circuit_testing/Contest(4).png diff --git a/docs/specs/prover/img/circuit_testing/Contest(5).png b/docs/src/specs/prover/img/circuit_testing/Contest(5).png similarity index 100% rename from docs/specs/prover/img/circuit_testing/Contest(5).png rename to docs/src/specs/prover/img/circuit_testing/Contest(5).png diff --git a/docs/specs/prover/img/circuit_testing/Contest(6).png b/docs/src/specs/prover/img/circuit_testing/Contest(6).png similarity index 100% rename from docs/specs/prover/img/circuit_testing/Contest(6).png rename to docs/src/specs/prover/img/circuit_testing/Contest(6).png diff --git a/docs/specs/prover/img/circuit_testing/Contest(7).png b/docs/src/specs/prover/img/circuit_testing/Contest(7).png similarity index 100% rename from docs/specs/prover/img/circuit_testing/Contest(7).png rename to docs/src/specs/prover/img/circuit_testing/Contest(7).png diff --git a/docs/specs/prover/img/circuit_testing/Contest(8).png b/docs/src/specs/prover/img/circuit_testing/Contest(8).png similarity index 100% rename from docs/specs/prover/img/circuit_testing/Contest(8).png rename to docs/src/specs/prover/img/circuit_testing/Contest(8).png diff --git a/docs/specs/prover/img/circuit_testing/Contest(9).png b/docs/src/specs/prover/img/circuit_testing/Contest(9).png similarity index 100% rename from docs/specs/prover/img/circuit_testing/Contest(9).png rename to docs/src/specs/prover/img/circuit_testing/Contest(9).png diff --git "a/docs/specs/prover/img/intro_to_zkSync\342\200\231s_ZK/circuit.png" "b/docs/src/specs/prover/img/intro_to_zkSync\342\200\231s_ZK/circuit.png" similarity index 100% rename from "docs/specs/prover/img/intro_to_zkSync\342\200\231s_ZK/circuit.png" rename to "docs/src/specs/prover/img/intro_to_zkSync\342\200\231s_ZK/circuit.png" diff --git a/docs/specs/prover/overview.md b/docs/src/specs/prover/overview.md similarity index 100% rename from docs/specs/prover/overview.md rename to docs/src/specs/prover/overview.md diff --git a/docs/specs/prover/zk_terminology.md b/docs/src/specs/prover/zk_terminology.md similarity index 100% rename from docs/specs/prover/zk_terminology.md rename to docs/src/specs/prover/zk_terminology.md diff --git a/docs/specs/zk_chains/README.md b/docs/src/specs/zk_chains/README.md similarity index 100% rename from docs/specs/zk_chains/README.md rename to docs/src/specs/zk_chains/README.md diff --git a/docs/specs/zk_chains/gateway.md b/docs/src/specs/zk_chains/gateway.md similarity index 100% rename from docs/specs/zk_chains/gateway.md rename to docs/src/specs/zk_chains/gateway.md diff --git a/docs/specs/zk_chains/img/contractsExternal.png b/docs/src/specs/zk_chains/img/contractsExternal.png similarity index 100% rename from docs/specs/zk_chains/img/contractsExternal.png rename to docs/src/specs/zk_chains/img/contractsExternal.png diff --git a/docs/specs/zk_chains/img/deployWeth.png b/docs/src/specs/zk_chains/img/deployWeth.png similarity index 100% rename from docs/specs/zk_chains/img/deployWeth.png rename to docs/src/specs/zk_chains/img/deployWeth.png diff --git a/docs/specs/zk_chains/img/depositWeth.png b/docs/src/specs/zk_chains/img/depositWeth.png similarity index 100% rename from docs/specs/zk_chains/img/depositWeth.png rename to docs/src/specs/zk_chains/img/depositWeth.png diff --git a/docs/specs/zk_chains/img/hyperbridges.png b/docs/src/specs/zk_chains/img/hyperbridges.png similarity index 100% rename from docs/specs/zk_chains/img/hyperbridges.png rename to docs/src/specs/zk_chains/img/hyperbridges.png diff --git a/docs/specs/zk_chains/img/hyperbridging.png b/docs/src/specs/zk_chains/img/hyperbridging.png similarity index 100% rename from docs/specs/zk_chains/img/hyperbridging.png rename to docs/src/specs/zk_chains/img/hyperbridging.png diff --git a/docs/specs/zk_chains/img/newChain.png b/docs/src/specs/zk_chains/img/newChain.png similarity index 100% rename from docs/specs/zk_chains/img/newChain.png rename to docs/src/specs/zk_chains/img/newChain.png diff --git a/docs/specs/zk_chains/interop.md b/docs/src/specs/zk_chains/interop.md similarity index 100% rename from docs/specs/zk_chains/interop.md rename to docs/src/specs/zk_chains/interop.md diff --git a/docs/specs/zk_chains/overview.md b/docs/src/specs/zk_chains/overview.md similarity index 100% rename from docs/specs/zk_chains/overview.md rename to docs/src/specs/zk_chains/overview.md diff --git a/docs/specs/zk_chains/shared_bridge.md b/docs/src/specs/zk_chains/shared_bridge.md similarity index 100% rename from docs/specs/zk_chains/shared_bridge.md rename to docs/src/specs/zk_chains/shared_bridge.md diff --git a/docs/specs/zk_evm/README.md b/docs/src/specs/zk_evm/README.md similarity index 100% rename from docs/specs/zk_evm/README.md rename to docs/src/specs/zk_evm/README.md diff --git a/docs/specs/zk_evm/account_abstraction.md b/docs/src/specs/zk_evm/account_abstraction.md similarity index 100% rename from docs/specs/zk_evm/account_abstraction.md rename to docs/src/specs/zk_evm/account_abstraction.md diff --git a/docs/specs/zk_evm/bootloader.md b/docs/src/specs/zk_evm/bootloader.md similarity index 100% rename from docs/specs/zk_evm/bootloader.md rename to docs/src/specs/zk_evm/bootloader.md diff --git a/docs/specs/zk_evm/fee_model.md b/docs/src/specs/zk_evm/fee_model.md similarity index 100% rename from docs/specs/zk_evm/fee_model.md rename to docs/src/specs/zk_evm/fee_model.md diff --git a/docs/specs/zk_evm/precompiles.md b/docs/src/specs/zk_evm/precompiles.md similarity index 100% rename from docs/specs/zk_evm/precompiles.md rename to docs/src/specs/zk_evm/precompiles.md diff --git a/docs/specs/zk_evm/system_contracts.md b/docs/src/specs/zk_evm/system_contracts.md similarity index 100% rename from docs/specs/zk_evm/system_contracts.md rename to docs/src/specs/zk_evm/system_contracts.md diff --git a/docs/specs/zk_evm/vm_overview.md b/docs/src/specs/zk_evm/vm_overview.md similarity index 100% rename from docs/specs/zk_evm/vm_overview.md rename to docs/src/specs/zk_evm/vm_overview.md diff --git a/docs/specs/zk_evm/vm_specification/EraVM_formal_specification.pdf b/docs/src/specs/zk_evm/vm_specification/EraVM_formal_specification.pdf similarity index 100% rename from docs/specs/zk_evm/vm_specification/EraVM_formal_specification.pdf rename to docs/src/specs/zk_evm/vm_specification/EraVM_formal_specification.pdf diff --git a/docs/specs/zk_evm/vm_specification/README.md b/docs/src/specs/zk_evm/vm_specification/README.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/README.md rename to docs/src/specs/zk_evm/vm_specification/README.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/README.md b/docs/src/specs/zk_evm/vm_specification/compiler/README.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/README.md rename to docs/src/specs/zk_evm/vm_specification/compiler/README.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/code_separation.md b/docs/src/specs/zk_evm/vm_specification/compiler/code_separation.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/code_separation.md rename to docs/src/specs/zk_evm/vm_specification/compiler/code_separation.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/evmla_translator.md b/docs/src/specs/zk_evm/vm_specification/compiler/evmla_translator.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/evmla_translator.md rename to docs/src/specs/zk_evm/vm_specification/compiler/evmla_translator.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/exception_handling.md b/docs/src/specs/zk_evm/vm_specification/compiler/exception_handling.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/exception_handling.md rename to docs/src/specs/zk_evm/vm_specification/compiler/exception_handling.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/README.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/README.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/README.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/README.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/evm/README.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/README.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/evm/README.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/README.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/evm/arithmetic.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/arithmetic.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/evm/arithmetic.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/arithmetic.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/evm/bitwise.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/bitwise.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/evm/bitwise.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/bitwise.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/evm/block.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/block.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/evm/block.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/block.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/evm/call.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/call.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/evm/call.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/call.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/evm/create.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/create.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/evm/create.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/create.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/evm/environment.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/environment.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/evm/environment.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/environment.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/evm/logging.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/logging.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/evm/logging.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/logging.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/evm/logical.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/logical.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/evm/logical.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/logical.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/evm/memory.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/memory.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/evm/memory.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/memory.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/evm/overview.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/overview.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/evm/overview.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/overview.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/evm/return.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/return.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/evm/return.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/return.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/evm/sha3.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/sha3.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/evm/sha3.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/sha3.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/evm/stack.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/stack.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/evm/stack.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/evm/stack.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/evmla.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/evmla.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/evmla.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/evmla.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/extensions/README.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/extensions/README.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/extensions/README.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/extensions/README.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/extensions/call.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/extensions/call.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/extensions/call.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/extensions/call.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/extensions/overview.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/extensions/overview.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/extensions/overview.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/extensions/overview.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/extensions/verbatim.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/extensions/verbatim.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/extensions/verbatim.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/extensions/verbatim.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/overview.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/overview.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/overview.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/overview.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/instructions/yul.md b/docs/src/specs/zk_evm/vm_specification/compiler/instructions/yul.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/instructions/yul.md rename to docs/src/specs/zk_evm/vm_specification/compiler/instructions/yul.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/overview.md b/docs/src/specs/zk_evm/vm_specification/compiler/overview.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/overview.md rename to docs/src/specs/zk_evm/vm_specification/compiler/overview.md diff --git a/docs/specs/zk_evm/vm_specification/compiler/system_contracts.md b/docs/src/specs/zk_evm/vm_specification/compiler/system_contracts.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/compiler/system_contracts.md rename to docs/src/specs/zk_evm/vm_specification/compiler/system_contracts.md diff --git a/docs/specs/zk_evm/vm_specification/img/arch-overview.png b/docs/src/specs/zk_evm/vm_specification/img/arch-overview.png similarity index 100% rename from docs/specs/zk_evm/vm_specification/img/arch-overview.png rename to docs/src/specs/zk_evm/vm_specification/img/arch-overview.png diff --git a/docs/specs/zk_evm/vm_specification/img/arithmetic_opcode.png b/docs/src/specs/zk_evm/vm_specification/img/arithmetic_opcode.png similarity index 100% rename from docs/specs/zk_evm/vm_specification/img/arithmetic_opcode.png rename to docs/src/specs/zk_evm/vm_specification/img/arithmetic_opcode.png diff --git a/docs/specs/zk_evm/vm_specification/zkSync_era_virtual_machine_primer.md b/docs/src/specs/zk_evm/vm_specification/zkSync_era_virtual_machine_primer.md similarity index 100% rename from docs/specs/zk_evm/vm_specification/zkSync_era_virtual_machine_primer.md rename to docs/src/specs/zk_evm/vm_specification/zkSync_era_virtual_machine_primer.md diff --git a/docs/theme/head.hbs b/docs/theme/head.hbs new file mode 100644 index 000000000000..66ee37538adf --- /dev/null +++ b/docs/theme/head.hbs @@ -0,0 +1 @@ + diff --git a/etc/contracts-test-data/README.md b/etc/contracts-test-data/README.md deleted file mode 100644 index 532703ad210f..000000000000 --- a/etc/contracts-test-data/README.md +++ /dev/null @@ -1,4 +0,0 @@ -# Contracts test data - -This folder contains data for contracts that are being used for testing to check the correctness of the smart contract -flow in ZKsync. diff --git a/etc/contracts-test-data/contracts/basic-constructor/basic-constructor.sol b/etc/contracts-test-data/contracts/basic-constructor/basic-constructor.sol deleted file mode 100644 index d2fe2d0eefb9..000000000000 --- a/etc/contracts-test-data/contracts/basic-constructor/basic-constructor.sol +++ /dev/null @@ -1,16 +0,0 @@ -// SPDX-License-Identifier: MIT OR Apache-2.0 - -pragma solidity ^0.8.0; - -contract SimpleConstructor { - uint256 c; - - constructor(uint256 a, uint256 b, bool shouldRevert) { - c = a * b; - require(!shouldRevert, "reverted deploy"); - } - - function get() public view returns (uint256) { - return c; - } -} diff --git a/etc/contracts-test-data/contracts/create/Foo.sol b/etc/contracts-test-data/contracts/create/Foo.sol deleted file mode 100644 index 1ae4868e5bf6..000000000000 --- a/etc/contracts-test-data/contracts/create/Foo.sol +++ /dev/null @@ -1,8 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity >=0.8.1; -pragma abicoder v2; - -contract Foo { - string public name = "Foo"; -} diff --git a/etc/contracts-test-data/contracts/create/create.sol b/etc/contracts-test-data/contracts/create/create.sol deleted file mode 100644 index ef03e7c457ce..000000000000 --- a/etc/contracts-test-data/contracts/create/create.sol +++ /dev/null @@ -1,17 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity >=0.8.1; -pragma abicoder v2; - -// import Foo.sol from current directory -import "./Foo.sol"; - -contract Import { - // Initialize Foo.sol - Foo public foo = new Foo(); - - // Test Foo.sol by getting it's name. - function getFooName() public view returns (string memory) { - return foo.name(); - } -} \ No newline at end of file diff --git a/etc/contracts-test-data/contracts/custom-account/interfaces/IERC20.sol b/etc/contracts-test-data/contracts/custom-account/interfaces/IERC20.sol deleted file mode 100644 index b816bfed0863..000000000000 --- a/etc/contracts-test-data/contracts/custom-account/interfaces/IERC20.sol +++ /dev/null @@ -1,82 +0,0 @@ -// SPDX-License-Identifier: MIT -// OpenZeppelin Contracts (last updated v4.6.0) (token/ERC20/IERC20.sol) - -pragma solidity ^0.8.0; - -/** - * @dev Interface of the ERC20 standard as defined in the EIP. - */ -interface IERC20 { - /** - * @dev Emitted when `value` tokens are moved from one account (`from`) to - * another (`to`). - * - * Note that `value` may be zero. - */ - event Transfer(address indexed from, address indexed to, uint256 value); - - /** - * @dev Emitted when the allowance of a `spender` for an `owner` is set by - * a call to {approve}. `value` is the new allowance. - */ - event Approval(address indexed owner, address indexed spender, uint256 value); - - /** - * @dev Returns the amount of tokens in existence. - */ - function totalSupply() external view returns (uint256); - - /** - * @dev Returns the amount of tokens owned by `account`. - */ - function balanceOf(address account) external view returns (uint256); - - /** - * @dev Moves `amount` tokens from the caller's account to `to`. - * - * Returns a boolean value indicating whether the operation succeeded. - * - * Emits a {Transfer} event. - */ - function transfer(address to, uint256 amount) external returns (bool); - - /** - * @dev Returns the remaining number of tokens that `spender` will be - * allowed to spend on behalf of `owner` through {transferFrom}. This is - * zero by default. - * - * This value changes when {approve} or {transferFrom} are called. - */ - function allowance(address owner, address spender) external view returns (uint256); - - /** - * @dev Sets `amount` as the allowance of `spender` over the caller's tokens. - * - * Returns a boolean value indicating whether the operation succeeded. - * - * IMPORTANT: Beware that changing an allowance with this method brings the risk - * that someone may use both the old and the new allowance by unfortunate - * transaction ordering. One possible solution to mitigate this race - * condition is to first reduce the spender's allowance to 0 and set the - * desired value afterwards: - * https://github.com/ethereum/EIPs/issues/20#issuecomment-263524729 - * - * Emits an {Approval} event. - */ - function approve(address spender, uint256 amount) external returns (bool); - - /** - * @dev Moves `amount` tokens from `from` to `to` using the - * allowance mechanism. `amount` is then deducted from the caller's - * allowance. - * - * Returns a boolean value indicating whether the operation succeeded. - * - * Emits a {Transfer} event. - */ - function transferFrom( - address from, - address to, - uint256 amount - ) external returns (bool); -} diff --git a/etc/contracts-test-data/contracts/estimator/estimator.sol b/etc/contracts-test-data/contracts/estimator/estimator.sol deleted file mode 100644 index 7fc7dfffc64b..000000000000 --- a/etc/contracts-test-data/contracts/estimator/estimator.sol +++ /dev/null @@ -1,30 +0,0 @@ -// SPDX-License-Identifier: UNLICENSED - -// This contract is used to estimate the protocol properties -// related to the fee calculation, such as block capacity -// and different operations costs. - -pragma solidity ^0.8.0; - -// Copied from `contracts/zksync/contracts/L2ContractHelper.sol`. -interface IL2Messenger { - function sendToL1(bytes memory _message) external returns (bytes32); -} - -uint160 constant SYSTEM_CONTRACTS_OFFSET = 0x8000; // 2^15 -IL2Messenger constant L2_MESSENGER = IL2Messenger(address(SYSTEM_CONTRACTS_OFFSET + 0x08)); - -// TODO: Should be set to the actual value (SMA-1185). -// Represents the maximum amount of L2->L1 messages that can happen in one block. -uint256 constant MAX_L2_L1_MESSAGES_IN_BLOCK = 256; - -contract Estimator { - function estimateBlockCapacity() public { - // Block capacity is defined by several parameters, but the "cheapest" way to seal the block - // is to send a limited amount of messages to the L1. - // Here we're going to do just it. - for (uint256 i = 0; i < MAX_L2_L1_MESSAGES_IN_BLOCK; i++) { - L2_MESSENGER.sendToL1(bytes("")); - } - } -} diff --git a/etc/contracts-test-data/contracts/events/events.sol b/etc/contracts-test-data/contracts/events/events.sol deleted file mode 100644 index 93a451d54695..000000000000 --- a/etc/contracts-test-data/contracts/events/events.sol +++ /dev/null @@ -1,15 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity ^0.8.0; - -contract Emitter { - event Trivial(); - event Simple(uint256 Number, address Account); - event Indexed(uint256 indexed Number, address Account); - - function test(uint256 number) public { - emit Trivial(); - emit Simple(number, address(0xdeadbeef)); - emit Indexed(number, address(0xc0ffee)); - } -} diff --git a/etc/contracts-test-data/contracts/events/sample-calldata b/etc/contracts-test-data/contracts/events/sample-calldata deleted file mode 100644 index c137101ba026..000000000000 Binary files a/etc/contracts-test-data/contracts/events/sample-calldata and /dev/null differ diff --git a/etc/contracts-test-data/contracts/loadnext/loadnext_contract.sol b/etc/contracts-test-data/contracts/loadnext/loadnext_contract.sol deleted file mode 100644 index b14286a45038..000000000000 --- a/etc/contracts-test-data/contracts/loadnext/loadnext_contract.sol +++ /dev/null @@ -1,56 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity ^0.8.0; -pragma abicoder v2; - -contract LoadnextContract { - event Event(uint val); - uint[] readArray; - uint[] writeArray; - - constructor (uint reads) { - for (uint i = 0; i < reads; i++) { - readArray.push(i); - } - } - - function execute(uint reads, uint writes, uint hashes, uint events, uint max_recursion, uint deploys) external returns(uint) { - if (max_recursion > 0) { - return this.execute(reads, writes, hashes, events, max_recursion - 1, deploys); - } - - uint sum = 0; - - // Somehow use result of storage read for compiler to not optimize this place. - for (uint i = 0; i < reads; i++) { - sum += readArray[i]; - } - - for (uint i = 0; i < writes; i++) { - writeArray.push(i); - } - - for (uint i = 0; i < events; i++) { - emit Event(i); - } - - // Somehow use result of keccak for compiler to not optimize this place. - for (uint i = 0; i < hashes; i++) { - sum += uint8(keccak256(abi.encodePacked("Message for encoding"))[0]); - } - - for (uint i = 0; i < deploys; i++) { - Foo foo = new Foo(); - } - return sum; - } - - function burnGas(uint256 gasToBurn) external { - uint256 initialGas = gasleft(); - while(initialGas - gasleft() < gasToBurn) {} - } -} - -contract Foo { - string public name = "Foo"; -} diff --git a/etc/contracts-test-data/contracts/long-return-data/long-return-data.sol b/etc/contracts-test-data/contracts/long-return-data/long-return-data.sol deleted file mode 100644 index 793bf191cbd8..000000000000 --- a/etc/contracts-test-data/contracts/long-return-data/long-return-data.sol +++ /dev/null @@ -1,13 +0,0 @@ -// SPDX-License-Identifier: UNLICENSED -pragma solidity ^0.8.0; - -contract LongReturnData{ - function longReturnData() external returns (bool, bytes memory) { - // do some recursion, let's have more layers - (bool success, bytes memory _tmp) = this.longReturnData{gas: 79500000}(); - require(success == false); // they should fail by design - assembly { - return(0, 0xffffffffffffffff) - } - } -} diff --git a/etc/contracts-test-data/counter/counter.sol b/etc/contracts-test-data/counter/counter.sol deleted file mode 100644 index ec9219d7a199..000000000000 --- a/etc/contracts-test-data/counter/counter.sol +++ /dev/null @@ -1,27 +0,0 @@ -// SPDX-License-Identifier: UNLICENSED - -pragma solidity ^0.8.0; - -contract Counter { - uint256 value; - - function increment(uint256 x) public { - value += x; - } - - function incrementWithRevertPayable(uint256 x, bool shouldRevert) public payable returns (uint256) { - return incrementWithRevert(x, shouldRevert); - } - - function incrementWithRevert(uint256 x, bool shouldRevert) public returns (uint256) { - value += x; - if (shouldRevert) { - revert("This method always reverts"); - } - return value; - } - - function get() public view returns (uint256) { - return value; - } -} diff --git a/etc/contracts-test-data/hardhat.config.ts b/etc/contracts-test-data/hardhat.config.ts deleted file mode 100644 index 1883c1f6cd4e..000000000000 --- a/etc/contracts-test-data/hardhat.config.ts +++ /dev/null @@ -1,35 +0,0 @@ -import '@matterlabs/hardhat-zksync-solc'; - -const COMPILER_VERSION = '1.5.0'; -const PRE_RELEASE_VERSION = 'prerelease-a167aa3-code4rena'; -function getZksolcUrl(): string { - // @ts-ignore - const platform = { darwin: 'macosx', linux: 'linux', win32: 'windows' }[process.platform]; - // @ts-ignore - const toolchain = { linux: '-musl', win32: '-gnu', darwin: '' }[process.platform]; - const arch = process.arch === 'x64' ? 'amd64' : process.arch; - const ext = process.platform === 'win32' ? '.exe' : ''; - - return `https://github.com/matter-labs/era-compiler-solidity/releases/download/${PRE_RELEASE_VERSION}/zksolc-${platform}-${arch}${toolchain}-v${COMPILER_VERSION}${ext}`; -} - -export default { - zksolc: { - compilerSource: 'binary', - settings: { - compilerPath: getZksolcUrl(), - isSystem: true - } - }, - networks: { - hardhat: { - zksync: true - } - }, - solidity: { - version: '0.8.24', - settings: { - evmVersion: 'cancun' - } - } -}; diff --git a/etc/contracts-test-data/package.json b/etc/contracts-test-data/package.json deleted file mode 100644 index 543a982e4b77..000000000000 --- a/etc/contracts-test-data/package.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "name": "contracts-test-data", - "version": "0.1.0", - "license": "MIT", - "dependencies": { - "@openzeppelin/contracts": "^4.8.0", - "hardhat": "=2.22.2" - }, - "devDependencies": { - "@matterlabs/hardhat-zksync-solc": "^0.3.15" - }, - "scripts": { - "build": "hardhat compile", - "clean": "hardhat clean" - } -} diff --git a/etc/contracts-test-data/yarn.lock b/etc/contracts-test-data/yarn.lock deleted file mode 100644 index 47c70d2d63eb..000000000000 --- a/etc/contracts-test-data/yarn.lock +++ /dev/null @@ -1,2757 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -"@balena/dockerignore@^1.0.2": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@balena/dockerignore/-/dockerignore-1.0.2.tgz#9ffe4726915251e8eb69f44ef3547e0da2c03e0d" - integrity sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q== - -"@chainsafe/as-sha256@^0.3.1": - version "0.3.1" - resolved "https://registry.yarnpkg.com/@chainsafe/as-sha256/-/as-sha256-0.3.1.tgz#3639df0e1435cab03f4d9870cc3ac079e57a6fc9" - integrity sha512-hldFFYuf49ed7DAakWVXSJODuq3pzJEguD8tQ7h+sGkM18vja+OFoJI9krnGmgzyuZC2ETX0NOIcCTy31v2Mtg== - -"@chainsafe/persistent-merkle-tree@^0.4.2": - version "0.4.2" - resolved "https://registry.yarnpkg.com/@chainsafe/persistent-merkle-tree/-/persistent-merkle-tree-0.4.2.tgz#4c9ee80cc57cd3be7208d98c40014ad38f36f7ff" - integrity sha512-lLO3ihKPngXLTus/L7WHKaw9PnNJWizlOF1H9NNzHP6Xvh82vzg9F2bzkXhYIFshMZ2gTCEz8tq6STe7r5NDfQ== - dependencies: - "@chainsafe/as-sha256" "^0.3.1" - -"@chainsafe/persistent-merkle-tree@^0.5.0": - version "0.5.0" - resolved "https://registry.yarnpkg.com/@chainsafe/persistent-merkle-tree/-/persistent-merkle-tree-0.5.0.tgz#2b4a62c9489a5739dedd197250d8d2f5427e9f63" - integrity sha512-l0V1b5clxA3iwQLXP40zYjyZYospQLZXzBVIhhr9kDg/1qHZfzzHw0jj4VPBijfYCArZDlPkRi1wZaV2POKeuw== - dependencies: - "@chainsafe/as-sha256" "^0.3.1" - -"@chainsafe/ssz@^0.10.0": - version "0.10.2" - resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.10.2.tgz#c782929e1bb25fec66ba72e75934b31fd087579e" - integrity sha512-/NL3Lh8K+0q7A3LsiFq09YXS9fPE+ead2rr7vM2QK8PLzrNsw3uqrif9bpRX5UxgeRjM+vYi+boCM3+GM4ovXg== - dependencies: - "@chainsafe/as-sha256" "^0.3.1" - "@chainsafe/persistent-merkle-tree" "^0.5.0" - -"@chainsafe/ssz@^0.9.2": - version "0.9.4" - resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.9.4.tgz#696a8db46d6975b600f8309ad3a12f7c0e310497" - integrity sha512-77Qtg2N1ayqs4Bg/wvnWfg5Bta7iy7IRh8XqXh7oNMeP2HBbBwx8m6yTpA8p0EHItWPEBkgZd5S5/LSlp3GXuQ== - dependencies: - "@chainsafe/as-sha256" "^0.3.1" - "@chainsafe/persistent-merkle-tree" "^0.4.2" - case "^1.6.3" - -"@ethersproject/abi@5.7.0", "@ethersproject/abi@^5.1.2", "@ethersproject/abi@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/abi/-/abi-5.7.0.tgz#b3f3e045bbbeed1af3947335c247ad625a44e449" - integrity sha512-351ktp42TiRcYB3H1OP8yajPeAQstMW/yCFokj/AthP9bLHzQFPlOrxOcwYEDkUAICmOHljvN4K39OMTMUa9RA== - dependencies: - "@ethersproject/address" "^5.7.0" - "@ethersproject/bignumber" "^5.7.0" - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/constants" "^5.7.0" - "@ethersproject/hash" "^5.7.0" - "@ethersproject/keccak256" "^5.7.0" - "@ethersproject/logger" "^5.7.0" - "@ethersproject/properties" "^5.7.0" - "@ethersproject/strings" "^5.7.0" - -"@ethersproject/abstract-provider@5.7.0", "@ethersproject/abstract-provider@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/abstract-provider/-/abstract-provider-5.7.0.tgz#b0a8550f88b6bf9d51f90e4795d48294630cb9ef" - integrity sha512-R41c9UkchKCpAqStMYUpdunjo3pkEvZC3FAwZn5S5MGbXoMQOHIdHItezTETxAO5bevtMApSyEhn9+CHcDsWBw== - dependencies: - "@ethersproject/bignumber" "^5.7.0" - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/logger" "^5.7.0" - "@ethersproject/networks" "^5.7.0" - "@ethersproject/properties" "^5.7.0" - "@ethersproject/transactions" "^5.7.0" - "@ethersproject/web" "^5.7.0" - -"@ethersproject/abstract-signer@5.7.0", "@ethersproject/abstract-signer@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/abstract-signer/-/abstract-signer-5.7.0.tgz#13f4f32117868452191a4649723cb086d2b596b2" - integrity sha512-a16V8bq1/Cz+TGCkE2OPMTOUDLS3grCpdjoJCYNnVBbdYEMSgKrU0+B90s8b6H+ByYTBZN7a3g76jdIJi7UfKQ== - dependencies: - "@ethersproject/abstract-provider" "^5.7.0" - "@ethersproject/bignumber" "^5.7.0" - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/logger" "^5.7.0" - "@ethersproject/properties" "^5.7.0" - -"@ethersproject/address@5.7.0", "@ethersproject/address@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/address/-/address-5.7.0.tgz#19b56c4d74a3b0a46bfdbb6cfcc0a153fc697f37" - integrity sha512-9wYhYt7aghVGo758POM5nqcOMaE168Q6aRLJZwUmiqSrAungkG74gSSeKEIR7ukixesdRZGPgVqme6vmxs1fkA== - dependencies: - "@ethersproject/bignumber" "^5.7.0" - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/keccak256" "^5.7.0" - "@ethersproject/logger" "^5.7.0" - "@ethersproject/rlp" "^5.7.0" - -"@ethersproject/base64@5.7.0", "@ethersproject/base64@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/base64/-/base64-5.7.0.tgz#ac4ee92aa36c1628173e221d0d01f53692059e1c" - integrity sha512-Dr8tcHt2mEbsZr/mwTPIQAf3Ai0Bks/7gTw9dSqk1mQvhW3XvRlmDJr/4n+wg1JmCl16NZue17CDh8xb/vZ0sQ== - dependencies: - "@ethersproject/bytes" "^5.7.0" - -"@ethersproject/basex@5.7.0", "@ethersproject/basex@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/basex/-/basex-5.7.0.tgz#97034dc7e8938a8ca943ab20f8a5e492ece4020b" - integrity sha512-ywlh43GwZLv2Voc2gQVTKBoVQ1mti3d8HK5aMxsfu/nRDnMmNqaSJ3r3n85HBByT8OpoY96SXM1FogC533T4zw== - dependencies: - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/properties" "^5.7.0" - -"@ethersproject/bignumber@5.7.0", "@ethersproject/bignumber@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/bignumber/-/bignumber-5.7.0.tgz#e2f03837f268ba655ffba03a57853e18a18dc9c2" - integrity sha512-n1CAdIHRWjSucQO3MC1zPSVgV/6dy/fjL9pMrPP9peL+QxEg9wOsVqwD4+818B6LUEtaXzVHQiuivzRoxPxUGw== - dependencies: - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/logger" "^5.7.0" - bn.js "^5.2.1" - -"@ethersproject/bytes@5.7.0", "@ethersproject/bytes@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/bytes/-/bytes-5.7.0.tgz#a00f6ea8d7e7534d6d87f47188af1148d71f155d" - integrity sha512-nsbxwgFXWh9NyYWo+U8atvmMsSdKJprTcICAkvbBffT75qDocbuggBU0SJiVK2MuTrp0q+xvLkTnGMPK1+uA9A== - dependencies: - "@ethersproject/logger" "^5.7.0" - -"@ethersproject/constants@5.7.0", "@ethersproject/constants@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/constants/-/constants-5.7.0.tgz#df80a9705a7e08984161f09014ea012d1c75295e" - integrity sha512-DHI+y5dBNvkpYUMiRQyxRBYBefZkJfo70VUkUAsRjcPs47muV9evftfZ0PJVCXYbAiCgght0DtcF9srFQmIgWA== - dependencies: - "@ethersproject/bignumber" "^5.7.0" - -"@ethersproject/contracts@5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/contracts/-/contracts-5.7.0.tgz#c305e775abd07e48aa590e1a877ed5c316f8bd1e" - integrity sha512-5GJbzEU3X+d33CdfPhcyS+z8MzsTrBGk/sc+G+59+tPa9yFkl6HQ9D6L0QMgNTA9q8dT0XKxxkyp883XsQvbbg== - dependencies: - "@ethersproject/abi" "^5.7.0" - "@ethersproject/abstract-provider" "^5.7.0" - "@ethersproject/abstract-signer" "^5.7.0" - "@ethersproject/address" "^5.7.0" - "@ethersproject/bignumber" "^5.7.0" - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/constants" "^5.7.0" - "@ethersproject/logger" "^5.7.0" - "@ethersproject/properties" "^5.7.0" - "@ethersproject/transactions" "^5.7.0" - -"@ethersproject/hash@5.7.0", "@ethersproject/hash@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/hash/-/hash-5.7.0.tgz#eb7aca84a588508369562e16e514b539ba5240a7" - integrity sha512-qX5WrQfnah1EFnO5zJv1v46a8HW0+E5xuBBDTwMFZLuVTx0tbU2kkx15NqdjxecrLGatQN9FGQKpb1FKdHCt+g== - dependencies: - "@ethersproject/abstract-signer" "^5.7.0" - "@ethersproject/address" "^5.7.0" - "@ethersproject/base64" "^5.7.0" - "@ethersproject/bignumber" "^5.7.0" - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/keccak256" "^5.7.0" - "@ethersproject/logger" "^5.7.0" - "@ethersproject/properties" "^5.7.0" - "@ethersproject/strings" "^5.7.0" - -"@ethersproject/hdnode@5.7.0", "@ethersproject/hdnode@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/hdnode/-/hdnode-5.7.0.tgz#e627ddc6b466bc77aebf1a6b9e47405ca5aef9cf" - integrity sha512-OmyYo9EENBPPf4ERhR7oj6uAtUAhYGqOnIS+jE5pTXvdKBS99ikzq1E7Iv0ZQZ5V36Lqx1qZLeak0Ra16qpeOg== - dependencies: - "@ethersproject/abstract-signer" "^5.7.0" - "@ethersproject/basex" "^5.7.0" - "@ethersproject/bignumber" "^5.7.0" - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/logger" "^5.7.0" - "@ethersproject/pbkdf2" "^5.7.0" - "@ethersproject/properties" "^5.7.0" - "@ethersproject/sha2" "^5.7.0" - "@ethersproject/signing-key" "^5.7.0" - "@ethersproject/strings" "^5.7.0" - "@ethersproject/transactions" "^5.7.0" - "@ethersproject/wordlists" "^5.7.0" - -"@ethersproject/json-wallets@5.7.0", "@ethersproject/json-wallets@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/json-wallets/-/json-wallets-5.7.0.tgz#5e3355287b548c32b368d91014919ebebddd5360" - integrity sha512-8oee5Xgu6+RKgJTkvEMl2wDgSPSAQ9MB/3JYjFV9jlKvcYHUXZC+cQp0njgmxdHkYWn8s6/IqIZYm0YWCjO/0g== - dependencies: - "@ethersproject/abstract-signer" "^5.7.0" - "@ethersproject/address" "^5.7.0" - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/hdnode" "^5.7.0" - "@ethersproject/keccak256" "^5.7.0" - "@ethersproject/logger" "^5.7.0" - "@ethersproject/pbkdf2" "^5.7.0" - "@ethersproject/properties" "^5.7.0" - "@ethersproject/random" "^5.7.0" - "@ethersproject/strings" "^5.7.0" - "@ethersproject/transactions" "^5.7.0" - aes-js "3.0.0" - scrypt-js "3.0.1" - -"@ethersproject/keccak256@5.7.0", "@ethersproject/keccak256@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/keccak256/-/keccak256-5.7.0.tgz#3186350c6e1cd6aba7940384ec7d6d9db01f335a" - integrity sha512-2UcPboeL/iW+pSg6vZ6ydF8tCnv3Iu/8tUmLLzWWGzxWKFFqOBQFLo6uLUv6BDrLgCDfN28RJ/wtByx+jZ4KBg== - dependencies: - "@ethersproject/bytes" "^5.7.0" - js-sha3 "0.8.0" - -"@ethersproject/logger@5.7.0", "@ethersproject/logger@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/logger/-/logger-5.7.0.tgz#6ce9ae168e74fecf287be17062b590852c311892" - integrity sha512-0odtFdXu/XHtjQXJYA3u9G0G8btm0ND5Cu8M7i5vhEcE8/HmF4Lbdqanwyv4uQTr2tx6b7fQRmgLrsnpQlmnig== - -"@ethersproject/networks@5.7.1", "@ethersproject/networks@^5.7.0": - version "5.7.1" - resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.7.1.tgz#118e1a981d757d45ccea6bb58d9fd3d9db14ead6" - integrity sha512-n/MufjFYv3yFcUyfhnXotyDlNdFb7onmkSy8aQERi2PjNcnWQ66xXxa3XlS8nCcA8aJKJjIIMNJTC7tu80GwpQ== - dependencies: - "@ethersproject/logger" "^5.7.0" - -"@ethersproject/pbkdf2@5.7.0", "@ethersproject/pbkdf2@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/pbkdf2/-/pbkdf2-5.7.0.tgz#d2267d0a1f6e123f3771007338c47cccd83d3102" - integrity sha512-oR/dBRZR6GTyaofd86DehG72hY6NpAjhabkhxgr3X2FpJtJuodEl2auADWBZfhDHgVCbu3/H/Ocq2uC6dpNjjw== - dependencies: - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/sha2" "^5.7.0" - -"@ethersproject/properties@5.7.0", "@ethersproject/properties@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/properties/-/properties-5.7.0.tgz#a6e12cb0439b878aaf470f1902a176033067ed30" - integrity sha512-J87jy8suntrAkIZtecpxEPxY//szqr1mlBaYlQ0r4RCaiD2hjheqF9s1LVE8vVuJCXisjIP+JgtK/Do54ej4Sw== - dependencies: - "@ethersproject/logger" "^5.7.0" - -"@ethersproject/providers@5.7.2", "@ethersproject/providers@^5.7.1", "@ethersproject/providers@^5.7.2": - version "5.7.2" - resolved "https://registry.yarnpkg.com/@ethersproject/providers/-/providers-5.7.2.tgz#f8b1a4f275d7ce58cf0a2eec222269a08beb18cb" - integrity sha512-g34EWZ1WWAVgr4aptGlVBF8mhl3VWjv+8hoAnzStu8Ah22VHBsuGzP17eb6xDVRzw895G4W7vvx60lFFur/1Rg== - dependencies: - "@ethersproject/abstract-provider" "^5.7.0" - "@ethersproject/abstract-signer" "^5.7.0" - "@ethersproject/address" "^5.7.0" - "@ethersproject/base64" "^5.7.0" - "@ethersproject/basex" "^5.7.0" - "@ethersproject/bignumber" "^5.7.0" - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/constants" "^5.7.0" - "@ethersproject/hash" "^5.7.0" - "@ethersproject/logger" "^5.7.0" - "@ethersproject/networks" "^5.7.0" - "@ethersproject/properties" "^5.7.0" - "@ethersproject/random" "^5.7.0" - "@ethersproject/rlp" "^5.7.0" - "@ethersproject/sha2" "^5.7.0" - "@ethersproject/strings" "^5.7.0" - "@ethersproject/transactions" "^5.7.0" - "@ethersproject/web" "^5.7.0" - bech32 "1.1.4" - ws "7.4.6" - -"@ethersproject/random@5.7.0", "@ethersproject/random@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/random/-/random-5.7.0.tgz#af19dcbc2484aae078bb03656ec05df66253280c" - integrity sha512-19WjScqRA8IIeWclFme75VMXSBvi4e6InrUNuaR4s5pTF2qNhcGdCUwdxUVGtDDqC00sDLCO93jPQoDUH4HVmQ== - dependencies: - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/logger" "^5.7.0" - -"@ethersproject/rlp@5.7.0", "@ethersproject/rlp@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/rlp/-/rlp-5.7.0.tgz#de39e4d5918b9d74d46de93af80b7685a9c21304" - integrity sha512-rBxzX2vK8mVF7b0Tol44t5Tb8gomOHkj5guL+HhzQ1yBh/ydjGnpw6at+X6Iw0Kp3OzzzkcKp8N9r0W4kYSs9w== - dependencies: - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/logger" "^5.7.0" - -"@ethersproject/sha2@5.7.0", "@ethersproject/sha2@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/sha2/-/sha2-5.7.0.tgz#9a5f7a7824ef784f7f7680984e593a800480c9fb" - integrity sha512-gKlH42riwb3KYp0reLsFTokByAKoJdgFCwI+CCiX/k+Jm2mbNs6oOaCjYQSlI1+XBVejwH2KrmCbMAT/GnRDQw== - dependencies: - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/logger" "^5.7.0" - hash.js "1.1.7" - -"@ethersproject/signing-key@5.7.0", "@ethersproject/signing-key@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/signing-key/-/signing-key-5.7.0.tgz#06b2df39411b00bc57c7c09b01d1e41cf1b16ab3" - integrity sha512-MZdy2nL3wO0u7gkB4nA/pEf8lu1TlFswPNmy8AiYkfKTdO6eXBJyUdmHO/ehm/htHw9K/qF8ujnTyUAD+Ry54Q== - dependencies: - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/logger" "^5.7.0" - "@ethersproject/properties" "^5.7.0" - bn.js "^5.2.1" - elliptic "6.5.4" - hash.js "1.1.7" - -"@ethersproject/solidity@5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/solidity/-/solidity-5.7.0.tgz#5e9c911d8a2acce2a5ebb48a5e2e0af20b631cb8" - integrity sha512-HmabMd2Dt/raavyaGukF4XxizWKhKQ24DoLtdNbBmNKUOPqwjsKQSdV9GQtj9CBEea9DlzETlVER1gYeXXBGaA== - dependencies: - "@ethersproject/bignumber" "^5.7.0" - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/keccak256" "^5.7.0" - "@ethersproject/logger" "^5.7.0" - "@ethersproject/sha2" "^5.7.0" - "@ethersproject/strings" "^5.7.0" - -"@ethersproject/strings@5.7.0", "@ethersproject/strings@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/strings/-/strings-5.7.0.tgz#54c9d2a7c57ae8f1205c88a9d3a56471e14d5ed2" - integrity sha512-/9nu+lj0YswRNSH0NXYqrh8775XNyEdUQAuf3f+SmOrnVewcJ5SBNAjF7lpgehKi4abvNNXyf+HX86czCdJ8Mg== - dependencies: - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/constants" "^5.7.0" - "@ethersproject/logger" "^5.7.0" - -"@ethersproject/transactions@5.7.0", "@ethersproject/transactions@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/transactions/-/transactions-5.7.0.tgz#91318fc24063e057885a6af13fdb703e1f993d3b" - integrity sha512-kmcNicCp1lp8qanMTC3RIikGgoJ80ztTyvtsFvCYpSCfkjhD0jZ2LOrnbcuxuToLIUYYf+4XwD1rP+B/erDIhQ== - dependencies: - "@ethersproject/address" "^5.7.0" - "@ethersproject/bignumber" "^5.7.0" - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/constants" "^5.7.0" - "@ethersproject/keccak256" "^5.7.0" - "@ethersproject/logger" "^5.7.0" - "@ethersproject/properties" "^5.7.0" - "@ethersproject/rlp" "^5.7.0" - "@ethersproject/signing-key" "^5.7.0" - -"@ethersproject/units@5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/units/-/units-5.7.0.tgz#637b563d7e14f42deeee39245275d477aae1d8b1" - integrity sha512-pD3xLMy3SJu9kG5xDGI7+xhTEmGXlEqXU4OfNapmfnxLVY4EMSSRp7j1k7eezutBPH7RBN/7QPnwR7hzNlEFeg== - dependencies: - "@ethersproject/bignumber" "^5.7.0" - "@ethersproject/constants" "^5.7.0" - "@ethersproject/logger" "^5.7.0" - -"@ethersproject/wallet@5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/wallet/-/wallet-5.7.0.tgz#4e5d0790d96fe21d61d38fb40324e6c7ef350b2d" - integrity sha512-MhmXlJXEJFBFVKrDLB4ZdDzxcBxQ3rLyCkhNqVu3CDYvR97E+8r01UgrI+TI99Le+aYm/in/0vp86guJuM7FCA== - dependencies: - "@ethersproject/abstract-provider" "^5.7.0" - "@ethersproject/abstract-signer" "^5.7.0" - "@ethersproject/address" "^5.7.0" - "@ethersproject/bignumber" "^5.7.0" - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/hash" "^5.7.0" - "@ethersproject/hdnode" "^5.7.0" - "@ethersproject/json-wallets" "^5.7.0" - "@ethersproject/keccak256" "^5.7.0" - "@ethersproject/logger" "^5.7.0" - "@ethersproject/properties" "^5.7.0" - "@ethersproject/random" "^5.7.0" - "@ethersproject/signing-key" "^5.7.0" - "@ethersproject/transactions" "^5.7.0" - "@ethersproject/wordlists" "^5.7.0" - -"@ethersproject/web@5.7.1", "@ethersproject/web@^5.7.0": - version "5.7.1" - resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.7.1.tgz#de1f285b373149bee5928f4eb7bcb87ee5fbb4ae" - integrity sha512-Gueu8lSvyjBWL4cYsWsjh6MtMwM0+H4HvqFPZfB6dV8ctbP9zFAO73VG1cMWae0FLPCtz0peKPpZY8/ugJJX2w== - dependencies: - "@ethersproject/base64" "^5.7.0" - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/logger" "^5.7.0" - "@ethersproject/properties" "^5.7.0" - "@ethersproject/strings" "^5.7.0" - -"@ethersproject/wordlists@5.7.0", "@ethersproject/wordlists@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/wordlists/-/wordlists-5.7.0.tgz#8fb2c07185d68c3e09eb3bfd6e779ba2774627f5" - integrity sha512-S2TFNJNfHWVHNE6cNDjbVlZ6MgE17MIxMbMg2zv3wn+3XSJGosL1m9ZVv3GXCf/2ymSsQ+hRI5IzoMJTG6aoVA== - dependencies: - "@ethersproject/bytes" "^5.7.0" - "@ethersproject/hash" "^5.7.0" - "@ethersproject/logger" "^5.7.0" - "@ethersproject/properties" "^5.7.0" - "@ethersproject/strings" "^5.7.0" - -"@fastify/busboy@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@fastify/busboy/-/busboy-2.0.0.tgz#f22824caff3ae506b18207bad4126dbc6ccdb6b8" - integrity sha512-JUFJad5lv7jxj926GPgymrWQxxjPYuJNiNjNMzqT+HiuP6Vl3dk5xzG+8sTX96np0ZAluvaMzPsjhHZ5rNuNQQ== - -"@matterlabs/hardhat-zksync-solc@^0.3.15": - version "0.3.17" - resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-solc/-/hardhat-zksync-solc-0.3.17.tgz#72f199544dc89b268d7bfc06d022a311042752fd" - integrity sha512-aZgQ0yfXW5xPkfuEH1d44ncWV4T2LzKZd0VVPo4PL5cUrYs2/II1FaEDp5zsf3FxOR1xT3mBsjuSrtJkk4AL8Q== - dependencies: - "@nomiclabs/hardhat-docker" "^2.0.0" - chalk "4.1.2" - dockerode "^3.3.4" - -"@metamask/eth-sig-util@^4.0.0": - version "4.0.1" - resolved "https://registry.yarnpkg.com/@metamask/eth-sig-util/-/eth-sig-util-4.0.1.tgz#3ad61f6ea9ad73ba5b19db780d40d9aae5157088" - integrity sha512-tghyZKLHZjcdlDqCA3gNZmLeR0XvOE9U1qoQO9ohyAZT6Pya+H9vkBPcsyXytmYLNgVoin7CKCmweo/R43V+tQ== - dependencies: - ethereumjs-abi "^0.6.8" - ethereumjs-util "^6.2.1" - ethjs-util "^0.1.6" - tweetnacl "^1.0.3" - tweetnacl-util "^0.15.1" - -"@noble/hashes@1.2.0", "@noble/hashes@~1.2.0": - version "1.2.0" - resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.2.0.tgz#a3150eeb09cc7ab207ebf6d7b9ad311a9bdbed12" - integrity sha512-FZfhjEDbT5GRswV3C6uvLPHMiVD6lQBmpoX5+eSiPaMTXte/IKqI5dykDxzZB/WBeK/CDuQRBWarPdi3FNY2zQ== - -"@noble/secp256k1@1.7.1", "@noble/secp256k1@~1.7.0": - version "1.7.1" - resolved "https://registry.yarnpkg.com/@noble/secp256k1/-/secp256k1-1.7.1.tgz#b251c70f824ce3ca7f8dc3df08d58f005cc0507c" - integrity sha512-hOUk6AyBFmqVrv7k5WAw/LpszxVbj9gGN4JRkIX52fdFAj1UA61KXmZDvqVEm+pOyec3+fIeZB02LYa/pWOArw== - -"@nomicfoundation/ethereumjs-block@5.0.1": - version "5.0.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-block/-/ethereumjs-block-5.0.1.tgz#6f89664f55febbd723195b6d0974773d29ee133d" - integrity sha512-u1Yioemi6Ckj3xspygu/SfFvm8vZEO8/Yx5a1QLzi6nVU0jz3Pg2OmHKJ5w+D9Ogk1vhwRiqEBAqcb0GVhCyHw== - dependencies: - "@nomicfoundation/ethereumjs-common" "4.0.1" - "@nomicfoundation/ethereumjs-rlp" "5.0.1" - "@nomicfoundation/ethereumjs-trie" "6.0.1" - "@nomicfoundation/ethereumjs-tx" "5.0.1" - "@nomicfoundation/ethereumjs-util" "9.0.1" - ethereum-cryptography "0.1.3" - ethers "^5.7.1" - -"@nomicfoundation/ethereumjs-blockchain@7.0.1": - version "7.0.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-blockchain/-/ethereumjs-blockchain-7.0.1.tgz#80e0bd3535bfeb9baa29836b6f25123dab06a726" - integrity sha512-NhzndlGg829XXbqJEYrF1VeZhAwSPgsK/OB7TVrdzft3y918hW5KNd7gIZ85sn6peDZOdjBsAXIpXZ38oBYE5A== - dependencies: - "@nomicfoundation/ethereumjs-block" "5.0.1" - "@nomicfoundation/ethereumjs-common" "4.0.1" - "@nomicfoundation/ethereumjs-ethash" "3.0.1" - "@nomicfoundation/ethereumjs-rlp" "5.0.1" - "@nomicfoundation/ethereumjs-trie" "6.0.1" - "@nomicfoundation/ethereumjs-tx" "5.0.1" - "@nomicfoundation/ethereumjs-util" "9.0.1" - abstract-level "^1.0.3" - debug "^4.3.3" - ethereum-cryptography "0.1.3" - level "^8.0.0" - lru-cache "^5.1.1" - memory-level "^1.0.0" - -"@nomicfoundation/ethereumjs-common@4.0.1": - version "4.0.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-common/-/ethereumjs-common-4.0.1.tgz#4702d82df35b07b5407583b54a45bf728e46a2f0" - integrity sha512-OBErlkfp54GpeiE06brBW/TTbtbuBJV5YI5Nz/aB2evTDo+KawyEzPjBlSr84z/8MFfj8wS2wxzQX1o32cev5g== - dependencies: - "@nomicfoundation/ethereumjs-util" "9.0.1" - crc-32 "^1.2.0" - -"@nomicfoundation/ethereumjs-ethash@3.0.1": - version "3.0.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-ethash/-/ethereumjs-ethash-3.0.1.tgz#65ca494d53e71e8415c9a49ef48bc921c538fc41" - integrity sha512-KDjGIB5igzWOp8Ik5I6QiRH5DH+XgILlplsHR7TEuWANZA759G6krQ6o8bvj+tRUz08YygMQu/sGd9mJ1DYT8w== - dependencies: - "@nomicfoundation/ethereumjs-block" "5.0.1" - "@nomicfoundation/ethereumjs-rlp" "5.0.1" - "@nomicfoundation/ethereumjs-util" "9.0.1" - abstract-level "^1.0.3" - bigint-crypto-utils "^3.0.23" - ethereum-cryptography "0.1.3" - -"@nomicfoundation/ethereumjs-evm@2.0.1": - version "2.0.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-evm/-/ethereumjs-evm-2.0.1.tgz#f35681e203363f69ce2b3d3bf9f44d4e883ca1f1" - integrity sha512-oL8vJcnk0Bx/onl+TgQOQ1t/534GKFaEG17fZmwtPFeH8S5soiBYPCLUrvANOl4sCp9elYxIMzIiTtMtNNN8EQ== - dependencies: - "@ethersproject/providers" "^5.7.1" - "@nomicfoundation/ethereumjs-common" "4.0.1" - "@nomicfoundation/ethereumjs-tx" "5.0.1" - "@nomicfoundation/ethereumjs-util" "9.0.1" - debug "^4.3.3" - ethereum-cryptography "0.1.3" - mcl-wasm "^0.7.1" - rustbn.js "~0.2.0" - -"@nomicfoundation/ethereumjs-rlp@5.0.1": - version "5.0.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-rlp/-/ethereumjs-rlp-5.0.1.tgz#0b30c1cf77d125d390408e391c4bb5291ef43c28" - integrity sha512-xtxrMGa8kP4zF5ApBQBtjlSbN5E2HI8m8FYgVSYAnO6ssUoY5pVPGy2H8+xdf/bmMa22Ce8nWMH3aEW8CcqMeQ== - -"@nomicfoundation/ethereumjs-statemanager@2.0.1": - version "2.0.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-statemanager/-/ethereumjs-statemanager-2.0.1.tgz#8824a97938db4471911e2d2f140f79195def5935" - integrity sha512-B5ApMOnlruVOR7gisBaYwFX+L/AP7i/2oAahatssjPIBVDF6wTX1K7Qpa39E/nzsH8iYuL3krkYeUFIdO3EMUQ== - dependencies: - "@nomicfoundation/ethereumjs-common" "4.0.1" - "@nomicfoundation/ethereumjs-rlp" "5.0.1" - debug "^4.3.3" - ethereum-cryptography "0.1.3" - ethers "^5.7.1" - js-sdsl "^4.1.4" - -"@nomicfoundation/ethereumjs-trie@6.0.1": - version "6.0.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-trie/-/ethereumjs-trie-6.0.1.tgz#662c55f6b50659fd4b22ea9f806a7401cafb7717" - integrity sha512-A64It/IMpDVODzCgxDgAAla8jNjNtsoQZIzZUfIV5AY6Coi4nvn7+VReBn5itlxMiL2yaTlQr9TRWp3CSI6VoA== - dependencies: - "@nomicfoundation/ethereumjs-rlp" "5.0.1" - "@nomicfoundation/ethereumjs-util" "9.0.1" - "@types/readable-stream" "^2.3.13" - ethereum-cryptography "0.1.3" - readable-stream "^3.6.0" - -"@nomicfoundation/ethereumjs-tx@5.0.1": - version "5.0.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-tx/-/ethereumjs-tx-5.0.1.tgz#7629dc2036b4a33c34e9f0a592b43227ef4f0c7d" - integrity sha512-0HwxUF2u2hrsIM1fsasjXvlbDOq1ZHFV2dd1yGq8CA+MEYhaxZr8OTScpVkkxqMwBcc5y83FyPl0J9MZn3kY0w== - dependencies: - "@chainsafe/ssz" "^0.9.2" - "@ethersproject/providers" "^5.7.2" - "@nomicfoundation/ethereumjs-common" "4.0.1" - "@nomicfoundation/ethereumjs-rlp" "5.0.1" - "@nomicfoundation/ethereumjs-util" "9.0.1" - ethereum-cryptography "0.1.3" - -"@nomicfoundation/ethereumjs-util@9.0.1": - version "9.0.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-util/-/ethereumjs-util-9.0.1.tgz#530cda8bae33f8b5020a8f199ed1d0a2ce48ec89" - integrity sha512-TwbhOWQ8QoSCFhV/DDfSmyfFIHjPjFBj957219+V3jTZYZ2rf9PmDtNOeZWAE3p3vlp8xb02XGpd0v6nTUPbsA== - dependencies: - "@chainsafe/ssz" "^0.10.0" - "@nomicfoundation/ethereumjs-rlp" "5.0.1" - ethereum-cryptography "0.1.3" - -"@nomicfoundation/ethereumjs-vm@7.0.1": - version "7.0.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-vm/-/ethereumjs-vm-7.0.1.tgz#7d035e0993bcad10716c8b36e61dfb87fa3ca05f" - integrity sha512-rArhyn0jPsS/D+ApFsz3yVJMQ29+pVzNZ0VJgkzAZ+7FqXSRtThl1C1prhmlVr3YNUlfpZ69Ak+RUT4g7VoOuQ== - dependencies: - "@nomicfoundation/ethereumjs-block" "5.0.1" - "@nomicfoundation/ethereumjs-blockchain" "7.0.1" - "@nomicfoundation/ethereumjs-common" "4.0.1" - "@nomicfoundation/ethereumjs-evm" "2.0.1" - "@nomicfoundation/ethereumjs-rlp" "5.0.1" - "@nomicfoundation/ethereumjs-statemanager" "2.0.1" - "@nomicfoundation/ethereumjs-trie" "6.0.1" - "@nomicfoundation/ethereumjs-tx" "5.0.1" - "@nomicfoundation/ethereumjs-util" "9.0.1" - debug "^4.3.3" - ethereum-cryptography "0.1.3" - mcl-wasm "^0.7.1" - rustbn.js "~0.2.0" - -"@nomicfoundation/solidity-analyzer-darwin-arm64@0.1.1": - version "0.1.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-darwin-arm64/-/solidity-analyzer-darwin-arm64-0.1.1.tgz#4c858096b1c17fe58a474fe81b46815f93645c15" - integrity sha512-KcTodaQw8ivDZyF+D76FokN/HdpgGpfjc/gFCImdLUyqB6eSWVaZPazMbeAjmfhx3R0zm/NYVzxwAokFKgrc0w== - -"@nomicfoundation/solidity-analyzer-darwin-x64@0.1.1": - version "0.1.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-darwin-x64/-/solidity-analyzer-darwin-x64-0.1.1.tgz#6e25ccdf6e2d22389c35553b64fe6f3fdaec432c" - integrity sha512-XhQG4BaJE6cIbjAVtzGOGbK3sn1BO9W29uhk9J8y8fZF1DYz0Doj8QDMfpMu+A6TjPDs61lbsmeYodIDnfveSA== - -"@nomicfoundation/solidity-analyzer-freebsd-x64@0.1.1": - version "0.1.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-freebsd-x64/-/solidity-analyzer-freebsd-x64-0.1.1.tgz#0a224ea50317139caeebcdedd435c28a039d169c" - integrity sha512-GHF1VKRdHW3G8CndkwdaeLkVBi5A9u2jwtlS7SLhBc8b5U/GcoL39Q+1CSO3hYqePNP+eV5YI7Zgm0ea6kMHoA== - -"@nomicfoundation/solidity-analyzer-linux-arm64-gnu@0.1.1": - version "0.1.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-linux-arm64-gnu/-/solidity-analyzer-linux-arm64-gnu-0.1.1.tgz#dfa085d9ffab9efb2e7b383aed3f557f7687ac2b" - integrity sha512-g4Cv2fO37ZsUENQ2vwPnZc2zRenHyAxHcyBjKcjaSmmkKrFr64yvzeNO8S3GBFCo90rfochLs99wFVGT/0owpg== - -"@nomicfoundation/solidity-analyzer-linux-arm64-musl@0.1.1": - version "0.1.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-linux-arm64-musl/-/solidity-analyzer-linux-arm64-musl-0.1.1.tgz#c9e06b5d513dd3ab02a7ac069c160051675889a4" - integrity sha512-WJ3CE5Oek25OGE3WwzK7oaopY8xMw9Lhb0mlYuJl/maZVo+WtP36XoQTb7bW/i8aAdHW5Z+BqrHMux23pvxG3w== - -"@nomicfoundation/solidity-analyzer-linux-x64-gnu@0.1.1": - version "0.1.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-linux-x64-gnu/-/solidity-analyzer-linux-x64-gnu-0.1.1.tgz#8d328d16839e52571f72f2998c81e46bf320f893" - integrity sha512-5WN7leSr5fkUBBjE4f3wKENUy9HQStu7HmWqbtknfXkkil+eNWiBV275IOlpXku7v3uLsXTOKpnnGHJYI2qsdA== - -"@nomicfoundation/solidity-analyzer-linux-x64-musl@0.1.1": - version "0.1.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-linux-x64-musl/-/solidity-analyzer-linux-x64-musl-0.1.1.tgz#9b49d0634b5976bb5ed1604a1e1b736f390959bb" - integrity sha512-KdYMkJOq0SYPQMmErv/63CwGwMm5XHenEna9X9aB8mQmhDBrYrlAOSsIPgFCUSL0hjxE3xHP65/EPXR/InD2+w== - -"@nomicfoundation/solidity-analyzer-win32-arm64-msvc@0.1.1": - version "0.1.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-win32-arm64-msvc/-/solidity-analyzer-win32-arm64-msvc-0.1.1.tgz#e2867af7264ebbcc3131ef837878955dd6a3676f" - integrity sha512-VFZASBfl4qiBYwW5xeY20exWhmv6ww9sWu/krWSesv3q5hA0o1JuzmPHR4LPN6SUZj5vcqci0O6JOL8BPw+APg== - -"@nomicfoundation/solidity-analyzer-win32-ia32-msvc@0.1.1": - version "0.1.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-win32-ia32-msvc/-/solidity-analyzer-win32-ia32-msvc-0.1.1.tgz#0685f78608dd516c8cdfb4896ed451317e559585" - integrity sha512-JnFkYuyCSA70j6Si6cS1A9Gh1aHTEb8kOTBApp/c7NRTFGNMH8eaInKlyuuiIbvYFhlXW4LicqyYuWNNq9hkpQ== - -"@nomicfoundation/solidity-analyzer-win32-x64-msvc@0.1.1": - version "0.1.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-win32-x64-msvc/-/solidity-analyzer-win32-x64-msvc-0.1.1.tgz#c9a44f7108646f083b82e851486e0f6aeb785836" - integrity sha512-HrVJr6+WjIXGnw3Q9u6KQcbZCtk0caVWhCdFADySvRyUxJ8PnzlaP+MhwNE8oyT8OZ6ejHBRrrgjSqDCFXGirw== - -"@nomicfoundation/solidity-analyzer@^0.1.0": - version "0.1.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer/-/solidity-analyzer-0.1.1.tgz#f5f4d36d3f66752f59a57e7208cd856f3ddf6f2d" - integrity sha512-1LMtXj1puAxyFusBgUIy5pZk3073cNXYnXUpuNKFghHbIit/xZgbk0AokpUADbNm3gyD6bFWl3LRFh3dhVdREg== - optionalDependencies: - "@nomicfoundation/solidity-analyzer-darwin-arm64" "0.1.1" - "@nomicfoundation/solidity-analyzer-darwin-x64" "0.1.1" - "@nomicfoundation/solidity-analyzer-freebsd-x64" "0.1.1" - "@nomicfoundation/solidity-analyzer-linux-arm64-gnu" "0.1.1" - "@nomicfoundation/solidity-analyzer-linux-arm64-musl" "0.1.1" - "@nomicfoundation/solidity-analyzer-linux-x64-gnu" "0.1.1" - "@nomicfoundation/solidity-analyzer-linux-x64-musl" "0.1.1" - "@nomicfoundation/solidity-analyzer-win32-arm64-msvc" "0.1.1" - "@nomicfoundation/solidity-analyzer-win32-ia32-msvc" "0.1.1" - "@nomicfoundation/solidity-analyzer-win32-x64-msvc" "0.1.1" - -"@nomiclabs/hardhat-docker@^2.0.0": - version "2.0.2" - resolved "https://registry.yarnpkg.com/@nomiclabs/hardhat-docker/-/hardhat-docker-2.0.2.tgz#ae964be17951275a55859ff7358e9e7c77448846" - integrity sha512-XgGEpRT3wlA1VslyB57zyAHV+oll8KnV1TjwnxxC1tpAL04/lbdwpdO5KxInVN8irMSepqFpsiSkqlcnvbE7Ng== - dependencies: - dockerode "^2.5.8" - fs-extra "^7.0.1" - node-fetch "^2.6.0" - -"@openzeppelin/contracts@^4.8.0": - version "4.9.3" - resolved "https://registry.yarnpkg.com/@openzeppelin/contracts/-/contracts-4.9.3.tgz#00d7a8cf35a475b160b3f0293a6403c511099364" - integrity sha512-He3LieZ1pP2TNt5JbkPA4PNT9WC3gOTOlDcFGJW4Le4QKqwmiNJCRt44APfxMxvq7OugU/cqYuPcSBzOw38DAg== - -"@scure/base@~1.1.0": - version "1.1.3" - resolved "https://registry.yarnpkg.com/@scure/base/-/base-1.1.3.tgz#8584115565228290a6c6c4961973e0903bb3df2f" - integrity sha512-/+SgoRjLq7Xlf0CWuLHq2LUZeL/w65kfzAPG5NH9pcmBhs+nunQTn4gvdwgMTIXnt9b2C/1SeL2XiysZEyIC9Q== - -"@scure/bip32@1.1.5": - version "1.1.5" - resolved "https://registry.yarnpkg.com/@scure/bip32/-/bip32-1.1.5.tgz#d2ccae16dcc2e75bc1d75f5ef3c66a338d1ba300" - integrity sha512-XyNh1rB0SkEqd3tXcXMi+Xe1fvg+kUIcoRIEujP1Jgv7DqW2r9lg3Ah0NkFaCs9sTkQAQA8kw7xiRXzENi9Rtw== - dependencies: - "@noble/hashes" "~1.2.0" - "@noble/secp256k1" "~1.7.0" - "@scure/base" "~1.1.0" - -"@scure/bip39@1.1.1": - version "1.1.1" - resolved "https://registry.yarnpkg.com/@scure/bip39/-/bip39-1.1.1.tgz#b54557b2e86214319405db819c4b6a370cf340c5" - integrity sha512-t+wDck2rVkh65Hmv280fYdVdY25J9YeEUIgn2LG1WM6gxFkGzcksoDiUkWVpVp3Oex9xGC68JU2dSbUfwZ2jPg== - dependencies: - "@noble/hashes" "~1.2.0" - "@scure/base" "~1.1.0" - -"@sentry/core@5.30.0": - version "5.30.0" - resolved "https://registry.yarnpkg.com/@sentry/core/-/core-5.30.0.tgz#6b203664f69e75106ee8b5a2fe1d717379b331f3" - integrity sha512-TmfrII8w1PQZSZgPpUESqjB+jC6MvZJZdLtE/0hZ+SrnKhW3x5WlYLvTXZpcWePYBku7rl2wn1RZu6uT0qCTeg== - dependencies: - "@sentry/hub" "5.30.0" - "@sentry/minimal" "5.30.0" - "@sentry/types" "5.30.0" - "@sentry/utils" "5.30.0" - tslib "^1.9.3" - -"@sentry/hub@5.30.0": - version "5.30.0" - resolved "https://registry.yarnpkg.com/@sentry/hub/-/hub-5.30.0.tgz#2453be9b9cb903404366e198bd30c7ca74cdc100" - integrity sha512-2tYrGnzb1gKz2EkMDQcfLrDTvmGcQPuWxLnJKXJvYTQDGLlEvi2tWz1VIHjunmOvJrB5aIQLhm+dcMRwFZDCqQ== - dependencies: - "@sentry/types" "5.30.0" - "@sentry/utils" "5.30.0" - tslib "^1.9.3" - -"@sentry/minimal@5.30.0": - version "5.30.0" - resolved "https://registry.yarnpkg.com/@sentry/minimal/-/minimal-5.30.0.tgz#ce3d3a6a273428e0084adcb800bc12e72d34637b" - integrity sha512-BwWb/owZKtkDX+Sc4zCSTNcvZUq7YcH3uAVlmh/gtR9rmUvbzAA3ewLuB3myi4wWRAMEtny6+J/FN/x+2wn9Xw== - dependencies: - "@sentry/hub" "5.30.0" - "@sentry/types" "5.30.0" - tslib "^1.9.3" - -"@sentry/node@^5.18.1": - version "5.30.0" - resolved "https://registry.yarnpkg.com/@sentry/node/-/node-5.30.0.tgz#4ca479e799b1021285d7fe12ac0858951c11cd48" - integrity sha512-Br5oyVBF0fZo6ZS9bxbJZG4ApAjRqAnqFFurMVJJdunNb80brh7a5Qva2kjhm+U6r9NJAB5OmDyPkA1Qnt+QVg== - dependencies: - "@sentry/core" "5.30.0" - "@sentry/hub" "5.30.0" - "@sentry/tracing" "5.30.0" - "@sentry/types" "5.30.0" - "@sentry/utils" "5.30.0" - cookie "^0.4.1" - https-proxy-agent "^5.0.0" - lru_map "^0.3.3" - tslib "^1.9.3" - -"@sentry/tracing@5.30.0": - version "5.30.0" - resolved "https://registry.yarnpkg.com/@sentry/tracing/-/tracing-5.30.0.tgz#501d21f00c3f3be7f7635d8710da70d9419d4e1f" - integrity sha512-dUFowCr0AIMwiLD7Fs314Mdzcug+gBVo/+NCMyDw8tFxJkwWAKl7Qa2OZxLQ0ZHjakcj1hNKfCQJ9rhyfOl4Aw== - dependencies: - "@sentry/hub" "5.30.0" - "@sentry/minimal" "5.30.0" - "@sentry/types" "5.30.0" - "@sentry/utils" "5.30.0" - tslib "^1.9.3" - -"@sentry/types@5.30.0": - version "5.30.0" - resolved "https://registry.yarnpkg.com/@sentry/types/-/types-5.30.0.tgz#19709bbe12a1a0115bc790b8942917da5636f402" - integrity sha512-R8xOqlSTZ+htqrfteCWU5Nk0CDN5ApUTvrlvBuiH1DyP6czDZ4ktbZB0hAgBlVcK0U+qpD3ag3Tqqpa5Q67rPw== - -"@sentry/utils@5.30.0": - version "5.30.0" - resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-5.30.0.tgz#9a5bd7ccff85ccfe7856d493bffa64cabc41e980" - integrity sha512-zaYmoH0NWWtvnJjC9/CBseXMtKHm/tm40sz3YfJRxeQjyzRqNQPgivpd9R/oDJCYj999mzdW382p/qi2ypjLww== - dependencies: - "@sentry/types" "5.30.0" - tslib "^1.9.3" - -"@types/bn.js@^4.11.3": - version "4.11.6" - resolved "https://registry.yarnpkg.com/@types/bn.js/-/bn.js-4.11.6.tgz#c306c70d9358aaea33cd4eda092a742b9505967c" - integrity sha512-pqr857jrp2kPuO9uRjZ3PwnJTjoQy+fcdxvBTvHm6dkmEL9q+hDD/2j/0ELOBPtPnS8LjCX0gI9nbl8lVkadpg== - dependencies: - "@types/node" "*" - -"@types/bn.js@^5.1.0": - version "5.1.5" - resolved "https://registry.yarnpkg.com/@types/bn.js/-/bn.js-5.1.5.tgz#2e0dacdcce2c0f16b905d20ff87aedbc6f7b4bf0" - integrity sha512-V46N0zwKRF5Q00AZ6hWtN0T8gGmDUaUzLWQvHFo5yThtVwK/VCenFY3wXVbOvNfajEpsTfQM4IN9k/d6gUVX3A== - dependencies: - "@types/node" "*" - -"@types/lru-cache@^5.1.0": - version "5.1.1" - resolved "https://registry.yarnpkg.com/@types/lru-cache/-/lru-cache-5.1.1.tgz#c48c2e27b65d2a153b19bfc1a317e30872e01eef" - integrity sha512-ssE3Vlrys7sdIzs5LOxCzTVMsU7i9oa/IaW92wF32JFb3CVczqOkru2xspuKczHEbG3nvmPY7IFqVmGGHdNbYw== - -"@types/node@*": - version "20.9.0" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.9.0.tgz#bfcdc230583aeb891cf51e73cfdaacdd8deae298" - integrity sha512-nekiGu2NDb1BcVofVcEKMIwzlx4NjHlcjhoxxKBNLtz15Y1z7MYf549DFvkHSId02Ax6kGwWntIBPC3l/JZcmw== - dependencies: - undici-types "~5.26.4" - -"@types/pbkdf2@^3.0.0": - version "3.1.2" - resolved "https://registry.yarnpkg.com/@types/pbkdf2/-/pbkdf2-3.1.2.tgz#2dc43808e9985a2c69ff02e2d2027bd4fe33e8dc" - integrity sha512-uRwJqmiXmh9++aSu1VNEn3iIxWOhd8AHXNSdlaLfdAAdSTY9jYVeGWnzejM3dvrkbqE3/hyQkQQ29IFATEGlew== - dependencies: - "@types/node" "*" - -"@types/readable-stream@^2.3.13": - version "2.3.15" - resolved "https://registry.yarnpkg.com/@types/readable-stream/-/readable-stream-2.3.15.tgz#3d79c9ceb1b6a57d5f6e6976f489b9b5384321ae" - integrity sha512-oM5JSKQCcICF1wvGgmecmHldZ48OZamtMxcGGVICOJA8o8cahXC1zEVAif8iwoc5j8etxFaRFnf095+CDsuoFQ== - dependencies: - "@types/node" "*" - safe-buffer "~5.1.1" - -"@types/secp256k1@^4.0.1": - version "4.0.6" - resolved "https://registry.yarnpkg.com/@types/secp256k1/-/secp256k1-4.0.6.tgz#d60ba2349a51c2cbc5e816dcd831a42029d376bf" - integrity sha512-hHxJU6PAEUn0TP4S/ZOzuTUvJWuZ6eIKeNKb5RBpODvSl6hp1Wrw4s7ATY50rklRCScUDpHzVA/DQdSjJ3UoYQ== - dependencies: - "@types/node" "*" - -JSONStream@1.3.2: - version "1.3.2" - resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.2.tgz#c102371b6ec3a7cf3b847ca00c20bb0fce4c6dea" - integrity sha512-mn0KSip7N4e0UDPZHnqDsHECo5uGQrixQKnAskOM1BIB8hd7QKbd6il8IPRPudPHOeHiECoCFqhyMaRO9+nWyA== - dependencies: - jsonparse "^1.2.0" - through ">=2.2.7 <3" - -abort-controller@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/abort-controller/-/abort-controller-3.0.0.tgz#eaf54d53b62bae4138e809ca225c8439a6efb392" - integrity sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg== - dependencies: - event-target-shim "^5.0.0" - -abstract-level@^1.0.0, abstract-level@^1.0.2, abstract-level@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/abstract-level/-/abstract-level-1.0.3.tgz#78a67d3d84da55ee15201486ab44c09560070741" - integrity sha512-t6jv+xHy+VYwc4xqZMn2Pa9DjcdzvzZmQGRjTFc8spIbRGHgBrEKbPq+rYXc7CCo0lxgYvSgKVg9qZAhpVQSjA== - dependencies: - buffer "^6.0.3" - catering "^2.1.0" - is-buffer "^2.0.5" - level-supports "^4.0.0" - level-transcoder "^1.0.1" - module-error "^1.0.1" - queue-microtask "^1.2.3" - -adm-zip@^0.4.16: - version "0.4.16" - resolved "https://registry.yarnpkg.com/adm-zip/-/adm-zip-0.4.16.tgz#cf4c508fdffab02c269cbc7f471a875f05570365" - integrity sha512-TFi4HBKSGfIKsK5YCkKaaFG2m4PEDyViZmEwof3MTIgzimHLto6muaHVpbrljdIvIrFZzEq/p4nafOeLcYegrg== - -aes-js@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/aes-js/-/aes-js-3.0.0.tgz#e21df10ad6c2053295bcbb8dab40b09dbea87e4d" - integrity sha512-H7wUZRn8WpTq9jocdxQ2c8x2sKo9ZVmzfRE13GiNJXfp7NcKYEdvl3vspKjXox6RIG2VtaRe4JFvxG4rqp2Zuw== - -agent-base@6: - version "6.0.2" - resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" - integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== - dependencies: - debug "4" - -aggregate-error@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a" - integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA== - dependencies: - clean-stack "^2.0.0" - indent-string "^4.0.0" - -ansi-colors@4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.1.tgz#cbb9ae256bf750af1eab344f229aa27fe94ba348" - integrity sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA== - -ansi-colors@^4.1.1: - version "4.1.3" - resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.3.tgz#37611340eb2243e70cc604cad35d63270d48781b" - integrity sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw== - -ansi-escapes@^4.3.0: - version "4.3.2" - resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" - integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== - dependencies: - type-fest "^0.21.3" - -ansi-regex@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" - integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== - -ansi-styles@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" - integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== - dependencies: - color-convert "^1.9.0" - -ansi-styles@^4.0.0, ansi-styles@^4.1.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" - integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== - dependencies: - color-convert "^2.0.1" - -anymatch@~3.1.2: - version "3.1.3" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" - integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== - dependencies: - normalize-path "^3.0.0" - picomatch "^2.0.4" - -argparse@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" - integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== - -asn1@^0.2.6: - version "0.2.6" - resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.6.tgz#0d3a7bb6e64e02a90c0303b31f292868ea09a08d" - integrity sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ== - dependencies: - safer-buffer "~2.1.0" - -balanced-match@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" - integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== - -base-x@^3.0.2: - version "3.0.9" - resolved "https://registry.yarnpkg.com/base-x/-/base-x-3.0.9.tgz#6349aaabb58526332de9f60995e548a53fe21320" - integrity sha512-H7JU6iBHTal1gp56aKoaa//YUxEaAOUiydvrV/pILqIHXTtqxSkATOnDA2u+jZ/61sD+L/412+7kzXRtWukhpQ== - dependencies: - safe-buffer "^5.0.1" - -base64-js@^1.3.1: - version "1.5.1" - resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" - integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== - -bcrypt-pbkdf@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" - integrity sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w== - dependencies: - tweetnacl "^0.14.3" - -bech32@1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/bech32/-/bech32-1.1.4.tgz#e38c9f37bf179b8eb16ae3a772b40c356d4832e9" - integrity sha512-s0IrSOzLlbvX7yp4WBfPITzpAU8sqQcpsmwXDiKwrG4r491vwCO/XpejasRNl0piBMe/DvP4Tz0mIS/X1DPJBQ== - -bigint-crypto-utils@^3.0.23: - version "3.3.0" - resolved "https://registry.yarnpkg.com/bigint-crypto-utils/-/bigint-crypto-utils-3.3.0.tgz#72ad00ae91062cf07f2b1def9594006c279c1d77" - integrity sha512-jOTSb+drvEDxEq6OuUybOAv/xxoh3cuYRUIPyu8sSHQNKM303UQ2R1DAo45o1AkcIXw6fzbaFI1+xGGdaXs2lg== - -binary-extensions@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" - integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== - -bl@^1.0.0: - version "1.2.3" - resolved "https://registry.yarnpkg.com/bl/-/bl-1.2.3.tgz#1e8dd80142eac80d7158c9dccc047fb620e035e7" - integrity sha512-pvcNpa0UU69UT341rO6AYy4FVAIkUHuZXRIWbq+zHnsVcRzDDjIAhGuuYoi0d//cwIwtt4pkpKycWEfjdV+vww== - dependencies: - readable-stream "^2.3.5" - safe-buffer "^5.1.1" - -bl@^4.0.3: - version "4.1.0" - resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" - integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== - dependencies: - buffer "^5.5.0" - inherits "^2.0.4" - readable-stream "^3.4.0" - -blakejs@^1.1.0: - version "1.2.1" - resolved "https://registry.yarnpkg.com/blakejs/-/blakejs-1.2.1.tgz#5057e4206eadb4a97f7c0b6e197a505042fc3814" - integrity sha512-QXUSXI3QVc/gJME0dBpXrag1kbzOqCjCX8/b54ntNyW6sjtoqxqRk3LTmXzaJoh71zMsDCjM+47jS7XiwN/+fQ== - -bn.js@^4.11.0, bn.js@^4.11.8, bn.js@^4.11.9: - version "4.12.0" - resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88" - integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA== - -bn.js@^5.2.0, bn.js@^5.2.1: - version "5.2.1" - resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.2.1.tgz#0bc527a6a0d18d0aa8d5b0538ce4a77dccfa7b70" - integrity sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ== - -brace-expansion@^1.1.7: - version "1.1.11" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" - integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== - dependencies: - balanced-match "^1.0.0" - concat-map "0.0.1" - -brace-expansion@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" - integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== - dependencies: - balanced-match "^1.0.0" - -braces@~3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" - integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== - dependencies: - fill-range "^7.0.1" - -brorand@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" - integrity sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w== - -browser-level@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/browser-level/-/browser-level-1.0.1.tgz#36e8c3183d0fe1c405239792faaab5f315871011" - integrity sha512-XECYKJ+Dbzw0lbydyQuJzwNXtOpbMSq737qxJN11sIRTErOMShvDpbzTlgju7orJKvx4epULolZAuJGLzCmWRQ== - dependencies: - abstract-level "^1.0.2" - catering "^2.1.1" - module-error "^1.0.2" - run-parallel-limit "^1.1.0" - -browser-stdout@1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/browser-stdout/-/browser-stdout-1.3.1.tgz#baa559ee14ced73452229bad7326467c61fabd60" - integrity sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw== - -browserify-aes@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" - integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== - dependencies: - buffer-xor "^1.0.3" - cipher-base "^1.0.0" - create-hash "^1.1.0" - evp_bytestokey "^1.0.3" - inherits "^2.0.1" - safe-buffer "^5.0.1" - -bs58@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/bs58/-/bs58-4.0.1.tgz#be161e76c354f6f788ae4071f63f34e8c4f0a42a" - integrity sha512-Ok3Wdf5vOIlBrgCvTq96gBkJw+JUEzdBgyaza5HLtPm7yTHkjRy8+JzNyHF7BHa0bNWOQIp3m5YF0nnFcOIKLw== - dependencies: - base-x "^3.0.2" - -bs58check@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/bs58check/-/bs58check-2.1.2.tgz#53b018291228d82a5aa08e7d796fdafda54aebfc" - integrity sha512-0TS1jicxdU09dwJMNZtVAfzPi6Q6QeN0pM1Fkzrjn+XYHvzMKPU3pHVpva+769iNVSfIYWf7LJ6WR+BuuMf8cA== - dependencies: - bs58 "^4.0.0" - create-hash "^1.1.0" - safe-buffer "^5.1.2" - -buffer-alloc-unsafe@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz#bd7dc26ae2972d0eda253be061dba992349c19f0" - integrity sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg== - -buffer-alloc@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/buffer-alloc/-/buffer-alloc-1.2.0.tgz#890dd90d923a873e08e10e5fd51a57e5b7cce0ec" - integrity sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow== - dependencies: - buffer-alloc-unsafe "^1.1.0" - buffer-fill "^1.0.0" - -buffer-fill@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/buffer-fill/-/buffer-fill-1.0.0.tgz#f8f78b76789888ef39f205cd637f68e702122b2c" - integrity sha512-T7zexNBwiiaCOGDg9xNX9PBmjrubblRkENuptryuI64URkXDFum9il/JGL8Lm8wYfAXpredVXXZz7eMHilimiQ== - -buffer-from@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" - integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== - -buffer-xor@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" - integrity sha512-571s0T7nZWK6vB67HI5dyUF7wXiNcfaPPPTl6zYCNApANjIvYJTg7hlud/+cJpdAhS7dVzqMLmfhfHR3rAcOjQ== - -buffer@^5.5.0: - version "5.7.1" - resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" - integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== - dependencies: - base64-js "^1.3.1" - ieee754 "^1.1.13" - -buffer@^6.0.3: - version "6.0.3" - resolved "https://registry.yarnpkg.com/buffer/-/buffer-6.0.3.tgz#2ace578459cc8fbe2a70aaa8f52ee63b6a74c6c6" - integrity sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA== - dependencies: - base64-js "^1.3.1" - ieee754 "^1.2.1" - -buildcheck@~0.0.6: - version "0.0.6" - resolved "https://registry.yarnpkg.com/buildcheck/-/buildcheck-0.0.6.tgz#89aa6e417cfd1e2196e3f8fe915eb709d2fe4238" - integrity sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A== - -bytes@3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" - integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== - -camelcase@^6.0.0: - version "6.3.0" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" - integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== - -case@^1.6.3: - version "1.6.3" - resolved "https://registry.yarnpkg.com/case/-/case-1.6.3.tgz#0a4386e3e9825351ca2e6216c60467ff5f1ea1c9" - integrity sha512-mzDSXIPaFwVDvZAHqZ9VlbyF4yyXRuX6IvB06WvPYkqJVO24kX1PPhv9bfpKNFZyxYFmmgo03HUiD8iklmJYRQ== - -catering@^2.1.0, catering@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/catering/-/catering-2.1.1.tgz#66acba06ed5ee28d5286133982a927de9a04b510" - integrity sha512-K7Qy8O9p76sL3/3m7/zLKbRkyOlSZAgzEaLhyj2mXS8PsCud2Eo4hAb8aLtZqHh0QGqLcb9dlJSu6lHRVENm1w== - -chalk@4.1.2, chalk@^4.1.0: - version "4.1.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" - integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== - dependencies: - ansi-styles "^4.1.0" - supports-color "^7.1.0" - -chalk@^2.4.2: - version "2.4.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" - integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== - dependencies: - ansi-styles "^3.2.1" - escape-string-regexp "^1.0.5" - supports-color "^5.3.0" - -chokidar@3.5.3, chokidar@^3.4.0: - version "3.5.3" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" - integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== - dependencies: - anymatch "~3.1.2" - braces "~3.0.2" - glob-parent "~5.1.2" - is-binary-path "~2.1.0" - is-glob "~4.0.1" - normalize-path "~3.0.0" - readdirp "~3.6.0" - optionalDependencies: - fsevents "~2.3.2" - -chownr@^1.0.1, chownr@^1.1.1: - version "1.1.4" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" - integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== - -ci-info@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46" - integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== - -cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" - integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q== - dependencies: - inherits "^2.0.1" - safe-buffer "^5.0.1" - -classic-level@^1.2.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/classic-level/-/classic-level-1.3.0.tgz#5e36680e01dc6b271775c093f2150844c5edd5c8" - integrity sha512-iwFAJQYtqRTRM0F6L8h4JCt00ZSGdOyqh7yVrhhjrOpFhmBjNlRUey64MCiyo6UmQHMJ+No3c81nujPv+n9yrg== - dependencies: - abstract-level "^1.0.2" - catering "^2.1.0" - module-error "^1.0.1" - napi-macros "^2.2.2" - node-gyp-build "^4.3.0" - -clean-stack@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" - integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== - -cliui@^7.0.2: - version "7.0.4" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" - integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== - dependencies: - string-width "^4.2.0" - strip-ansi "^6.0.0" - wrap-ansi "^7.0.0" - -color-convert@^1.9.0: - version "1.9.3" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" - integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== - dependencies: - color-name "1.1.3" - -color-convert@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" - integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== - dependencies: - color-name "~1.1.4" - -color-name@1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" - integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== - -color-name@~1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" - integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== - -command-exists@^1.2.8: - version "1.2.9" - resolved "https://registry.yarnpkg.com/command-exists/-/command-exists-1.2.9.tgz#c50725af3808c8ab0260fd60b01fbfa25b954f69" - integrity sha512-LTQ/SGc+s0Xc0Fu5WaKnR0YiygZkm9eKFvyS+fRsU7/ZWFF8ykFM6Pc9aCVf1+xasOOZpO3BAVgVrKvsqKHV7w== - -commander@3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/commander/-/commander-3.0.2.tgz#6837c3fb677ad9933d1cfba42dd14d5117d6b39e" - integrity sha512-Gar0ASD4BDyKC4hl4DwHqDrmvjoxWKZigVnAbn5H1owvm4CxCPdb0HQDehwNYMJpla5+M2tPmPARzhtYuwpHow== - -concat-map@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" - integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== - -concat-stream@~1.6.2: - version "1.6.2" - resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" - integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== - dependencies: - buffer-from "^1.0.0" - inherits "^2.0.3" - readable-stream "^2.2.2" - typedarray "^0.0.6" - -cookie@^0.4.1: - version "0.4.2" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.2.tgz#0e41f24de5ecf317947c82fc789e06a884824432" - integrity sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA== - -core-util-is@~1.0.0: - version "1.0.3" - resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" - integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== - -cpu-features@~0.0.8: - version "0.0.9" - resolved "https://registry.yarnpkg.com/cpu-features/-/cpu-features-0.0.9.tgz#5226b92f0f1c63122b0a3eb84cb8335a4de499fc" - integrity sha512-AKjgn2rP2yJyfbepsmLfiYcmtNn/2eUvocUyM/09yB0YDiz39HteK/5/T4Onf0pmdYDMgkBoGvRLvEguzyL7wQ== - dependencies: - buildcheck "~0.0.6" - nan "^2.17.0" - -crc-32@^1.2.0: - version "1.2.2" - resolved "https://registry.yarnpkg.com/crc-32/-/crc-32-1.2.2.tgz#3cad35a934b8bf71f25ca524b6da51fb7eace2ff" - integrity sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ== - -create-hash@^1.1.0, create-hash@^1.1.2, create-hash@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" - integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== - dependencies: - cipher-base "^1.0.1" - inherits "^2.0.1" - md5.js "^1.3.4" - ripemd160 "^2.0.1" - sha.js "^2.4.0" - -create-hmac@^1.1.4, create-hmac@^1.1.7: - version "1.1.7" - resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" - integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== - dependencies: - cipher-base "^1.0.3" - create-hash "^1.1.0" - inherits "^2.0.1" - ripemd160 "^2.0.0" - safe-buffer "^5.0.1" - sha.js "^2.4.8" - -debug@4, debug@4.3.4, debug@^4.1.1, debug@^4.3.3: - version "4.3.4" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" - integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== - dependencies: - ms "2.1.2" - -debug@^3.2.6: - version "3.2.7" - resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" - integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== - dependencies: - ms "^2.1.1" - -decamelize@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-4.0.0.tgz#aa472d7bf660eb15f3494efd531cab7f2a709837" - integrity sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ== - -depd@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" - integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== - -diff@5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/diff/-/diff-5.0.0.tgz#7ed6ad76d859d030787ec35855f5b1daf31d852b" - integrity sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w== - -docker-modem@^1.0.8: - version "1.0.9" - resolved "https://registry.yarnpkg.com/docker-modem/-/docker-modem-1.0.9.tgz#a1f13e50e6afb6cf3431b2d5e7aac589db6aaba8" - integrity sha512-lVjqCSCIAUDZPAZIeyM125HXfNvOmYYInciphNrLrylUtKyW66meAjSPXWchKVzoIYZx69TPnAepVSSkeawoIw== - dependencies: - JSONStream "1.3.2" - debug "^3.2.6" - readable-stream "~1.0.26-4" - split-ca "^1.0.0" - -docker-modem@^3.0.0: - version "3.0.8" - resolved "https://registry.yarnpkg.com/docker-modem/-/docker-modem-3.0.8.tgz#ef62c8bdff6e8a7d12f0160988c295ea8705e77a" - integrity sha512-f0ReSURdM3pcKPNS30mxOHSbaFLcknGmQjwSfmbcdOw1XWKXVhukM3NJHhr7NpY9BIyyWQb0EBo3KQvvuU5egQ== - dependencies: - debug "^4.1.1" - readable-stream "^3.5.0" - split-ca "^1.0.1" - ssh2 "^1.11.0" - -dockerode@^2.5.8: - version "2.5.8" - resolved "https://registry.yarnpkg.com/dockerode/-/dockerode-2.5.8.tgz#1b661e36e1e4f860e25f56e0deabe9f87f1d0acc" - integrity sha512-+7iOUYBeDTScmOmQqpUYQaE7F4vvIt6+gIZNHWhqAQEI887tiPFB9OvXI/HzQYqfUNvukMK+9myLW63oTJPZpw== - dependencies: - concat-stream "~1.6.2" - docker-modem "^1.0.8" - tar-fs "~1.16.3" - -dockerode@^3.3.4: - version "3.3.5" - resolved "https://registry.yarnpkg.com/dockerode/-/dockerode-3.3.5.tgz#7ae3f40f2bec53ae5e9a741ce655fff459745629" - integrity sha512-/0YNa3ZDNeLr/tSckmD69+Gq+qVNhvKfAHNeZJBnp7EOP6RGKV8ORrJHkUn20So5wU+xxT7+1n5u8PjHbfjbSA== - dependencies: - "@balena/dockerignore" "^1.0.2" - docker-modem "^3.0.0" - tar-fs "~2.0.1" - -elliptic@6.5.4, elliptic@^6.5.2, elliptic@^6.5.4: - version "6.5.4" - resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.4.tgz#da37cebd31e79a1367e941b592ed1fbebd58abbb" - integrity sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ== - dependencies: - bn.js "^4.11.9" - brorand "^1.1.0" - hash.js "^1.0.0" - hmac-drbg "^1.0.1" - inherits "^2.0.4" - minimalistic-assert "^1.0.1" - minimalistic-crypto-utils "^1.0.1" - -emoji-regex@^8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" - integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== - -end-of-stream@^1.0.0, end-of-stream@^1.1.0, end-of-stream@^1.4.1: - version "1.4.4" - resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" - integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== - dependencies: - once "^1.4.0" - -enquirer@^2.3.0: - version "2.4.1" - resolved "https://registry.yarnpkg.com/enquirer/-/enquirer-2.4.1.tgz#93334b3fbd74fc7097b224ab4a8fb7e40bf4ae56" - integrity sha512-rRqJg/6gd538VHvR3PSrdRBb/1Vy2YfzHqzvbhGIQpDRKIa4FgV/54b5Q1xYSxOOwKvjXweS26E0Q+nAMwp2pQ== - dependencies: - ansi-colors "^4.1.1" - strip-ansi "^6.0.1" - -env-paths@^2.2.0: - version "2.2.1" - resolved "https://registry.yarnpkg.com/env-paths/-/env-paths-2.2.1.tgz#420399d416ce1fbe9bc0a07c62fa68d67fd0f8f2" - integrity sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A== - -escalade@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" - integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== - -escape-string-regexp@4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" - integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== - -escape-string-regexp@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" - integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== - -ethereum-cryptography@0.1.3, ethereum-cryptography@^0.1.3: - version "0.1.3" - resolved "https://registry.yarnpkg.com/ethereum-cryptography/-/ethereum-cryptography-0.1.3.tgz#8d6143cfc3d74bf79bbd8edecdf29e4ae20dd191" - integrity sha512-w8/4x1SGGzc+tO97TASLja6SLd3fRIK2tLVcV2Gx4IB21hE19atll5Cq9o3d0ZmAYC/8aw0ipieTSiekAea4SQ== - dependencies: - "@types/pbkdf2" "^3.0.0" - "@types/secp256k1" "^4.0.1" - blakejs "^1.1.0" - browserify-aes "^1.2.0" - bs58check "^2.1.2" - create-hash "^1.2.0" - create-hmac "^1.1.7" - hash.js "^1.1.7" - keccak "^3.0.0" - pbkdf2 "^3.0.17" - randombytes "^2.1.0" - safe-buffer "^5.1.2" - scrypt-js "^3.0.0" - secp256k1 "^4.0.1" - setimmediate "^1.0.5" - -ethereum-cryptography@^1.0.3: - version "1.2.0" - resolved "https://registry.yarnpkg.com/ethereum-cryptography/-/ethereum-cryptography-1.2.0.tgz#5ccfa183e85fdaf9f9b299a79430c044268c9b3a" - integrity sha512-6yFQC9b5ug6/17CQpCyE3k9eKBMdhyVjzUy1WkiuY/E4vj/SXDBbCw8QEIaXqf0Mf2SnY6RmpDcwlUmBSS0EJw== - dependencies: - "@noble/hashes" "1.2.0" - "@noble/secp256k1" "1.7.1" - "@scure/bip32" "1.1.5" - "@scure/bip39" "1.1.1" - -ethereumjs-abi@^0.6.8: - version "0.6.8" - resolved "https://registry.yarnpkg.com/ethereumjs-abi/-/ethereumjs-abi-0.6.8.tgz#71bc152db099f70e62f108b7cdfca1b362c6fcae" - integrity sha512-Tx0r/iXI6r+lRsdvkFDlut0N08jWMnKRZ6Gkq+Nmw75lZe4e6o3EkSnkaBP5NF6+m5PTGAr9JP43N3LyeoglsA== - dependencies: - bn.js "^4.11.8" - ethereumjs-util "^6.0.0" - -ethereumjs-util@^6.0.0, ethereumjs-util@^6.2.1: - version "6.2.1" - resolved "https://registry.yarnpkg.com/ethereumjs-util/-/ethereumjs-util-6.2.1.tgz#fcb4e4dd5ceacb9d2305426ab1a5cd93e3163b69" - integrity sha512-W2Ktez4L01Vexijrm5EB6w7dg4n/TgpoYU4avuT5T3Vmnw/eCRtiBrJfQYS/DCSvDIOLn2k57GcHdeBcgVxAqw== - dependencies: - "@types/bn.js" "^4.11.3" - bn.js "^4.11.0" - create-hash "^1.1.2" - elliptic "^6.5.2" - ethereum-cryptography "^0.1.3" - ethjs-util "0.1.6" - rlp "^2.2.3" - -ethers@^5.7.1: - version "5.7.2" - resolved "https://registry.yarnpkg.com/ethers/-/ethers-5.7.2.tgz#3a7deeabbb8c030d4126b24f84e525466145872e" - integrity sha512-wswUsmWo1aOK8rR7DIKiWSw9DbLWe6x98Jrn8wcTflTVvaXhAMaB5zGAXy0GYQEQp9iO1iSHWVyARQm11zUtyg== - dependencies: - "@ethersproject/abi" "5.7.0" - "@ethersproject/abstract-provider" "5.7.0" - "@ethersproject/abstract-signer" "5.7.0" - "@ethersproject/address" "5.7.0" - "@ethersproject/base64" "5.7.0" - "@ethersproject/basex" "5.7.0" - "@ethersproject/bignumber" "5.7.0" - "@ethersproject/bytes" "5.7.0" - "@ethersproject/constants" "5.7.0" - "@ethersproject/contracts" "5.7.0" - "@ethersproject/hash" "5.7.0" - "@ethersproject/hdnode" "5.7.0" - "@ethersproject/json-wallets" "5.7.0" - "@ethersproject/keccak256" "5.7.0" - "@ethersproject/logger" "5.7.0" - "@ethersproject/networks" "5.7.1" - "@ethersproject/pbkdf2" "5.7.0" - "@ethersproject/properties" "5.7.0" - "@ethersproject/providers" "5.7.2" - "@ethersproject/random" "5.7.0" - "@ethersproject/rlp" "5.7.0" - "@ethersproject/sha2" "5.7.0" - "@ethersproject/signing-key" "5.7.0" - "@ethersproject/solidity" "5.7.0" - "@ethersproject/strings" "5.7.0" - "@ethersproject/transactions" "5.7.0" - "@ethersproject/units" "5.7.0" - "@ethersproject/wallet" "5.7.0" - "@ethersproject/web" "5.7.1" - "@ethersproject/wordlists" "5.7.0" - -ethjs-util@0.1.6, ethjs-util@^0.1.6: - version "0.1.6" - resolved "https://registry.yarnpkg.com/ethjs-util/-/ethjs-util-0.1.6.tgz#f308b62f185f9fe6237132fb2a9818866a5cd536" - integrity sha512-CUnVOQq7gSpDHZVVrQW8ExxUETWrnrvXYvYz55wOU8Uj4VCgw56XC2B/fVqQN+f7gmrnRHSLVnFAwsCuNwji8w== - dependencies: - is-hex-prefixed "1.0.0" - strip-hex-prefix "1.0.0" - -event-target-shim@^5.0.0: - version "5.0.1" - resolved "https://registry.yarnpkg.com/event-target-shim/-/event-target-shim-5.0.1.tgz#5d4d3ebdf9583d63a5333ce2deb7480ab2b05789" - integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ== - -evp_bytestokey@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02" - integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA== - dependencies: - md5.js "^1.3.4" - safe-buffer "^5.1.1" - -fill-range@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" - integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== - dependencies: - to-regex-range "^5.0.1" - -find-up@5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" - integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== - dependencies: - locate-path "^6.0.0" - path-exists "^4.0.0" - -find-up@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" - integrity sha512-NWzkk0jSJtTt08+FBFMvXoeZnOJD+jTtsRmBYbAIzJdX6l7dLgR7CTubCM5/eDdPUBvLCeVasP1brfVR/9/EZQ== - dependencies: - locate-path "^2.0.0" - -flat@^5.0.2: - version "5.0.2" - resolved "https://registry.yarnpkg.com/flat/-/flat-5.0.2.tgz#8ca6fe332069ffa9d324c327198c598259ceb241" - integrity sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ== - -follow-redirects@^1.12.1: - version "1.15.3" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.3.tgz#fe2f3ef2690afce7e82ed0b44db08165b207123a" - integrity sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q== - -fp-ts@1.19.3: - version "1.19.3" - resolved "https://registry.yarnpkg.com/fp-ts/-/fp-ts-1.19.3.tgz#261a60d1088fbff01f91256f91d21d0caaaaa96f" - integrity sha512-H5KQDspykdHuztLTg+ajGN0Z2qUjcEf3Ybxc6hLt0k7/zPkn29XnKnxlBPyW2XIddWrGaJBzBl4VLYOtk39yZg== - -fp-ts@^1.0.0: - version "1.19.5" - resolved "https://registry.yarnpkg.com/fp-ts/-/fp-ts-1.19.5.tgz#3da865e585dfa1fdfd51785417357ac50afc520a" - integrity sha512-wDNqTimnzs8QqpldiId9OavWK2NptormjXnRJTQecNjzwfyp6P/8s/zG8e4h3ja3oqkKaY72UlTjQYt/1yXf9A== - -fs-constants@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" - integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow== - -fs-extra@^0.30.0: - version "0.30.0" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-0.30.0.tgz#f233ffcc08d4da7d432daa449776989db1df93f0" - integrity sha512-UvSPKyhMn6LEd/WpUaV9C9t3zATuqoqfWc3QdPhPLb58prN9tqYPlPWi8Krxi44loBoUzlobqZ3+8tGpxxSzwA== - dependencies: - graceful-fs "^4.1.2" - jsonfile "^2.1.0" - klaw "^1.0.0" - path-is-absolute "^1.0.0" - rimraf "^2.2.8" - -fs-extra@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9" - integrity sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw== - dependencies: - graceful-fs "^4.1.2" - jsonfile "^4.0.0" - universalify "^0.1.0" - -fs.realpath@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" - integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== - -fsevents@~2.3.2: - version "2.3.3" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" - integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== - -functional-red-black-tree@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" - integrity sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g== - -get-caller-file@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" - integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== - -glob-parent@~5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" - integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== - dependencies: - is-glob "^4.0.1" - -glob@7.2.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023" - integrity sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.0.4" - once "^1.3.0" - path-is-absolute "^1.0.0" - -glob@^7.1.3: - version "7.2.3" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" - integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.1.1" - once "^1.3.0" - path-is-absolute "^1.0.0" - -graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.1.9: - version "4.2.11" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" - integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== - -hardhat@=2.16.0: - version "2.16.0" - resolved "https://registry.yarnpkg.com/hardhat/-/hardhat-2.16.0.tgz#c5611d433416b31f6ce92f733b1f1b5236ad6230" - integrity sha512-7VQEJPQRAZdtrYUZaU9GgCpP3MBNy/pTdscARNJQMWKj5C+R7V32G5uIZKIqZ4QiqXa6CBfxxe+G+ahxUbHZHA== - dependencies: - "@ethersproject/abi" "^5.1.2" - "@metamask/eth-sig-util" "^4.0.0" - "@nomicfoundation/ethereumjs-block" "5.0.1" - "@nomicfoundation/ethereumjs-blockchain" "7.0.1" - "@nomicfoundation/ethereumjs-common" "4.0.1" - "@nomicfoundation/ethereumjs-evm" "2.0.1" - "@nomicfoundation/ethereumjs-rlp" "5.0.1" - "@nomicfoundation/ethereumjs-statemanager" "2.0.1" - "@nomicfoundation/ethereumjs-trie" "6.0.1" - "@nomicfoundation/ethereumjs-tx" "5.0.1" - "@nomicfoundation/ethereumjs-util" "9.0.1" - "@nomicfoundation/ethereumjs-vm" "7.0.1" - "@nomicfoundation/solidity-analyzer" "^0.1.0" - "@sentry/node" "^5.18.1" - "@types/bn.js" "^5.1.0" - "@types/lru-cache" "^5.1.0" - abort-controller "^3.0.0" - adm-zip "^0.4.16" - aggregate-error "^3.0.0" - ansi-escapes "^4.3.0" - chalk "^2.4.2" - chokidar "^3.4.0" - ci-info "^2.0.0" - debug "^4.1.1" - enquirer "^2.3.0" - env-paths "^2.2.0" - ethereum-cryptography "^1.0.3" - ethereumjs-abi "^0.6.8" - find-up "^2.1.0" - fp-ts "1.19.3" - fs-extra "^7.0.1" - glob "7.2.0" - immutable "^4.0.0-rc.12" - io-ts "1.10.4" - keccak "^3.0.2" - lodash "^4.17.11" - mnemonist "^0.38.0" - mocha "^10.0.0" - p-map "^4.0.0" - raw-body "^2.4.1" - resolve "1.17.0" - semver "^6.3.0" - solc "0.7.3" - source-map-support "^0.5.13" - stacktrace-parser "^0.1.10" - tsort "0.0.1" - undici "^5.14.0" - uuid "^8.3.2" - ws "^7.4.6" - -has-flag@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" - integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== - -has-flag@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" - integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== - -hash-base@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.1.0.tgz#55c381d9e06e1d2997a883b4a3fddfe7f0d3af33" - integrity sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA== - dependencies: - inherits "^2.0.4" - readable-stream "^3.6.0" - safe-buffer "^5.2.0" - -hash.js@1.1.7, hash.js@^1.0.0, hash.js@^1.0.3, hash.js@^1.1.7: - version "1.1.7" - resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" - integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== - dependencies: - inherits "^2.0.3" - minimalistic-assert "^1.0.1" - -he@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" - integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== - -hmac-drbg@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" - integrity sha512-Tti3gMqLdZfhOQY1Mzf/AanLiqh1WTiJgEj26ZuYQ9fbkLomzGchCws4FyrSd4VkpBfiNhaE1On+lOz894jvXg== - dependencies: - hash.js "^1.0.3" - minimalistic-assert "^1.0.0" - minimalistic-crypto-utils "^1.0.1" - -http-errors@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" - integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== - dependencies: - depd "2.0.0" - inherits "2.0.4" - setprototypeof "1.2.0" - statuses "2.0.1" - toidentifier "1.0.1" - -https-proxy-agent@^5.0.0: - version "5.0.1" - resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" - integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA== - dependencies: - agent-base "6" - debug "4" - -iconv-lite@0.4.24: - version "0.4.24" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" - integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== - dependencies: - safer-buffer ">= 2.1.2 < 3" - -ieee754@^1.1.13, ieee754@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" - integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== - -immutable@^4.0.0-rc.12: - version "4.3.4" - resolved "https://registry.yarnpkg.com/immutable/-/immutable-4.3.4.tgz#2e07b33837b4bb7662f288c244d1ced1ef65a78f" - integrity sha512-fsXeu4J4i6WNWSikpI88v/PcVflZz+6kMhUfIwc5SY+poQRPnaf5V7qds6SUyUN3cVxEzuCab7QIoLOQ+DQ1wA== - -indent-string@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" - integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== - -inflight@^1.0.4: - version "1.0.6" - resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" - integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== - dependencies: - once "^1.3.0" - wrappy "1" - -inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3: - version "2.0.4" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" - integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== - -io-ts@1.10.4: - version "1.10.4" - resolved "https://registry.yarnpkg.com/io-ts/-/io-ts-1.10.4.tgz#cd5401b138de88e4f920adbcb7026e2d1967e6e2" - integrity sha512-b23PteSnYXSONJ6JQXRAlvJhuw8KOtkqa87W4wDtvMrud/DTJd5X+NpOOI+O/zZwVq6v0VLAaJ+1EDViKEuN9g== - dependencies: - fp-ts "^1.0.0" - -is-binary-path@~2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" - integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== - dependencies: - binary-extensions "^2.0.0" - -is-buffer@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191" - integrity sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ== - -is-extglob@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" - integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== - -is-fullwidth-code-point@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" - integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== - -is-glob@^4.0.1, is-glob@~4.0.1: - version "4.0.3" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" - integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== - dependencies: - is-extglob "^2.1.1" - -is-hex-prefixed@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-hex-prefixed/-/is-hex-prefixed-1.0.0.tgz#7d8d37e6ad77e5d127148913c573e082d777f554" - integrity sha512-WvtOiug1VFrE9v1Cydwm+FnXd3+w9GaeVUss5W4v/SLy3UW00vP+6iNF2SdnfiBoLy4bTqVdkftNGTUeOFVsbA== - -is-number@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" - integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== - -is-plain-obj@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz#45e42e37fccf1f40da8e5f76ee21515840c09287" - integrity sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA== - -is-unicode-supported@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz#3f26c76a809593b52bfa2ecb5710ed2779b522a7" - integrity sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw== - -isarray@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" - integrity sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ== - -isarray@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" - integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== - -js-sdsl@^4.1.4: - version "4.4.2" - resolved "https://registry.yarnpkg.com/js-sdsl/-/js-sdsl-4.4.2.tgz#2e3c031b1f47d3aca8b775532e3ebb0818e7f847" - integrity sha512-dwXFwByc/ajSV6m5bcKAPwe4yDDF6D614pxmIi5odytzxRlwqF6nwoiCek80Ixc7Cvma5awClxrzFtxCQvcM8w== - -js-sha3@0.8.0: - version "0.8.0" - resolved "https://registry.yarnpkg.com/js-sha3/-/js-sha3-0.8.0.tgz#b9b7a5da73afad7dedd0f8c463954cbde6818840" - integrity sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q== - -js-yaml@4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" - integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== - dependencies: - argparse "^2.0.1" - -jsonfile@^2.1.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-2.4.0.tgz#3736a2b428b87bbda0cc83b53fa3d633a35c2ae8" - integrity sha512-PKllAqbgLgxHaj8TElYymKCAgrASebJrWpTnEkOaTowt23VKXXN0sUeriJ+eh7y6ufb/CC5ap11pz71/cM0hUw== - optionalDependencies: - graceful-fs "^4.1.6" - -jsonfile@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" - integrity sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg== - optionalDependencies: - graceful-fs "^4.1.6" - -jsonparse@^1.2.0: - version "1.3.1" - resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-1.3.1.tgz#3f4dae4a91fac315f71062f8521cc239f1366280" - integrity sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg== - -keccak@^3.0.0, keccak@^3.0.2: - version "3.0.4" - resolved "https://registry.yarnpkg.com/keccak/-/keccak-3.0.4.tgz#edc09b89e633c0549da444432ecf062ffadee86d" - integrity sha512-3vKuW0jV8J3XNTzvfyicFR5qvxrSAGl7KIhvgOu5cmWwM7tZRj3fMbj/pfIf4be7aznbc+prBWGjywox/g2Y6Q== - dependencies: - node-addon-api "^2.0.0" - node-gyp-build "^4.2.0" - readable-stream "^3.6.0" - -klaw@^1.0.0: - version "1.3.1" - resolved "https://registry.yarnpkg.com/klaw/-/klaw-1.3.1.tgz#4088433b46b3b1ba259d78785d8e96f73ba02439" - integrity sha512-TED5xi9gGQjGpNnvRWknrwAB1eL5GciPfVFOt3Vk1OJCVDQbzuSfrF3hkUQKlsgKrG1F+0t5W0m+Fje1jIt8rw== - optionalDependencies: - graceful-fs "^4.1.9" - -level-supports@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/level-supports/-/level-supports-4.0.1.tgz#431546f9d81f10ff0fea0e74533a0e875c08c66a" - integrity sha512-PbXpve8rKeNcZ9C1mUicC9auIYFyGpkV9/i6g76tLgANwWhtG2v7I4xNBUlkn3lE2/dZF3Pi0ygYGtLc4RXXdA== - -level-transcoder@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/level-transcoder/-/level-transcoder-1.0.1.tgz#f8cef5990c4f1283d4c86d949e73631b0bc8ba9c" - integrity sha512-t7bFwFtsQeD8cl8NIoQ2iwxA0CL/9IFw7/9gAjOonH0PWTTiRfY7Hq+Ejbsxh86tXobDQ6IOiddjNYIfOBs06w== - dependencies: - buffer "^6.0.3" - module-error "^1.0.1" - -level@^8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/level/-/level-8.0.0.tgz#41b4c515dabe28212a3e881b61c161ffead14394" - integrity sha512-ypf0jjAk2BWI33yzEaaotpq7fkOPALKAgDBxggO6Q9HGX2MRXn0wbP1Jn/tJv1gtL867+YOjOB49WaUF3UoJNQ== - dependencies: - browser-level "^1.0.1" - classic-level "^1.2.0" - -locate-path@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" - integrity sha512-NCI2kiDkyR7VeEKm27Kda/iQHyKJe1Bu0FlTbYp3CqJu+9IFe9bLyAjMxf5ZDDbEg+iMPzB5zYyUTSm8wVTKmA== - dependencies: - p-locate "^2.0.0" - path-exists "^3.0.0" - -locate-path@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" - integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== - dependencies: - p-locate "^5.0.0" - -lodash@^4.17.11: - version "4.17.21" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" - integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== - -log-symbols@4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-4.1.0.tgz#3fbdbb95b4683ac9fc785111e792e558d4abd503" - integrity sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg== - dependencies: - chalk "^4.1.0" - is-unicode-supported "^0.1.0" - -lru-cache@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" - integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== - dependencies: - yallist "^3.0.2" - -lru_map@^0.3.3: - version "0.3.3" - resolved "https://registry.yarnpkg.com/lru_map/-/lru_map-0.3.3.tgz#b5c8351b9464cbd750335a79650a0ec0e56118dd" - integrity sha512-Pn9cox5CsMYngeDbmChANltQl+5pi6XmTrraMSzhPmMBbmgcxmqWry0U3PGapCU1yB4/LqCcom7qhHZiF/jGfQ== - -mcl-wasm@^0.7.1: - version "0.7.9" - resolved "https://registry.yarnpkg.com/mcl-wasm/-/mcl-wasm-0.7.9.tgz#c1588ce90042a8700c3b60e40efb339fc07ab87f" - integrity sha512-iJIUcQWA88IJB/5L15GnJVnSQJmf/YaxxV6zRavv83HILHaJQb6y0iFyDMdDO0gN8X37tdxmAOrH/P8B6RB8sQ== - -md5.js@^1.3.4: - version "1.3.5" - resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" - integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg== - dependencies: - hash-base "^3.0.0" - inherits "^2.0.1" - safe-buffer "^5.1.2" - -memory-level@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/memory-level/-/memory-level-1.0.0.tgz#7323c3fd368f9af2f71c3cd76ba403a17ac41692" - integrity sha512-UXzwewuWeHBz5krr7EvehKcmLFNoXxGcvuYhC41tRnkrTbJohtS7kVn9akmgirtRygg+f7Yjsfi8Uu5SGSQ4Og== - dependencies: - abstract-level "^1.0.0" - functional-red-black-tree "^1.0.1" - module-error "^1.0.1" - -memorystream@^0.3.1: - version "0.3.1" - resolved "https://registry.yarnpkg.com/memorystream/-/memorystream-0.3.1.tgz#86d7090b30ce455d63fbae12dda51a47ddcaf9b2" - integrity sha512-S3UwM3yj5mtUSEfP41UZmt/0SCoVYUcU1rkXv+BQ5Ig8ndL4sPoJNBUJERafdPb5jjHJGuMgytgKvKIf58XNBw== - -minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" - integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== - -minimalistic-crypto-utils@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" - integrity sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg== - -minimatch@5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.0.1.tgz#fb9022f7528125187c92bd9e9b6366be1cf3415b" - integrity sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g== - dependencies: - brace-expansion "^2.0.1" - -minimatch@^3.0.4, minimatch@^3.1.1: - version "3.1.2" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" - integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== - dependencies: - brace-expansion "^1.1.7" - -minimist@^1.2.6: - version "1.2.8" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" - integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== - -mkdirp-classic@^0.5.2: - version "0.5.3" - resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" - integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== - -mkdirp@^0.5.1: - version "0.5.6" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" - integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== - dependencies: - minimist "^1.2.6" - -mnemonist@^0.38.0: - version "0.38.5" - resolved "https://registry.yarnpkg.com/mnemonist/-/mnemonist-0.38.5.tgz#4adc7f4200491237fe0fa689ac0b86539685cade" - integrity sha512-bZTFT5rrPKtPJxj8KSV0WkPyNxl72vQepqqVUAW2ARUpUSF2qXMB6jZj7hW5/k7C1rtpzqbD/IIbJwLXUjCHeg== - dependencies: - obliterator "^2.0.0" - -mocha@^10.0.0: - version "10.2.0" - resolved "https://registry.yarnpkg.com/mocha/-/mocha-10.2.0.tgz#1fd4a7c32ba5ac372e03a17eef435bd00e5c68b8" - integrity sha512-IDY7fl/BecMwFHzoqF2sg/SHHANeBoMMXFlS9r0OXKDssYE1M5O43wUY/9BVPeIvfH2zmEbBfseqN9gBQZzXkg== - dependencies: - ansi-colors "4.1.1" - browser-stdout "1.3.1" - chokidar "3.5.3" - debug "4.3.4" - diff "5.0.0" - escape-string-regexp "4.0.0" - find-up "5.0.0" - glob "7.2.0" - he "1.2.0" - js-yaml "4.1.0" - log-symbols "4.1.0" - minimatch "5.0.1" - ms "2.1.3" - nanoid "3.3.3" - serialize-javascript "6.0.0" - strip-json-comments "3.1.1" - supports-color "8.1.1" - workerpool "6.2.1" - yargs "16.2.0" - yargs-parser "20.2.4" - yargs-unparser "2.0.0" - -module-error@^1.0.1, module-error@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/module-error/-/module-error-1.0.2.tgz#8d1a48897ca883f47a45816d4fb3e3c6ba404d86" - integrity sha512-0yuvsqSCv8LbaOKhnsQ/T5JhyFlCYLPXK3U2sgV10zoKQwzs/MyfuQUOZQ1V/6OCOJsK/TRgNVrPuPDqtdMFtA== - -ms@2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" - integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== - -ms@2.1.3, ms@^2.1.1: - version "2.1.3" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" - integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== - -nan@^2.17.0: - version "2.18.0" - resolved "https://registry.yarnpkg.com/nan/-/nan-2.18.0.tgz#26a6faae7ffbeb293a39660e88a76b82e30b7554" - integrity sha512-W7tfG7vMOGtD30sHoZSSc/JVYiyDPEyQVso/Zz+/uQd0B0L46gtC+pHha5FFMRpil6fm/AoEcRWyOVi4+E/f8w== - -nanoid@3.3.3: - version "3.3.3" - resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.3.tgz#fd8e8b7aa761fe807dba2d1b98fb7241bb724a25" - integrity sha512-p1sjXuopFs0xg+fPASzQ28agW1oHD7xDsd9Xkf3T15H3c/cifrFHVwrh74PdoklAPi+i7MdRsE47vm2r6JoB+w== - -napi-macros@^2.2.2: - version "2.2.2" - resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-2.2.2.tgz#817fef20c3e0e40a963fbf7b37d1600bd0201044" - integrity sha512-hmEVtAGYzVQpCKdbQea4skABsdXW4RUh5t5mJ2zzqowJS2OyXZTU1KhDVFhx+NlWZ4ap9mqR9TcDO3LTTttd+g== - -node-addon-api@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-2.0.2.tgz#432cfa82962ce494b132e9d72a15b29f71ff5d32" - integrity sha512-Ntyt4AIXyaLIuMHF6IOoTakB3K+RWxwtsHNRxllEoA6vPwP9o4866g6YWDLUdnucilZhmkxiHwHr11gAENw+QA== - -node-fetch@^2.6.0: - version "2.7.0" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" - integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== - dependencies: - whatwg-url "^5.0.0" - -node-gyp-build@^4.2.0, node-gyp-build@^4.3.0: - version "4.6.1" - resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.6.1.tgz#24b6d075e5e391b8d5539d98c7fc5c210cac8a3e" - integrity sha512-24vnklJmyRS8ViBNI8KbtK/r/DmXQMRiOMXTNz2nrTnAYUwjmEEbnnpB/+kt+yWRv73bPsSPRFddrcIbAxSiMQ== - -normalize-path@^3.0.0, normalize-path@~3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" - integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== - -obliterator@^2.0.0: - version "2.0.4" - resolved "https://registry.yarnpkg.com/obliterator/-/obliterator-2.0.4.tgz#fa650e019b2d075d745e44f1effeb13a2adbe816" - integrity sha512-lgHwxlxV1qIg1Eap7LgIeoBWIMFibOjbrYPIPJZcI1mmGAI2m3lNYpK12Y+GBdPQ0U1hRwSord7GIaawz962qQ== - -once@^1.3.0, once@^1.3.1, once@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== - dependencies: - wrappy "1" - -os-tmpdir@~1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" - integrity sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g== - -p-limit@^1.1.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8" - integrity sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q== - dependencies: - p-try "^1.0.0" - -p-limit@^3.0.2: - version "3.1.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" - integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== - dependencies: - yocto-queue "^0.1.0" - -p-locate@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" - integrity sha512-nQja7m7gSKuewoVRen45CtVfODR3crN3goVQ0DDZ9N3yHxgpkuBhZqsaiotSQRrADUrne346peY7kT3TSACykg== - dependencies: - p-limit "^1.1.0" - -p-locate@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" - integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== - dependencies: - p-limit "^3.0.2" - -p-map@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b" - integrity sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ== - dependencies: - aggregate-error "^3.0.0" - -p-try@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" - integrity sha512-U1etNYuMJoIz3ZXSrrySFjsXQTWOx2/jdi86L+2pRvph/qMKL6sbcCYdH23fqsbm8TH2Gn0OybpT4eSFlCVHww== - -path-exists@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" - integrity sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ== - -path-exists@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" - integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== - -path-is-absolute@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" - integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== - -path-parse@^1.0.6: - version "1.0.7" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" - integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== - -pbkdf2@^3.0.17: - version "3.1.2" - resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.1.2.tgz#dd822aa0887580e52f1a039dc3eda108efae3075" - integrity sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA== - dependencies: - create-hash "^1.1.2" - create-hmac "^1.1.4" - ripemd160 "^2.0.1" - safe-buffer "^5.0.1" - sha.js "^2.4.8" - -picomatch@^2.0.4, picomatch@^2.2.1: - version "2.3.1" - resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" - integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== - -process-nextick-args@~2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" - integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== - -pump@^1.0.0: - version "1.0.3" - resolved "https://registry.yarnpkg.com/pump/-/pump-1.0.3.tgz#5dfe8311c33bbf6fc18261f9f34702c47c08a954" - integrity sha512-8k0JupWme55+9tCVE+FS5ULT3K6AbgqrGa58lTT49RpyfwwcGedHqaC5LlQNdEAumn/wFsu6aPwkuPMioy8kqw== - dependencies: - end-of-stream "^1.1.0" - once "^1.3.1" - -pump@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" - integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== - dependencies: - end-of-stream "^1.1.0" - once "^1.3.1" - -queue-microtask@^1.2.2, queue-microtask@^1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" - integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== - -randombytes@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" - integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== - dependencies: - safe-buffer "^5.1.0" - -raw-body@^2.4.1: - version "2.5.2" - resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.2.tgz#99febd83b90e08975087e8f1f9419a149366b68a" - integrity sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA== - dependencies: - bytes "3.1.2" - http-errors "2.0.0" - iconv-lite "0.4.24" - unpipe "1.0.0" - -readable-stream@^2.2.2, readable-stream@^2.3.0, readable-stream@^2.3.5: - version "2.3.8" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.8.tgz#91125e8042bba1b9887f49345f6277027ce8be9b" - integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA== - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.3" - isarray "~1.0.0" - process-nextick-args "~2.0.0" - safe-buffer "~5.1.1" - string_decoder "~1.1.1" - util-deprecate "~1.0.1" - -readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.5.0, readable-stream@^3.6.0: - version "3.6.2" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" - integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== - dependencies: - inherits "^2.0.3" - string_decoder "^1.1.1" - util-deprecate "^1.0.1" - -readable-stream@~1.0.26-4: - version "1.0.34" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.0.34.tgz#125820e34bc842d2f2aaafafe4c2916ee32c157c" - integrity sha512-ok1qVCJuRkNmvebYikljxJA/UEsKwLl2nI1OmaqAu4/UE+h0wKCHok4XkL/gvi39OacXvw59RJUOFUkDib2rHg== - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.1" - isarray "0.0.1" - string_decoder "~0.10.x" - -readdirp@~3.6.0: - version "3.6.0" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" - integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== - dependencies: - picomatch "^2.2.1" - -require-directory@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" - integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== - -require-from-string@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" - integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== - -resolve@1.17.0: - version "1.17.0" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.17.0.tgz#b25941b54968231cc2d1bb76a79cb7f2c0bf8444" - integrity sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w== - dependencies: - path-parse "^1.0.6" - -rimraf@^2.2.8: - version "2.7.1" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" - integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== - dependencies: - glob "^7.1.3" - -ripemd160@^2.0.0, ripemd160@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" - integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA== - dependencies: - hash-base "^3.0.0" - inherits "^2.0.1" - -rlp@^2.2.3: - version "2.2.7" - resolved "https://registry.yarnpkg.com/rlp/-/rlp-2.2.7.tgz#33f31c4afac81124ac4b283e2bd4d9720b30beaf" - integrity sha512-d5gdPmgQ0Z+AklL2NVXr/IoSjNZFfTVvQWzL/AM2AOcSzYP2xjlb0AC8YyCLc41MSNf6P6QVtjgPdmVtzb+4lQ== - dependencies: - bn.js "^5.2.0" - -run-parallel-limit@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/run-parallel-limit/-/run-parallel-limit-1.1.0.tgz#be80e936f5768623a38a963262d6bef8ff11e7ba" - integrity sha512-jJA7irRNM91jaKc3Hcl1npHsFLOXOoTkPCUL1JEa1R82O2miplXXRaGdjW/KM/98YQWDhJLiSs793CnXfblJUw== - dependencies: - queue-microtask "^1.2.2" - -rustbn.js@~0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/rustbn.js/-/rustbn.js-0.2.0.tgz#8082cb886e707155fd1cb6f23bd591ab8d55d0ca" - integrity sha512-4VlvkRUuCJvr2J6Y0ImW7NvTCriMi7ErOAqWk1y69vAdoNIzCF3yPmgeNzx+RQTLEDFq5sHfscn1MwHxP9hNfA== - -safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.0, safe-buffer@~5.2.0: - version "5.2.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" - integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== - -safe-buffer@~5.1.0, safe-buffer@~5.1.1: - version "5.1.2" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" - integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== - -"safer-buffer@>= 2.1.2 < 3", safer-buffer@~2.1.0: - version "2.1.2" - resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" - integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== - -scrypt-js@3.0.1, scrypt-js@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/scrypt-js/-/scrypt-js-3.0.1.tgz#d314a57c2aef69d1ad98a138a21fe9eafa9ee312" - integrity sha512-cdwTTnqPu0Hyvf5in5asVdZocVDTNRmR7XEcJuIzMjJeSHybHl7vpB66AzwTaIg6CLSbtjcxc8fqcySfnTkccA== - -secp256k1@^4.0.1: - version "4.0.3" - resolved "https://registry.yarnpkg.com/secp256k1/-/secp256k1-4.0.3.tgz#c4559ecd1b8d3c1827ed2d1b94190d69ce267303" - integrity sha512-NLZVf+ROMxwtEj3Xa562qgv2BK5e2WNmXPiOdVIPLgs6lyTzMvBq0aWTYMI5XCP9jZMVKOcqZLw/Wc4vDkuxhA== - dependencies: - elliptic "^6.5.4" - node-addon-api "^2.0.0" - node-gyp-build "^4.2.0" - -semver@^5.5.0: - version "5.7.2" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8" - integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== - -semver@^6.3.0: - version "6.3.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" - integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== - -serialize-javascript@6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.0.tgz#efae5d88f45d7924141da8b5c3a7a7e663fefeb8" - integrity sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag== - dependencies: - randombytes "^2.1.0" - -setimmediate@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" - integrity sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA== - -setprototypeof@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" - integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== - -sha.js@^2.4.0, sha.js@^2.4.8: - version "2.4.11" - resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" - integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== - dependencies: - inherits "^2.0.1" - safe-buffer "^5.0.1" - -solc@0.7.3: - version "0.7.3" - resolved "https://registry.yarnpkg.com/solc/-/solc-0.7.3.tgz#04646961bd867a744f63d2b4e3c0701ffdc7d78a" - integrity sha512-GAsWNAjGzIDg7VxzP6mPjdurby3IkGCjQcM8GFYZT6RyaoUZKmMU6Y7YwG+tFGhv7dwZ8rmR4iwFDrrD99JwqA== - dependencies: - command-exists "^1.2.8" - commander "3.0.2" - follow-redirects "^1.12.1" - fs-extra "^0.30.0" - js-sha3 "0.8.0" - memorystream "^0.3.1" - require-from-string "^2.0.0" - semver "^5.5.0" - tmp "0.0.33" - -source-map-support@^0.5.13: - version "0.5.21" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" - integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== - dependencies: - buffer-from "^1.0.0" - source-map "^0.6.0" - -source-map@^0.6.0: - version "0.6.1" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" - integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== - -split-ca@^1.0.0, split-ca@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/split-ca/-/split-ca-1.0.1.tgz#6c83aff3692fa61256e0cd197e05e9de157691a6" - integrity sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ== - -ssh2@^1.11.0: - version "1.14.0" - resolved "https://registry.yarnpkg.com/ssh2/-/ssh2-1.14.0.tgz#8f68440e1b768b66942c9e4e4620b2725b3555bb" - integrity sha512-AqzD1UCqit8tbOKoj6ztDDi1ffJZ2rV2SwlgrVVrHPkV5vWqGJOVp5pmtj18PunkPJAuKQsnInyKV+/Nb2bUnA== - dependencies: - asn1 "^0.2.6" - bcrypt-pbkdf "^1.0.2" - optionalDependencies: - cpu-features "~0.0.8" - nan "^2.17.0" - -stacktrace-parser@^0.1.10: - version "0.1.10" - resolved "https://registry.yarnpkg.com/stacktrace-parser/-/stacktrace-parser-0.1.10.tgz#29fb0cae4e0d0b85155879402857a1639eb6051a" - integrity sha512-KJP1OCML99+8fhOHxwwzyWrlUuVX5GQ0ZpJTd1DFXhdkrvg1szxfHhawXUZ3g9TkXORQd4/WG68jMlQZ2p8wlg== - dependencies: - type-fest "^0.7.1" - -statuses@2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" - integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== - -string-width@^4.1.0, string-width@^4.2.0: - version "4.2.3" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - -string_decoder@^1.1.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" - integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== - dependencies: - safe-buffer "~5.2.0" - -string_decoder@~0.10.x: - version "0.10.31" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" - integrity sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ== - -string_decoder@~1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" - integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== - dependencies: - safe-buffer "~5.1.0" - -strip-ansi@^6.0.0, strip-ansi@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== - dependencies: - ansi-regex "^5.0.1" - -strip-hex-prefix@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/strip-hex-prefix/-/strip-hex-prefix-1.0.0.tgz#0c5f155fef1151373377de9dbb588da05500e36f" - integrity sha512-q8d4ue7JGEiVcypji1bALTos+0pWtyGlivAWyPuTkHzuTCJqrK9sWxYQZUq6Nq3cuyv3bm734IhHvHtGGURU6A== - dependencies: - is-hex-prefixed "1.0.0" - -strip-json-comments@3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" - integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== - -supports-color@8.1.1: - version "8.1.1" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" - integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== - dependencies: - has-flag "^4.0.0" - -supports-color@^5.3.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" - integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== - dependencies: - has-flag "^3.0.0" - -supports-color@^7.1.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" - integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== - dependencies: - has-flag "^4.0.0" - -tar-fs@~1.16.3: - version "1.16.3" - resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-1.16.3.tgz#966a628841da2c4010406a82167cbd5e0c72d509" - integrity sha512-NvCeXpYx7OsmOh8zIOP/ebG55zZmxLE0etfWRbWok+q2Qo8x/vOR/IJT1taADXPe+jsiu9axDb3X4B+iIgNlKw== - dependencies: - chownr "^1.0.1" - mkdirp "^0.5.1" - pump "^1.0.0" - tar-stream "^1.1.2" - -tar-fs@~2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.0.1.tgz#e44086c1c60d31a4f0cf893b1c4e155dabfae9e2" - integrity sha512-6tzWDMeroL87uF/+lin46k+Q+46rAJ0SyPGz7OW7wTgblI273hsBqk2C1j0/xNadNLKDTUL9BukSjB7cwgmlPA== - dependencies: - chownr "^1.1.1" - mkdirp-classic "^0.5.2" - pump "^3.0.0" - tar-stream "^2.0.0" - -tar-stream@^1.1.2: - version "1.6.2" - resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-1.6.2.tgz#8ea55dab37972253d9a9af90fdcd559ae435c555" - integrity sha512-rzS0heiNf8Xn7/mpdSVVSMAWAoy9bfb1WOTYC78Z0UQKeKa/CWS8FOq0lKGNa8DWKAn9gxjCvMLYc5PGXYlK2A== - dependencies: - bl "^1.0.0" - buffer-alloc "^1.2.0" - end-of-stream "^1.0.0" - fs-constants "^1.0.0" - readable-stream "^2.3.0" - to-buffer "^1.1.1" - xtend "^4.0.0" - -tar-stream@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287" - integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ== - dependencies: - bl "^4.0.3" - end-of-stream "^1.4.1" - fs-constants "^1.0.0" - inherits "^2.0.3" - readable-stream "^3.1.1" - -"through@>=2.2.7 <3": - version "2.3.8" - resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" - integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== - -tmp@0.0.33: - version "0.0.33" - resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" - integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== - dependencies: - os-tmpdir "~1.0.2" - -to-buffer@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/to-buffer/-/to-buffer-1.1.1.tgz#493bd48f62d7c43fcded313a03dcadb2e1213a80" - integrity sha512-lx9B5iv7msuFYE3dytT+KE5tap+rNYw+K4jVkb9R/asAb+pbBSM17jtunHplhBe6RRJdZx3Pn2Jph24O32mOVg== - -to-regex-range@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" - integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== - dependencies: - is-number "^7.0.0" - -toidentifier@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" - integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== - -tr46@~0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" - integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== - -tslib@^1.9.3: - version "1.14.1" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" - integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== - -tsort@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/tsort/-/tsort-0.0.1.tgz#e2280f5e817f8bf4275657fd0f9aebd44f5a2786" - integrity sha512-Tyrf5mxF8Ofs1tNoxA13lFeZ2Zrbd6cKbuH3V+MQ5sb6DtBj5FjrXVsRWT8YvNAQTqNoz66dz1WsbigI22aEnw== - -tweetnacl-util@^0.15.1: - version "0.15.1" - resolved "https://registry.yarnpkg.com/tweetnacl-util/-/tweetnacl-util-0.15.1.tgz#b80fcdb5c97bcc508be18c44a4be50f022eea00b" - integrity sha512-RKJBIj8lySrShN4w6i/BonWp2Z/uxwC3h4y7xsRrpP59ZboCd0GpEVsOnMDYLMmKBpYhb5TgHzZXy7wTfYFBRw== - -tweetnacl@^0.14.3: - version "0.14.5" - resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" - integrity sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA== - -tweetnacl@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-1.0.3.tgz#ac0af71680458d8a6378d0d0d050ab1407d35596" - integrity sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw== - -type-fest@^0.21.3: - version "0.21.3" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" - integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== - -type-fest@^0.7.1: - version "0.7.1" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.7.1.tgz#8dda65feaf03ed78f0a3f9678f1869147f7c5c48" - integrity sha512-Ne2YiiGN8bmrmJJEuTWTLJR32nh/JdL1+PSicowtNb0WFpn59GK8/lfD61bVtzguz7b3PBt74nxpv/Pw5po5Rg== - -typedarray@^0.0.6: - version "0.0.6" - resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" - integrity sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA== - -undici-types@~5.26.4: - version "5.26.5" - resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" - integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== - -undici@^5.14.0: - version "5.27.2" - resolved "https://registry.yarnpkg.com/undici/-/undici-5.27.2.tgz#a270c563aea5b46cc0df2550523638c95c5d4411" - integrity sha512-iS857PdOEy/y3wlM3yRp+6SNQQ6xU0mmZcwRSriqk+et/cwWAtwmIGf6WkoDN2EK/AMdCO/dfXzIwi+rFMrjjQ== - dependencies: - "@fastify/busboy" "^2.0.0" - -universalify@^0.1.0: - version "0.1.2" - resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" - integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== - -unpipe@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" - integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== - -util-deprecate@^1.0.1, util-deprecate@~1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" - integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== - -uuid@^8.3.2: - version "8.3.2" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" - integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== - -webidl-conversions@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" - integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== - -whatwg-url@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" - integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== - dependencies: - tr46 "~0.0.3" - webidl-conversions "^3.0.0" - -workerpool@6.2.1: - version "6.2.1" - resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-6.2.1.tgz#46fc150c17d826b86a008e5a4508656777e9c343" - integrity sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw== - -wrap-ansi@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" - integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== - dependencies: - ansi-styles "^4.0.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - -wrappy@1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" - integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== - -ws@7.4.6: - version "7.4.6" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.6.tgz#5654ca8ecdeee47c33a9a4bf6d28e2be2980377c" - integrity sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A== - -ws@^7.4.6: - version "7.5.9" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" - integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== - -xtend@^4.0.0: - version "4.0.2" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" - integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== - -y18n@^5.0.5: - version "5.0.8" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" - integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== - -yallist@^3.0.2: - version "3.1.1" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" - integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== - -yargs-parser@20.2.4: - version "20.2.4" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.4.tgz#b42890f14566796f85ae8e3a25290d205f154a54" - integrity sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA== - -yargs-parser@^20.2.2: - version "20.2.9" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" - integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== - -yargs-unparser@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/yargs-unparser/-/yargs-unparser-2.0.0.tgz#f131f9226911ae5d9ad38c432fe809366c2325eb" - integrity sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA== - dependencies: - camelcase "^6.0.0" - decamelize "^4.0.0" - flat "^5.0.2" - is-plain-obj "^2.1.0" - -yargs@16.2.0: - version "16.2.0" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" - integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== - dependencies: - cliui "^7.0.2" - escalade "^3.1.1" - get-caller-file "^2.0.5" - require-directory "^2.1.1" - string-width "^4.2.0" - y18n "^5.0.5" - yargs-parser "^20.2.2" - -yocto-queue@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" - integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== diff --git a/etc/env/base/proof_data_handler.toml b/etc/env/base/proof_data_handler.toml index b56ac26fb177..767d1d16da2e 100644 --- a/etc/env/base/proof_data_handler.toml +++ b/etc/env/base/proof_data_handler.toml @@ -1,5 +1,6 @@ [proof_data_handler] http_port = 3320 proof_generation_timeout_in_secs = 18000 -tee_proof_generation_timeout_in_secs = 600 +tee_proof_generation_timeout_in_secs = 60 +tee_batch_permanently_ignored_timeout_in_hours = 240 tee_support = true diff --git a/etc/env/file_based/general.yaml b/etc/env/file_based/general.yaml index a4005e9477a8..23e8b3ee420c 100644 --- a/etc/env/file_based/general.yaml +++ b/etc/env/file_based/general.yaml @@ -166,7 +166,8 @@ witness_vector_generator: data_handler: http_port: 3320 proof_generation_timeout_in_secs: 18000 - tee_proof_generation_timeout_in_secs: 600 + tee_proof_generation_timeout_in_secs: 60 + tee_batch_permanently_ignored_timeout_in_hours: 240 tee_support: true prover_gateway: api_url: http://127.0.0.1:3320 diff --git a/etc/lint-config/ignore.yaml b/etc/lint-config/ignore.yaml index b4456a6c3fd4..009d0dbb0946 100644 --- a/etc/lint-config/ignore.yaml +++ b/etc/lint-config/ignore.yaml @@ -24,5 +24,7 @@ dirs: [ "artifacts-zk", "cache-zk", "contracts/", - "era-observability" + "era-observability", + "docs/js", + "prover/docs/js" ] diff --git a/get_all_blobs/.gitignore b/get_all_blobs/.gitignore new file mode 100644 index 000000000000..a1ee59a11803 --- /dev/null +++ b/get_all_blobs/.gitignore @@ -0,0 +1 @@ +blob_data.json diff --git a/get_all_blobs/Cargo.toml b/get_all_blobs/Cargo.toml new file mode 100644 index 000000000000..d629650f6ff1 --- /dev/null +++ b/get_all_blobs/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "get_all_blobs" +version.workspace = true +edition.workspace = true +authors.workspace = true +homepage.workspace = true +repository.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true + +[dependencies] +anyhow.workspace = true +tokio = { version = "1" , features = ["full"] } +axum.workspace = true +rustls.workspace = true +rlp.workspace = true +hex.workspace = true + +reqwest.workspace = true +serde = { version = "1.0", features = ["derive"] } +serde_json.workspace = true + +tonic = { version = "0.12.1", features = ["tls", "channel", "tls-roots"]} +prost = "0.13.1" +kzgpad-rs = { git = "https://github.com/Layr-Labs/kzgpad-rs.git", tag = "v0.1.0" } +alloy = { version = "0.3", features = ["full"] } +futures = "0.3" diff --git a/get_all_blobs/abi/commitBatchesSharedBridge.json b/get_all_blobs/abi/commitBatchesSharedBridge.json new file mode 100644 index 000000000000..877ce399c1c6 --- /dev/null +++ b/get_all_blobs/abi/commitBatchesSharedBridge.json @@ -0,0 +1,119 @@ +[ + { + "inputs": [ + { + "internalType": "uint256", + "name": "_chainId", + "type": "uint256" + }, + { + "components": [ + { + "internalType": "uint64", + "name": "batchNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "batchHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBatchInfo", + "name": "", + "type": "tuple" + }, + { + "components": [ + { + "internalType": "uint64", + "name": "batchNumber", + "type": "uint64" + }, + { + "internalType": "uint64", + "name": "timestamp", + "type": "uint64" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "newStateRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "bootloaderHeapInitialContentsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "eventsQueueStateHash", + "type": "bytes32" + }, + { + "internalType": "bytes", + "name": "systemLogs", + "type": "bytes" + }, + { + "internalType": "bytes", + "name": "pubdataCommitments", + "type": "bytes" + } + ], + "internalType": "struct IExecutor.CommitBatchInfo[]", + "name": "_newBatchesData", + "type": "tuple[]" + } + ], + "name": "commitBatchesSharedBridge", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + } +] diff --git a/get_all_blobs/src/blob_info.rs b/get_all_blobs/src/blob_info.rs new file mode 100644 index 000000000000..caefe3391deb --- /dev/null +++ b/get_all_blobs/src/blob_info.rs @@ -0,0 +1,504 @@ +use std::fmt; + +use rlp::{Decodable, DecoderError, Encodable, Rlp, RlpStream}; + +use crate::generated::{ + common::G1Commitment as DisperserG1Commitment, + disperser::{ + BatchHeader as DisperserBatchHeader, BatchMetadata as DisperserBatchMetadata, + BlobHeader as DisperserBlobHeader, BlobInfo as DisperserBlobInfo, + BlobQuorumParam as DisperserBlobQuorumParam, + BlobVerificationProof as DisperserBlobVerificationProof, + }, +}; + +#[derive(Debug)] +pub enum ConversionError { + NotPresentError, +} + +impl fmt::Display for ConversionError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ConversionError::NotPresentError => write!(f, "Failed to convert BlobInfo"), + } + } +} + +#[derive(Debug, PartialEq, Clone)] +pub struct G1Commitment { + pub x: Vec, + pub y: Vec, +} + +impl G1Commitment { + pub fn to_bytes(&self) -> Vec { + let mut bytes = vec![]; + bytes.extend(&self.x.len().to_be_bytes()); + bytes.extend(&self.x); + bytes.extend(&self.y.len().to_be_bytes()); + bytes.extend(&self.y); + + bytes + } +} + +impl Decodable for G1Commitment { + fn decode(rlp: &Rlp) -> Result { + let x: Vec = rlp.val_at(0)?; // Decode first element as Vec + let y: Vec = rlp.val_at(1)?; // Decode second element as Vec + + Ok(G1Commitment { x, y }) + } +} + +impl Encodable for G1Commitment { + fn rlp_append(&self, s: &mut RlpStream) { + s.begin_list(2); + s.append(&self.x); + s.append(&self.y); + } +} + +impl From for G1Commitment { + fn from(value: DisperserG1Commitment) -> Self { + Self { + x: value.x, + y: value.y, + } + } +} + +#[derive(Debug, PartialEq, Clone)] +pub struct BlobQuorumParam { + pub quorum_number: u32, + pub adversary_threshold_percentage: u32, + pub confirmation_threshold_percentage: u32, + pub chunk_length: u32, +} + +impl BlobQuorumParam { + pub fn to_bytes(&self) -> Vec { + let mut bytes = vec![]; + bytes.extend(&self.quorum_number.to_be_bytes()); + bytes.extend(&self.adversary_threshold_percentage.to_be_bytes()); + bytes.extend(&self.confirmation_threshold_percentage.to_be_bytes()); + bytes.extend(&self.chunk_length.to_be_bytes()); + + bytes + } +} + +impl Decodable for BlobQuorumParam { + fn decode(rlp: &Rlp) -> Result { + Ok(BlobQuorumParam { + quorum_number: rlp.val_at(0)?, + adversary_threshold_percentage: rlp.val_at(1)?, + confirmation_threshold_percentage: rlp.val_at(2)?, + chunk_length: rlp.val_at(3)?, + }) + } +} + +impl Encodable for BlobQuorumParam { + fn rlp_append(&self, s: &mut RlpStream) { + s.begin_list(4); + s.append(&self.quorum_number); + s.append(&self.adversary_threshold_percentage); + s.append(&self.confirmation_threshold_percentage); + s.append(&self.chunk_length); + } +} + +impl From for BlobQuorumParam { + fn from(value: DisperserBlobQuorumParam) -> Self { + Self { + quorum_number: value.quorum_number, + adversary_threshold_percentage: value.adversary_threshold_percentage, + confirmation_threshold_percentage: value.confirmation_threshold_percentage, + chunk_length: value.chunk_length, + } + } +} + +#[derive(Debug, PartialEq, Clone)] +pub struct BlobHeader { + pub commitment: G1Commitment, + pub data_length: u32, + pub blob_quorum_params: Vec, +} + +impl BlobHeader { + pub fn to_bytes(&self) -> Vec { + let mut bytes = vec![]; + bytes.extend(self.commitment.to_bytes()); + bytes.extend(&self.data_length.to_be_bytes()); + bytes.extend(&self.blob_quorum_params.len().to_be_bytes()); + + for quorum in &self.blob_quorum_params { + bytes.extend(quorum.to_bytes()); + } + + bytes + } +} + +impl Decodable for BlobHeader { + fn decode(rlp: &Rlp) -> Result { + let commitment: G1Commitment = rlp.val_at(0)?; + let data_length: u32 = rlp.val_at(1)?; + let blob_quorum_params: Vec = rlp.list_at(2)?; + + Ok(BlobHeader { + commitment, + data_length, + blob_quorum_params, + }) + } +} + +impl Encodable for BlobHeader { + fn rlp_append(&self, s: &mut RlpStream) { + s.begin_list(3); + s.append(&self.commitment); + s.append(&self.data_length); + s.append_list(&self.blob_quorum_params); + } +} + +impl TryFrom for BlobHeader { + type Error = ConversionError; + fn try_from(value: DisperserBlobHeader) -> Result { + if value.commitment.is_none() { + return Err(ConversionError::NotPresentError); + } + let blob_quorum_params: Vec = value + .blob_quorum_params + .iter() + .map(|param| BlobQuorumParam::from(param.clone())) + .collect(); + Ok(Self { + commitment: G1Commitment::from(value.commitment.unwrap()), + data_length: value.data_length, + blob_quorum_params, + }) + } +} + +#[derive(Debug, PartialEq, Clone)] +pub struct BatchHeader { + pub batch_root: Vec, + pub quorum_numbers: Vec, + pub quorum_signed_percentages: Vec, + pub reference_block_number: u32, +} + +impl BatchHeader { + pub fn to_bytes(&self) -> Vec { + let mut bytes = vec![]; + bytes.extend(&self.batch_root.len().to_be_bytes()); + bytes.extend(&self.batch_root); + bytes.extend(&self.quorum_numbers.len().to_be_bytes()); + bytes.extend(&self.quorum_numbers); + bytes.extend(&self.quorum_signed_percentages.len().to_be_bytes()); + bytes.extend(&self.quorum_signed_percentages); + bytes.extend(&self.reference_block_number.to_be_bytes()); + + bytes + } +} + +impl Decodable for BatchHeader { + fn decode(rlp: &Rlp) -> Result { + Ok(BatchHeader { + batch_root: rlp.val_at(0)?, + quorum_numbers: rlp.val_at(1)?, + quorum_signed_percentages: rlp.val_at(2)?, + reference_block_number: rlp.val_at(3)?, + }) + } +} + +impl Encodable for BatchHeader { + fn rlp_append(&self, s: &mut RlpStream) { + s.begin_list(4); + s.append(&self.batch_root); + s.append(&self.quorum_numbers); + s.append(&self.quorum_signed_percentages); + s.append(&self.reference_block_number); + } +} + +impl From for BatchHeader { + fn from(value: DisperserBatchHeader) -> Self { + Self { + batch_root: value.batch_root, + quorum_numbers: value.quorum_numbers, + quorum_signed_percentages: value.quorum_signed_percentages, + reference_block_number: value.reference_block_number, + } + } +} + +#[derive(Debug, PartialEq, Clone)] +pub struct BatchMetadata { + pub batch_header: BatchHeader, + pub signatory_record_hash: Vec, + pub fee: Vec, + pub confirmation_block_number: u32, + pub batch_header_hash: Vec, +} + +impl BatchMetadata { + pub fn to_bytes(&self) -> Vec { + let mut bytes = vec![]; + bytes.extend(self.batch_header.to_bytes()); + bytes.extend(&self.signatory_record_hash); + bytes.extend(&self.confirmation_block_number.to_be_bytes()); + + bytes + } +} + +impl Decodable for BatchMetadata { + fn decode(rlp: &Rlp) -> Result { + let batch_header: BatchHeader = rlp.val_at(0)?; + + Ok(BatchMetadata { + batch_header, + signatory_record_hash: rlp.val_at(1)?, + fee: rlp.val_at(2)?, + confirmation_block_number: rlp.val_at(3)?, + batch_header_hash: rlp.val_at(4)?, + }) + } +} + +impl Encodable for BatchMetadata { + fn rlp_append(&self, s: &mut RlpStream) { + s.begin_list(5); + s.append(&self.batch_header); + s.append(&self.signatory_record_hash); + s.append(&self.fee); + s.append(&self.confirmation_block_number); + s.append(&self.batch_header_hash); + } +} + +impl TryFrom for BatchMetadata { + type Error = ConversionError; + fn try_from(value: DisperserBatchMetadata) -> Result { + if value.batch_header.is_none() { + return Err(ConversionError::NotPresentError); + } + Ok(Self { + batch_header: BatchHeader::from(value.batch_header.unwrap()), + signatory_record_hash: value.signatory_record_hash, + fee: value.fee, + confirmation_block_number: value.confirmation_block_number, + batch_header_hash: value.batch_header_hash, + }) + } +} + +#[derive(Debug, PartialEq, Clone)] +pub struct BlobVerificationProof { + pub batch_id: u32, + pub blob_index: u32, + pub batch_medatada: BatchMetadata, + pub inclusion_proof: Vec, + pub quorum_indexes: Vec, +} + +impl BlobVerificationProof { + pub fn to_bytes(&self) -> Vec { + let mut bytes = vec![]; + bytes.extend(&self.batch_id.to_be_bytes()); + bytes.extend(&self.blob_index.to_be_bytes()); + bytes.extend(self.batch_medatada.to_bytes()); + bytes.extend(&self.inclusion_proof.len().to_be_bytes()); + bytes.extend(&self.inclusion_proof); + bytes.extend(&self.quorum_indexes.len().to_be_bytes()); + bytes.extend(&self.quorum_indexes); + + bytes + } +} + +impl Decodable for BlobVerificationProof { + fn decode(rlp: &Rlp) -> Result { + Ok(BlobVerificationProof { + batch_id: rlp.val_at(0)?, + blob_index: rlp.val_at(1)?, + batch_medatada: rlp.val_at(2)?, + inclusion_proof: rlp.val_at(3)?, + quorum_indexes: rlp.val_at(4)?, + }) + } +} + +impl Encodable for BlobVerificationProof { + fn rlp_append(&self, s: &mut RlpStream) { + s.begin_list(5); + s.append(&self.batch_id); + s.append(&self.blob_index); + s.append(&self.batch_medatada); + s.append(&self.inclusion_proof); + s.append(&self.quorum_indexes); + } +} + +impl TryFrom for BlobVerificationProof { + type Error = ConversionError; + fn try_from(value: DisperserBlobVerificationProof) -> Result { + if value.batch_metadata.is_none() { + return Err(ConversionError::NotPresentError); + } + Ok(Self { + batch_id: value.batch_id, + blob_index: value.blob_index, + batch_medatada: BatchMetadata::try_from(value.batch_metadata.unwrap())?, + inclusion_proof: value.inclusion_proof, + quorum_indexes: value.quorum_indexes, + }) + } +} + +#[derive(Debug, PartialEq, Clone)] +pub struct BlobInfo { + pub blob_header: BlobHeader, + pub blob_verification_proof: BlobVerificationProof, +} + +impl BlobInfo { + pub fn to_bytes(&self) -> Vec { + let mut bytes = vec![]; + let blob_header_bytes = self.blob_header.to_bytes(); + bytes.extend(blob_header_bytes.len().to_be_bytes()); + bytes.extend(blob_header_bytes); + let blob_verification_proof_bytes = self.blob_verification_proof.to_bytes(); + bytes.extend(blob_verification_proof_bytes); + bytes + } +} + +impl Decodable for BlobInfo { + fn decode(rlp: &Rlp) -> Result { + let blob_header: BlobHeader = rlp.val_at(0)?; + let blob_verification_proof: BlobVerificationProof = rlp.val_at(1)?; + + Ok(BlobInfo { + blob_header, + blob_verification_proof, + }) + } +} + +impl Encodable for BlobInfo { + fn rlp_append(&self, s: &mut RlpStream) { + s.begin_list(2); + s.append(&self.blob_header); + s.append(&self.blob_verification_proof); + } +} + +impl TryFrom for BlobInfo { + type Error = ConversionError; + fn try_from(value: DisperserBlobInfo) -> Result { + if value.blob_header.is_none() || value.blob_verification_proof.is_none() { + return Err(ConversionError::NotPresentError); + } + Ok(Self { + blob_header: BlobHeader::try_from(value.blob_header.unwrap())?, + blob_verification_proof: BlobVerificationProof::try_from( + value.blob_verification_proof.unwrap(), + )?, + }) + } +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_blob_info_encoding_and_decoding() { + let blob_info = BlobInfo { + blob_header: BlobHeader { + commitment: G1Commitment { + x: vec![ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, + ], + y: vec![ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, + ], + }, + data_length: 4, + blob_quorum_params: vec![ + BlobQuorumParam { + quorum_number: 0, + adversary_threshold_percentage: 33, + confirmation_threshold_percentage: 55, + chunk_length: 1, + }, + BlobQuorumParam { + quorum_number: 1, + adversary_threshold_percentage: 33, + confirmation_threshold_percentage: 55, + chunk_length: 1, + }, + ], + }, + blob_verification_proof: BlobVerificationProof { + batch_id: 66507, + blob_index: 92, + batch_medatada: BatchMetadata { + batch_header: BatchHeader { + batch_root: vec![ + 179, 187, 53, 98, 192, 80, 151, 28, 125, 192, 115, 29, 129, 238, 216, + 8, 213, 210, 203, 143, 181, 19, 146, 113, 98, 131, 39, 238, 149, 248, + 211, 43, + ], + quorum_numbers: vec![0, 1], + quorum_signed_percentages: vec![100, 100], + reference_block_number: 2624794, + }, + signatory_record_hash: vec![ + 172, 32, 172, 142, 197, 52, 84, 143, 120, 26, 190, 9, 143, 217, 62, 19, 17, + 107, 105, 67, 203, 5, 172, 249, 6, 60, 105, 240, 134, 34, 66, 133, + ], + fee: vec![0], + confirmation_block_number: 2624876, + batch_header_hash: vec![ + 122, 115, 2, 85, 233, 75, 121, 85, 51, 81, 248, 170, 198, 252, 42, 16, 1, + 146, 96, 218, 159, 44, 41, 40, 94, 247, 147, 11, 255, 68, 40, 177, + ], + }, + inclusion_proof: vec![ + 203, 160, 237, 48, 117, 255, 75, 254, 117, 144, 164, 77, 29, 146, 36, 48, 190, + 140, 50, 100, 144, 237, 125, 125, 75, 54, 210, 247, 147, 23, 48, 189, 120, 4, + 125, 123, 195, 244, 207, 239, 145, 109, 0, 21, 11, 162, 109, 79, 192, 100, 138, + 157, 203, 22, 17, 114, 234, 72, 174, 231, 209, 133, 99, 118, 201, 160, 137, + 128, 112, 84, 34, 136, 174, 139, 96, 26, 246, 148, 134, 52, 200, 229, 160, 145, + 5, 120, 18, 187, 51, 11, 109, 91, 237, 171, 215, 207, 90, 95, 146, 54, 135, + 166, 66, 157, 255, 237, 69, 183, 141, 45, 162, 145, 71, 16, 87, 184, 120, 84, + 156, 220, 159, 4, 99, 48, 191, 203, 136, 112, 127, 226, 192, 184, 110, 6, 177, + 182, 109, 207, 197, 239, 161, 132, 17, 89, 56, 137, 205, 202, 101, 97, 60, 162, + 253, 23, 169, 75, 236, 211, 126, 121, 132, 191, 68, 167, 200, 16, 154, 149, + 202, 197, 7, 191, 26, 8, 67, 3, 37, 137, 16, 153, 30, 209, 238, 53, 233, 148, + 198, 253, 94, 216, 73, 25, 190, 205, 132, 208, 255, 219, 170, 98, 17, 160, 179, + 183, 200, 17, 99, 36, 130, 216, 223, 72, 222, 250, 73, 78, 79, 72, 253, 105, + 245, 84, 244, 196, + ], + quorum_indexes: vec![0, 1], + }, + }; + + let encoded_blob_info = rlp::encode(&blob_info); + let decoded_blob_info: BlobInfo = rlp::decode(&encoded_blob_info).unwrap(); + + assert_eq!(blob_info, decoded_blob_info); + } +} diff --git a/get_all_blobs/src/client.rs b/get_all_blobs/src/client.rs new file mode 100644 index 000000000000..0996b150a472 --- /dev/null +++ b/get_all_blobs/src/client.rs @@ -0,0 +1,52 @@ +use std::str::FromStr; + +use tonic::transport::{Channel, ClientTlsConfig, Endpoint}; + +use crate::{ + blob_info::BlobInfo, + generated::{disperser, disperser::disperser_client::DisperserClient}, +}; + +#[derive(Debug, Clone)] +pub struct EigenClientRetriever { + client: DisperserClient, +} + +impl EigenClientRetriever { + pub async fn new(disperser_rpc: &str) -> anyhow::Result { + let endpoint = Endpoint::from_str(disperser_rpc)?.tls_config(ClientTlsConfig::new())?; + let client = DisperserClient::connect(endpoint) + .await + .map_err(|e| anyhow::anyhow!("Failed to connect to Disperser server: {}", e))?; + + Ok(EigenClientRetriever { client }) + } + + pub async fn get_blob_data(&self, blob_id: &str) -> anyhow::Result>> { + let commit = hex::decode(blob_id)?; + + let blob_info: BlobInfo = rlp::decode(&commit)?; + let blob_index = blob_info.blob_verification_proof.blob_index; + let batch_header_hash = blob_info + .blob_verification_proof + .batch_medatada + .batch_header_hash; + let get_response = self + .client + .clone() + .retrieve_blob(disperser::RetrieveBlobRequest { + batch_header_hash, + blob_index, + }) + .await + .unwrap() + .into_inner(); + + if get_response.data.is_empty() { + panic!("Empty data returned from Disperser") + } + + let data = kzgpad_rs::remove_empty_byte_from_padded_bytes(&get_response.data); + Ok(Some(data)) + } +} diff --git a/get_all_blobs/src/generated/common.rs b/get_all_blobs/src/generated/common.rs new file mode 100644 index 000000000000..0599b9af4127 --- /dev/null +++ b/get_all_blobs/src/generated/common.rs @@ -0,0 +1,63 @@ +// This file is @generated by prost-build. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct G1Commitment { + /// The X coordinate of the KZG commitment. This is the raw byte representation of the field element. + #[prost(bytes = "vec", tag = "1")] + pub x: ::prost::alloc::vec::Vec, + /// The Y coordinate of the KZG commitment. This is the raw byte representation of the field element. + #[prost(bytes = "vec", tag = "2")] + pub y: ::prost::alloc::vec::Vec, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct G2Commitment { + /// The A0 element of the X coordinate of G2 point. + #[prost(bytes = "vec", tag = "1")] + pub x_a0: ::prost::alloc::vec::Vec, + /// The A1 element of the X coordinate of G2 point. + #[prost(bytes = "vec", tag = "2")] + pub x_a1: ::prost::alloc::vec::Vec, + /// The A0 element of the Y coordinate of G2 point. + #[prost(bytes = "vec", tag = "3")] + pub y_a0: ::prost::alloc::vec::Vec, + /// The A1 element of the Y coordinate of G2 point. + #[prost(bytes = "vec", tag = "4")] + pub y_a1: ::prost::alloc::vec::Vec, +} +/// BlobCommitment represents commitment of a specific blob, containing its +/// KZG commitment, degree proof, the actual degree, and data length in number of symbols. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct BlobCommitment { + #[prost(message, optional, tag = "1")] + pub commitment: ::core::option::Option, + #[prost(message, optional, tag = "2")] + pub length_commitment: ::core::option::Option, + #[prost(message, optional, tag = "3")] + pub length_proof: ::core::option::Option, + #[prost(uint32, tag = "4")] + pub data_length: u32, +} +/// BlobCertificate is what gets attested by the network +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct BlobCertificate { + #[prost(uint32, tag = "1")] + pub version: u32, + #[prost(bytes = "vec", tag = "2")] + pub blob_key: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "3")] + pub blob_commitment: ::core::option::Option, + #[prost(uint32, repeated, tag = "4")] + pub quorum_numbers: ::prost::alloc::vec::Vec, + #[prost(uint32, tag = "5")] + pub reference_block_number: u32, +} +/// A chunk of a blob. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ChunkData { + #[prost(bytes = "vec", tag = "1")] + pub data: ::prost::alloc::vec::Vec, +} diff --git a/get_all_blobs/src/generated/disperser.rs b/get_all_blobs/src/generated/disperser.rs new file mode 100644 index 000000000000..b2ff5edc183c --- /dev/null +++ b/get_all_blobs/src/generated/disperser.rs @@ -0,0 +1,486 @@ +// This file is @generated by prost-build. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AuthenticatedRequest { + #[prost(oneof = "authenticated_request::Payload", tags = "1, 2")] + pub payload: ::core::option::Option, +} +/// Nested message and enum types in `AuthenticatedRequest`. +pub mod authenticated_request { + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum Payload { + #[prost(message, tag = "1")] + DisperseRequest(super::DisperseBlobRequest), + #[prost(message, tag = "2")] + AuthenticationData(super::AuthenticationData), + } +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AuthenticatedReply { + #[prost(oneof = "authenticated_reply::Payload", tags = "1, 2")] + pub payload: ::core::option::Option, +} +/// Nested message and enum types in `AuthenticatedReply`. +pub mod authenticated_reply { + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum Payload { + #[prost(message, tag = "1")] + BlobAuthHeader(super::BlobAuthHeader), + #[prost(message, tag = "2")] + DisperseReply(super::DisperseBlobReply), + } +} +/// BlobAuthHeader contains information about the blob for the client to verify and sign. +/// - Once payments are enabled, the BlobAuthHeader will contain the KZG commitment to the blob, which the client +/// will verify and sign. Having the client verify the KZG commitment instead of calculating it avoids +/// the need for the client to have the KZG structured reference string (SRS), which can be large. +/// The signed KZG commitment prevents the disperser from sending a different blob to the DA Nodes +/// than the one the client sent. +/// - In the meantime, the BlobAuthHeader contains a simple challenge parameter is used to prevent +/// replay attacks in the event that a signature is leaked. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct BlobAuthHeader { + #[prost(uint32, tag = "1")] + pub challenge_parameter: u32, +} +/// AuthenticationData contains the signature of the BlobAuthHeader. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AuthenticationData { + #[prost(bytes = "vec", tag = "1")] + pub authentication_data: ::prost::alloc::vec::Vec, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DisperseBlobRequest { + /// The data to be dispersed. + /// The size of data must be <= 2MiB. Every 32 bytes of data chunk is interpreted as an integer in big endian format + /// where the lower address has more significant bits. The integer must stay in the valid range to be interpreted + /// as a field element on the bn254 curve. The valid range is + /// 0 <= x < 21888242871839275222246405745257275088548364400416034343698204186575808495617 + /// containing slightly less than 254 bits and more than 253 bits. If any one of the 32 bytes chunk is outside the range, + /// the whole request is deemed as invalid, and rejected. + #[prost(bytes = "vec", tag = "1")] + pub data: ::prost::alloc::vec::Vec, + /// The quorums to which the blob will be sent, in addition to the required quorums which are configured + /// on the EigenDA smart contract. If required quorums are included here, an error will be returned. + /// The disperser will ensure that the encoded blobs for each quorum are all processed + /// within the same batch. + #[prost(uint32, repeated, tag = "2")] + pub custom_quorum_numbers: ::prost::alloc::vec::Vec, + /// The account ID of the client. This should be a hex-encoded string of the ECSDA public key + /// corresponding to the key used by the client to sign the BlobAuthHeader. + #[prost(string, tag = "3")] + pub account_id: ::prost::alloc::string::String, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DisperseBlobReply { + /// The status of the blob associated with the request_id. + #[prost(enumeration = "BlobStatus", tag = "1")] + pub result: i32, + /// The request ID generated by the disperser. + /// Once a request is accepted (although not processed), a unique request ID will be + /// generated. + /// Two different DisperseBlobRequests (determined by the hash of the DisperseBlobRequest) + /// will have different IDs, and the same DisperseBlobRequest sent repeatedly at different + /// times will also have different IDs. + /// The client should use this ID to query the processing status of the request (via + /// the GetBlobStatus API). + #[prost(bytes = "vec", tag = "2")] + pub request_id: ::prost::alloc::vec::Vec, +} +/// BlobStatusRequest is used to query the status of a blob. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct BlobStatusRequest { + #[prost(bytes = "vec", tag = "1")] + pub request_id: ::prost::alloc::vec::Vec, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct BlobStatusReply { + /// The status of the blob. + #[prost(enumeration = "BlobStatus", tag = "1")] + pub status: i32, + /// The blob info needed for clients to confirm the blob against the EigenDA contracts. + #[prost(message, optional, tag = "2")] + pub info: ::core::option::Option, +} +/// RetrieveBlobRequest contains parameters to retrieve the blob. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RetrieveBlobRequest { + #[prost(bytes = "vec", tag = "1")] + pub batch_header_hash: ::prost::alloc::vec::Vec, + #[prost(uint32, tag = "2")] + pub blob_index: u32, +} +/// RetrieveBlobReply contains the retrieved blob data +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RetrieveBlobReply { + #[prost(bytes = "vec", tag = "1")] + pub data: ::prost::alloc::vec::Vec, +} +/// BlobInfo contains information needed to confirm the blob against the EigenDA contracts +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct BlobInfo { + #[prost(message, optional, tag = "1")] + pub blob_header: ::core::option::Option, + #[prost(message, optional, tag = "2")] + pub blob_verification_proof: ::core::option::Option, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct BlobHeader { + /// KZG commitment of the blob. + #[prost(message, optional, tag = "1")] + pub commitment: ::core::option::Option, + /// The length of the blob in symbols (each symbol is 32 bytes). + #[prost(uint32, tag = "2")] + pub data_length: u32, + /// The params of the quorums that this blob participates in. + #[prost(message, repeated, tag = "3")] + pub blob_quorum_params: ::prost::alloc::vec::Vec, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct BlobQuorumParam { + /// The ID of the quorum. + #[prost(uint32, tag = "1")] + pub quorum_number: u32, + /// The max percentage of stake within the quorum that can be held by or delegated + /// to adversarial operators. Currently, this and the next parameter are standardized + /// across the quorum using values read from the EigenDA contracts. + #[prost(uint32, tag = "2")] + pub adversary_threshold_percentage: u32, + /// The min percentage of stake that must attest in order to consider + /// the dispersal is successful. + #[prost(uint32, tag = "3")] + pub confirmation_threshold_percentage: u32, + /// The length of each chunk. + #[prost(uint32, tag = "4")] + pub chunk_length: u32, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct BlobVerificationProof { + /// batch_id is an incremental ID assigned to a batch by EigenDAServiceManager + #[prost(uint32, tag = "1")] + pub batch_id: u32, + /// The index of the blob in the batch (which is logically an ordered list of blobs). + #[prost(uint32, tag = "2")] + pub blob_index: u32, + #[prost(message, optional, tag = "3")] + pub batch_metadata: ::core::option::Option, + /// inclusion_proof is a merkle proof for a blob header's inclusion in a batch + #[prost(bytes = "vec", tag = "4")] + pub inclusion_proof: ::prost::alloc::vec::Vec, + /// indexes of quorums in BatchHeader.quorum_numbers that match the quorums in BlobHeader.blob_quorum_params + /// Ex. BlobHeader.blob_quorum_params = [ + /// { + /// quorum_number = 0, + /// ... + /// }, + /// { + /// quorum_number = 3, + /// ... + /// }, + /// { + /// quorum_number = 5, + /// ... + /// }, + /// ] + /// BatchHeader.quorum_numbers = \[0, 5, 3\] => 0x000503 + /// Then, quorum_indexes = \[0, 2, 1\] => 0x000201 + #[prost(bytes = "vec", tag = "5")] + pub quorum_indexes: ::prost::alloc::vec::Vec, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct BatchMetadata { + #[prost(message, optional, tag = "1")] + pub batch_header: ::core::option::Option, + /// The hash of all public keys of the operators that did not sign the batch. + #[prost(bytes = "vec", tag = "2")] + pub signatory_record_hash: ::prost::alloc::vec::Vec, + /// The fee payment paid by users for dispersing this batch. It's the bytes + /// representation of a big.Int value. + #[prost(bytes = "vec", tag = "3")] + pub fee: ::prost::alloc::vec::Vec, + /// The Ethereum block number at which the batch is confirmed onchain. + #[prost(uint32, tag = "4")] + pub confirmation_block_number: u32, + /// This is the hash of the ReducedBatchHeader defined onchain, see: + /// + /// The is the message that the operators will sign their signatures on. + #[prost(bytes = "vec", tag = "5")] + pub batch_header_hash: ::prost::alloc::vec::Vec, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct BatchHeader { + /// The root of the merkle tree with the hashes of blob headers as leaves. + #[prost(bytes = "vec", tag = "1")] + pub batch_root: ::prost::alloc::vec::Vec, + /// All quorums associated with blobs in this batch. Sorted in ascending order. + /// Ex. \[0, 2, 1\] => 0x000102 + #[prost(bytes = "vec", tag = "2")] + pub quorum_numbers: ::prost::alloc::vec::Vec, + /// The percentage of stake that has signed for this batch. + /// The quorum_signed_percentages\[i\] is percentage for the quorum_numbers\[i\]. + #[prost(bytes = "vec", tag = "3")] + pub quorum_signed_percentages: ::prost::alloc::vec::Vec, + /// The Ethereum block number at which the batch was created. + /// The Disperser will encode and disperse the blobs based on the onchain info + /// (e.g. operator stakes) at this block number. + #[prost(uint32, tag = "4")] + pub reference_block_number: u32, +} +/// BlobStatus represents the status of a blob. +/// The status of a blob is updated as the blob is processed by the disperser. +/// The status of a blob can be queried by the client using the GetBlobStatus API. +/// Intermediate states are states that the blob can be in while being processed, and it can be updated to a differet state: +/// - PROCESSING +/// - DISPERSING +/// - CONFIRMED +/// Terminal states are states that will not be updated to a different state: +/// - FAILED +/// - FINALIZED +/// - INSUFFICIENT_SIGNATURES +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum BlobStatus { + Unknown = 0, + /// PROCESSING means that the blob is currently being processed by the disperser + Processing = 1, + /// CONFIRMED means that the blob has been dispersed to DA Nodes and the dispersed + /// batch containing the blob has been confirmed onchain + Confirmed = 2, + /// FAILED means that the blob has failed permanently (for reasons other than insufficient + /// signatures, which is a separate state) + Failed = 3, + /// FINALIZED means that the block containing the blob's confirmation transaction has been finalized on Ethereum + Finalized = 4, + /// INSUFFICIENT_SIGNATURES means that the confirmation threshold for the blob was not met + /// for at least one quorum. + InsufficientSignatures = 5, + /// DISPERSING means that the blob is currently being dispersed to DA Nodes and being confirmed onchain + Dispersing = 6, +} +impl BlobStatus { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + BlobStatus::Unknown => "UNKNOWN", + BlobStatus::Processing => "PROCESSING", + BlobStatus::Confirmed => "CONFIRMED", + BlobStatus::Failed => "FAILED", + BlobStatus::Finalized => "FINALIZED", + BlobStatus::InsufficientSignatures => "INSUFFICIENT_SIGNATURES", + BlobStatus::Dispersing => "DISPERSING", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "UNKNOWN" => Some(Self::Unknown), + "PROCESSING" => Some(Self::Processing), + "CONFIRMED" => Some(Self::Confirmed), + "FAILED" => Some(Self::Failed), + "FINALIZED" => Some(Self::Finalized), + "INSUFFICIENT_SIGNATURES" => Some(Self::InsufficientSignatures), + "DISPERSING" => Some(Self::Dispersing), + _ => None, + } + } +} +/// Generated client implementations. +pub mod disperser_client { + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] + use tonic::codegen::{http::Uri, *}; + /// Disperser defines the public APIs for dispersing blobs. + #[derive(Debug, Clone)] + pub struct DisperserClient { + inner: tonic::client::Grpc, + } + impl DisperserClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl DisperserClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> DisperserClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + >>::Error: + Into + Send + Sync, + { + DisperserClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + /// This API accepts blob to disperse from clients. + /// This executes the dispersal async, i.e. it returns once the request + /// is accepted. The client could use GetBlobStatus() API to poll the the + /// processing status of the blob. + pub async fn disperse_blob( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> { + self.inner.ready().await.map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static("/disperser.Disperser/DisperseBlob"); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("disperser.Disperser", "DisperseBlob")); + self.inner.unary(req, path, codec).await + } + /// DisperseBlobAuthenticated is similar to DisperseBlob, except that it requires the + /// client to authenticate itself via the AuthenticationData message. The protoco is as follows: + /// 1. The client sends a DisperseBlobAuthenticated request with the DisperseBlobRequest message + /// 2. The Disperser sends back a BlobAuthHeader message containing information for the client to + /// verify and sign. + /// 3. The client verifies the BlobAuthHeader and sends back the signed BlobAuthHeader in an + /// AuthenticationData message. + /// 4. The Disperser verifies the signature and returns a DisperseBlobReply message. + pub async fn disperse_blob_authenticated( + &mut self, + request: impl tonic::IntoStreamingRequest, + ) -> std::result::Result< + tonic::Response>, + tonic::Status, + > { + self.inner.ready().await.map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/disperser.Disperser/DisperseBlobAuthenticated", + ); + let mut req = request.into_streaming_request(); + req.extensions_mut().insert(GrpcMethod::new( + "disperser.Disperser", + "DisperseBlobAuthenticated", + )); + self.inner.streaming(req, path, codec).await + } + /// This API is meant to be polled for the blob status. + pub async fn get_blob_status( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> { + self.inner.ready().await.map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static("/disperser.Disperser/GetBlobStatus"); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("disperser.Disperser", "GetBlobStatus")); + self.inner.unary(req, path, codec).await + } + /// This retrieves the requested blob from the Disperser's backend. + /// This is a more efficient way to retrieve blobs than directly retrieving + /// from the DA Nodes (see detail about this approach in + /// api/proto/retriever/retriever.proto). + /// The blob should have been initially dispersed via this Disperser service + /// for this API to work. + pub async fn retrieve_blob( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> { + self.inner.ready().await.map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static("/disperser.Disperser/RetrieveBlob"); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("disperser.Disperser", "RetrieveBlob")); + self.inner.unary(req, path, codec).await + } + } +} diff --git a/get_all_blobs/src/generated/mod.rs b/get_all_blobs/src/generated/mod.rs new file mode 100644 index 000000000000..d77a351741d9 --- /dev/null +++ b/get_all_blobs/src/generated/mod.rs @@ -0,0 +1,3 @@ +pub(crate) mod common; +pub(crate) mod disperser; +// pub(crate) mod eigendaservicemanager; diff --git a/get_all_blobs/src/main.rs b/get_all_blobs/src/main.rs new file mode 100644 index 000000000000..215108d554ed --- /dev/null +++ b/get_all_blobs/src/main.rs @@ -0,0 +1,164 @@ +use std::{fs, str::FromStr}; + +use alloy::{ + dyn_abi::JsonAbiExt, + json_abi::JsonAbi, + network::Ethereum, + primitives::Address, + providers::{Provider, RootProvider}, +}; +use client::EigenClientRetriever; +use serde::{Deserialize, Serialize}; + +mod blob_info; +mod client; +mod generated; + +#[derive(Debug, Serialize, Deserialize)] +struct BlobData { + pub commitment: String, + pub blob: String, +} + +const EIGENDA_API_URL: &str = "https://disperser-holesky.eigenda.xyz:443"; +const BLOB_DATA_JSON: &str = "blob_data.json"; +const ABI_JSON: &str = "./abi/commitBatchesSharedBridge.json"; +const COMMIT_BATCHES_SELECTOR: &str = "6edd4f12"; + +async fn get_blob(commitment: &str) -> anyhow::Result> { + let client = EigenClientRetriever::new(EIGENDA_API_URL).await?; + let data = client + .get_blob_data(commitment) + .await? + .ok_or_else(|| anyhow::anyhow!("Blob not found"))?; + Ok(data) +} + +async fn get_transactions( + provider: &RootProvider< + alloy::transports::http::Http, + Ethereum, + >, + validator_timelock_address: Address, + block_start: u64, +) -> anyhow::Result<()> { + let latest_block = provider.get_block_number().await?; + let mut json_array = Vec::new(); + + let mut i = 0; + for block_number in block_start..=latest_block { + i += 1; + if i % 50 == 0 { + println!( + "\x1b[32mProcessed up to block {} of {}\x1b[0m", + block_number, latest_block + ); + } + if let Ok(Some(block)) = provider + .get_block_by_number(block_number.into(), true) + .await + { + for tx in block.transactions.into_transactions() { + if let Some(to) = tx.to { + if to == validator_timelock_address { + let input = tx.input; + let selector = &input[0..4]; + if selector == hex::decode(COMMIT_BATCHES_SELECTOR)? { + if let Ok(decoded) = decode_blob_data_input(&input[4..]).await { + for blob in decoded { + json_array.push(blob); + } + } + } + } + } + } + } + } + + if json_array.is_empty() { + println!("\x1b[31mNo transactions found.\x1b[0m"); + return Ok(()); + } + + let json_string = serde_json::to_string_pretty(&json_array)?; + fs::write(BLOB_DATA_JSON, json_string)?; + println!("\x1b[32mData stored in blob_data.json file.\x1b[0m"); + + Ok(()) +} + +async fn decode_blob_data_input(input: &[u8]) -> anyhow::Result> { + let json = std::fs::read_to_string(ABI_JSON)?; + let json_abi: JsonAbi = serde_json::from_str(&json)?; + let function = json_abi + .functions + .iter() + .find(|f| f.0 == "commitBatchesSharedBridge") + .ok_or(anyhow::anyhow!("Function not found"))? + .1; + + let decoded = function[0].abi_decode_input(input, true)?; + let commit_batch_info = decoded[2].as_array().ok_or(anyhow::anyhow!( + "CommitBatchInfo cannot be represented as an array" + ))?[0] + .as_tuple() + .ok_or(anyhow::anyhow!( + "CommitBatchInfo components cannot be represented as a tuple" + ))?; + + let mut blobs = vec![]; + + for pubdata_commitments in commit_batch_info.iter() { + let pubdata_commitments_bytes = pubdata_commitments.as_bytes(); + if let Ok(blob_data) = get_blob_from_pubdata_commitment(pubdata_commitments_bytes).await { + blobs.push(blob_data) + } + } + + Ok(blobs) +} + +async fn get_blob_from_pubdata_commitment( + pubdata_commitments_bytes: Option<&[u8]>, +) -> anyhow::Result { + if pubdata_commitments_bytes.is_none() { + return Err(anyhow::anyhow!( + "CommitBatchInfo components cannot be represented as a tuple" + )); + } + let pubdata_commitments_bytes = pubdata_commitments_bytes.unwrap(); + let commitment = hex::decode(&pubdata_commitments_bytes[1..])?; + let commitment = hex::encode(&commitment); + let blob = get_blob(&commitment).await?; + Ok(BlobData { + commitment, + blob: hex::encode(blob), + }) +} + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + let args: Vec = std::env::args().collect(); + + if args.len() != 4 { + eprintln!("Usage: cargo run "); + std::process::exit(1); + } + + let validator_timelock_address = Address::from_str(&args[1])?; + + let _ = rustls::crypto::aws_lc_rs::default_provider().install_default(); + + let url = alloy::transports::http::reqwest::Url::from_str(&args[2])?; + let provider: RootProvider< + alloy::transports::http::Http, + Ethereum, + > = RootProvider::new_http(url); + + let block_start = args[3].parse::()?; + + get_transactions(&provider, validator_timelock_address, block_start).await?; + + Ok(()) +} diff --git a/infrastructure/zk/src/compiler.ts b/infrastructure/zk/src/compiler.ts index 9a90154909ba..881908eeacea 100644 --- a/infrastructure/zk/src/compiler.ts +++ b/infrastructure/zk/src/compiler.ts @@ -2,7 +2,6 @@ import { Command } from 'commander'; import * as utils from 'utils'; export async function compileTestContracts() { - await utils.spawn('yarn workspace contracts-test-data build'); await utils.spawn('yarn ts-integration build'); await utils.spawn('yarn ts-integration build-yul'); } diff --git a/package.json b/package.json index af745160c30d..9e3428e614cc 100644 --- a/package.json +++ b/package.json @@ -9,7 +9,6 @@ "contracts/l1-contracts", "contracts/l2-contracts", "contracts/system-contracts", - "etc/contracts-test-data", "etc/ERC20", "etc/utils", "infrastructure/zk", diff --git a/prover/CHANGELOG.md b/prover/CHANGELOG.md index 6687b1450ba0..d30076cddcf1 100644 --- a/prover/CHANGELOG.md +++ b/prover/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +## [17.1.0](https://github.com/matter-labs/zksync-era/compare/prover-v17.0.0...prover-v17.1.0) (2024-11-18) + + +### Features + +* Add min_replicas for SimpleScaler, apply_min_to_namespace config ([#3282](https://github.com/matter-labs/zksync-era/issues/3282)) ([bc00c4a](https://github.com/matter-labs/zksync-era/commit/bc00c4a44a212def3cc25567f3b271530d76b6a4)) +* allow vm2 tracers to stop execution ([#3183](https://github.com/matter-labs/zksync-era/issues/3183)) ([9dae839](https://github.com/matter-labs/zksync-era/commit/9dae839935d82a1e73be220d17567f3382131039)) +* **contract-verifier:** Support Solidity contracts with EVM bytecode in contract verifier ([#3225](https://github.com/matter-labs/zksync-era/issues/3225)) ([8a3a82c](https://github.com/matter-labs/zksync-era/commit/8a3a82ca16479183e96505bc91011fc07bfc6889)) +* **prover:** Add cluster name autodetection ([#3227](https://github.com/matter-labs/zksync-era/issues/3227)) ([bd32aec](https://github.com/matter-labs/zksync-era/commit/bd32aecdf982c51202c1a69d12fcf1d878fe6d05)) +* **prover:** Add queue metric to report autoscaler view of the queue. ([#3206](https://github.com/matter-labs/zksync-era/issues/3206)) ([2721396](https://github.com/matter-labs/zksync-era/commit/272139690e028d3bdebdb6bcb1824fec23cefd0f)) +* ProverJobProcessor & circuit prover ([#3287](https://github.com/matter-labs/zksync-era/issues/3287)) ([98823f9](https://github.com/matter-labs/zksync-era/commit/98823f95c0b95feeb37eb9086cc88d4ac5220904)) +* **prover:** Move prover_autoscaler config into crate ([#3222](https://github.com/matter-labs/zksync-era/issues/3222)) ([1b33b5e](https://github.com/matter-labs/zksync-era/commit/1b33b5e9ec04bea0010350798332a90413c482d3)) + + +### Bug Fixes + +* **prover:** Remove unneeded dependencies, add default for graceful_shutdown_timeout ([#3242](https://github.com/matter-labs/zksync-era/issues/3242)) ([1bfff0e](https://github.com/matter-labs/zksync-era/commit/1bfff0e007e2fb5a4b4b885cf5c69a5cd290888b)) + ## [17.0.0](https://github.com/matter-labs/zksync-era/compare/prover-v16.6.0...prover-v17.0.0) (2024-10-31) diff --git a/prover/Cargo.lock b/prover/Cargo.lock index f119d4bd1951..559304d653e7 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -6511,9 +6511,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" +checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" dependencies = [ "futures-core", "pin-project-lite", @@ -7816,6 +7816,7 @@ version = "0.1.0" dependencies = [ "anyhow", "chrono", + "const-decoder 0.4.0", "ethabi", "hex", "num_enum 0.7.2", @@ -7823,6 +7824,7 @@ dependencies = [ "serde", "serde_json", "serde_with", + "sha2 0.10.8", "strum", "thiserror", "tiny-keccak 2.0.2", @@ -7866,6 +7868,7 @@ dependencies = [ "tracing", "vise", "zkevm_test_harness", + "zksync_circuit_prover_service", "zksync_config", "zksync_core_leftovers", "zksync_env_config", @@ -7873,10 +7876,32 @@ dependencies = [ "zksync_prover_dal", "zksync_prover_fri_types", "zksync_prover_fri_utils", + "zksync_prover_job_processor", "zksync_prover_keystore", "zksync_queued_job_processor", "zksync_types", "zksync_utils", + "zksync_vlog", +] + +[[package]] +name = "zksync_circuit_prover_service" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "shivini", + "tokio", + "tokio-util", + "tracing", + "vise", + "zkevm_test_harness", + "zksync_object_store", + "zksync_prover_dal", + "zksync_prover_fri_types", + "zksync_prover_job_processor", + "zksync_prover_keystore", + "zksync_types", ] [[package]] @@ -7994,11 +8019,11 @@ name = "zksync_contracts" version = "0.1.0" dependencies = [ "envy", - "ethabi", "hex", "once_cell", "serde", "serde_json", + "zksync_basic_types", "zksync_utils", ] @@ -8030,7 +8055,6 @@ dependencies = [ "sha2 0.10.8", "thiserror", "zksync_basic_types", - "zksync_utils", ] [[package]] @@ -8077,7 +8101,6 @@ dependencies = [ "zksync_protobuf_build", "zksync_system_constants", "zksync_types", - "zksync_utils", "zksync_vm_interface", ] @@ -8231,7 +8254,6 @@ dependencies = [ "zksync_mini_merkle_tree", "zksync_system_constants", "zksync_types", - "zksync_utils", "zksync_vm2", "zksync_vm_interface", ] @@ -8534,6 +8556,21 @@ dependencies = [ "zksync_vlog", ] +[[package]] +name = "zksync_prover_job_processor" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "futures 0.3.30", + "strum", + "tokio", + "tokio-stream", + "tokio-util", + "tracing", + "vise", +] + [[package]] name = "zksync_prover_keystore" version = "0.1.0" @@ -8592,7 +8629,6 @@ version = "0.1.0" dependencies = [ "once_cell", "zksync_basic_types", - "zksync_utils", ] [[package]] @@ -8624,7 +8660,6 @@ dependencies = [ "zksync_protobuf", "zksync_protobuf_build", "zksync_system_constants", - "zksync_utils", ] [[package]] @@ -8632,20 +8667,12 @@ name = "zksync_utils" version = "0.1.0" dependencies = [ "anyhow", - "bigdecimal", - "const-decoder 0.4.0", "futures 0.3.30", - "hex", - "num", "once_cell", "reqwest 0.12.5", - "serde", "serde_json", - "thiserror", "tokio", "tracing", - "zk_evm 0.133.0", - "zksync_basic_types", "zksync_vlog", ] diff --git a/prover/Cargo.toml b/prover/Cargo.toml index e53efaae1968..15e819d77f7d 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -58,6 +58,7 @@ strum_macros = "0.26" tempfile = "3" tokio = "1" tokio-util = "0.7.11" +tokio-stream = "0.1.16" toml_edit = "0.14.4" tracing = "0.1" tracing-subscriber = "0.3" @@ -100,6 +101,8 @@ zksync_prover_fri_types = { path = "crates/lib/prover_fri_types" } zksync_prover_fri_utils = { path = "crates/lib/prover_fri_utils" } zksync_prover_keystore = { path = "crates/lib/keystore" } zksync_vk_setup_data_generator_server_fri = { path = "crates/bin/vk_setup_data_generator_server_fri" } +zksync_prover_job_processor = { path = "crates/lib/prover_job_processor" } +zksync_circuit_prover_service = { path = "crates/lib/circuit_prover_service" } zksync_prover_job_monitor = { path = "crates/bin/prover_job_monitor" } # for `perf` profiling diff --git a/prover/crates/bin/circuit_prover/Cargo.toml b/prover/crates/bin/circuit_prover/Cargo.toml index a5751a4cd9a6..8fecc7a7a6a1 100644 --- a/prover/crates/bin/circuit_prover/Cargo.toml +++ b/prover/crates/bin/circuit_prover/Cargo.toml @@ -1,5 +1,6 @@ [package] name = "zksync_circuit_prover" +description = "ZKsync circuit prover binary implementation" version.workspace = true edition.workspace = true authors.workspace = true @@ -8,6 +9,7 @@ repository.workspace = true license.workspace = true keywords.workspace = true categories.workspace = true +publish = false [dependencies] tokio = { workspace = true, features = ["macros", "time"] } @@ -29,6 +31,9 @@ zksync_prover_keystore = { workspace = true, features = ["gpu"] } zksync_env_config.workspace = true zksync_core_leftovers.workspace = true zksync_utils.workspace = true +zksync_circuit_prover_service.workspace = true +zksync_prover_job_processor.workspace = true +zksync_vlog.workspace = true vise.workspace = true shivini = { workspace = true, features = [ diff --git a/prover/crates/bin/circuit_prover/src/circuit_prover.rs b/prover/crates/bin/circuit_prover/src/circuit_prover.rs deleted file mode 100644 index 1a5f8aa0d974..000000000000 --- a/prover/crates/bin/circuit_prover/src/circuit_prover.rs +++ /dev/null @@ -1,397 +0,0 @@ -use std::{sync::Arc, time::Instant}; - -use anyhow::Context; -use shivini::{ - gpu_proof_config::GpuProofConfig, gpu_prove_from_external_witness_data, ProverContext, - ProverContextConfig, -}; -use tokio::{sync::mpsc::Receiver, task::JoinHandle}; -use tokio_util::sync::CancellationToken; -use zkevm_test_harness::prover_utils::{verify_base_layer_proof, verify_recursion_layer_proof}; -use zksync_object_store::ObjectStore; -use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; -use zksync_prover_fri_types::{ - circuit_definitions::{ - base_layer_proof_config, - boojum::{ - cs::implementations::{pow::NoPow, witness::WitnessVec}, - field::goldilocks::GoldilocksField, - worker::Worker, - }, - circuit_definitions::{ - base_layer::ZkSyncBaseLayerProof, recursion_layer::ZkSyncRecursionLayerProof, - }, - recursion_layer_proof_config, - }, - CircuitWrapper, FriProofWrapper, ProverArtifacts, WitnessVectorArtifactsTemp, -}; -use zksync_prover_keystore::GoldilocksGpuProverSetupData; -use zksync_types::protocol_version::ProtocolSemanticVersion; -use zksync_utils::panic_extractor::try_extract_panic_message; - -use crate::{ - metrics::CIRCUIT_PROVER_METRICS, - types::{DefaultTranscript, DefaultTreeHasher, Proof, VerificationKey}, - SetupDataCache, -}; - -/// In charge of proving circuits, given a Witness Vector source. -/// Both job runner & job executor. -#[derive(Debug)] -pub struct CircuitProver { - connection_pool: ConnectionPool, - object_store: Arc, - protocol_version: ProtocolSemanticVersion, - /// Witness Vector source receiver - receiver: Receiver, - /// Setup Data used for proving & proof verification - setup_data_cache: SetupDataCache, -} - -impl CircuitProver { - pub fn new( - connection_pool: ConnectionPool, - object_store: Arc, - protocol_version: ProtocolSemanticVersion, - receiver: Receiver, - max_allocation: Option, - setup_data_cache: SetupDataCache, - ) -> anyhow::Result<(Self, ProverContext)> { - // VRAM allocation - let prover_context = match max_allocation { - Some(max_allocation) => ProverContext::create_with_config( - ProverContextConfig::default().with_maximum_device_allocation(max_allocation), - ) - .context("failed initializing fixed gpu prover context")?, - None => ProverContext::create().context("failed initializing gpu prover context")?, - }; - Ok(( - Self { - connection_pool, - object_store, - protocol_version, - receiver, - setup_data_cache, - }, - prover_context, - )) - } - - /// Continuously polls `receiver` for Witness Vectors and proves them. - /// All job executions are persisted. - pub async fn run(mut self, cancellation_token: CancellationToken) -> anyhow::Result<()> { - while !cancellation_token.is_cancelled() { - let time = Instant::now(); - - let artifact = self - .receiver - .recv() - .await - .context("no Witness Vector Generators are available")?; - tracing::info!( - "Circuit Prover received job {:?} after: {:?}", - artifact.prover_job.job_id, - time.elapsed() - ); - CIRCUIT_PROVER_METRICS.job_wait_time.observe(time.elapsed()); - - self.prove(artifact, cancellation_token.clone()) - .await - .context("failed to prove circuit proof")?; - } - tracing::info!("Circuit Prover shut down."); - Ok(()) - } - - /// Proves a job, with persistence of execution. - async fn prove( - &self, - artifact: WitnessVectorArtifactsTemp, - cancellation_token: CancellationToken, - ) -> anyhow::Result<()> { - let time = Instant::now(); - let block_number = artifact.prover_job.block_number; - let job_id = artifact.prover_job.job_id; - let job_start_time = artifact.time; - let setup_data_key = artifact.prover_job.setup_data_key.crypto_setup_key(); - let setup_data = self - .setup_data_cache - .get(&setup_data_key) - .context(format!( - "failed to get setup data for key {setup_data_key:?}" - ))? - .clone(); - let task = tokio::task::spawn_blocking(move || { - let _span = tracing::info_span!("prove_circuit_proof", %block_number).entered(); - Self::prove_circuit_proof(artifact, setup_data).context("failed to prove circuit") - }); - - self.finish_task( - job_id, - time, - job_start_time, - task, - cancellation_token.clone(), - ) - .await?; - tracing::info!( - "Circuit Prover finished job {:?} in: {:?}", - job_id, - time.elapsed() - ); - CIRCUIT_PROVER_METRICS - .job_finished_time - .observe(time.elapsed()); - CIRCUIT_PROVER_METRICS - .full_proving_time - .observe(job_start_time.elapsed()); - Ok(()) - } - - /// Proves a job using crypto primitives (proof generation & proof verification). - #[tracing::instrument( - name = "Prover::prove_circuit_proof", - skip_all, - fields(l1_batch = % witness_vector_artifacts.prover_job.block_number) - )] - pub fn prove_circuit_proof( - witness_vector_artifacts: WitnessVectorArtifactsTemp, - setup_data: Arc, - ) -> anyhow::Result { - let time = Instant::now(); - let WitnessVectorArtifactsTemp { - witness_vector, - prover_job, - .. - } = witness_vector_artifacts; - - let job_id = prover_job.job_id; - let circuit_wrapper = prover_job.circuit_wrapper; - let block_number = prover_job.block_number; - - let (proof, circuit_id) = - Self::generate_proof(&circuit_wrapper, witness_vector, &setup_data) - .context(format!("failed to generate proof for job id {job_id}"))?; - - Self::verify_proof(&circuit_wrapper, &proof, &setup_data.vk).context(format!( - "failed to verify proof with job_id {job_id}, circuit_id: {circuit_id}" - ))?; - - let proof_wrapper = match &circuit_wrapper { - CircuitWrapper::Base(_) => { - FriProofWrapper::Base(ZkSyncBaseLayerProof::from_inner(circuit_id, proof)) - } - CircuitWrapper::Recursive(_) => { - FriProofWrapper::Recursive(ZkSyncRecursionLayerProof::from_inner(circuit_id, proof)) - } - CircuitWrapper::BasePartial(_) => { - return Self::partial_proof_error(); - } - }; - CIRCUIT_PROVER_METRICS - .crypto_primitives_time - .observe(time.elapsed()); - Ok(ProverArtifacts::new(block_number, proof_wrapper)) - } - - /// Generates a proof from crypto primitives. - fn generate_proof( - circuit_wrapper: &CircuitWrapper, - witness_vector: WitnessVec, - setup_data: &Arc, - ) -> anyhow::Result<(Proof, u8)> { - let time = Instant::now(); - - let worker = Worker::new(); - - let (gpu_proof_config, proof_config, circuit_id) = match circuit_wrapper { - CircuitWrapper::Base(circuit) => ( - GpuProofConfig::from_base_layer_circuit(circuit), - base_layer_proof_config(), - circuit.numeric_circuit_type(), - ), - CircuitWrapper::Recursive(circuit) => ( - GpuProofConfig::from_recursive_layer_circuit(circuit), - recursion_layer_proof_config(), - circuit.numeric_circuit_type(), - ), - CircuitWrapper::BasePartial(_) => { - return Self::partial_proof_error(); - } - }; - - let proof = - gpu_prove_from_external_witness_data::( - &gpu_proof_config, - &witness_vector, - proof_config, - &setup_data.setup, - &setup_data.vk, - (), - &worker, - ) - .context("crypto primitive: failed to generate proof")?; - CIRCUIT_PROVER_METRICS - .generate_proof_time - .observe(time.elapsed()); - Ok((proof.into(), circuit_id)) - } - - /// Verifies a proof from crypto primitives - fn verify_proof( - circuit_wrapper: &CircuitWrapper, - proof: &Proof, - verification_key: &VerificationKey, - ) -> anyhow::Result<()> { - let time = Instant::now(); - - let is_valid = match circuit_wrapper { - CircuitWrapper::Base(base_circuit) => { - verify_base_layer_proof::(base_circuit, proof, verification_key) - } - CircuitWrapper::Recursive(recursive_circuit) => { - verify_recursion_layer_proof::(recursive_circuit, proof, verification_key) - } - CircuitWrapper::BasePartial(_) => { - return Self::partial_proof_error(); - } - }; - - CIRCUIT_PROVER_METRICS - .verify_proof_time - .observe(time.elapsed()); - - if !is_valid { - return Err(anyhow::anyhow!("crypto primitive: failed to verify proof")); - } - Ok(()) - } - - /// This code path should never trigger. All proofs are hydrated during Witness Vector Generator. - /// If this triggers, it means that proof hydration in Witness Vector Generator was not done -- logic bug. - fn partial_proof_error() -> anyhow::Result { - Err(anyhow::anyhow!("received unexpected dehydrated proof")) - } - - /// Runs task to completion and persists result. - /// NOTE: Task may be cancelled mid-flight. - async fn finish_task( - &self, - job_id: u32, - time: Instant, - job_start_time: Instant, - task: JoinHandle>, - cancellation_token: CancellationToken, - ) -> anyhow::Result<()> { - tokio::select! { - _ = cancellation_token.cancelled() => { - tracing::info!("Stop signal received, shutting down Circuit Prover..."); - return Ok(()) - } - result = task => { - let error_message = match result { - Ok(Ok(prover_artifact)) => { - tracing::info!("Circuit Prover executed job {:?} in: {:?}", job_id, time.elapsed()); - CIRCUIT_PROVER_METRICS.execution_time.observe(time.elapsed()); - self - .save_result(job_id, job_start_time, prover_artifact) - .await.context("failed to save result")?; - return Ok(()) - } - Ok(Err(error)) => error.to_string(), - Err(error) => try_extract_panic_message(error), - }; - tracing::error!( - "Circuit Prover failed on job {:?} with error {:?}", - job_id, - error_message - ); - - self.save_failure(job_id, error_message).await.context("failed to save failure")?; - } - } - - Ok(()) - } - - /// Persists proof generated. - /// Job metadata is saved to database, whilst artifacts go to object store. - async fn save_result( - &self, - job_id: u32, - job_start_time: Instant, - artifacts: ProverArtifacts, - ) -> anyhow::Result<()> { - let time = Instant::now(); - let mut connection = self - .connection_pool - .connection() - .await - .context("failed to get db connection")?; - let proof = artifacts.proof_wrapper; - - let (_circuit_type, is_scheduler_proof) = match &proof { - FriProofWrapper::Base(base) => (base.numeric_circuit_type(), false), - FriProofWrapper::Recursive(recursive_circuit) => match recursive_circuit { - ZkSyncRecursionLayerProof::SchedulerCircuit(_) => { - (recursive_circuit.numeric_circuit_type(), true) - } - _ => (recursive_circuit.numeric_circuit_type(), false), - }, - }; - - let upload_time = Instant::now(); - let blob_url = self - .object_store - .put(job_id, &proof) - .await - .context("failed to upload to object store")?; - CIRCUIT_PROVER_METRICS - .artifact_upload_time - .observe(upload_time.elapsed()); - - let mut transaction = connection - .start_transaction() - .await - .context("failed to start db transaction")?; - transaction - .fri_prover_jobs_dal() - .save_proof(job_id, job_start_time.elapsed(), &blob_url) - .await; - if is_scheduler_proof { - transaction - .fri_proof_compressor_dal() - .insert_proof_compression_job( - artifacts.block_number, - &blob_url, - self.protocol_version, - ) - .await; - } - transaction - .commit() - .await - .context("failed to commit db transaction")?; - - tracing::info!( - "Circuit Prover saved job {:?} after {:?}", - job_id, - time.elapsed() - ); - CIRCUIT_PROVER_METRICS.save_time.observe(time.elapsed()); - - Ok(()) - } - - /// Persists job execution error to database. - async fn save_failure(&self, job_id: u32, error: String) -> anyhow::Result<()> { - self.connection_pool - .connection() - .await - .context("failed to get db connection")? - .fri_prover_jobs_dal() - .save_proof_error(job_id, error) - .await; - Ok(()) - } -} diff --git a/prover/crates/bin/circuit_prover/src/lib.rs b/prover/crates/bin/circuit_prover/src/lib.rs index 7d7ce1d96686..c25afe6e9b3b 100644 --- a/prover/crates/bin/circuit_prover/src/lib.rs +++ b/prover/crates/bin/circuit_prover/src/lib.rs @@ -1,13 +1,5 @@ -#![allow(incomplete_features)] // We have to use generic const exprs. -#![feature(generic_const_exprs)] -pub use backoff::Backoff; -pub use circuit_prover::CircuitProver; pub use metrics::PROVER_BINARY_METRICS; pub use types::{FinalizationHintsCache, SetupDataCache}; -pub use witness_vector_generator::WitnessVectorGenerator; -mod backoff; -mod circuit_prover; mod metrics; mod types; -mod witness_vector_generator; diff --git a/prover/crates/bin/circuit_prover/src/main.rs b/prover/crates/bin/circuit_prover/src/main.rs index e26f29ca995d..a445ceca3abe 100644 --- a/prover/crates/bin/circuit_prover/src/main.rs +++ b/prover/crates/bin/circuit_prover/src/main.rs @@ -6,11 +6,10 @@ use std::{ use anyhow::Context as _; use clap::Parser; +use shivini::{ProverContext, ProverContextConfig}; use tokio_util::sync::CancellationToken; -use zksync_circuit_prover::{ - Backoff, CircuitProver, FinalizationHintsCache, SetupDataCache, WitnessVectorGenerator, - PROVER_BINARY_METRICS, -}; +use zksync_circuit_prover::{FinalizationHintsCache, SetupDataCache, PROVER_BINARY_METRICS}; +use zksync_circuit_prover_service::job_runner::{circuit_prover_runner, WvgRunnerBuilder}; use zksync_config::{ configs::{FriProverConfig, ObservabilityConfig}, ObjectStoreConfig, @@ -21,83 +20,110 @@ use zksync_prover_dal::{ConnectionPool, Prover}; use zksync_prover_fri_types::PROVER_PROTOCOL_SEMANTIC_VERSION; use zksync_prover_keystore::keystore::Keystore; use zksync_utils::wait_for_tasks::ManagedTasks; +use zksync_vlog::prometheus::PrometheusExporterConfig; + +/// On most commodity hardware, WVG can take ~30 seconds to complete. +/// GPU processing is ~1 second. +/// Typical setup is ~25 WVGs & 1 GPU. +/// Worst case scenario, you just picked all 25 WVGs (so you need 30 seconds to finish) +/// and another 25 for the GPU. +const GRACEFUL_SHUTDOWN_DURATION: Duration = Duration::from_secs(55); + +/// With current setup, only a single job is expected to be in flight. +/// This guarantees memory consumption is going to be fixed (1 job in memory, no more). +/// Additionally, helps with estimating graceful shutdown time. +/// Free side effect, if the machine dies, only 1 job is in "pending" state. +const CHANNEL_SIZE: usize = 1; #[derive(Debug, Parser)] #[command(author = "Matter Labs", version)] struct Cli { - #[arg(long)] + /// Path to file configuration + #[arg(short = 'c', long)] pub(crate) config_path: Option, - #[arg(long)] + /// Path to file secrets + #[arg(short = 's', long)] pub(crate) secrets_path: Option, - /// Number of WVG jobs to run in parallel. - /// Default value is 1. - #[arg(long, default_value_t = 1)] - pub(crate) witness_vector_generator_count: usize, + /// Number of light witness vector generators to run in parallel. + /// Corresponds to 1 CPU thread & ~2GB of RAM. + #[arg(short = 'l', long, default_value_t = 1)] + light_wvg_count: usize, + /// Number of heavy witness vector generators to run in parallel. + /// Corresponds to 1 CPU thread & ~9GB of RAM. + #[arg(short = 'h', long, default_value_t = 1)] + heavy_wvg_count: usize, /// Max VRAM to allocate. Useful if you want to limit the size of VRAM used. /// None corresponds to allocating all available VRAM. - #[arg(long)] + #[arg(short = 'm', long)] pub(crate) max_allocation: Option, } #[tokio::main] async fn main() -> anyhow::Result<()> { - let time = Instant::now(); + let start_time = Instant::now(); let opt = Cli::parse(); let (observability_config, prover_config, object_store_config) = load_configs(opt.config_path)?; - let _observability_guard = observability_config .install() .context("failed to install observability")?; - let wvg_count = opt.witness_vector_generator_count as u32; - - let (connection_pool, object_store, setup_data_cache, hints) = load_resources( + let (connection_pool, object_store, prover_context, setup_data_cache, hints) = load_resources( opt.secrets_path, + opt.max_allocation, object_store_config, prover_config.setup_data_path.into(), - wvg_count, ) .await .context("failed to load configs")?; - PROVER_BINARY_METRICS.start_up.observe(time.elapsed()); + PROVER_BINARY_METRICS + .startup_time + .observe(start_time.elapsed()); let cancellation_token = CancellationToken::new(); - let backoff = Backoff::new(Duration::from_secs(5), Duration::from_secs(30)); - let mut tasks = vec![]; + let exporter_config = PrometheusExporterConfig::pull(prover_config.prometheus_port); + let (metrics_stop_sender, metrics_stop_receiver) = tokio::sync::watch::channel(false); - let (sender, receiver) = tokio::sync::mpsc::channel(5); + let mut tasks = vec![tokio::spawn(exporter_config.run(metrics_stop_receiver))]; - tracing::info!("Starting {wvg_count} Witness Vector Generators."); + let (witness_vector_sender, witness_vector_receiver) = tokio::sync::mpsc::channel(CHANNEL_SIZE); - for _ in 0..wvg_count { - let wvg = WitnessVectorGenerator::new( - object_store.clone(), - connection_pool.clone(), - PROVER_PROTOCOL_SEMANTIC_VERSION, - sender.clone(), - hints.clone(), - ); - tasks.push(tokio::spawn( - wvg.run(cancellation_token.clone(), backoff.clone()), - )); - } + tracing::info!( + "Starting {} light WVGs and {} heavy WVGs.", + opt.light_wvg_count, + opt.heavy_wvg_count + ); + + let builder = WvgRunnerBuilder::new( + connection_pool.clone(), + object_store.clone(), + PROVER_PROTOCOL_SEMANTIC_VERSION, + hints.clone(), + witness_vector_sender, + cancellation_token.clone(), + ); + + let light_wvg_runner = builder.light_wvg_runner(opt.light_wvg_count); + let heavy_wvg_runner = builder.heavy_wvg_runner(opt.heavy_wvg_count); + + tasks.extend(light_wvg_runner.run()); + tasks.extend(heavy_wvg_runner.run()); - // NOTE: Prover Context is the way VRAM is allocated. If it is dropped, the claim on VRAM allocation is dropped as well. - // It has to be kept until prover dies. Whilst it may be kept in prover struct, during cancellation, prover can `drop`, but the thread doing the processing can still be alive. - // This setup prevents segmentation faults and other nasty behavior during shutdown. - let (prover, _prover_context) = CircuitProver::new( + // necessary as it has a connection_pool which will keep 1 connection active by default + drop(builder); + + let circuit_prover_runner = circuit_prover_runner( connection_pool, object_store, PROVER_PROTOCOL_SEMANTIC_VERSION, - receiver, - opt.max_allocation, setup_data_cache, - ) - .context("failed to create circuit prover")?; - tasks.push(tokio::spawn(prover.run(cancellation_token.clone()))); + witness_vector_receiver, + prover_context, + ); + + tasks.extend(circuit_prover_runner.run()); let mut tasks = ManagedTasks::new(tasks); tokio::select! { @@ -114,12 +140,17 @@ async fn main() -> anyhow::Result<()> { } } } - PROVER_BINARY_METRICS.run_time.observe(time.elapsed()); - tasks.complete(Duration::from_secs(5)).await; - + let shutdown_time = Instant::now(); + tasks.complete(GRACEFUL_SHUTDOWN_DURATION).await; + PROVER_BINARY_METRICS + .shutdown_time + .observe(shutdown_time.elapsed()); + PROVER_BINARY_METRICS.run_time.observe(start_time.elapsed()); + metrics_stop_sender + .send(true) + .context("failed to stop metrics")?; Ok(()) } - /// Loads configs necessary for proving. /// - observability config - for observability setup /// - prover config - necessary for setup data @@ -143,20 +174,21 @@ fn load_configs( tracing::info!("Loaded configs."); Ok((observability_config, prover_config, object_store_config)) } - /// Loads resources necessary for proving. /// - connection pool - necessary to pick & store jobs from database /// - object store - necessary for loading and storing artifacts to object store +/// - prover context - necessary for circuit proving; VRAM allocation /// - setup data - necessary for circuit proving /// - finalization hints - necessary for generating witness vectors async fn load_resources( secrets_path: Option, + max_gpu_vram_allocation: Option, object_store_config: ObjectStoreConfig, setup_data_path: PathBuf, - wvg_count: u32, ) -> anyhow::Result<( ConnectionPool, Arc, + ProverContext, SetupDataCache, FinalizationHintsCache, )> { @@ -165,9 +197,8 @@ async fn load_resources( let database_url = database_secrets .prover_url .context("no prover DB URl present")?; - - // 1 connection for the prover and one for each vector generator - let max_connections = 1 + wvg_count; + // 2 connections for the witness vector generator job pickers (1 each) and 1 for gpu circuit prover job saver + let max_connections = 3; let connection_pool = ConnectionPool::::builder(database_url, max_connections) .build() .await @@ -178,23 +209,34 @@ async fn load_resources( .await .context("failed to create object store")?; - tracing::info!("Loading mappings from disk..."); + let prover_context = match max_gpu_vram_allocation { + Some(max_allocation) => ProverContext::create_with_config( + ProverContextConfig::default().with_maximum_device_allocation(max_allocation), + ) + .context("failed initializing fixed gpu prover context")?, + None => ProverContext::create().context("failed initializing gpu prover context")?, + }; + + tracing::info!("Loading setup data from disk..."); let keystore = Keystore::locate().with_setup_path(Some(setup_data_path)); let setup_data_cache = keystore .load_all_setup_key_mapping() .await .context("failed to load setup key mapping")?; + + tracing::info!("Loading finalization hints from disk..."); let finalization_hints = keystore .load_all_finalization_hints_mapping() .await .context("failed to load finalization hints mapping")?; - tracing::info!("Loaded mappings from disk."); + tracing::info!("Finished loading mappings from disk."); Ok(( connection_pool, object_store, + prover_context, setup_data_cache, finalization_hints, )) diff --git a/prover/crates/bin/circuit_prover/src/metrics.rs b/prover/crates/bin/circuit_prover/src/metrics.rs index e9f445914795..f9b8c38e3e34 100644 --- a/prover/crates/bin/circuit_prover/src/metrics.rs +++ b/prover/crates/bin/circuit_prover/src/metrics.rs @@ -2,79 +2,20 @@ use std::time::Duration; use vise::{Buckets, Histogram, Metrics}; +/// Instrument prover binary lifecycle #[derive(Debug, Metrics)] #[metrics(prefix = "prover_binary")] pub struct ProverBinaryMetrics { /// How long does it take for prover to load data before it can produce proofs? #[metrics(buckets = Buckets::LATENCIES)] - pub start_up: Histogram, - /// How long has the prover been running? + pub startup_time: Histogram, + /// How long did the prover binary run for? #[metrics(buckets = Buckets::LATENCIES)] pub run_time: Histogram, -} - -#[vise::register] -pub static PROVER_BINARY_METRICS: vise::Global = vise::Global::new(); - -#[derive(Debug, Metrics)] -#[metrics(prefix = "witness_vector_generator")] -pub struct WitnessVectorGeneratorMetrics { - /// How long does witness vector generator waits before a job is available? - #[metrics(buckets = Buckets::LATENCIES)] - pub job_wait_time: Histogram, - /// How long does it take to load object store artifacts for a witness vector job? - #[metrics(buckets = Buckets::LATENCIES)] - pub artifact_download_time: Histogram, - /// How long does the crypto witness generation primitive take? - #[metrics(buckets = Buckets::LATENCIES)] - pub crypto_primitive_time: Histogram, - /// How long does it take for a job to be executed, from the moment it's loaded? - #[metrics(buckets = Buckets::LATENCIES)] - pub execution_time: Histogram, - /// How long does it take to send a job to prover? - /// This is relevant because prover queue can apply back-pressure. - #[metrics(buckets = Buckets::LATENCIES)] - pub send_time: Histogram, - /// How long does it take for a job to be considered finished, from the moment it's been loaded? - #[metrics(buckets = Buckets::LATENCIES)] - pub job_finished_time: Histogram, -} - -#[vise::register] -pub static WITNESS_VECTOR_GENERATOR_METRICS: vise::Global = - vise::Global::new(); - -#[derive(Debug, Metrics)] -#[metrics(prefix = "circuit_prover")] -pub struct CircuitProverMetrics { - /// How long does circuit prover wait before a job is available? - #[metrics(buckets = Buckets::LATENCIES)] - pub job_wait_time: Histogram, - /// How long does the crypto primitives (proof generation & verification) take? - #[metrics(buckets = Buckets::LATENCIES)] - pub crypto_primitives_time: Histogram, - /// How long does proof generation (crypto primitive) take? - #[metrics(buckets = Buckets::LATENCIES)] - pub generate_proof_time: Histogram, - /// How long does verify proof (crypto primitive) take? + /// How long does it take prover to gracefully shutdown? #[metrics(buckets = Buckets::LATENCIES)] - pub verify_proof_time: Histogram, - /// How long does it take for a job to be executed, from the moment it's loaded? - #[metrics(buckets = Buckets::LATENCIES)] - pub execution_time: Histogram, - /// How long does it take to upload proof to object store? - #[metrics(buckets = Buckets::LATENCIES)] - pub artifact_upload_time: Histogram, - /// How long does it take to save a job? - #[metrics(buckets = Buckets::LATENCIES)] - pub save_time: Histogram, - /// How long does it take for a job to be considered finished, from the moment it's been loaded? - #[metrics(buckets = Buckets::LATENCIES)] - pub job_finished_time: Histogram, - /// How long does it take a job to go from witness generation to having the proof saved? - #[metrics(buckets = Buckets::LATENCIES)] - pub full_proving_time: Histogram, + pub shutdown_time: Histogram, } #[vise::register] -pub static CIRCUIT_PROVER_METRICS: vise::Global = vise::Global::new(); +pub static PROVER_BINARY_METRICS: vise::Global = vise::Global::new(); diff --git a/prover/crates/bin/circuit_prover/src/types.rs b/prover/crates/bin/circuit_prover/src/types.rs index 52cdd48b6b50..e4e1fdc13b8f 100644 --- a/prover/crates/bin/circuit_prover/src/types.rs +++ b/prover/crates/bin/circuit_prover/src/types.rs @@ -1,31 +1,12 @@ use std::{collections::HashMap, sync::Arc}; use zksync_prover_fri_types::{ - circuit_definitions::boojum::{ - algebraic_props::{ - round_function::AbsorptionModeOverwrite, sponge::GoldilocksPoseidon2Sponge, - }, - cs::implementations::{ - proof::Proof as CryptoProof, setup::FinalizationHintsForProver, - transcript::GoldilocksPoisedon2Transcript, - verifier::VerificationKey as CryptoVerificationKey, - }, - field::goldilocks::{GoldilocksExt2, GoldilocksField}, - }, + circuit_definitions::boojum::cs::implementations::setup::FinalizationHintsForProver, ProverServiceDataKey, }; use zksync_prover_keystore::GoldilocksGpuProverSetupData; -// prover types -pub type DefaultTranscript = GoldilocksPoisedon2Transcript; -pub type DefaultTreeHasher = GoldilocksPoseidon2Sponge; - -type F = GoldilocksField; -type H = GoldilocksPoseidon2Sponge; -type Ext = GoldilocksExt2; -pub type Proof = CryptoProof; -pub type VerificationKey = CryptoVerificationKey; - +// TODO: To be moved to circuit_prover_service lib & adjusted to new type idiom // cache types pub type SetupDataCache = HashMap>; pub type FinalizationHintsCache = HashMap>; diff --git a/prover/crates/bin/circuit_prover/src/witness_vector_generator.rs b/prover/crates/bin/circuit_prover/src/witness_vector_generator.rs deleted file mode 100644 index cb2d2a256df9..000000000000 --- a/prover/crates/bin/circuit_prover/src/witness_vector_generator.rs +++ /dev/null @@ -1,345 +0,0 @@ -use std::{collections::HashMap, sync::Arc, time::Instant}; - -use anyhow::Context; -use tokio::{sync::mpsc::Sender, task::JoinHandle}; -use tokio_util::sync::CancellationToken; -use zksync_object_store::ObjectStore; -use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; -use zksync_prover_fri_types::{ - circuit_definitions::{ - boojum::{ - cs::implementations::setup::FinalizationHintsForProver, - field::goldilocks::GoldilocksField, - gadgets::queue::full_state_queue::FullStateCircuitQueueRawWitness, - }, - circuit_definitions::base_layer::ZkSyncBaseLayerCircuit, - }, - get_current_pod_name, - keys::RamPermutationQueueWitnessKey, - CircuitAuxData, CircuitWrapper, ProverJob, ProverServiceDataKey, RamPermutationQueueWitness, - WitnessVectorArtifactsTemp, -}; -use zksync_types::{protocol_version::ProtocolSemanticVersion, L1BatchNumber}; -use zksync_utils::panic_extractor::try_extract_panic_message; - -use crate::{metrics::WITNESS_VECTOR_GENERATOR_METRICS, Backoff, FinalizationHintsCache}; - -/// In charge of generating Witness Vectors and sending them to Circuit Prover. -/// Both job runner & job executor. -#[derive(Debug)] -pub struct WitnessVectorGenerator { - object_store: Arc, - connection_pool: ConnectionPool, - protocol_version: ProtocolSemanticVersion, - /// Finalization Hints used for Witness Vector generation - finalization_hints_cache: FinalizationHintsCache, - /// Witness Vector sender for Circuit Prover - sender: Sender, - pod_name: String, -} - -impl WitnessVectorGenerator { - pub fn new( - object_store: Arc, - connection_pool: ConnectionPool, - protocol_version: ProtocolSemanticVersion, - sender: Sender, - finalization_hints: HashMap>, - ) -> Self { - Self { - object_store, - connection_pool, - protocol_version, - finalization_hints_cache: finalization_hints, - sender, - pod_name: get_current_pod_name(), - } - } - - /// Continuously polls database for new prover jobs and generates witness vectors for them. - /// All job executions are persisted. - pub async fn run( - self, - cancellation_token: CancellationToken, - mut backoff: Backoff, - ) -> anyhow::Result<()> { - let mut get_job_timer = Instant::now(); - while !cancellation_token.is_cancelled() { - if let Some(prover_job) = self - .get_job() - .await - .context("failed to get next witness generation job")? - { - tracing::info!( - "Witness Vector Generator received job {:?} after: {:?}", - prover_job.job_id, - get_job_timer.elapsed() - ); - WITNESS_VECTOR_GENERATOR_METRICS - .job_wait_time - .observe(get_job_timer.elapsed()); - if let e @ Err(_) = self.generate(prover_job, cancellation_token.clone()).await { - // this means that the witness vector receiver is closed, no need to report the error, just return - if cancellation_token.is_cancelled() { - return Ok(()); - } - e.context("failed to generate witness")? - } - - // waiting for a job timer starts as soon as the other is finished - get_job_timer = Instant::now(); - backoff.reset(); - continue; - }; - self.backoff(&mut backoff, cancellation_token.clone()).await; - } - tracing::info!("Witness Vector Generator shut down."); - Ok(()) - } - - /// Retrieves a prover job from database, loads artifacts from object store and hydrates them. - async fn get_job(&self) -> anyhow::Result> { - let mut connection = self - .connection_pool - .connection() - .await - .context("failed to get db connection")?; - let prover_job_metadata = match connection - .fri_prover_jobs_dal() - .get_job(self.protocol_version, &self.pod_name) - .await - { - None => return Ok(None), - Some(job) => job, - }; - - let time = Instant::now(); - let circuit_wrapper = self - .object_store - .get(prover_job_metadata.into()) - .await - .context("failed to get circuit_wrapper from object store")?; - let artifact = match circuit_wrapper { - a @ CircuitWrapper::Base(_) => a, - a @ CircuitWrapper::Recursive(_) => a, - CircuitWrapper::BasePartial((circuit, aux_data)) => self - .fill_witness(circuit, aux_data, prover_job_metadata.block_number) - .await - .context("failed to fill witness")?, - }; - WITNESS_VECTOR_GENERATOR_METRICS - .artifact_download_time - .observe(time.elapsed()); - - let setup_data_key = ProverServiceDataKey { - circuit_id: prover_job_metadata.circuit_id, - round: prover_job_metadata.aggregation_round, - } - .crypto_setup_key(); - let prover_job = ProverJob::new( - prover_job_metadata.block_number, - prover_job_metadata.id, - artifact, - setup_data_key, - ); - Ok(Some(prover_job)) - } - - /// Prover artifact hydration. - async fn fill_witness( - &self, - circuit: ZkSyncBaseLayerCircuit, - aux_data: CircuitAuxData, - l1_batch_number: L1BatchNumber, - ) -> anyhow::Result { - if let ZkSyncBaseLayerCircuit::RAMPermutation(circuit_instance) = circuit { - let sorted_witness_key = RamPermutationQueueWitnessKey { - block_number: l1_batch_number, - circuit_subsequence_number: aux_data.circuit_subsequence_number as usize, - is_sorted: true, - }; - let sorted_witness: RamPermutationQueueWitness = self - .object_store - .get(sorted_witness_key) - .await - .context("failed to load sorted witness key")?; - - let unsorted_witness_key = RamPermutationQueueWitnessKey { - block_number: l1_batch_number, - circuit_subsequence_number: aux_data.circuit_subsequence_number as usize, - is_sorted: false, - }; - let unsorted_witness: RamPermutationQueueWitness = self - .object_store - .get(unsorted_witness_key) - .await - .context("failed to load unsorted witness key")?; - - let mut witness = circuit_instance.witness.take().unwrap(); - witness.unsorted_queue_witness = FullStateCircuitQueueRawWitness { - elements: unsorted_witness.witness.into(), - }; - witness.sorted_queue_witness = FullStateCircuitQueueRawWitness { - elements: sorted_witness.witness.into(), - }; - circuit_instance.witness.store(Some(witness)); - - return Ok(CircuitWrapper::Base( - ZkSyncBaseLayerCircuit::RAMPermutation(circuit_instance), - )); - } - Err(anyhow::anyhow!( - "unexpected circuit received with partial witness, expected RAM permutation, got {:?}", - circuit.short_description() - )) - } - - /// Generates witness vector, with persistence of execution. - async fn generate( - &self, - prover_job: ProverJob, - cancellation_token: CancellationToken, - ) -> anyhow::Result<()> { - let start_time = Instant::now(); - let finalization_hints = self - .finalization_hints_cache - .get(&prover_job.setup_data_key) - .context(format!( - "failed to get finalization hints for key {:?}", - &prover_job.setup_data_key - ))? - .clone(); - let job_id = prover_job.job_id; - let task = tokio::task::spawn_blocking(move || { - let block_number = prover_job.block_number; - let _span = tracing::info_span!("witness_vector_generator", %block_number).entered(); - Self::generate_witness_vector(prover_job, finalization_hints) - }); - - self.finish_task(job_id, start_time, task, cancellation_token.clone()) - .await?; - - tracing::info!( - "Witness Vector Generator finished job {:?} in: {:?}", - job_id, - start_time.elapsed() - ); - WITNESS_VECTOR_GENERATOR_METRICS - .job_finished_time - .observe(start_time.elapsed()); - Ok(()) - } - - /// Generates witness vector using crypto primitives. - #[tracing::instrument( - skip_all, - fields(l1_batch = % prover_job.block_number) - )] - pub fn generate_witness_vector( - prover_job: ProverJob, - finalization_hints: Arc, - ) -> anyhow::Result { - let time = Instant::now(); - let cs = match prover_job.circuit_wrapper.clone() { - CircuitWrapper::Base(base_circuit) => { - base_circuit.synthesis::(&finalization_hints) - } - CircuitWrapper::Recursive(recursive_circuit) => { - recursive_circuit.synthesis::(&finalization_hints) - } - // circuit must be hydrated during `get_job` - CircuitWrapper::BasePartial(_) => { - return Err(anyhow::anyhow!("received unexpected dehydrated proof")); - } - }; - WITNESS_VECTOR_GENERATOR_METRICS - .crypto_primitive_time - .observe(time.elapsed()); - Ok(WitnessVectorArtifactsTemp::new( - cs.witness.unwrap(), - prover_job, - time, - )) - } - - /// Runs task to completion and persists result. - /// NOTE: Task may be cancelled mid-flight. - async fn finish_task( - &self, - job_id: u32, - time: Instant, - task: JoinHandle>, - cancellation_token: CancellationToken, - ) -> anyhow::Result<()> { - tokio::select! { - _ = cancellation_token.cancelled() => { - tracing::info!("Stop signal received, shutting down Witness Vector Generator..."); - return Ok(()) - } - result = task => { - let error_message = match result { - Ok(Ok(witness_vector)) => { - tracing::info!("Witness Vector Generator executed job {:?} in: {:?}", job_id, time.elapsed()); - WITNESS_VECTOR_GENERATOR_METRICS.execution_time.observe(time.elapsed()); - self - .save_result(witness_vector, job_id) - .await - .context("failed to save result")?; - return Ok(()) - } - Ok(Err(error)) => error.to_string(), - Err(error) => try_extract_panic_message(error), - }; - tracing::error!("Witness Vector Generator failed on job {job_id:?} with error {error_message:?}"); - - self.save_failure(job_id, error_message).await.context("failed to save failure")?; - } - } - - Ok(()) - } - - /// Sends proof to Circuit Prover. - async fn save_result( - &self, - artifacts: WitnessVectorArtifactsTemp, - job_id: u32, - ) -> anyhow::Result<()> { - let time = Instant::now(); - self.sender - .send(artifacts) - .await - .context("failed to send witness vector to prover")?; - tracing::info!( - "Witness Vector Generator sent job {:?} after {:?}", - job_id, - time.elapsed() - ); - WITNESS_VECTOR_GENERATOR_METRICS - .send_time - .observe(time.elapsed()); - Ok(()) - } - - /// Persists job execution error to database - async fn save_failure(&self, job_id: u32, error: String) -> anyhow::Result<()> { - self.connection_pool - .connection() - .await - .context("failed to get db connection")? - .fri_prover_jobs_dal() - .save_proof_error(job_id, error) - .await; - Ok(()) - } - - /// Backs off, whilst being cancellation aware. - async fn backoff(&self, backoff: &mut Backoff, cancellation_token: CancellationToken) { - let backoff_duration = backoff.delay(); - tracing::info!("Backing off for {:?}...", backoff_duration); - // Error here corresponds to a timeout w/o receiving task cancel; we're OK with this. - tokio::time::timeout(backoff_duration, cancellation_token.cancelled()) - .await - .ok(); - } -} diff --git a/prover/crates/bin/prover_autoscaler/README.md b/prover/crates/bin/prover_autoscaler/README.md new file mode 100644 index 000000000000..3d1a9afe5a30 --- /dev/null +++ b/prover/crates/bin/prover_autoscaler/README.md @@ -0,0 +1,237 @@ +# Prover Autoscaler + +Prover Autoscaler is needed to automatically scale Prover related Kubernetes Deployments according to the load in a +cluster with higher chances to get Nodes to run. If the cluster runs out of resources it moves the load to next one. + +## Design + +Prover Autoscaler has the main Scaler part and Agents running in each cluster. + +### Agent + +Agents watch via Kubernetes API status of Deployments, Pods and out of resources Events; perform scaling by requests +from Scaler. They watch only specified in config namespaces. Agent provides listens on 2 ports: `prometheus_port` to +export metrics (path is `/metrics`), and `http_port` with 3 paths: `/healthz`, `/cluster` to get the cluster status and +`/scale` to scale Deployments up or down. + +### Scaler + +Scaler collects cluster statuses from Agents, job queues from prover-job-monitor, calculates needed number of replicas +and sends scale requests to Agents. + +Requests flow diagram: + +```mermaid +sequenceDiagram + participant prover-job-monitor + participant Scaler + box cluster1 + participant Agent1 + participant K8s API1 + end + box cluster2 + participant Agent2 + participant K8s API2 + end + loop Watch + Agent1->>K8s API1: Watch namespaces + end + loop Watch + Agent2->>K8s API2: Watch namespaces + end + loop Recalculate + Scaler->>prover-job-monitor: /report + Scaler->>Agent1: /cluster + Scaler->>Agent2: /cluster + Scaler->>Agent1: /scale + end +``` + +Scaler supports 2 types of scaling algorithms: GPU and Simple. GPU usually is prover itself and all other Deployments +are using Simple algorithm. + +Simple algorithm tries to scale the Deployment up to `queue / speed` replicas (rounded up) in the best cluster. If there +is not enough capacity it continue in the next best cluster and so on. On each run it selects "best cluster" using +priority, number of capacity issues and cluster size. The capacity is limited by config (`max_provers` or +`max_replicas`) and also by availability of machines in the cluster. Autoscaler detects that a cluster is running out of +particular machines by watching for `FailedScaleUp` events and also by checking if a Pod stuck in Pending for longer +than `long_pending_duration`. If not enough capacity is detected not running Pods will be moved. + +GPU algorithm works similar to Simple one, but it also recognise different GPU types and distribute load across L4 GPUs +first, then T4, V100, P100 and A100, if available. + +Different namespaces are running different protocol versions and completely independent. Normally only one namespace is +active, and only during protocol upgrade both are active. Each namespace has to have correct version of binaries +installed, see `protocol_versions` config option. + +## Dependencies + +- [prover-job-monitor](.../prover_job_monitor/) +- Kubernetes API +- GCP API (optional) + +## Permissions + +Agents need the following Kubernetes permissions: + +```yaml +- apiGroups: + - '' + resources: + - pods + - events + - namespaces + - nodes + verbs: + - get + - watch + - list +- apiGroups: + - apps + resources: + - deployments + - replicasets + verbs: + - get + - list + - watch + - patch + - update +``` + +## Configuration + +Prover Autoscaler requires a config file provided via `--config-path` flag, supported format: YAML. Also you need to +specify which job to run Scaler or Agent using `--job=scaler` or `--job=agent` flag correspondingly. + +### Common configuration + +- `graceful_shutdown_timeout` is time to wait for all the task to finish before force shutdown. Default: 5s. +- `observability` section configures type of `log_format` (`plain` or `json`) and log levels per module with + `log_directives`. + +Example: + +```yaml +graceful_shutdown_timeout: 5s +observability: + log_format: plain + log_directives: 'zksync_prover_autoscaler=debug' +``` + +### Agent configuration + +`agent_config` section configures Agent parameters: + +- `prometheus_port` is a port for Prometheus metrics to be served on (path is `/metrics`). +- `http_port` is the main port for Scaler to connect to. +- `namespaces` is list of namespaces to watch. +- `dry_run` if enabled, Agent will not change number of replicas, just report success. Default: true. + +Example: + +```yaml +agent_config: + prometheus_port: 8080 + http_port: 8081 + namespaces: + - prover-old + - prover-new + dry_run: true +``` + +### Scaler configuration + +`scaler_config` section configures Scaler parameters: + +- `dry_run` if enabled, Scaler will not send any scaler requests. Default: false. +- `prometheus_port` is a port for Prometheus metrics to be served on (path is `/metrics`). +- `prover_job_monitor_url` is full URL to get queue report from prover-job-monitor. +- `agents` is Agent list to send requests to. +- `scaler_run_interval` is interval between re-calculations. Default: 10s. +- `protocol_versions` is a map namespaces to protocol version it processes. Should correspond binary versions running + there! +- `cluster_priorities` is a map cluster name to priority, the lower will be used first. +- `apply_min_to_namespace` specifies current primary namespace to run min number of provers in it. +- `min_provers` is a minimum number of provers to run even if the queue is empty. Default: 0. +- `max_provers` is a map of cluster name to map GPU type to maximum number of provers. +- `prover_speed` is a map GPU to speed divider. Default: 500. +- `long_pending_duration` is time after a pending pod considered long pending and will be relocated to different + cluster. Default: 10m. +- `scaler_targets` subsection is a list of Simple targets: + - `queue_report_field` is name of corresponding queue report section. See example for possible options. + - `deployment` is name of a Deployment to scale. + - `min_replicas` is a minimum number of replicas to run even if the queue is empty. Default: 0. + - `max_replicas` is a map of cluster name to maximum number of replicas. + - `speed` is a divider for corresponding queue. + +Example: + +```yaml +scaler_config: + dry_run: true + prometheus_port: 8082 + prover_job_monitor_url: http://prover-job-monitor.default.svc.cluster.local:3074/queue_report + agents: + - http://prover-autoscaler-agent.cluster1.com + - http://prover-autoscaler-agent.cluster2.com + - http://prover-autoscaler-agent.cluster3.com + scaler_run_interval: 30s + protocol_versions: + prover-old: 0.24.2 + prover-new: 0.25.0 + cluster_priorities: + cluster1: 0 + cluster2: 100 + cluster3: 200 + apply_min_to_namespace: prover-new + min_provers: 1 + max_provers: + cluster1: + L4: 1 + T4: 200 + cluster2: + L4: 100 + T4: 200 + cluster3: + L4: 100 + T4: 100 + prover_speed: + L4: 500 + T4: 400 + long_pending_duration: 10m + scaler_targets: + - queue_report_field: basic_witness_jobs + deployment: witness-generator-basic-fri + min_replicas: 1 + max_replicas: + cluster1: 10 + cluster2: 20 + speed: 10 + - queue_report_field: leaf_witness_jobs + deployment: witness-generator-leaf-fri + max_replicas: + cluster1: 10 + speed: 10 + - queue_report_field: node_witness_jobs + deployment: witness-generator-node-fri + max_replicas: + cluster1: 10 + speed: 10 + - queue_report_field: recursion_tip_witness_jobs + deployment: witness-generator-recursion-tip-fri + max_replicas: + cluster1: 10 + speed: 10 + - queue_report_field: scheduler_witness_jobs + deployment: witness-generator-scheduler-fri + max_replicas: + cluster1: 10 + speed: 10 + - queue_report_field: proof_compressor_jobs + deployment: proof-fri-gpu-compressor + max_replicas: + cluster1: 10 + cluster2: 10 + speed: 5 +``` diff --git a/prover/crates/bin/prover_autoscaler/src/config.rs b/prover/crates/bin/prover_autoscaler/src/config.rs index 777ffe89fc91..ff3bccf79c83 100644 --- a/prover/crates/bin/prover_autoscaler/src/config.rs +++ b/prover/crates/bin/prover_autoscaler/src/config.rs @@ -59,8 +59,11 @@ pub struct ProverAutoscalerScalerConfig { pub prover_speed: HashMap, /// Maximum number of provers which can be run per cluster/GPU. pub max_provers: HashMap>, - /// Minimum number of provers per namespace. - pub min_provers: HashMap, + /// Minimum number of provers globally. + #[serde(default)] + pub min_provers: u32, + /// Name of primary namespace, all min numbers are applied to it. + pub apply_min_to_namespace: Option, /// Duration after which pending pod considered long pending. #[serde( with = "humantime_serde", @@ -132,6 +135,9 @@ pub enum QueueReportFields { pub struct ScalerTarget { pub queue_report_field: QueueReportFields, pub deployment: String, + /// Min replicas globally. + #[serde(default)] + pub min_replicas: usize, /// Max replicas per cluster. pub max_replicas: HashMap, /// The queue will be divided by the speed and rounded up to get number of replicas. diff --git a/prover/crates/bin/prover_autoscaler/src/global/scaler.rs b/prover/crates/bin/prover_autoscaler/src/global/scaler.rs index 829b95dd7514..074da383b740 100644 --- a/prover/crates/bin/prover_autoscaler/src/global/scaler.rs +++ b/prover/crates/bin/prover_autoscaler/src/global/scaler.rs @@ -73,7 +73,8 @@ pub struct Scaler { pub struct GpuScaler { /// Which cluster to use first. cluster_priorities: HashMap, - min_provers: HashMap, + apply_min_to_namespace: Option, + min_provers: u32, max_provers: HashMap>, prover_speed: HashMap, long_pending_duration: chrono::Duration, @@ -84,6 +85,8 @@ pub struct SimpleScaler { deployment: String, /// Which cluster to use first. cluster_priorities: HashMap, + apply_min_to_namespace: Option, + min_replicas: usize, max_replicas: HashMap, speed: usize, long_pending_duration: chrono::Duration, @@ -126,6 +129,7 @@ impl Scaler { simple_scalers.push(SimpleScaler::new( c, config.cluster_priorities.clone(), + config.apply_min_to_namespace.clone(), chrono::Duration::seconds(config.long_pending_duration.as_secs() as i64), )) } @@ -144,6 +148,7 @@ impl GpuScaler { pub fn new(config: ProverAutoscalerScalerConfig) -> Self { Self { cluster_priorities: config.cluster_priorities, + apply_min_to_namespace: config.apply_min_to_namespace, min_provers: config.min_provers, max_provers: config.max_provers, prover_speed: config.prover_speed, @@ -287,10 +292,12 @@ impl GpuScaler { // Increase queue size, if it's too small, to make sure that required min_provers are // running. - let queue: u64 = self.min_provers.get(namespace).map_or(queue, |min| { + let queue: u64 = if self.apply_min_to_namespace.as_deref() == Some(namespace.as_str()) { self.normalize_queue(Gpu::L4, queue) - .max(self.provers_to_speed(Gpu::L4, *min)) - }); + .max(self.provers_to_speed(Gpu::L4, self.min_provers)) + } else { + queue + }; let mut total: i64 = 0; let mut provers: HashMap = HashMap::new(); @@ -424,12 +431,15 @@ impl SimpleScaler { pub fn new( config: &ScalerTarget, cluster_priorities: HashMap, + apply_min_to_namespace: Option, long_pending_duration: chrono::Duration, ) -> Self { Self { queue_report_field: config.queue_report_field, deployment: config.deployment.clone(), cluster_priorities, + apply_min_to_namespace, + min_replicas: config.min_replicas, max_replicas: config.max_replicas.clone(), speed: config.speed, long_pending_duration, @@ -521,6 +531,15 @@ impl SimpleScaler { &sorted_clusters ); + // Increase queue size, if it's too small, to make sure that required min_provers are + // running. + let queue: u64 = if self.apply_min_to_namespace.as_deref() == Some(namespace.as_str()) { + self.normalize_queue(queue) + .max(self.pods_to_speed(self.min_replicas)) + } else { + queue + }; + let mut total: i64 = 0; let mut pods: HashMap = HashMap::new(); for cluster in &sorted_clusters { @@ -719,7 +738,8 @@ mod tests { fn test_run() { let scaler = GpuScaler::new(ProverAutoscalerScalerConfig { cluster_priorities: [("foo".into(), 0), ("bar".into(), 10)].into(), - min_provers: [("prover-other".into(), 2)].into(), + apply_min_to_namespace: Some("prover-other".into()), + min_provers: 2, max_provers: [ ("foo".into(), [(Gpu::L4, 100)].into()), ("bar".into(), [(Gpu::L4, 100)].into()), @@ -857,7 +877,8 @@ mod tests { fn test_run_min_provers() { let scaler = GpuScaler::new(ProverAutoscalerScalerConfig { cluster_priorities: [("foo".into(), 0), ("bar".into(), 10)].into(), - min_provers: [("prover".into(), 2)].into(), + apply_min_to_namespace: Some("prover".into()), + min_provers: 2, max_provers: [ ("foo".into(), [(Gpu::L4, 100)].into()), ("bar".into(), [(Gpu::L4, 100)].into()), @@ -1052,7 +1073,8 @@ mod tests { fn test_run_need_move() { let scaler = GpuScaler::new(ProverAutoscalerScalerConfig { cluster_priorities: [("foo".into(), 0), ("bar".into(), 10)].into(), - min_provers: [("prover".into(), 2)].into(), + apply_min_to_namespace: Some("prover".into()), + min_provers: 2, max_provers: [ ("foo".into(), [(Gpu::L4, 100)].into()), ("bar".into(), [(Gpu::L4, 100)].into()), diff --git a/prover/crates/lib/circuit_prover_service/Cargo.toml b/prover/crates/lib/circuit_prover_service/Cargo.toml new file mode 100644 index 000000000000..ca7d1ede02f1 --- /dev/null +++ b/prover/crates/lib/circuit_prover_service/Cargo.toml @@ -0,0 +1,31 @@ +[package] +name = "zksync_circuit_prover_service" +description = "ZKsync circuit prover service implementation" +version.workspace = true +edition.workspace = true +authors.workspace = true +homepage.workspace = true +repository.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true + +[dependencies] +zksync_prover_job_processor.workspace = true +zksync_prover_fri_types.workspace = true +zksync_prover_keystore.workspace = true +zksync_prover_dal.workspace = true +zksync_types.workspace = true +zksync_object_store.workspace = true + +async-trait.workspace = true +anyhow.workspace = true +tokio = { workspace = true, features = ["macros", "time"] } +tokio-util.workspace = true +tracing.workspace = true + +shivini = { workspace = true, features = [ + "circuit_definitions", +] } +zkevm_test_harness.workspace = true +vise.workspace = true diff --git a/prover/crates/lib/circuit_prover_service/README.md b/prover/crates/lib/circuit_prover_service/README.md new file mode 100644 index 000000000000..3cc8a80e966d --- /dev/null +++ b/prover/crates/lib/circuit_prover_service/README.md @@ -0,0 +1,96 @@ +# Circuit Prover Service + +This crate provides the building blocks for running circuit provers. Circuit proving is the heaviest part of the proving +process, being both the most time intensive and resource heavy part. + +The primitives exported by this lib are job runners, namely: + +- light_wvg_runner +- heavy_wvg_runner +- circuit_prover_runner + +The rest of the codebase simply covers the internals of creating a runner, which is an implementation of +`ProverJobProcessor`. + +## Witness Vector Generator Runner + +Runners related to synthesizing Witness Vector (the CPU heavy part of circuit proving). They are tied to +`prover_jobs_fri` table and operate over `ProverJobsFri` object storage bucket. + +Witness Vector Generators have big gaps in resource usages. Node proofs are the heavy jobs (~9GB RAM), whilst all others +are rather light (~2GB RAM). + +There are 2 ways to deal with this: + +1. run RAM left over / 9 which will result in RAM under utilization but simplify implementation +2. run multiple light WVG jobs, with a small amount of heavy WVG jobs. + +This implementation favors number 2. As such, `MetadataLoader` abstraction was introduced to force loading lighter and +heavier jobs. Heavier picker will try to prioritize nodes. If none are available, it falls back to light jobs in order +to maximize usage. + +### Job Picker + +Interacts with the database to get a job (as described above), loads the data from object store and then hydrates the +circuit. In current implementation, Ram Permutation circuits are sent separately in order to save RAM in basic witness +generation & reduce the amount of storage used by object store. A further optimization will be introduced later on, +which will remove the necessity of witness hydration on circuits. + +### Executor + +Straight forward, synthesizes witness vector from circuit. + +### Job Saver + +If successful, will provide data to GPU circuit prover over a channel. If it fails, will mark the database as such and +will later be retried (as marked by Prover Job Monitor). + +## GPU Circuit Prover + +Runners related to generating the circuit proof & verifying it. They are tied to `prover_jobs_fri` table and operate +over `ProverJobs` object storage bucket. + +### Job Picker + +Waits on information from (multiple) WVGs sent via a channel. + +### Executor + +Generates & verifies the circuit proof (on GPU). + +### Job Saver + +Persists information back to `prover_jobs_fri` table. Note that a job is picked by WVG & finished by CP. + +## Diagram + +```mermaid +sequenceDiagram + box Resources + participant db as Database + participant os as Object Store + end + box Heavy/Light Witness Vector Generator + participant wvg_p as Job Picker + participant wvg_e as Executor + participant wvg_s as Job Saver + end + box Circuit Prover + participant cp_p as Job Picker + participant cp_e as Executor + participant cp_s as Job Saver + end + wvg_p-->>db: Get job metadata + wvg_p-->>os: Get circuit + wvg_p-->>wvg_p: Hydrate circuit & get finalization hints + wvg_p-->>wvg_e: Provide metadata & circuit + wvg_e-->>wvg_e: Synthesize witness vector + wvg_e-->>wvg_s: Provide metadata & witness vector & circuit + wvg_s-->>cp_p: Provide metadata & witness vector & circuit + cp_p-->>cp_p: Get setup data + cp_p-->>cp_e: Provide metadata & witness vector & circuit + cp_e-->>cp_e: Prove & verify circuit proof + cp_e-->>cp_s: Provide metadata & proof + cp_s-->>os: Save proof + cp_s-->>db: Update job metadata +``` diff --git a/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/gpu_circuit_prover_executor.rs b/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/gpu_circuit_prover_executor.rs new file mode 100644 index 000000000000..043232a5003c --- /dev/null +++ b/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/gpu_circuit_prover_executor.rs @@ -0,0 +1,73 @@ +use std::time::Instant; + +use anyhow::Context; +use shivini::ProverContext; +use zksync_prover_fri_types::FriProofWrapper; +use zksync_prover_job_processor::Executor; +use zksync_types::prover_dal::FriProverJobMetadata; + +use crate::{ + metrics::CIRCUIT_PROVER_METRICS, types::circuit_prover_payload::GpuCircuitProverPayload, +}; + +/// GpuCircuitProver executor implementation. +/// Generates circuit proof & verifies it. +/// NOTE: It requires prover context, which is the way Shivini allocates VRAM. +pub struct GpuCircuitProverExecutor { + _prover_context: ProverContext, +} + +impl GpuCircuitProverExecutor { + pub fn new(prover_context: ProverContext) -> Self { + Self { + _prover_context: prover_context, + } + } +} + +impl Executor for GpuCircuitProverExecutor { + type Input = GpuCircuitProverPayload; + type Output = FriProofWrapper; + type Metadata = FriProverJobMetadata; + + #[tracing::instrument( + name = "gpu_circuit_prover_executor", + skip_all, + fields(l1_batch = % metadata.block_number) + )] + fn execute( + &self, + input: Self::Input, + metadata: Self::Metadata, + ) -> anyhow::Result { + let start_time = Instant::now(); + tracing::info!( + "Started executing gpu circuit prover job {}, on batch {}, for circuit {}, at round {}", + metadata.id, + metadata.block_number, + metadata.circuit_id, + metadata.aggregation_round + ); + let GpuCircuitProverPayload { + circuit, + witness_vector, + setup_data, + } = input; + + let proof_wrapper = circuit + .prove(witness_vector, setup_data) + .context("failed to gpu prove circuit")?; + tracing::info!( + "Finished executing gpu circuit prover job {}, on batch {}, for circuit {}, at round {} after {:?}", + metadata.id, + metadata.block_number, + metadata.circuit_id, + metadata.aggregation_round, + start_time.elapsed() + ); + CIRCUIT_PROVER_METRICS + .prove_and_verify_time + .observe(start_time.elapsed()); + Ok(proof_wrapper) + } +} diff --git a/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/gpu_circuit_prover_job_picker.rs b/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/gpu_circuit_prover_job_picker.rs new file mode 100644 index 000000000000..76dc0cda66d3 --- /dev/null +++ b/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/gpu_circuit_prover_job_picker.rs @@ -0,0 +1,92 @@ +use std::{collections::HashMap, sync::Arc, time::Instant}; + +use anyhow::Context; +use async_trait::async_trait; +use zksync_prover_fri_types::ProverServiceDataKey; +use zksync_prover_job_processor::JobPicker; +use zksync_prover_keystore::GoldilocksGpuProverSetupData; +use zksync_types::prover_dal::FriProverJobMetadata; + +use crate::{ + gpu_circuit_prover::GpuCircuitProverExecutor, + metrics::CIRCUIT_PROVER_METRICS, + types::{ + circuit_prover_payload::GpuCircuitProverPayload, + witness_vector_generator_execution_output::WitnessVectorGeneratorExecutionOutput, + }, +}; + +/// GpuCircuitProver job picker implementation. +/// Retrieves job & data from WVG job saver. +#[derive(Debug)] +pub struct GpuCircuitProverJobPicker { + receiver: + tokio::sync::mpsc::Receiver<(WitnessVectorGeneratorExecutionOutput, FriProverJobMetadata)>, + setup_data_cache: HashMap>, +} + +impl GpuCircuitProverJobPicker { + pub fn new( + receiver: tokio::sync::mpsc::Receiver<( + WitnessVectorGeneratorExecutionOutput, + FriProverJobMetadata, + )>, + setup_data_cache: HashMap>, + ) -> Self { + Self { + receiver, + setup_data_cache, + } + } +} + +#[async_trait] +impl JobPicker for GpuCircuitProverJobPicker { + type ExecutorType = GpuCircuitProverExecutor; + + async fn pick_job( + &mut self, + ) -> anyhow::Result> { + let start_time = Instant::now(); + tracing::info!("Started picking gpu circuit prover job"); + + let (wvg_output, metadata) = self + .receiver + .recv() + .await + .context("no witness vector generators are available, stopping...")?; + let WitnessVectorGeneratorExecutionOutput { + circuit, + witness_vector, + } = wvg_output; + + let key = ProverServiceDataKey { + circuit_id: metadata.circuit_id, + round: metadata.aggregation_round, + } + .crypto_setup_key(); + let setup_data = self + .setup_data_cache + .get(&key) + .context("failed to retrieve setup data from cache")? + .clone(); + + let payload = GpuCircuitProverPayload { + circuit, + witness_vector, + setup_data, + }; + tracing::info!( + "Finished picking gpu circuit prover job {}, on batch {}, for circuit {}, at round {} in {:?}", + metadata.id, + metadata.block_number, + metadata.circuit_id, + metadata.aggregation_round, + start_time.elapsed() + ); + CIRCUIT_PROVER_METRICS + .load_time + .observe(start_time.elapsed()); + Ok(Some((payload, metadata))) + } +} diff --git a/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/gpu_circuit_prover_job_saver.rs b/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/gpu_circuit_prover_job_saver.rs new file mode 100644 index 000000000000..0ba28a0d9f5a --- /dev/null +++ b/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/gpu_circuit_prover_job_saver.rs @@ -0,0 +1,126 @@ +use std::{sync::Arc, time::Instant}; + +use anyhow::Context; +use async_trait::async_trait; +use zksync_object_store::ObjectStore; +use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; +use zksync_prover_fri_types::FriProofWrapper; +use zksync_prover_job_processor::JobSaver; +use zksync_types::{protocol_version::ProtocolSemanticVersion, prover_dal::FriProverJobMetadata}; + +use crate::{gpu_circuit_prover::GpuCircuitProverExecutor, metrics::CIRCUIT_PROVER_METRICS}; + +/// GpuCircuitProver job saver implementation. +/// Persists the job execution to database. In case of success, artifacts are uploaded to object store. +#[derive(Debug)] +pub struct GpuCircuitProverJobSaver { + connection_pool: ConnectionPool, + object_store: Arc, + protocol_version: ProtocolSemanticVersion, +} + +impl GpuCircuitProverJobSaver { + pub fn new( + connection_pool: ConnectionPool, + object_store: Arc, + protocol_version: ProtocolSemanticVersion, + ) -> Self { + Self { + connection_pool, + object_store, + protocol_version, + } + } +} + +#[async_trait] +impl JobSaver for GpuCircuitProverJobSaver { + type ExecutorType = GpuCircuitProverExecutor; + + #[tracing::instrument( + name = "gpu_circuit_prover_job_saver", + skip_all, + fields(l1_batch = % data.1.block_number) + )] + async fn save_job_result( + &self, + data: (anyhow::Result, FriProverJobMetadata), + ) -> anyhow::Result<()> { + let start_time = Instant::now(); + let (result, metadata) = data; + tracing::info!( + "Started saving gpu circuit prover job {}, on batch {}, for circuit {}, at round {}", + metadata.id, + metadata.block_number, + metadata.circuit_id, + metadata.aggregation_round + ); + + match result { + Ok(proof_wrapper) => { + let mut connection = self + .connection_pool + .connection() + .await + .context("failed to get db connection")?; + + let is_scheduler_proof = metadata.is_scheduler_proof()?; + + let blob_url = self + .object_store + .put(metadata.id, &proof_wrapper) + .await + .context("failed to upload to object store")?; + + let mut transaction = connection + .start_transaction() + .await + .context("failed to start db transaction")?; + transaction + .fri_prover_jobs_dal() + .save_proof(metadata.id, metadata.pick_time.elapsed(), &blob_url) + .await; + if is_scheduler_proof { + transaction + .fri_proof_compressor_dal() + .insert_proof_compression_job( + metadata.block_number, + &blob_url, + self.protocol_version, + ) + .await; + } + transaction + .commit() + .await + .context("failed to commit db transaction")?; + } + Err(error) => { + let error_message = error.to_string(); + tracing::error!("GPU circuit prover failed: {:?}", error_message); + self.connection_pool + .connection() + .await + .context("failed to get db connection")? + .fri_prover_jobs_dal() + .save_proof_error(metadata.id, error_message) + .await; + } + }; + tracing::info!( + "Finished saving gpu circuit prover job {}, on batch {}, for circuit {}, at round {} after {:?}", + metadata.id, + metadata.block_number, + metadata.circuit_id, + metadata.aggregation_round, + start_time.elapsed() + ); + CIRCUIT_PROVER_METRICS + .save_time + .observe(start_time.elapsed()); + CIRCUIT_PROVER_METRICS + .full_time + .observe(metadata.pick_time.elapsed()); + Ok(()) + } +} diff --git a/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/mod.rs b/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/mod.rs new file mode 100644 index 000000000000..7dff12aa2cc6 --- /dev/null +++ b/prover/crates/lib/circuit_prover_service/src/gpu_circuit_prover/mod.rs @@ -0,0 +1,8 @@ +pub use gpu_circuit_prover_executor::GpuCircuitProverExecutor; +pub use gpu_circuit_prover_job_picker::GpuCircuitProverJobPicker; +pub use gpu_circuit_prover_job_saver::GpuCircuitProverJobSaver; + +mod gpu_circuit_prover_executor; + +mod gpu_circuit_prover_job_picker; +mod gpu_circuit_prover_job_saver; diff --git a/prover/crates/lib/circuit_prover_service/src/job_runner.rs b/prover/crates/lib/circuit_prover_service/src/job_runner.rs new file mode 100644 index 000000000000..2e102fd40e33 --- /dev/null +++ b/prover/crates/lib/circuit_prover_service/src/job_runner.rs @@ -0,0 +1,144 @@ +use std::{collections::HashMap, sync::Arc}; + +use shivini::ProverContext; +use tokio_util::sync::CancellationToken; +use zksync_object_store::ObjectStore; +use zksync_prover_dal::{ConnectionPool, Prover}; +use zksync_prover_fri_types::{ + circuit_definitions::boojum::cs::implementations::setup::FinalizationHintsForProver, + get_current_pod_name, ProverServiceDataKey, +}; +use zksync_prover_job_processor::{Backoff, BackoffAndCancellable, JobRunner}; +use zksync_prover_keystore::GoldilocksGpuProverSetupData; +use zksync_types::{protocol_version::ProtocolSemanticVersion, prover_dal::FriProverJobMetadata}; + +use crate::{ + gpu_circuit_prover::{ + GpuCircuitProverExecutor, GpuCircuitProverJobPicker, GpuCircuitProverJobSaver, + }, + types::witness_vector_generator_execution_output::WitnessVectorGeneratorExecutionOutput, + witness_vector_generator::{ + HeavyWitnessVectorMetadataLoader, LightWitnessVectorMetadataLoader, + WitnessVectorGeneratorExecutor, WitnessVectorGeneratorJobPicker, + WitnessVectorGeneratorJobSaver, WitnessVectorMetadataLoader, + }, +}; + +/// Convenience struct helping with building Witness Vector Generator runners. +#[derive(Debug)] +pub struct WvgRunnerBuilder { + connection_pool: ConnectionPool, + object_store: Arc, + protocol_version: ProtocolSemanticVersion, + finalization_hints_cache: HashMap>, + sender: + tokio::sync::mpsc::Sender<(WitnessVectorGeneratorExecutionOutput, FriProverJobMetadata)>, + cancellation_token: CancellationToken, + pod_name: String, +} + +impl WvgRunnerBuilder { + pub fn new( + connection_pool: ConnectionPool, + object_store: Arc, + protocol_version: ProtocolSemanticVersion, + finalization_hints_cache: HashMap>, + sender: tokio::sync::mpsc::Sender<( + WitnessVectorGeneratorExecutionOutput, + FriProverJobMetadata, + )>, + cancellation_token: CancellationToken, + ) -> Self { + Self { + connection_pool, + object_store, + protocol_version, + finalization_hints_cache, + sender, + cancellation_token, + pod_name: get_current_pod_name(), + } + } + + /// Witness Vector Generator runner implementation for light jobs. + pub fn light_wvg_runner( + &self, + count: usize, + ) -> JobRunner< + WitnessVectorGeneratorExecutor, + WitnessVectorGeneratorJobPicker, + WitnessVectorGeneratorJobSaver, + > { + let metadata_loader = + LightWitnessVectorMetadataLoader::new(self.pod_name.clone(), self.protocol_version); + + self.wvg_runner(count, metadata_loader) + } + + /// Witness Vector Generator runner implementation that prioritizes heavy jobs over light jobs. + pub fn heavy_wvg_runner( + &self, + count: usize, + ) -> JobRunner< + WitnessVectorGeneratorExecutor, + WitnessVectorGeneratorJobPicker, + WitnessVectorGeneratorJobSaver, + > { + let metadata_loader = + HeavyWitnessVectorMetadataLoader::new(self.pod_name.clone(), self.protocol_version); + + self.wvg_runner(count, metadata_loader) + } + + /// Creates a Witness Vector Generator job runner with specified MetadataLoader. + /// The MetadataLoader makes the difference between heavy & light WVG runner. + fn wvg_runner( + &self, + count: usize, + metadata_loader: ML, + ) -> JobRunner< + WitnessVectorGeneratorExecutor, + WitnessVectorGeneratorJobPicker, + WitnessVectorGeneratorJobSaver, + > { + let executor = WitnessVectorGeneratorExecutor; + let job_picker = WitnessVectorGeneratorJobPicker::new( + self.connection_pool.clone(), + self.object_store.clone(), + self.finalization_hints_cache.clone(), + metadata_loader, + ); + let job_saver = + WitnessVectorGeneratorJobSaver::new(self.connection_pool.clone(), self.sender.clone()); + let backoff = Backoff::default(); + + JobRunner::new( + executor, + job_picker, + job_saver, + count, + Some(BackoffAndCancellable::new( + backoff, + self.cancellation_token.clone(), + )), + ) + } +} + +/// Circuit Prover runner implementation. +pub fn circuit_prover_runner( + connection_pool: ConnectionPool, + object_store: Arc, + protocol_version: ProtocolSemanticVersion, + setup_data_cache: HashMap>, + receiver: tokio::sync::mpsc::Receiver<( + WitnessVectorGeneratorExecutionOutput, + FriProverJobMetadata, + )>, + prover_context: ProverContext, +) -> JobRunner { + let executor = GpuCircuitProverExecutor::new(prover_context); + let job_picker = GpuCircuitProverJobPicker::new(receiver, setup_data_cache); + let job_saver = GpuCircuitProverJobSaver::new(connection_pool, object_store, protocol_version); + JobRunner::new(executor, job_picker, job_saver, 1, None) +} diff --git a/prover/crates/lib/circuit_prover_service/src/lib.rs b/prover/crates/lib/circuit_prover_service/src/lib.rs new file mode 100644 index 000000000000..0d7b146cc43b --- /dev/null +++ b/prover/crates/lib/circuit_prover_service/src/lib.rs @@ -0,0 +1,7 @@ +#![allow(incomplete_features)] // Crypto code uses generic const exprs +#![feature(generic_const_exprs)] +mod gpu_circuit_prover; +pub mod job_runner; +mod metrics; +mod types; +mod witness_vector_generator; diff --git a/prover/crates/lib/circuit_prover_service/src/metrics.rs b/prover/crates/lib/circuit_prover_service/src/metrics.rs new file mode 100644 index 000000000000..c102422c4771 --- /dev/null +++ b/prover/crates/lib/circuit_prover_service/src/metrics.rs @@ -0,0 +1,46 @@ +use std::time::Duration; + +use vise::{Buckets, Histogram, Metrics}; + +/// Metrics for witness vector generator execution +#[derive(Debug, Metrics)] +#[metrics(prefix = "witness_vector_generator")] +pub struct WitnessVectorGeneratorMetrics { + /// How long does it take to load witness vector inputs? + #[metrics(buckets = Buckets::LATENCIES)] + pub pick_time: Histogram, + /// How long does it take to synthesize witness vector? + #[metrics(buckets = Buckets::LATENCIES)] + pub synthesize_time: Histogram, + /// How long does it take to send witness vectors to gpu prover? + #[metrics(buckets = Buckets::LATENCIES)] + pub transfer_time: Histogram, + /// How long does it take to save witness vector failure? + #[metrics(buckets = Buckets::LATENCIES)] + pub save_time: Histogram, +} + +#[vise::register] +pub static WITNESS_VECTOR_GENERATOR_METRICS: vise::Global = + vise::Global::new(); + +/// Metrics for GPU circuit prover execution +#[derive(Debug, Metrics)] +#[metrics(prefix = "circuit_prover")] +pub struct CircuitProverMetrics { + /// How long does it take to load prover inputs? + #[metrics(buckets = Buckets::LATENCIES)] + pub load_time: Histogram, + /// How long does it take to prove & verify? + #[metrics(buckets = Buckets::LATENCIES)] + pub prove_and_verify_time: Histogram, + /// How long does it take to save prover results? + #[metrics(buckets = Buckets::LATENCIES)] + pub save_time: Histogram, + /// How long does it take finish a prover job from witness vector to circuit prover? + #[metrics(buckets = Buckets::LATENCIES)] + pub full_time: Histogram, +} + +#[vise::register] +pub static CIRCUIT_PROVER_METRICS: vise::Global = vise::Global::new(); diff --git a/prover/crates/lib/circuit_prover_service/src/types/circuit.rs b/prover/crates/lib/circuit_prover_service/src/types/circuit.rs new file mode 100644 index 000000000000..19c05666b2c5 --- /dev/null +++ b/prover/crates/lib/circuit_prover_service/src/types/circuit.rs @@ -0,0 +1,152 @@ +use std::sync::Arc; + +use anyhow::Context; +use shivini::{gpu_proof_config::GpuProofConfig, gpu_prove_from_external_witness_data}; +use zkevm_test_harness::{ + boojum::cs::implementations::setup::FinalizationHintsForProver, + prover_utils::{verify_base_layer_proof, verify_recursion_layer_proof}, +}; +use zksync_prover_fri_types::{ + circuit_definitions::{ + base_layer_proof_config, + boojum::{ + algebraic_props::{ + round_function::AbsorptionModeOverwrite, sponge::GoldilocksPoseidon2Sponge, + }, + cs::implementations::{ + pow::NoPow, proof::Proof as CryptoProof, transcript::GoldilocksPoisedon2Transcript, + witness::WitnessVec, + }, + field::goldilocks::{GoldilocksExt2, GoldilocksField}, + worker::Worker, + }, + circuit_definitions::{ + base_layer::{ZkSyncBaseLayerCircuit, ZkSyncBaseLayerProof}, + recursion_layer::{ZkSyncRecursionLayerProof, ZkSyncRecursiveLayerCircuit}, + }, + recursion_layer_proof_config, + }, + FriProofWrapper, +}; +use zksync_prover_keystore::GoldilocksGpuProverSetupData; + +type Transcript = GoldilocksPoisedon2Transcript; +type Field = GoldilocksField; +type Hasher = GoldilocksPoseidon2Sponge; +type Extension = GoldilocksExt2; +type Proof = CryptoProof; + +/// Hydrated circuit. +/// Circuits are currently dehydrated for memory and storage reasons. +/// Circuits are hydrated on the flight where necessary. +// TODO: This enum will be merged with CircuitWrapper once BWG changes are done. +#[allow(clippy::large_enum_variant)] +pub enum Circuit { + Base(ZkSyncBaseLayerCircuit), + Recursive(ZkSyncRecursiveLayerCircuit), +} + +impl Circuit { + /// Generates proof for given witness vector. + /// Expects setup_data to match witness vector. + pub(crate) fn prove( + &self, + witness_vector: WitnessVec, + setup_data: Arc, + ) -> anyhow::Result { + let worker = Worker::new(); + + match self { + Circuit::Base(circuit) => { + let proof = Self::prove_base(circuit, witness_vector, setup_data, worker)?; + let circuit_id = circuit.numeric_circuit_type(); + Ok(FriProofWrapper::Base(ZkSyncBaseLayerProof::from_inner( + circuit_id, proof, + ))) + } + Circuit::Recursive(circuit) => { + let proof = Self::prove_recursive(circuit, witness_vector, setup_data, worker)?; + let circuit_id = circuit.numeric_circuit_type(); + Ok(FriProofWrapper::Recursive( + ZkSyncRecursionLayerProof::from_inner(circuit_id, proof), + )) + } + } + } + + /// Prove & verify base circuit. + fn prove_base( + circuit: &ZkSyncBaseLayerCircuit, + witness_vector: WitnessVec, + setup_data: Arc, + worker: Worker, + ) -> anyhow::Result { + let span = tracing::info_span!("prove_base_circuit").entered(); + let gpu_proof_config = GpuProofConfig::from_base_layer_circuit(circuit); + let boojum_proof_config = base_layer_proof_config(); + let proof = gpu_prove_from_external_witness_data::( + &gpu_proof_config, + &witness_vector, + boojum_proof_config, + &setup_data.setup, + &setup_data.vk, + (), + &worker, + ) + .context("failed to generate base proof")? + .into(); + drop(span); + let _span = tracing::info_span!("verify_base_circuit").entered(); + if !verify_base_layer_proof::(circuit, &proof, &setup_data.vk) { + return Err(anyhow::anyhow!("failed to verify base proof")); + } + Ok(proof) + } + + /// Prove & verify recursive circuit. + fn prove_recursive( + circuit: &ZkSyncRecursiveLayerCircuit, + witness_vector: WitnessVec, + setup_data: Arc, + worker: Worker, + ) -> anyhow::Result { + let span = tracing::info_span!("prove_recursive_circuit").entered(); + let gpu_proof_config = GpuProofConfig::from_recursive_layer_circuit(circuit); + let boojum_proof_config = recursion_layer_proof_config(); + let proof = gpu_prove_from_external_witness_data::( + &gpu_proof_config, + &witness_vector, + boojum_proof_config, + &setup_data.setup, + &setup_data.vk, + (), + &worker, + ) + .context("failed to generate recursive proof")? + .into(); + drop(span); + let _span = tracing::info_span!("verify_recursive_circuit").entered(); + if !verify_recursion_layer_proof::(circuit, &proof, &setup_data.vk) { + return Err(anyhow::anyhow!("failed to verify recursive proof")); + } + Ok(proof) + } + + /// Synthesize vector for a given circuit. + /// Expects finalization hints to match circuit. + pub(crate) fn synthesize_vector( + &self, + finalization_hints: Arc, + ) -> anyhow::Result> { + let _span = tracing::info_span!("synthesize_vector").entered(); + + let cs = match self { + Circuit::Base(circuit) => circuit.synthesis::(&finalization_hints), + Circuit::Recursive(circuit) => { + circuit.synthesis::(&finalization_hints) + } + }; + cs.witness + .context("circuit is missing witness post synthesis") + } +} diff --git a/prover/crates/lib/circuit_prover_service/src/types/circuit_prover_payload.rs b/prover/crates/lib/circuit_prover_service/src/types/circuit_prover_payload.rs new file mode 100644 index 000000000000..925b7b318ccc --- /dev/null +++ b/prover/crates/lib/circuit_prover_service/src/types/circuit_prover_payload.rs @@ -0,0 +1,15 @@ +use std::sync::Arc; + +use zksync_prover_fri_types::circuit_definitions::boojum::{ + cs::implementations::witness::WitnessVec, field::goldilocks::GoldilocksField, +}; +use zksync_prover_keystore::GoldilocksGpuProverSetupData; + +use crate::types::circuit::Circuit; + +/// Payload used as input for GPU circuit prover. +pub struct GpuCircuitProverPayload { + pub circuit: Circuit, + pub witness_vector: WitnessVec, + pub setup_data: Arc, +} diff --git a/prover/crates/lib/circuit_prover_service/src/types/mod.rs b/prover/crates/lib/circuit_prover_service/src/types/mod.rs new file mode 100644 index 000000000000..cbbf0d885f7a --- /dev/null +++ b/prover/crates/lib/circuit_prover_service/src/types/mod.rs @@ -0,0 +1,4 @@ +pub mod circuit; +pub mod circuit_prover_payload; +pub mod witness_vector_generator_execution_output; +pub mod witness_vector_generator_payload; diff --git a/prover/crates/lib/circuit_prover_service/src/types/witness_vector_generator_execution_output.rs b/prover/crates/lib/circuit_prover_service/src/types/witness_vector_generator_execution_output.rs new file mode 100644 index 000000000000..593f825f8f99 --- /dev/null +++ b/prover/crates/lib/circuit_prover_service/src/types/witness_vector_generator_execution_output.rs @@ -0,0 +1,11 @@ +use zksync_prover_fri_types::circuit_definitions::boojum::{ + cs::implementations::witness::WitnessVec, field::goldilocks::GoldilocksField, +}; + +use crate::types::circuit::Circuit; + +/// Witness vector generator output. Used as input for GPU circuit provers. +pub struct WitnessVectorGeneratorExecutionOutput { + pub circuit: Circuit, + pub witness_vector: WitnessVec, +} diff --git a/prover/crates/lib/circuit_prover_service/src/types/witness_vector_generator_payload.rs b/prover/crates/lib/circuit_prover_service/src/types/witness_vector_generator_payload.rs new file mode 100644 index 000000000000..409e178ac61a --- /dev/null +++ b/prover/crates/lib/circuit_prover_service/src/types/witness_vector_generator_payload.rs @@ -0,0 +1,11 @@ +use std::sync::Arc; + +use zksync_prover_fri_types::circuit_definitions::boojum::cs::implementations::setup::FinalizationHintsForProver; + +use crate::types::circuit::Circuit; + +/// Payload used as input for Witness Vector Generator. +pub struct WitnessVectorGeneratorPayload { + pub circuit: Circuit, + pub finalization_hints: Arc, +} diff --git a/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/mod.rs b/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/mod.rs new file mode 100644 index 000000000000..d5b140dac94f --- /dev/null +++ b/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/mod.rs @@ -0,0 +1,11 @@ +pub use witness_vector_generator_executor::WitnessVectorGeneratorExecutor; +pub use witness_vector_generator_job_picker::WitnessVectorGeneratorJobPicker; +pub use witness_vector_generator_job_saver::WitnessVectorGeneratorJobSaver; +pub use witness_vector_generator_metadata_loader::{ + HeavyWitnessVectorMetadataLoader, LightWitnessVectorMetadataLoader, WitnessVectorMetadataLoader, +}; + +mod witness_vector_generator_executor; +mod witness_vector_generator_job_picker; +mod witness_vector_generator_job_saver; +mod witness_vector_generator_metadata_loader; diff --git a/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/witness_vector_generator_executor.rs b/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/witness_vector_generator_executor.rs new file mode 100644 index 000000000000..e9dd7e31fd63 --- /dev/null +++ b/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/witness_vector_generator_executor.rs @@ -0,0 +1,66 @@ +use std::time::Instant; + +use anyhow::Context; +use zksync_prover_job_processor::Executor; +use zksync_types::prover_dal::FriProverJobMetadata; + +use crate::{ + metrics::WITNESS_VECTOR_GENERATOR_METRICS, + types::{ + witness_vector_generator_execution_output::WitnessVectorGeneratorExecutionOutput, + witness_vector_generator_payload::WitnessVectorGeneratorPayload, + }, +}; + +/// WitnessVectorGenerator executor implementation. +/// Synthesizes witness vectors to be later be used in GPU circuit proving. +#[derive(Debug)] +pub struct WitnessVectorGeneratorExecutor; + +impl Executor for WitnessVectorGeneratorExecutor { + type Input = WitnessVectorGeneratorPayload; + type Output = WitnessVectorGeneratorExecutionOutput; + type Metadata = FriProverJobMetadata; + + #[tracing::instrument( + name = "witness_vector_generator_executor", + skip_all, + fields(l1_batch = % metadata.block_number) + )] + fn execute( + &self, + input: Self::Input, + metadata: Self::Metadata, + ) -> anyhow::Result { + let start_time = Instant::now(); + tracing::info!( + "Started executing witness vector generator job {}, on batch {}, for circuit {}, at round {}", + metadata.id, + metadata.block_number, + metadata.circuit_id, + metadata.aggregation_round + ); + let WitnessVectorGeneratorPayload { + circuit, + finalization_hints, + } = input; + let witness_vector = circuit + .synthesize_vector(finalization_hints) + .context("failed to generate witness vector")?; + tracing::info!( + "Finished executing witness vector generator job {}, on batch {}, for circuit {}, at round {} in {:?}", + metadata.id, + metadata.block_number, + metadata.circuit_id, + metadata.aggregation_round, + start_time.elapsed() + ); + WITNESS_VECTOR_GENERATOR_METRICS + .synthesize_time + .observe(start_time.elapsed()); + Ok(WitnessVectorGeneratorExecutionOutput { + circuit, + witness_vector, + }) + } +} diff --git a/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/witness_vector_generator_job_picker.rs b/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/witness_vector_generator_job_picker.rs new file mode 100644 index 000000000000..76e0f151c7ca --- /dev/null +++ b/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/witness_vector_generator_job_picker.rs @@ -0,0 +1,167 @@ +use std::{collections::HashMap, sync::Arc, time::Instant}; + +use anyhow::Context; +use async_trait::async_trait; +use zksync_object_store::ObjectStore; +use zksync_prover_dal::{ConnectionPool, Prover}; +use zksync_prover_fri_types::{ + circuit_definitions::{ + boojum::{ + cs::implementations::setup::FinalizationHintsForProver, + gadgets::queue::full_state_queue::FullStateCircuitQueueRawWitness, + }, + circuit_definitions::base_layer::ZkSyncBaseLayerCircuit, + }, + keys::RamPermutationQueueWitnessKey, + CircuitAuxData, CircuitWrapper, ProverServiceDataKey, RamPermutationQueueWitness, +}; +use zksync_prover_job_processor::JobPicker; +use zksync_types::{prover_dal::FriProverJobMetadata, L1BatchNumber}; + +use crate::{ + metrics::WITNESS_VECTOR_GENERATOR_METRICS, + types::{circuit::Circuit, witness_vector_generator_payload::WitnessVectorGeneratorPayload}, + witness_vector_generator::{ + witness_vector_generator_metadata_loader::WitnessVectorMetadataLoader, + WitnessVectorGeneratorExecutor, + }, +}; + +/// WitnessVectorGenerator job picker implementation. +/// Picks job from database (via MetadataLoader) and gets data from object store. +#[derive(Debug)] +pub struct WitnessVectorGeneratorJobPicker { + connection_pool: ConnectionPool, + object_store: Arc, + finalization_hints_cache: HashMap>, + metadata_loader: ML, +} + +impl WitnessVectorGeneratorJobPicker { + pub fn new( + connection_pool: ConnectionPool, + object_store: Arc, + finalization_hints_cache: HashMap>, + metadata_loader: ML, + ) -> Self { + Self { + connection_pool, + object_store, + finalization_hints_cache, + metadata_loader, + } + } + + /// Hydrates job data with witness information which is stored separately. + /// This is done in order to save RAM & storage. + // TODO: Once new BWG is done, this won't be necessary. + async fn fill_witness( + &self, + circuit: ZkSyncBaseLayerCircuit, + aux_data: CircuitAuxData, + l1_batch_number: L1BatchNumber, + ) -> anyhow::Result { + if let ZkSyncBaseLayerCircuit::RAMPermutation(circuit_instance) = circuit { + let sorted_witness_key = RamPermutationQueueWitnessKey { + block_number: l1_batch_number, + circuit_subsequence_number: aux_data.circuit_subsequence_number as usize, + is_sorted: true, + }; + let sorted_witness: RamPermutationQueueWitness = self + .object_store + .get(sorted_witness_key) + .await + .context("failed to load sorted witness key")?; + + let unsorted_witness_key = RamPermutationQueueWitnessKey { + block_number: l1_batch_number, + circuit_subsequence_number: aux_data.circuit_subsequence_number as usize, + is_sorted: false, + }; + let unsorted_witness: RamPermutationQueueWitness = self + .object_store + .get(unsorted_witness_key) + .await + .context("failed to load unsorted witness key")?; + + let mut witness = circuit_instance.witness.take().unwrap(); + witness.unsorted_queue_witness = FullStateCircuitQueueRawWitness { + elements: unsorted_witness.witness.into(), + }; + witness.sorted_queue_witness = FullStateCircuitQueueRawWitness { + elements: sorted_witness.witness.into(), + }; + circuit_instance.witness.store(Some(witness)); + + return Ok(Circuit::Base(ZkSyncBaseLayerCircuit::RAMPermutation( + circuit_instance, + ))); + } + Err(anyhow::anyhow!( + "unexpected circuit received with partial witness, expected RAM permutation, got {:?}", + circuit.short_description() + )) + } +} + +#[async_trait] +impl JobPicker for WitnessVectorGeneratorJobPicker { + type ExecutorType = WitnessVectorGeneratorExecutor; + async fn pick_job( + &mut self, + ) -> anyhow::Result> { + let start_time = Instant::now(); + tracing::info!("Started picking witness vector generator job"); + let connection = self + .connection_pool + .connection() + .await + .context("failed to get db connection")?; + let metadata = match self.metadata_loader.load_metadata(connection).await { + None => return Ok(None), + Some(metadata) => metadata, + }; + + let circuit_wrapper = self + .object_store + .get(metadata.into()) + .await + .context("failed to get circuit_wrapper from object store")?; + let circuit = match circuit_wrapper { + CircuitWrapper::Base(circuit) => Circuit::Base(circuit), + CircuitWrapper::Recursive(circuit) => Circuit::Recursive(circuit), + CircuitWrapper::BasePartial((circuit, aux_data)) => self + .fill_witness(circuit, aux_data, metadata.block_number) + .await + .context("failed to fill witness")?, + }; + + let key = ProverServiceDataKey { + circuit_id: metadata.circuit_id, + round: metadata.aggregation_round, + } + .crypto_setup_key(); + let finalization_hints = self + .finalization_hints_cache + .get(&key) + .context("failed to retrieve finalization key from cache")? + .clone(); + + let payload = WitnessVectorGeneratorPayload { + circuit, + finalization_hints, + }; + tracing::info!( + "Finished picking witness vector generator job {}, on batch {}, for circuit {}, at round {} in {:?}", + metadata.id, + metadata.block_number, + metadata.circuit_id, + metadata.aggregation_round, + start_time.elapsed() + ); + WITNESS_VECTOR_GENERATOR_METRICS + .pick_time + .observe(start_time.elapsed()); + Ok(Some((payload, metadata))) + } +} diff --git a/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/witness_vector_generator_job_saver.rs b/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/witness_vector_generator_job_saver.rs new file mode 100644 index 000000000000..86e04472b299 --- /dev/null +++ b/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/witness_vector_generator_job_saver.rs @@ -0,0 +1,114 @@ +use std::time::Instant; + +use anyhow::Context; +use async_trait::async_trait; +use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; +use zksync_prover_job_processor::JobSaver; +use zksync_types::prover_dal::FriProverJobMetadata; + +use crate::{ + metrics::WITNESS_VECTOR_GENERATOR_METRICS, + types::witness_vector_generator_execution_output::WitnessVectorGeneratorExecutionOutput, + witness_vector_generator::WitnessVectorGeneratorExecutor, +}; + +/// WitnessVectorGenerator job saver implementation. +/// On successful execution, sends data further to gpu circuit prover. +/// On error, marks the job as failed in database. +#[derive(Debug)] +pub struct WitnessVectorGeneratorJobSaver { + connection_pool: ConnectionPool, + sender: + tokio::sync::mpsc::Sender<(WitnessVectorGeneratorExecutionOutput, FriProverJobMetadata)>, +} + +impl WitnessVectorGeneratorJobSaver { + pub fn new( + connection_pool: ConnectionPool, + sender: tokio::sync::mpsc::Sender<( + WitnessVectorGeneratorExecutionOutput, + FriProverJobMetadata, + )>, + ) -> Self { + Self { + connection_pool, + sender, + } + } +} + +#[async_trait] +impl JobSaver for WitnessVectorGeneratorJobSaver { + type ExecutorType = WitnessVectorGeneratorExecutor; + + #[tracing::instrument( + name = "witness_vector_generator_save_job", + skip_all, + fields(l1_batch = % data.1.block_number) + )] + async fn save_job_result( + &self, + data: ( + anyhow::Result, + FriProverJobMetadata, + ), + ) -> anyhow::Result<()> { + let start_time = Instant::now(); + let (result, metadata) = data; + match result { + Ok(payload) => { + tracing::info!( + "Started transferring witness vector generator job {}, on batch {}, for circuit {}, at round {}", + metadata.id, + metadata.block_number, + metadata.circuit_id, + metadata.aggregation_round + ); + if self.sender.send((payload, metadata)).await.is_err() { + tracing::warn!("circuit prover shut down prematurely"); + return Ok(()); + } + tracing::info!( + "Finished transferring witness vector generator job {}, on batch {}, for circuit {}, at round {} in {:?}", + metadata.id, + metadata.block_number, + metadata.circuit_id, + metadata.aggregation_round, + start_time.elapsed() + ); + WITNESS_VECTOR_GENERATOR_METRICS + .transfer_time + .observe(start_time.elapsed()); + } + Err(err) => { + tracing::error!("Witness vector generation failed: {:?}", err); + tracing::info!( + "Started saving failure for witness vector generator job {}, on batch {}, for circuit {}, at round {}", + metadata.id, + metadata.block_number, + metadata.circuit_id, + metadata.aggregation_round + ); + self.connection_pool + .connection() + .await + .context("failed to get db connection")? + .fri_prover_jobs_dal() + .save_proof_error(metadata.id, err.to_string()) + .await; + tracing::info!( + "Finished saving failure for witness vector generator job {}, on batch {}, for circuit {}, at round {} in {:?}", + metadata.id, + metadata.block_number, + metadata.circuit_id, + metadata.aggregation_round, + start_time.elapsed() + ); + WITNESS_VECTOR_GENERATOR_METRICS + .save_time + .observe(start_time.elapsed()); + } + } + Ok(()) + } +} diff --git a/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/witness_vector_generator_metadata_loader.rs b/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/witness_vector_generator_metadata_loader.rs new file mode 100644 index 000000000000..bb0b6ec6e94c --- /dev/null +++ b/prover/crates/lib/circuit_prover_service/src/witness_vector_generator/witness_vector_generator_metadata_loader.rs @@ -0,0 +1,83 @@ +use async_trait::async_trait; +use zksync_prover_dal::{Connection, Prover, ProverDal}; +use zksync_types::{protocol_version::ProtocolSemanticVersion, prover_dal::FriProverJobMetadata}; + +/// Trait responsible for describing the job loading interface. +/// This is necessary as multiple strategies are necessary for loading jobs (which require different implementations). +#[async_trait] +pub trait WitnessVectorMetadataLoader: Sync + Send + 'static { + async fn load_metadata( + &self, + connection: Connection<'_, Prover>, + ) -> Option; +} + +/// Light job MetadataLoader. +/// +/// Most jobs are light, apart from nodes. This loader will only pick non nodes jobs. +#[derive(Debug)] +pub struct LightWitnessVectorMetadataLoader { + pod_name: String, + protocol_version: ProtocolSemanticVersion, +} + +impl LightWitnessVectorMetadataLoader { + pub fn new(pod_name: String, protocol_version: ProtocolSemanticVersion) -> Self { + Self { + pod_name, + protocol_version, + } + } +} + +#[async_trait] +impl WitnessVectorMetadataLoader for LightWitnessVectorMetadataLoader { + async fn load_metadata( + &self, + mut connection: Connection<'_, Prover>, + ) -> Option { + connection + .fri_prover_jobs_dal() + .get_light_job(self.protocol_version, &self.pod_name) + .await + } +} + +/// Heavy job MetadataLoader. +/// +/// Most jobs are light, apart from nodes. This loader will only prioritize node jobs. +/// If none are available, it will fall back to light jobs. +#[derive(Debug)] +pub struct HeavyWitnessVectorMetadataLoader { + pod_name: String, + protocol_version: ProtocolSemanticVersion, +} + +impl HeavyWitnessVectorMetadataLoader { + pub fn new(pod_name: String, protocol_version: ProtocolSemanticVersion) -> Self { + Self { + pod_name, + protocol_version, + } + } +} + +#[async_trait] +impl WitnessVectorMetadataLoader for HeavyWitnessVectorMetadataLoader { + async fn load_metadata( + &self, + mut connection: Connection<'_, Prover>, + ) -> Option { + let metadata = connection + .fri_prover_jobs_dal() + .get_heavy_job(self.protocol_version, &self.pod_name) + .await; + if metadata.is_some() { + return metadata; + } + connection + .fri_prover_jobs_dal() + .get_light_job(self.protocol_version, &self.pod_name) + .await + } +} diff --git a/prover/crates/lib/prover_dal/.sqlx/query-3b3193bfac70b5fe69bf3bb7ba5a234c19578572973094b21ddbb3876da6bb95.json b/prover/crates/lib/prover_dal/.sqlx/query-4d89c375af2c211a8a896cad7c99d2c9ff0d28f4662913ef7c2cf6fa1aa430d4.json similarity index 65% rename from prover/crates/lib/prover_dal/.sqlx/query-3b3193bfac70b5fe69bf3bb7ba5a234c19578572973094b21ddbb3876da6bb95.json rename to prover/crates/lib/prover_dal/.sqlx/query-4d89c375af2c211a8a896cad7c99d2c9ff0d28f4662913ef7c2cf6fa1aa430d4.json index 962979344b4b..f84489dd6523 100644 --- a/prover/crates/lib/prover_dal/.sqlx/query-3b3193bfac70b5fe69bf3bb7ba5a234c19578572973094b21ddbb3876da6bb95.json +++ b/prover/crates/lib/prover_dal/.sqlx/query-4d89c375af2c211a8a896cad7c99d2c9ff0d28f4662913ef7c2cf6fa1aa430d4.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n UPDATE prover_jobs_fri\n SET\n status = 'in_progress',\n attempts = attempts + 1,\n updated_at = NOW(),\n processing_started_at = NOW(),\n picked_by = $3\n WHERE\n id = (\n SELECT\n id\n FROM\n prover_jobs_fri\n WHERE\n status = 'queued'\n AND protocol_version = $1\n AND protocol_version_patch = $2\n ORDER BY\n l1_batch_number ASC,\n aggregation_round ASC,\n circuit_id ASC,\n id ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n prover_jobs_fri.id,\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.circuit_id,\n prover_jobs_fri.aggregation_round,\n prover_jobs_fri.sequence_number,\n prover_jobs_fri.depth,\n prover_jobs_fri.is_node_final_proof\n ", + "query": "\n UPDATE prover_jobs_fri\n SET\n status = 'in_progress',\n attempts = attempts + 1,\n updated_at = NOW(),\n processing_started_at = NOW(),\n picked_by = $3\n WHERE\n id = (\n SELECT\n id\n FROM\n prover_jobs_fri\n WHERE\n status = 'queued'\n AND protocol_version = $1\n AND protocol_version_patch = $2\n AND aggregation_round = $4\n ORDER BY\n l1_batch_number ASC,\n circuit_id ASC,\n id ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n prover_jobs_fri.id,\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.circuit_id,\n prover_jobs_fri.aggregation_round,\n prover_jobs_fri.sequence_number,\n prover_jobs_fri.depth,\n prover_jobs_fri.is_node_final_proof\n ", "describe": { "columns": [ { @@ -43,7 +43,8 @@ "Left": [ "Int4", "Int4", - "Text" + "Text", + "Int2" ] }, "nullable": [ @@ -56,5 +57,5 @@ false ] }, - "hash": "3b3193bfac70b5fe69bf3bb7ba5a234c19578572973094b21ddbb3876da6bb95" + "hash": "4d89c375af2c211a8a896cad7c99d2c9ff0d28f4662913ef7c2cf6fa1aa430d4" } diff --git a/prover/crates/lib/prover_dal/.sqlx/query-79b5ad4ef1ba888c3ffdb27cf2203367ae4cf57703c532fe3dfe18924c3c9492.json b/prover/crates/lib/prover_dal/.sqlx/query-79b5ad4ef1ba888c3ffdb27cf2203367ae4cf57703c532fe3dfe18924c3c9492.json new file mode 100644 index 000000000000..d1db20fbdbea --- /dev/null +++ b/prover/crates/lib/prover_dal/.sqlx/query-79b5ad4ef1ba888c3ffdb27cf2203367ae4cf57703c532fe3dfe18924c3c9492.json @@ -0,0 +1,61 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE prover_jobs_fri\n SET\n status = 'in_progress',\n attempts = attempts + 1,\n updated_at = NOW(),\n processing_started_at = NOW(),\n picked_by = $3\n WHERE\n id = (\n SELECT\n id\n FROM\n prover_jobs_fri\n WHERE\n status = 'queued'\n AND protocol_version = $1\n AND protocol_version_patch = $2\n AND aggregation_round != $4\n ORDER BY\n l1_batch_number ASC,\n aggregation_round ASC,\n circuit_id ASC,\n id ASC\n LIMIT\n 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING\n prover_jobs_fri.id,\n prover_jobs_fri.l1_batch_number,\n prover_jobs_fri.circuit_id,\n prover_jobs_fri.aggregation_round,\n prover_jobs_fri.sequence_number,\n prover_jobs_fri.depth,\n prover_jobs_fri.is_node_final_proof\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "l1_batch_number", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "circuit_id", + "type_info": "Int2" + }, + { + "ordinal": 3, + "name": "aggregation_round", + "type_info": "Int2" + }, + { + "ordinal": 4, + "name": "sequence_number", + "type_info": "Int4" + }, + { + "ordinal": 5, + "name": "depth", + "type_info": "Int4" + }, + { + "ordinal": 6, + "name": "is_node_final_proof", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Int4", + "Int4", + "Text", + "Int2" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + false + ] + }, + "hash": "79b5ad4ef1ba888c3ffdb27cf2203367ae4cf57703c532fe3dfe18924c3c9492" +} diff --git a/prover/crates/lib/prover_dal/src/fri_prover_dal.rs b/prover/crates/lib/prover_dal/src/fri_prover_dal.rs index a0420b056125..8efa8e2f6837 100644 --- a/prover/crates/lib/prover_dal/src/fri_prover_dal.rs +++ b/prover/crates/lib/prover_dal/src/fri_prover_dal.rs @@ -1,5 +1,10 @@ #![doc = include_str!("../doc/FriProverDal.md")] -use std::{collections::HashMap, convert::TryFrom, str::FromStr, time::Duration}; +use std::{ + collections::HashMap, + convert::TryFrom, + str::FromStr, + time::{Duration, Instant}, +}; use zksync_basic_types::{ basic_fri_types::{ @@ -60,8 +65,11 @@ impl FriProverDal<'_, '_> { /// - within the lowest batch, look at the lowest aggregation level (move up the proof tree) /// - pick the same type of circuit for as long as possible, this maximizes GPU cache reuse /// - /// NOTE: Most of this function is a duplicate of `get_next_job()`. Get next job will be deleted together with old prover. - pub async fn get_job( + /// Most of this function is similar to `get_light_job()`. + /// The 2 differ in the type of jobs they will load. Node jobs are heavy in resource utilization. + /// + /// NOTE: This function retrieves only node jobs. + pub async fn get_heavy_job( &mut self, protocol_version: ProtocolSemanticVersion, picked_by: &str, @@ -85,6 +93,84 @@ impl FriProverDal<'_, '_> { status = 'queued' AND protocol_version = $1 AND protocol_version_patch = $2 + AND aggregation_round = $4 + ORDER BY + l1_batch_number ASC, + circuit_id ASC, + id ASC + LIMIT + 1 + FOR UPDATE + SKIP LOCKED + ) + RETURNING + prover_jobs_fri.id, + prover_jobs_fri.l1_batch_number, + prover_jobs_fri.circuit_id, + prover_jobs_fri.aggregation_round, + prover_jobs_fri.sequence_number, + prover_jobs_fri.depth, + prover_jobs_fri.is_node_final_proof + "#, + protocol_version.minor as i32, + protocol_version.patch.0 as i32, + picked_by, + AggregationRound::NodeAggregation as i64, + ) + .fetch_optional(self.storage.conn()) + .await + .expect("failed to get prover job") + .map(|row| FriProverJobMetadata { + id: row.id as u32, + block_number: L1BatchNumber(row.l1_batch_number as u32), + circuit_id: row.circuit_id as u8, + aggregation_round: AggregationRound::try_from(i32::from(row.aggregation_round)) + .unwrap(), + sequence_number: row.sequence_number as usize, + depth: row.depth as u16, + is_node_final_proof: row.is_node_final_proof, + pick_time: Instant::now(), + }) + } + + /// Retrieves the next prover job to be proven. Called by WVGs. + /// + /// Prover jobs must be thought of as ordered. + /// Prover must prioritize proving such jobs that will make the chain move forward the fastest. + /// Current ordering: + /// - pick the lowest batch + /// - within the lowest batch, look at the lowest aggregation level (move up the proof tree) + /// - pick the same type of circuit for as long as possible, this maximizes GPU cache reuse + /// + /// Most of this function is similar to `get_heavy_job()`. + /// The 2 differ in the type of jobs they will load. Node jobs are heavy in resource utilization. + /// + /// NOTE: This function retrieves all jobs but nodes. + pub async fn get_light_job( + &mut self, + protocol_version: ProtocolSemanticVersion, + picked_by: &str, + ) -> Option { + sqlx::query!( + r#" + UPDATE prover_jobs_fri + SET + status = 'in_progress', + attempts = attempts + 1, + updated_at = NOW(), + processing_started_at = NOW(), + picked_by = $3 + WHERE + id = ( + SELECT + id + FROM + prover_jobs_fri + WHERE + status = 'queued' + AND protocol_version = $1 + AND protocol_version_patch = $2 + AND aggregation_round != $4 ORDER BY l1_batch_number ASC, aggregation_round ASC, @@ -107,6 +193,7 @@ impl FriProverDal<'_, '_> { protocol_version.minor as i32, protocol_version.patch.0 as i32, picked_by, + AggregationRound::NodeAggregation as i64 ) .fetch_optional(self.storage.conn()) .await @@ -120,6 +207,7 @@ impl FriProverDal<'_, '_> { sequence_number: row.sequence_number as usize, depth: row.depth as u16, is_node_final_proof: row.is_node_final_proof, + pick_time: Instant::now(), }) } @@ -181,9 +269,9 @@ impl FriProverDal<'_, '_> { sequence_number: row.sequence_number as usize, depth: row.depth as u16, is_node_final_proof: row.is_node_final_proof, + pick_time: Instant::now(), }) } - pub async fn get_next_job_for_circuit_id_round( &mut self, circuits_to_pick: &[CircuitIdRoundTuple], @@ -271,6 +359,7 @@ impl FriProverDal<'_, '_> { sequence_number: row.sequence_number as usize, depth: row.depth as u16, is_node_final_proof: row.is_node_final_proof, + pick_time: Instant::now(), }) } @@ -359,6 +448,7 @@ impl FriProverDal<'_, '_> { sequence_number: row.sequence_number as usize, depth: row.depth as u16, is_node_final_proof: row.is_node_final_proof, + pick_time: Instant::now(), }) .unwrap() } diff --git a/prover/crates/lib/prover_job_processor/Cargo.toml b/prover/crates/lib/prover_job_processor/Cargo.toml new file mode 100644 index 000000000000..5197b33b1f95 --- /dev/null +++ b/prover/crates/lib/prover_job_processor/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "zksync_prover_job_processor" +description = "ZKsync Prover Job Processor" +version.workspace = true +edition.workspace = true +authors.workspace = true +homepage.workspace = true +repository.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true + +[dependencies] +async-trait.workspace = true +anyhow.workspace = true +futures.workspace = true +tokio.workspace = true +tokio-stream.workspace = true +tokio-util.workspace = true +tracing.workspace = true +vise.workspace = true +strum.workspace = true diff --git a/prover/crates/lib/prover_job_processor/README.md b/prover/crates/lib/prover_job_processor/README.md new file mode 100644 index 000000000000..5eea5476d05d --- /dev/null +++ b/prover/crates/lib/prover_job_processor/README.md @@ -0,0 +1,152 @@ +# Prover Job Processor + +Prover Job Processor aims to be a small "framework" that allows building prover components at break-neck speeds. + +## Context + +Previously, prover components were hand tailored and had similar issues spread across the codebase. The "framework"'s +purpose is to standardize implementations and lift the undifferentiated work from prover component developers. + +## How it works + +The "framework" exports 4 main primitives: + +- executor +- job_picker +- job_saver +- job_runner + +### Executor + +This is the most important trait. It is meant to execute the crypto primitives (or any other payloads) and defines what +the inputs are, what is the metadata that has to travel with it and what the output will be. Executors will receive +information from Job Picker and will provide it further to Job Saver. + +For example, this could witness vector generator (synthesis of witness vector) or circuit prover (GPU circuit proving & +verification). Each would define what they need as input to operate and what they'll output. + +### Job Picker + +The starting point of the process. This trait is tied to Executor and will pick a metadata & input that corresponds to +the Executor. Job Picker picks information and provides it to Executor. + +As examples, for witness vector generator it would be a query to the database & a query to object storage. For circuit +prover, it would be waiting on the communication channel between witness vector generator and circuit prover. + +### Job Saver + +The final point of the process. This trait is tied to Executor and will receive metadata & output that corresponds to +the Executor. Job Saver receives information from Executor and saves it. + +Continuing with the same examples, for witness vector generator it would send the information to the communication +channel between witness vector generator & circuit prover. For circuit prover example, it would simply store the +information to database & object store. + +### Job Runner + +A wrapper over all 3 traits above, ensuring they communicate to each other as expected & they are spawned as +long-running threads. + +## Diagram + +```mermaid +sequenceDiagram + participant p as Job Picker + participant e as Executor + participant s as Job Saver + + p-->>p: Get metadata & input + p-->>e: Provide metadata & input + e-->>e: Execute + e-->>s: Provide metadata & output + s-->>s: Save output +``` + +## How to use it + +If you want to create a new prover component, you'd need to first define what are the communication boundaries: + +- metadata +- input +- output + +With these out of the way, you can specify the Executor and even integrate the crypto primitive. At this point in time +you could fully cover it with unit tests to make sure the functionality works as intended. + +Moving forward, you'll need to understand where you get this information and where you store it. These are your Job +Picker & Job saver. NOTE: Just like the executor, you need to implement the logic of executing/picking/saving a single +job, the "framework" will take care of looping it over and transmitting the details from one end to another. + +Once done, provide them as arguments to JobRunner, call `your_job_runner.run()` and you're good to go. + +TODO: Add example once testing is in place. + +## More (internal) details + +There are a few things that we've glossed over, let's get into details: + +### Back-offs & cancelling + +As you might've guessed, from a production point of view, you need to make sure that the process can die gracefully (k8s +sigterm), without being a nuisance to your dependencies (think DB or object store). As such, job picker can have an +optional component responsible for back-off & cancelling. + +### How do components communicate + +Internally, `JobRunner` wraps all 3 primitives into a task that are looping in a `while channel.recv() {}`. Each task is +slightly special, but the logic is far from complex. + +### Limitations + +Back off & cancelling is implemented only for job picker. Whilst it might sound inconvenient, in practice it works +great. When the cancel is received, the job picker will stop picking jobs, the executor will keep executing until there +are no more jobs in the receiver and the saver will save all jobs until there are no more jobs received from executor. + +Backoff is currently hardcoded, but it is trivial to make it more configurable. + +Whilst not a limitation, the first version is applied only to `circuit_provers`. It's very likely that more enhancements +will be needed to accommodate the rest of the codebase. Treat this as work in progress. + +## Objectives + +The "framework" wants to achieve the following: + +1. Reduce code complexity & technical debt (modularize the codebase) +2. Empower testability of the prover codebase +3. Optimize prover components for speed and multi-datacenter/multi-cloud setups +4. Increase speed of delivery of prover components +5. Enable external shops to implement their own flavors of prover components + +### 1. Reduce code complexity & technical debt (modularize the codebase) + +Previously, most prover components were custom written. This meant that the same logic was reimplemented across multiple +components. Whilst the "framework" doesn't fully solve the problem, it drastically reduces the amount of code needed to +start a new components. + +The rest of the code duplication can be tackled in the future as part of the node framework. + +### 2. Empower testability of the prover codebase + +Due to the entangled nature of the code, prover codebase was difficult to test. Current modular setup enables testing in +isolation each component. (not exactly true, given cryptography dependencies are too heavy - but will be true in the new +prover implementation) + +### 3. Optimize prover components for speed and multi-datacenter/multi-cloud setups + +Previously, provers were running "sync". Load job, once loaded, execute it, once executed, save its result. Whilst this +is fine, all steps can be done in parallel. This becomes super important when database and running machine are far away +and the round trip to database can cause up to 50% of the entire time. In a multi-cloud (read as future) setup, this +becomes even more painful. For free, we remove the current bottleneck from database (which was previous bottleneck, due +to # of connections). + +### 4. Increase speed of delivery of prover components + +Boojum release was rather slow and even releasing the current `circuit_prover` took longer than anticipated. Given +upcoming prover updates, this release sets us for success going forward. Furthermore, experimenting with different +setups becomes a matter of days, rather than months. + +### 5. Enable external shops to implement their own flavors of prover components + +Most external folks have to fork zksync-era and keep an up-to-date fork if anything needs to be modified. The framework +allows using the executors, whilst defining custom pickers/savers. This will be a massive time-save for any external +shop that wants to innovate on top of zksync-era's provers. diff --git a/prover/crates/bin/circuit_prover/src/backoff.rs b/prover/crates/lib/prover_job_processor/src/backoff_and_cancellable.rs similarity index 60% rename from prover/crates/bin/circuit_prover/src/backoff.rs rename to prover/crates/lib/prover_job_processor/src/backoff_and_cancellable.rs index 6ddb3d94be35..15d80404dc71 100644 --- a/prover/crates/bin/circuit_prover/src/backoff.rs +++ b/prover/crates/lib/prover_job_processor/src/backoff_and_cancellable.rs @@ -1,5 +1,24 @@ use std::{ops::Mul, time::Duration}; +use tokio_util::sync::CancellationToken; + +/// Utility struct that provides cancellation awareness & backoff capabilities. +/// They usually go hand in hand, having a wrapper over both simplifies implementation. +#[derive(Debug, Clone)] +pub struct BackoffAndCancellable { + pub(crate) backoff: Backoff, + pub(crate) cancellation_token: CancellationToken, +} + +impl BackoffAndCancellable { + pub fn new(backoff: Backoff, cancellation_token: CancellationToken) -> Self { + Self { + backoff, + cancellation_token, + } + } +} + /// Backoff - convenience structure that takes care of backoff timings. #[derive(Debug, Clone)] pub struct Backoff { @@ -7,12 +26,10 @@ pub struct Backoff { current_delay: Duration, max_delay: Duration, } - impl Backoff { /// The delay multiplication coefficient. // Currently it's hardcoded, but could be provided in the constructor. const DELAY_MULTIPLIER: u32 = 2; - /// Create a backoff with base_delay (first delay) and max_delay (maximum delay possible). pub fn new(base_delay: Duration, max_delay: Duration) -> Self { Backoff { @@ -37,3 +54,10 @@ impl Backoff { self.current_delay = self.base_delay; } } + +impl Default for Backoff { + /// Sensible database specific delays. + fn default() -> Self { + Self::new(Duration::from_secs(1), Duration::from_secs(5)) + } +} diff --git a/prover/crates/lib/prover_job_processor/src/executor.rs b/prover/crates/lib/prover_job_processor/src/executor.rs new file mode 100644 index 000000000000..80b019960e3e --- /dev/null +++ b/prover/crates/lib/prover_job_processor/src/executor.rs @@ -0,0 +1,11 @@ +/// Executor trait, responsible for defining what a job's execution will look like. +/// The trait covers what it expects as input, what it'll offer as output and what metadata needs to travel together with the input. +/// This is the backbone of the `prover_job_processor` from a user's point of view. +pub trait Executor: Send + Sync + 'static { + type Input: Send; + type Output: Send; + type Metadata: Send + Clone; + + fn execute(&self, input: Self::Input, metadata: Self::Metadata) + -> anyhow::Result; +} diff --git a/prover/crates/lib/prover_job_processor/src/job_picker.rs b/prover/crates/lib/prover_job_processor/src/job_picker.rs new file mode 100644 index 000000000000..74ecbcde5d74 --- /dev/null +++ b/prover/crates/lib/prover_job_processor/src/job_picker.rs @@ -0,0 +1,18 @@ +use async_trait::async_trait; + +use crate::Executor; + +/// Job Picker trait, in charge of getting a new job for executor. +/// NOTE: Job Pickers are tied to an executor, which ensures input/output/metadata types match. +#[async_trait] +pub trait JobPicker: Send + Sync + 'static { + type ExecutorType: Executor; + async fn pick_job( + &mut self, + ) -> anyhow::Result< + Option<( + ::Input, + ::Metadata, + )>, + >; +} diff --git a/prover/crates/lib/prover_job_processor/src/job_runner.rs b/prover/crates/lib/prover_job_processor/src/job_runner.rs new file mode 100644 index 000000000000..2a2d803e206d --- /dev/null +++ b/prover/crates/lib/prover_job_processor/src/job_runner.rs @@ -0,0 +1,69 @@ +use tokio::task::JoinHandle; + +use crate::{ + task_wiring::{JobPickerTask, JobSaverTask, Task, WorkerPool}, + BackoffAndCancellable, Executor, JobPicker, JobSaver, +}; + +/// It's preferred to have a minimal amount of jobs in flight at any given time. +/// This ensures that memory usage is minimized, in case of failures, a small amount of jobs is lost and +/// components can apply back pressure to each other in case of misconfiguration. +const CHANNEL_SIZE: usize = 1; + +/// The "framework" wrapper that runs the entire machinery. +/// Job Runner is responsible for tying together tasks (picker, executor, saver) and starting them. +#[derive(Debug)] +pub struct JobRunner +where + E: Executor, + P: JobPicker, + S: JobSaver, +{ + executor: E, + picker: P, + saver: S, + num_workers: usize, + picker_backoff_and_cancellable: Option, +} + +impl JobRunner +where + E: Executor, + P: JobPicker, + S: JobSaver, +{ + pub fn new( + executor: E, + picker: P, + saver: S, + num_workers: usize, + picker_backoff_and_cancellable: Option, + ) -> Self { + Self { + executor, + picker, + saver, + num_workers, + picker_backoff_and_cancellable, + } + } + + /// Runs job runner tasks. + pub fn run(self) -> Vec>> { + let (input_tx, input_rx) = + tokio::sync::mpsc::channel::<(E::Input, E::Metadata)>(CHANNEL_SIZE); + let (result_tx, result_rx) = + tokio::sync::mpsc::channel::<(anyhow::Result, E::Metadata)>(CHANNEL_SIZE); + + let picker_task = + JobPickerTask::new(self.picker, input_tx, self.picker_backoff_and_cancellable); + let worker_pool = WorkerPool::new(self.executor, self.num_workers, input_rx, result_tx); + let saver_task = JobSaverTask::new(self.saver, result_rx); + + vec![ + tokio::spawn(picker_task.run()), + tokio::spawn(worker_pool.run()), + tokio::spawn(saver_task.run()), + ] + } +} diff --git a/prover/crates/lib/prover_job_processor/src/job_saver.rs b/prover/crates/lib/prover_job_processor/src/job_saver.rs new file mode 100644 index 000000000000..4c0833dd77a4 --- /dev/null +++ b/prover/crates/lib/prover_job_processor/src/job_saver.rs @@ -0,0 +1,19 @@ +use async_trait::async_trait; + +use crate::Executor; + +/// Job Saver trait, in charge of getting the result from the executor and dispatching it. +/// Dispatch could be storing it, or sending to a separate component. +/// NOTE: Job Savers are tied to an executor, which ensures input/output/metadata types match. +#[async_trait] +pub trait JobSaver: Send + Sync + 'static { + type ExecutorType: Executor; + + async fn save_job_result( + &self, + data: ( + anyhow::Result<::Output>, + ::Metadata, + ), + ) -> anyhow::Result<()>; +} diff --git a/prover/crates/lib/prover_job_processor/src/lib.rs b/prover/crates/lib/prover_job_processor/src/lib.rs new file mode 100644 index 000000000000..02847be533ff --- /dev/null +++ b/prover/crates/lib/prover_job_processor/src/lib.rs @@ -0,0 +1,19 @@ +pub use backoff_and_cancellable::{Backoff, BackoffAndCancellable}; +pub use executor::Executor; +pub use job_picker::JobPicker; +pub use job_runner::JobRunner; +pub use job_saver::JobSaver; + +mod backoff_and_cancellable; +mod executor; +mod job_picker; +mod job_runner; +mod job_saver; +mod task_wiring; + +// convenience aliases to simplify declarations +type Input

= <

::ExecutorType as Executor>::Input; +type PickerMetadata

= <

::ExecutorType as Executor>::Metadata; + +type Output = <::ExecutorType as Executor>::Output; +type SaverMetadata = <::ExecutorType as Executor>::Metadata; diff --git a/prover/crates/lib/prover_job_processor/src/task_wiring/job_picker_task.rs b/prover/crates/lib/prover_job_processor/src/task_wiring/job_picker_task.rs new file mode 100644 index 000000000000..f3e5e3ea4686 --- /dev/null +++ b/prover/crates/lib/prover_job_processor/src/task_wiring/job_picker_task.rs @@ -0,0 +1,77 @@ +use anyhow::Context; +use async_trait::async_trait; + +use crate::{task_wiring::task::Task, BackoffAndCancellable, Input, JobPicker, PickerMetadata}; + +/// Wrapper over JobPicker. Makes it a continuous task, picking tasks until cancelled. +#[derive(Debug)] +pub struct JobPickerTask { + picker: P, + input_tx: tokio::sync::mpsc::Sender<(Input

, PickerMetadata

)>, + backoff_and_cancellable: Option, +} + +impl JobPickerTask

{ + pub fn new( + picker: P, + input_tx: tokio::sync::mpsc::Sender<(Input

, PickerMetadata

)>, + backoff_and_cancellable: Option, + ) -> Self { + Self { + picker, + input_tx, + backoff_and_cancellable, + } + } + + /// Backs off for the specified amount of time or until cancel is received, if available. + async fn backoff(&mut self) { + if let Some(backoff_and_cancellable) = &mut self.backoff_and_cancellable { + let backoff_duration = backoff_and_cancellable.backoff.delay(); + tracing::info!("Backing off for {:?}...", backoff_duration); + // Error here corresponds to a timeout w/o receiving task_wiring cancel; we're OK with this. + tokio::time::timeout( + backoff_duration, + backoff_and_cancellable.cancellation_token.cancelled(), + ) + .await + .ok(); + } + } + + /// Resets backoff to initial state, if available. + fn reset_backoff(&mut self) { + if let Some(backoff_and_cancellable) = &mut self.backoff_and_cancellable { + backoff_and_cancellable.backoff.reset(); + } + } + + /// Checks if the task is cancelled, if available. + fn is_cancelled(&self) -> bool { + if let Some(backoff_and_cancellable) = &self.backoff_and_cancellable { + return backoff_and_cancellable.cancellation_token.is_cancelled(); + } + false + } +} + +#[async_trait] +impl Task for JobPickerTask

{ + async fn run(mut self) -> anyhow::Result<()> { + while !self.is_cancelled() { + match self.picker.pick_job().await.context("failed to pick job")? { + Some((input, metadata)) => { + self.input_tx.send((input, metadata)).await.map_err(|err| { + anyhow::anyhow!("job picker failed to pass job to executor: {}", err) + })?; + self.reset_backoff(); + } + None => { + self.backoff().await; + } + } + } + tracing::info!("Stop signal received, shutting down JobPickerTask..."); + Ok(()) + } +} diff --git a/prover/crates/lib/prover_job_processor/src/task_wiring/job_saver_task.rs b/prover/crates/lib/prover_job_processor/src/task_wiring/job_saver_task.rs new file mode 100644 index 000000000000..8573821bc902 --- /dev/null +++ b/prover/crates/lib/prover_job_processor/src/task_wiring/job_saver_task.rs @@ -0,0 +1,33 @@ +use anyhow::Context; +use async_trait::async_trait; + +use crate::{task_wiring::task::Task, JobSaver, Output, SaverMetadata}; + +/// Wrapper over JobSaver. Makes it a continuous task, picking tasks until execution channel is closed. +#[derive(Debug)] +pub struct JobSaverTask { + saver: S, + result_rx: tokio::sync::mpsc::Receiver<(anyhow::Result>, SaverMetadata)>, +} + +impl JobSaverTask { + pub fn new( + saver: S, + result_rx: tokio::sync::mpsc::Receiver<(anyhow::Result>, SaverMetadata)>, + ) -> Self { + Self { saver, result_rx } + } +} + +#[async_trait] +impl Task for JobSaverTask { + async fn run(mut self) -> anyhow::Result<()> { + while let Some(data) = self.result_rx.recv().await { + self.saver + .save_job_result(data) + .await + .context("failed to save result")?; + } + Ok(()) + } +} diff --git a/prover/crates/lib/prover_job_processor/src/task_wiring/mod.rs b/prover/crates/lib/prover_job_processor/src/task_wiring/mod.rs new file mode 100644 index 000000000000..4b1ded605f50 --- /dev/null +++ b/prover/crates/lib/prover_job_processor/src/task_wiring/mod.rs @@ -0,0 +1,9 @@ +pub use job_picker_task::JobPickerTask; +pub use job_saver_task::JobSaverTask; +pub use task::Task; +pub use worker_pool::WorkerPool; + +mod job_picker_task; +mod job_saver_task; +mod task; +mod worker_pool; diff --git a/prover/crates/lib/prover_job_processor/src/task_wiring/task.rs b/prover/crates/lib/prover_job_processor/src/task_wiring/task.rs new file mode 100644 index 000000000000..68f8156b67c1 --- /dev/null +++ b/prover/crates/lib/prover_job_processor/src/task_wiring/task.rs @@ -0,0 +1,7 @@ +use async_trait::async_trait; + +/// Convenience trait to tie together all task wrappers. +#[async_trait] +pub trait Task { + async fn run(mut self) -> anyhow::Result<()>; +} diff --git a/prover/crates/lib/prover_job_processor/src/task_wiring/worker_pool.rs b/prover/crates/lib/prover_job_processor/src/task_wiring/worker_pool.rs new file mode 100644 index 000000000000..2f788ae99746 --- /dev/null +++ b/prover/crates/lib/prover_job_processor/src/task_wiring/worker_pool.rs @@ -0,0 +1,64 @@ +use std::sync::Arc; + +use async_trait::async_trait; +use futures::stream::StreamExt; +use tokio_stream::wrappers::ReceiverStream; + +use crate::{executor::Executor, task_wiring::Task}; + +/// Wrapper over Executor. Makes it a continuous task, picking tasks until picker channel is closed. +/// It can execute multiple concurrent executors, up to specified limit. +#[derive(Debug)] +pub struct WorkerPool +where + E: Executor, +{ + executor: E, + num_workers: usize, + input_rx: tokio::sync::mpsc::Receiver<(E::Input, E::Metadata)>, + result_tx: tokio::sync::mpsc::Sender<(anyhow::Result, E::Metadata)>, +} + +impl WorkerPool { + pub fn new( + executor: E, + num_workers: usize, + input_rx: tokio::sync::mpsc::Receiver<(E::Input, E::Metadata)>, + result_tx: tokio::sync::mpsc::Sender<(anyhow::Result, E::Metadata)>, + ) -> Self { + Self { + executor, + num_workers, + input_rx, + result_tx, + } + } +} + +#[async_trait] +impl Task for WorkerPool { + async fn run(mut self) -> anyhow::Result<()> { + let executor = Arc::new(self.executor); + let num_workers = self.num_workers; + let stream = ReceiverStream::new(self.input_rx); + + stream + .for_each_concurrent(num_workers, move |(input, metadata)| { + let executor = executor.clone(); + let result_tx = self.result_tx.clone(); + let exec_metadata = metadata.clone(); + async move { + let payload = + tokio::task::spawn_blocking(move || executor.execute(input, exec_metadata)) + .await + .expect("failed executing"); + result_tx + .send((payload, metadata)) + .await + .expect("job saver channel has been closed unexpectedly"); + } + }) + .await; + Ok(()) + } +} diff --git a/prover/docs/.gitignore b/prover/docs/.gitignore new file mode 100644 index 000000000000..7585238efedf --- /dev/null +++ b/prover/docs/.gitignore @@ -0,0 +1 @@ +book diff --git a/prover/docs/99_further_reading.md b/prover/docs/99_further_reading.md deleted file mode 100644 index 64487a715d57..000000000000 --- a/prover/docs/99_further_reading.md +++ /dev/null @@ -1,13 +0,0 @@ -# Further reading - -The documentation in this section aimed to provide a practical overview of the prover workspace, e.g. help people to -understand how to run provers and what they do. - -However, we have some documentation that is more focused on theory of proving in the [core workspace docs](../../docs/). - -You may find the following articles helpful for general understanding of ZK proofs: - -- [ZK intuition](../../docs/guides/advanced/13_zk_intuition.md). -- [ZK deeper overview](../../docs/guides/advanced/14_zk_deeper_overview.md). -- [Prover keys](../../docs/guides/advanced/15_prover_keys.md). -- [Overview of our ZK proving system implementation](../../docs/specs/prover/). diff --git a/prover/docs/book.toml b/prover/docs/book.toml new file mode 100644 index 000000000000..8e0a72942acd --- /dev/null +++ b/prover/docs/book.toml @@ -0,0 +1,32 @@ +[book] +authors = ["ZKsync team"] +language = "en" +multilingual = false +src = "src" +title = "ZKsync Prover Documentation" + +[output.html] +smart-punctuation = true +mathjax-support = true +git-repository-url = "https://github.com/matter-labs/zksync-era/tree/main/prover/docs" +edit-url-template = "https://github.com/matter-labs/zksync-era/tree/main/prover/docs/{path}" +additional-js = ["js/version-box.js", "js/mermaid-init.js"] +additional-css = ["css/version-box.css"] + +[output.html.playground] +editable = true +line-numbers = true + +[output.html.search] +limit-results = 20 +use-boolean-and = true +boost-title = 2 +boost-hierarchy = 2 +boost-paragraph = 1 +expand = true +heading-split-level = 2 + +[preprocessor] + +[preprocessor.mermaid] +command = "mdbook-mermaid" diff --git a/prover/docs/css/version-box.css b/prover/docs/css/version-box.css new file mode 100644 index 000000000000..4006ac7804b3 --- /dev/null +++ b/prover/docs/css/version-box.css @@ -0,0 +1,46 @@ +#version-box { + display: flex; + align-items: center; + margin-right: 15px; /* Space from the right side */ + background-color: transparent; /* Make the box background transparent */ +} + +/* Base styles for the version selector */ +#version-selector { + background-color: transparent; /* Remove background color */ + border: 1px solid #4a5568; /* Subtle border */ + border-radius: 4px; /* Rounded edges */ + padding: 5px 10px; /* Padding inside dropdown */ + font-size: 0.9em; + font-weight: normal; + outline: none; /* Removes default focus outline */ + cursor: pointer; +} + +/* Text color for dark themes */ +.theme-navy #version-selector, +.theme-coal #version-selector { + color: #f7fafc; /* Light text color for dark backgrounds */ +} + +/* Text color for light theme */ +.theme-light #version-selector { + color: #333333; /* Dark text color for light background */ +} + +/* Hover effect for better user feedback */ +#version-selector:hover { + background-color: rgba(255, 255, 255, 0.1); /* Light hover effect */ +} + +/* Optional: Style for when the selector is focused */ +#version-selector:focus { + border-color: #63b3ed; /* Accent color for focused state */ +} + +.right-buttons { + display: flex; + flex-direction: row; /* Aligns items in a row, left to right */ + align-items: center; /* Centers items vertically */ + gap: 10px; /* Adds space between items */ +} diff --git a/prover/docs/js/mermaid-init.js b/prover/docs/js/mermaid-init.js new file mode 100644 index 000000000000..15a7f4e57c60 --- /dev/null +++ b/prover/docs/js/mermaid-init.js @@ -0,0 +1,35 @@ +(() => { + const darkThemes = ['ayu', 'navy', 'coal']; + const lightThemes = ['light', 'rust']; + + const classList = document.getElementsByTagName('html')[0].classList; + + let lastThemeWasLight = true; + for (const cssClass of classList) { + if (darkThemes.includes(cssClass)) { + lastThemeWasLight = false; + break; + } + } + + const theme = lastThemeWasLight ? 'default' : 'dark'; + mermaid.initialize({ startOnLoad: true, theme }); + + // Simplest way to make mermaid re-render the diagrams in the new theme is via refreshing the page + + for (const darkTheme of darkThemes) { + document.getElementById(darkTheme).addEventListener('click', () => { + if (lastThemeWasLight) { + window.location.reload(); + } + }); + } + + for (const lightTheme of lightThemes) { + document.getElementById(lightTheme).addEventListener('click', () => { + if (!lastThemeWasLight) { + window.location.reload(); + } + }); + } +})(); diff --git a/prover/docs/js/version-box.js b/prover/docs/js/version-box.js new file mode 100644 index 000000000000..a7d053e01b47 --- /dev/null +++ b/prover/docs/js/version-box.js @@ -0,0 +1,61 @@ +document.addEventListener('DOMContentLoaded', function () { + // Get the base URL from the mdBook configuration + const baseUrl = document.location.origin + '/zksync-era/prover'; + + // Function to create version selector + function createVersionSelector(versions) { + const versionSelector = document.createElement('select'); + versionSelector.id = 'version-selector'; + + // Get the current path + const currentPath = window.location.pathname; + + // Iterate over the versions object + for (const [versionName, versionUrl] of Object.entries(versions)) { + const option = document.createElement('option'); + option.value = versionUrl + '/'; + option.textContent = versionName; + + // Check if the current URL matches this option's value + if (currentPath.includes(option.value)) { + option.selected = true; // Set this option as selected + } + + versionSelector.appendChild(option); + } + + // Event listener to handle version change + versionSelector.addEventListener('change', function () { + const selectedVersion = versionSelector.value; + // Redirect to the selected version URL + window.location.href = '/zksync-era/prover' + selectedVersion; + }); + + return versionSelector; + } + + // Fetch versions from JSON file + fetch(baseUrl + '/versions.json') + .then((response) => { + if (!response.ok) { + throw new Error('Network response was not ok ' + response.statusText); + } + return response.json(); + }) + .then((data) => { + const versionSelector = createVersionSelector(data); + const nav = document.querySelector('.right-buttons'); + + if (nav) { + const versionBox = document.createElement('div'); + versionBox.id = 'version-box'; + versionBox.appendChild(versionSelector); + nav.appendChild(versionBox); // Append to the .right-buttons container + } else { + console.error('.right-buttons element not found.'); + } + }) + .catch((error) => { + console.error('There has been a problem with your fetch operation:', error); + }); +}); diff --git a/prover/docs/00_intro.md b/prover/docs/src/00_intro.md similarity index 73% rename from prover/docs/00_intro.md rename to prover/docs/src/00_intro.md index fb79cf5bed0e..cae59ae1a28f 100644 --- a/prover/docs/00_intro.md +++ b/prover/docs/src/00_intro.md @@ -30,19 +30,19 @@ prover subsystem as well. We'll cover how the components work further in documentation. -[pg]: ../crates/bin/prover_fri_gateway/ -[wg]: ../crates/bin/witness_generator/ -[wvg]: ../crates/bin/witness_vector_generator/ -[p]: ../crates/bin/prover_fri/ -[pc]: ../crates/bin/proof_fri_compressor/ -[pdh]: ../../core/node/proof_data_handler/ -[hk]: ../../core/node/house_keeper/ -[vkg]: ../crates/bin/prover_cli/ -[pcli]: ../crates/bin/vk_setup_data_generator_server_fri/ -[mc]: ../../core/node/metadata_calculator/ -[cg]: ../../core/node/commitment_generator/ -[bwip]: ../../core/node/vm_runner/src/impls/bwip.rs -[prw]: ../../core/node/vm_runner/src/impls/protective_reads.rs +[pg]: https://github.com/matter-labs/zksync-era/tree/main/prover/crates/bin/prover_fri_gateway +[wg]: https://github.com/matter-labs/zksync-era/tree/main/prover/crates/bin/witness_generator +[wvg]: https://github.com/matter-labs/zksync-era/tree/main/prover/crates/bin/witness_vector_generator +[p]: https://github.com/matter-labs/zksync-era/tree/main/prover/crates/bin/prover_fri +[pc]: https://github.com/matter-labs/zksync-era/tree/main/prover/crates/bin/proof_fri_compressor +[pdh]: https://github.com/matter-labs/zksync-era/tree/main/core/node/proof_data_handler +[hk]: https://github.com/matter-labs/zksync-era/tree/main/core/node/house_keeper +[vkg]: https://github.com/matter-labs/zksync-era/tree/main/prover/crates/bin/vk_setup_data_generator_server_fri +[pcli]: https://github.com/matter-labs/zksync-era/tree/main/prover/crates/bin/prover_cli +[mc]: https://github.com/matter-labs/zksync-era/tree/main/core/node/metadata_calculator +[cg]: https://github.com/matter-labs/zksync-era/tree/main/core/node/commitment_generator +[bwip]: https://github.com/matter-labs/zksync-era/blob/main/core/node/vm_runner/src/impls/bwip.rs +[prw]: https://github.com/matter-labs/zksync-era/blob/main/core/node/vm_runner/src/impls/protective_reads.rs ## How it runs diff --git a/prover/docs/01_gcp_vm.md b/prover/docs/src/01_gcp_vm.md similarity index 100% rename from prover/docs/01_gcp_vm.md rename to prover/docs/src/01_gcp_vm.md diff --git a/prover/docs/02_setup.md b/prover/docs/src/02_setup.md similarity index 94% rename from prover/docs/02_setup.md rename to prover/docs/src/02_setup.md index 67c2b0b945ff..615a71be291c 100644 --- a/prover/docs/02_setup.md +++ b/prover/docs/src/02_setup.md @@ -6,7 +6,7 @@ machine in place, e.g. a compatible local machine or a prepared GCP VM. ## ZKsync repo setup If you haven't already, you need to initialize the ZKsync repository first. Follow -[this guide](../../docs/guides/setup-dev.md) for that. +[this guide](https://matter-labs.github.io/zksync-era/core/latest/guides/setup-dev.html) for that. Before proceeding, make sure that you can run the server and integration tests pass. diff --git a/prover/docs/03_launch.md b/prover/docs/src/03_launch.md similarity index 60% rename from prover/docs/03_launch.md rename to prover/docs/src/03_launch.md index 0465d888f612..fcddf93174b9 100644 --- a/prover/docs/03_launch.md +++ b/prover/docs/src/03_launch.md @@ -2,37 +2,25 @@ ## Preparing -First, run the following command: +First, create a new chain with prover mode `GPU`: -``` -zk env prover-local +```bash +zkstack chain create --prover-mode gpu ``` -It will create a config similar to `dev`, but with: +It will create a config similar to `era`, but with: - Proof sending mode set to `OnlyRealProofs` - Prover mode set to `Local` instead of `GCS`. -You can always switch back to dev config via `zk env dev`. - -**Important:** If you change environments, you have to do `zk init` again. - -## Enter the prover workspace - -All the commands for binaries in the prover workspace must be done from the prover folder: - -``` -cd $ZKSYNC_HOME/prover -``` - ## Key generation This operation should only be done once; if you already generated keys, you can skip it. The following command will generate the required keys: -``` -zk f cargo run --features gpu --release --bin key_generator -- generate-sk-gpu all --recompute-if-missing +```bash +zkstack prover setup-keys ``` With that, you should be ready to run the prover. @@ -40,20 +28,20 @@ With that, you should be ready to run the prover. ## Running Important! Generating a proof takes a lot of time, so if you just want to see whether you can generate a proof, do it -against clean sequencer state (e.g. right after `zk init`). +against clean sequencer state (e.g. right after `zkstack chain init`). We will be running a bunch of binaries, it's recommended to run each in a separate terminal. ### Server -``` -zk server --components=api,tree,eth,state_keeper,housekeeper,commitment_generator,da_dispatcher,proof_data_handler,vm_runner_protective_reads,vm_runner_bwip +```bash +zkstack server --components=api,tree,eth,state_keeper,housekeeper,commitment_generator,da_dispatcher,proof_data_handler,vm_runner_protective_reads,vm_runner_bwip ``` -### Proof data handler +### Prover gateway -``` -zk f cargo run --release --bin zksync_prover_fri_gateway +```bash +zkstack prover run --component=gateway ``` Then wait until the first job is picked up. Prover gateway has to insert protocol information into the database, and @@ -63,8 +51,8 @@ until it happens, witness generators will panic and won't be able to start. Once a job is created, start witness generators: -``` -zk f cargo run --release --bin zksync_witness_generator -- --all_rounds +```bash +zkstack prover run --component=witness-generator --round=all-rounds ``` `--all_rounds` means that witness generator will produce witnesses of all kinds. You can run a witness generator for @@ -72,22 +60,47 @@ each round separately, but it's mostly useful in production environments. ### Witness vector generator -``` -zk f cargo run --release --bin zksync_witness_vector_generator -- --threads 10 +```bash +zkstack prover run --component=witness-vector-generator --threads 10 ``` WVG prepares inputs for prover, and it's a single-threaded time-consuming operation. You may run several jobs by changing the `threads` parameter. The exact amount of WVGs needed to "feed" one prover depends on CPU/GPU specs, but a ballpark estimate (useful for local development) is 10 WVGs per prover. +> NOTE: The WVG thread typically uses approximately 10GB of RAM. + ### Prover -``` -zk f cargo run --features "gpu" --release --bin zksync_prover_fri +```bash +zkstack prover run --component=prover ``` Prover can prove any kinds of circuits, so you only need a single instance. +### Prover job monitor + +You can start the prover job monitor by specifying its component as follows. + +```bash +zkstack prover run --component=prover-job-monitor +``` + +### Insert protocol version in prover database + +Before running the prover, you can insert the protocol version in the prover database by executing the following +command: + +```bash +zkstack dev prover insert-version --version --snark-wrapper= +``` + +To query this information, use the following command: + +```bash +zkstack dev prover info +``` + ### Proof compressor ⚠️ Both prover and proof compressor require 24GB of VRAM, and currently it's not possible to make them use different @@ -96,8 +109,8 @@ GPU. So unless you have a GPU with 48GB of VRAM, you won't be able to run both a You should wait until the proof is generated, and once you see in the server logs that it tries to find available compressor, you can shut the prover down, and run the proof compressor: -``` -zk f cargo run --features "gpu" --release --bin zksync_proof_fri_compressor +```bash +zkstack prover run --component=compressor ``` Once the proof is compressed, proof gateway will see that and will send the generated proof back to core. diff --git a/prover/docs/04_flow.md b/prover/docs/src/04_flow.md similarity index 100% rename from prover/docs/04_flow.md rename to prover/docs/src/04_flow.md diff --git a/prover/docs/05_proving_batch.md b/prover/docs/src/05_proving_batch.md similarity index 98% rename from prover/docs/05_proving_batch.md rename to prover/docs/src/05_proving_batch.md index c35de975bf71..6bcf57a06a40 100644 --- a/prover/docs/05_proving_batch.md +++ b/prover/docs/src/05_proving_batch.md @@ -18,7 +18,7 @@ First of all, you need to install CUDA drivers, all other things will be dealt w For that, check the following [guide](./02_setup.md)(you can skip bellman-cuda step). Install the prerequisites, which you can find -[here](https://github.com/matter-labs/zksync-era/blob/main/docs/guides/setup-dev.md). Note, that if you are not using +[here](https://matter-labs.github.io/zksync-era/core/latest/guides/setup-dev.html). Note, that if you are not using Google VM instance, you also need to install [gcloud](https://cloud.google.com/sdk/docs/install#deb). Now, you can use `zkstack` and `prover_cli` tools for setting up the env and running prover subsystem. diff --git a/prover/docs/src/99_further_reading.md b/prover/docs/src/99_further_reading.md new file mode 100644 index 000000000000..98a2d6b4337f --- /dev/null +++ b/prover/docs/src/99_further_reading.md @@ -0,0 +1,14 @@ +# Further reading + +The documentation in this section aimed to provide a practical overview of the prover workspace, e.g. help people to +understand how to run provers and what they do. + +However, we have some documentation that is more focused on theory of proving in the +[core workspace docs](https://matter-labs.github.io/zksync-era/core/latest). + +You may find the following articles helpful for general understanding of ZK proofs: + +- [ZK intuition](https://matter-labs.github.io/zksync-era/core/latest/guides/advanced/13_zk_intuition.html). +- [ZK deeper overview](https://matter-labs.github.io/zksync-era/core/latest/guides/advanced/14_zk_deeper_overview.html). +- [Prover keys](https://matter-labs.github.io/zksync-era/core/latest/guides/advanced/15_prover_keys.html). +- [Overview of our ZK proving system implementation](https://matter-labs.github.io/zksync-era/core/latest/specs/prover/overview.html). diff --git a/prover/docs/src/README.md b/prover/docs/src/README.md new file mode 100644 index 000000000000..991c91219e99 --- /dev/null +++ b/prover/docs/src/README.md @@ -0,0 +1,16 @@ +# Prover subsystem documentation + +This is technical documentation for the prover subsystem. It aims to help developers to set up a development environment +for working with provers. This documentation assumes that you are already familiar with how ZKsync works, and you need +to be able to work with the prover code. + +It does not cover topics such as basics of ZK or production deployment for provers. + +## Table of contents + +- [Intro](00_intro.md) +- [Setting up a GCP VM](01_gcp_vm.md) +- [Workspace setup](02_setup.md) +- [Running prover subsystem](03_launch.md) +- [Proof generation flow](04_flow.md) +- [Further reading](99_further_reading.md) diff --git a/prover/docs/src/SUMMARY.md b/prover/docs/src/SUMMARY.md new file mode 100644 index 000000000000..d4a6fa15d778 --- /dev/null +++ b/prover/docs/src/SUMMARY.md @@ -0,0 +1,11 @@ +# Summary + +[Introduction](./00_intro.md) + +- [Creating a GCP VM](./01_gcp_vm.md) +- [Development environment setup](./02_setup.md) +- [Running provers](./03_launch.md) +- [Prover flow](./04_flow.md) +- [Proving a batch](./05_proving_batch.md) + +[Further reading](./99_further_reading.md) diff --git a/prover/docs/theme/head.hbs b/prover/docs/theme/head.hbs new file mode 100644 index 000000000000..66ee37538adf --- /dev/null +++ b/prover/docs/theme/head.hbs @@ -0,0 +1 @@ + diff --git a/zkstack_cli/Cargo.lock b/zkstack_cli/Cargo.lock index a9089719714d..2206a1052f59 100644 --- a/zkstack_cli/Cargo.lock +++ b/zkstack_cli/Cargo.lock @@ -297,12 +297,6 @@ dependencies = [ "windows-targets 0.52.6", ] -[[package]] -name = "base16ct" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" - [[package]] name = "base16ct" version = "0.2.0" @@ -659,7 +653,7 @@ dependencies = [ "coins-core", "digest", "hmac", - "k256 0.13.4", + "k256", "serde", "sha2", "thiserror", @@ -938,18 +932,6 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" -[[package]] -name = "crypto-bigint" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" -dependencies = [ - "generic-array", - "rand_core", - "subtle", - "zeroize", -] - [[package]] name = "crypto-bigint" version = "0.5.5" @@ -1069,16 +1051,6 @@ dependencies = [ "uuid 1.10.0", ] -[[package]] -name = "der" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1a467a65c5e759bce6e65eaf91cc29f466cdc57cb65777bd646872a8a1fd4de" -dependencies = [ - "const-oid", - "zeroize", -] - [[package]] name = "der" version = "0.7.9" @@ -1221,30 +1193,18 @@ version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" -[[package]] -name = "ecdsa" -version = "0.14.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c" -dependencies = [ - "der 0.6.1", - "elliptic-curve 0.12.3", - "rfc6979 0.3.1", - "signature 1.6.4", -] - [[package]] name = "ecdsa" version = "0.16.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" dependencies = [ - "der 0.7.9", + "der", "digest", - "elliptic-curve 0.13.8", - "rfc6979 0.4.0", - "signature 2.2.0", - "spki 0.7.3", + "elliptic-curve", + "rfc6979", + "signature", + "spki", ] [[package]] @@ -1253,8 +1213,8 @@ version = "2.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" dependencies = [ - "pkcs8 0.10.2", - "signature 2.2.0", + "pkcs8", + "signature", ] [[package]] @@ -1281,41 +1241,21 @@ dependencies = [ "serde", ] -[[package]] -name = "elliptic-curve" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3" -dependencies = [ - "base16ct 0.1.1", - "crypto-bigint 0.4.9", - "der 0.6.1", - "digest", - "ff 0.12.1", - "generic-array", - "group 0.12.1", - "pkcs8 0.9.0", - "rand_core", - "sec1 0.3.0", - "subtle", - "zeroize", -] - [[package]] name = "elliptic-curve" version = "0.13.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" dependencies = [ - "base16ct 0.2.0", - "crypto-bigint 0.5.5", + "base16ct", + "crypto-bigint", "digest", - "ff 0.13.0", + "ff", "generic-array", - "group 0.13.0", - "pkcs8 0.10.2", + "group", + "pkcs8", "rand_core", - "sec1 0.7.3", + "sec1", "subtle", "zeroize", ] @@ -1362,7 +1302,7 @@ dependencies = [ "base64 0.21.7", "bytes", "hex", - "k256 0.13.4", + "k256", "log", "rand", "rlp", @@ -1587,11 +1527,11 @@ dependencies = [ "cargo_metadata", "chrono", "const-hex", - "elliptic-curve 0.13.8", + "elliptic-curve", "ethabi", "generic-array", - "k256 0.13.4", - "num_enum 0.7.3", + "k256", + "num_enum", "once_cell", "open-fastrlp", "rand", @@ -1696,7 +1636,7 @@ dependencies = [ "coins-bip32", "coins-bip39", "const-hex", - "elliptic-curve 0.13.8", + "elliptic-curve", "eth-keystore", "ethers-core", "rand", @@ -1775,16 +1715,6 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6" -[[package]] -name = "ff" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" -dependencies = [ - "rand_core", - "subtle", -] - [[package]] name = "ff" version = "0.13.0" @@ -2082,24 +2012,13 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "group" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" -dependencies = [ - "ff 0.12.1", - "rand_core", - "subtle", -] - [[package]] name = "group" version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" dependencies = [ - "ff 0.13.0", + "ff", "rand_core", "subtle", ] @@ -2679,18 +2598,6 @@ dependencies = [ "simple_asn1", ] -[[package]] -name = "k256" -version = "0.11.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72c1e0b51e7ec0a97369623508396067a486bd0cbed95a2659a4b863d28cfc8b" -dependencies = [ - "cfg-if", - "ecdsa 0.14.8", - "elliptic-curve 0.12.3", - "sha2", -] - [[package]] name = "k256" version = "0.13.4" @@ -2698,11 +2605,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6e3919bbaa2945715f0bb6d3934a173d1e9a59ac23767fbaaef277265a7411b" dependencies = [ "cfg-if", - "ecdsa 0.16.9", - "elliptic-curve 0.13.8", + "ecdsa", + "elliptic-curve", "once_cell", "sha2", - "signature 2.2.0", + "signature", ] [[package]] @@ -3130,34 +3037,13 @@ dependencies = [ "libc", ] -[[package]] -name = "num_enum" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a015b430d3c108a207fd776d2e2196aaf8b1cf8cf93253e3a097ff3085076a1" -dependencies = [ - "num_enum_derive 0.6.1", -] - [[package]] name = "num_enum" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4e613fc340b2220f734a8595782c551f1250e969d87d3be1ae0579e8d4065179" dependencies = [ - "num_enum_derive 0.7.3", -] - -[[package]] -name = "num_enum_derive" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6" -dependencies = [ - "proc-macro-crate 1.3.1", - "proc-macro2", - "quote", - "syn 2.0.79", + "num_enum_derive", ] [[package]] @@ -3166,7 +3052,7 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56" dependencies = [ - "proc-macro-crate 3.2.0", + "proc-macro-crate", "proc-macro2", "quote", "syn 2.0.79", @@ -3412,7 +3298,7 @@ version = "3.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c" dependencies = [ - "proc-macro-crate 3.2.0", + "proc-macro-crate", "proc-macro2", "quote", "syn 1.0.109", @@ -3627,19 +3513,9 @@ version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" dependencies = [ - "der 0.7.9", - "pkcs8 0.10.2", - "spki 0.7.3", -] - -[[package]] -name = "pkcs8" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9eca2c590a5f85da82668fa685c09ce2888b9430e83299debf1f34b65fd4a4ba" -dependencies = [ - "der 0.6.1", - "spki 0.6.0", + "der", + "pkcs8", + "spki", ] [[package]] @@ -3648,8 +3524,8 @@ version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" dependencies = [ - "der 0.7.9", - "spki 0.7.3", + "der", + "spki", ] [[package]] @@ -3719,23 +3595,13 @@ dependencies = [ "uint", ] -[[package]] -name = "proc-macro-crate" -version = "1.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" -dependencies = [ - "once_cell", - "toml_edit 0.19.15", -] - [[package]] name = "proc-macro-crate" version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" dependencies = [ - "toml_edit 0.22.22", + "toml_edit", ] [[package]] @@ -4140,17 +4006,6 @@ dependencies = [ "windows-registry", ] -[[package]] -name = "rfc6979" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb" -dependencies = [ - "crypto-bigint 0.4.9", - "hmac", - "zeroize", -] - [[package]] name = "rfc6979" version = "0.4.0" @@ -4234,10 +4089,10 @@ dependencies = [ "num-integer", "num-traits", "pkcs1", - "pkcs8 0.10.2", + "pkcs8", "rand_core", - "signature 2.2.0", - "spki 0.7.3", + "signature", + "spki", "subtle", "zeroize", ] @@ -4400,7 +4255,7 @@ version = "2.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d35494501194174bda522a32605929eefc9ecf7e0a326c26db1fdd85881eb62" dependencies = [ - "proc-macro-crate 3.2.0", + "proc-macro-crate", "proc-macro2", "quote", "syn 1.0.109", @@ -4443,30 +4298,16 @@ dependencies = [ "untrusted 0.9.0", ] -[[package]] -name = "sec1" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928" -dependencies = [ - "base16ct 0.1.1", - "der 0.6.1", - "generic-array", - "pkcs8 0.9.0", - "subtle", - "zeroize", -] - [[package]] name = "sec1" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" dependencies = [ - "base16ct 0.2.0", - "der 0.7.9", + "base16ct", + "der", "generic-array", - "pkcs8 0.10.2", + "pkcs8", "subtle", "zeroize", ] @@ -4771,17 +4612,6 @@ dependencies = [ "digest", ] -[[package]] -name = "sha2_ce" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eca2daa77078f4ddff27e75c4bf59e4c2697525f56dbb3c842d34a5d1f2b04a2" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - [[package]] name = "sha3" version = "0.10.8" @@ -4792,16 +4622,6 @@ dependencies = [ "keccak", ] -[[package]] -name = "sha3_ce" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34c9a08202c50378d8a07a5f458193a5f542d2828ac6640263dbc0c2533ea25e" -dependencies = [ - "digest", - "keccak", -] - [[package]] name = "sharded-slab" version = "0.1.7" @@ -4826,16 +4646,6 @@ dependencies = [ "libc", ] -[[package]] -name = "signature" -version = "1.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" -dependencies = [ - "digest", - "rand_core", -] - [[package]] name = "signature" version = "2.2.0" @@ -4953,16 +4763,6 @@ dependencies = [ "lock_api", ] -[[package]] -name = "spki" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67cf02bbac7a337dc36e4f5a693db6c21e7863f45070f7064577eb4367a3212b" -dependencies = [ - "base64ct", - "der 0.6.1", -] - [[package]] name = "spki" version = "0.7.3" @@ -4970,7 +4770,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" dependencies = [ "base64ct", - "der 0.7.9", + "der", ] [[package]] @@ -5700,7 +5500,7 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.22.22", + "toml_edit", ] [[package]] @@ -5712,17 +5512,6 @@ dependencies = [ "serde", ] -[[package]] -name = "toml_edit" -version = "0.19.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" -dependencies = [ - "indexmap 2.6.0", - "toml_datetime", - "winnow 0.5.40", -] - [[package]] name = "toml_edit" version = "0.22.22" @@ -5733,7 +5522,7 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "winnow 0.6.20", + "winnow", ] [[package]] @@ -6530,15 +6319,6 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" -[[package]] -name = "winnow" -version = "0.5.40" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" -dependencies = [ - "memchr", -] - [[package]] name = "winnow" version = "0.6.20" @@ -6674,50 +6454,6 @@ dependencies = [ "zstd", ] -[[package]] -name = "zk_evm" -version = "0.133.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9af08e9284686a1b0c89ec4931eb915ac0729367f1247abd06164874fe738106" -dependencies = [ - "anyhow", - "lazy_static", - "num", - "serde", - "serde_json", - "static_assertions", - "zk_evm_abstractions", - "zkevm_opcode_defs", -] - -[[package]] -name = "zk_evm_abstractions" -version = "0.140.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be696258861eba4e6625a5665084b2266720bb67f4ba69819469700ac5c6a401" -dependencies = [ - "anyhow", - "num_enum 0.6.1", - "serde", - "static_assertions", - "zkevm_opcode_defs", -] - -[[package]] -name = "zkevm_opcode_defs" -version = "0.132.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0769f7b27d8fb06e715da3290c575cac5d04d10a557faef180e847afce50ac4" -dependencies = [ - "bitflags 2.6.0", - "blake2", - "ethereum-types", - "k256 0.11.6", - "lazy_static", - "sha2_ce", - "sha3_ce", -] - [[package]] name = "zkstack" version = "0.1.0" @@ -6767,13 +6503,15 @@ version = "0.1.0" dependencies = [ "anyhow", "chrono", + "const-decoder", "ethabi", "hex", - "num_enum 0.7.3", + "num_enum", "secrecy", "serde", "serde_json", "serde_with", + "sha2", "strum", "thiserror", "tiny-keccak", @@ -6822,9 +6560,9 @@ dependencies = [ "anyhow", "blst", "ed25519-dalek", - "elliptic-curve 0.13.8", + "elliptic-curve", "hex", - "k256 0.13.4", + "k256", "num-bigint", "num-traits", "rand", @@ -6873,11 +6611,11 @@ name = "zksync_contracts" version = "0.1.0" dependencies = [ "envy", - "ethabi", "hex", "once_cell", "serde", "serde_json", + "zksync_basic_types", "zksync_utils", ] @@ -6895,7 +6633,6 @@ dependencies = [ "sha2", "thiserror", "zksync_basic_types", - "zksync_utils", ] [[package]] @@ -6970,7 +6707,6 @@ version = "0.1.0" dependencies = [ "once_cell", "zksync_basic_types", - "zksync_utils", ] [[package]] @@ -6985,7 +6721,7 @@ dependencies = [ "hex", "itertools 0.10.5", "num", - "num_enum 0.7.3", + "num_enum", "once_cell", "prost 0.12.6", "rlp", @@ -7002,7 +6738,6 @@ dependencies = [ "zksync_protobuf", "zksync_protobuf_build", "zksync_system_constants", - "zksync_utils", ] [[package]] @@ -7010,20 +6745,12 @@ name = "zksync_utils" version = "0.1.0" dependencies = [ "anyhow", - "bigdecimal", - "const-decoder", "futures", - "hex", - "num", "once_cell", "reqwest 0.12.8", - "serde", "serde_json", - "thiserror", "tokio", "tracing", - "zk_evm", - "zksync_basic_types", "zksync_vlog", ] diff --git a/zkstack_cli/crates/common/src/contracts.rs b/zkstack_cli/crates/common/src/contracts.rs index 8f5ae8056029..4cef4467f382 100644 --- a/zkstack_cli/crates/common/src/contracts.rs +++ b/zkstack_cli/crates/common/src/contracts.rs @@ -4,12 +4,6 @@ use xshell::{cmd, Shell}; use crate::cmd::Cmd; -pub fn build_test_contracts(shell: Shell, link_to_code: PathBuf) -> anyhow::Result<()> { - let _dir_guard = shell.push_dir(link_to_code.join("etc/contracts-test-data")); - Cmd::new(cmd!(shell, "yarn install")).run()?; - Ok(Cmd::new(cmd!(shell, "yarn build")).run()?) -} - pub fn build_l1_contracts(shell: Shell, link_to_code: PathBuf) -> anyhow::Result<()> { let _dir_guard = shell.push_dir(link_to_code.join("contracts/l1-contracts")); Ok(Cmd::new(cmd!(shell, "forge build")).run()?) diff --git a/zkstack_cli/crates/config/src/consts.rs b/zkstack_cli/crates/config/src/consts.rs index f462ce33b8f8..c3efb4ac3e96 100644 --- a/zkstack_cli/crates/config/src/consts.rs +++ b/zkstack_cli/crates/config/src/consts.rs @@ -63,9 +63,10 @@ pub const DEFAULT_EXPLORER_API_PORT: u16 = 3002; /// Default port for the explorer data fetcher service pub const DEFAULT_EXPLORER_DATA_FETCHER_PORT: u16 = 3040; -pub const EXPLORER_API_DOCKER_IMAGE: &str = "matterlabs/block-explorer-api"; -pub const EXPLORER_DATA_FETCHER_DOCKER_IMAGE: &str = "matterlabs/block-explorer-data-fetcher"; -pub const EXPLORER_WORKER_DOCKER_IMAGE: &str = "matterlabs/block-explorer-worker"; +pub const EXPLORER_API_DOCKER_IMAGE: &str = "matterlabs/block-explorer-api:v2.50.8"; +pub const EXPLORER_DATA_FETCHER_DOCKER_IMAGE: &str = + "matterlabs/block-explorer-data-fetcher:v2.50.8"; +pub const EXPLORER_WORKER_DOCKER_IMAGE: &str = "matterlabs/block-explorer-worker:v2.50.8"; /// Interval (in milliseconds) for polling new batches to process in explorer app pub const EXPLORER_BATCHES_PROCESSING_POLLING_INTERVAL: u64 = 1000; diff --git a/zkstack_cli/crates/zkstack/completion/_zkstack.zsh b/zkstack_cli/crates/zkstack/completion/_zkstack.zsh index 825fc967e6d7..fc6f29851e66 100644 --- a/zkstack_cli/crates/zkstack/completion/_zkstack.zsh +++ b/zkstack_cli/crates/zkstack/completion/_zkstack.zsh @@ -535,6 +535,23 @@ _arguments "${_arguments_options[@]}" : \ '--help[Print help (see more with '\''--help'\'')]' \ && ret=0 ;; +(deploy-timestamp-asserter) +_arguments "${_arguments_options[@]}" : \ +'--verify=[Verify deployed contracts]' \ +'--verifier=[Verifier to use]:VERIFIER:(etherscan sourcify blockscout oklink)' \ +'--verifier-url=[Verifier URL, if using a custom provider]:VERIFIER_URL:_default' \ +'--verifier-api-key=[Verifier API key]:VERIFIER_API_KEY:_default' \ +'*-a+[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ +'*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ +'--chain=[Chain to use]:CHAIN:_default' \ +'--resume[]' \ +'-v[Verbose mode]' \ +'--verbose[Verbose mode]' \ +'--ignore-prerequisites[Ignores prerequisites checks]' \ +'-h[Print help (see more with '\''--help'\'')]' \ +'--help[Print help (see more with '\''--help'\'')]' \ +&& ret=0 +;; (deploy-upgrader) _arguments "${_arguments_options[@]}" : \ '--verify=[Verify deployed contracts]' \ @@ -674,6 +691,10 @@ _arguments "${_arguments_options[@]}" : \ _arguments "${_arguments_options[@]}" : \ && ret=0 ;; +(deploy-timestamp-asserter) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; (deploy-upgrader) _arguments "${_arguments_options[@]}" : \ && ret=0 @@ -1450,7 +1471,6 @@ _arguments "${_arguments_options[@]}" : \ '--l1-contracts=[Build L1 contracts]' \ '--l2-contracts=[Build L2 contracts]' \ '--system-contracts=[Build system contracts]' \ -'--test-contracts=[Build test contracts]' \ '--chain=[Chain to use]:CHAIN:_default' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ @@ -1901,7 +1921,11 @@ _arguments "${_arguments_options[@]}" : \ '--round=[]:ROUND:(all-rounds basic-circuits leaf-aggregation node-aggregation recursion-tip scheduler)' \ '--threads=[]:THREADS:_default' \ '--max-allocation=[Memory allocation limit in bytes (for prover component)]:MAX_ALLOCATION:_default' \ -'--witness-vector-generator-count=[]:WITNESS_VECTOR_GENERATOR_COUNT:_default' \ +'-l+[]:LIGHT_WVG_COUNT:_default' \ +'--light-wvg-count=[]:LIGHT_WVG_COUNT:_default' \ +'-h+[]:HEAVY_WVG_COUNT:_default' \ +'--heavy-wvg-count=[]:HEAVY_WVG_COUNT:_default' \ +'-m+[]:MAX_ALLOCATION:_default' \ '--max-allocation=[]:MAX_ALLOCATION:_default' \ '--docker=[]:DOCKER:(true false)' \ '--tag=[]:TAG:_default' \ @@ -1987,15 +2011,96 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[Additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--genesis[Run server in genesis mode]' \ -'--build[Build server but don'\''t run it]' \ '--uring[Enables uring support for RocksDB]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ '-h[Print help]' \ '--help[Print help]' \ +":: :_zkstack__server_commands" \ +"*::: :->server" \ +&& ret=0 + + case $state in + (server) + words=($line[1] "${words[@]}") + (( CURRENT += 1 )) + curcontext="${curcontext%:*:*}:zkstack-server-command-$line[1]:" + case $line[1] in + (build) +_arguments "${_arguments_options[@]}" : \ +'--chain=[Chain to use]:CHAIN:_default' \ +'-v[Verbose mode]' \ +'--verbose[Verbose mode]' \ +'--ignore-prerequisites[Ignores prerequisites checks]' \ +'-h[Print help]' \ +'--help[Print help]' \ && ret=0 ;; +(run) +_arguments "${_arguments_options[@]}" : \ +'*--components=[Components of server to run]:COMPONENTS:_default' \ +'*-a+[Additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ +'*--additional-args=[Additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ +'--chain=[Chain to use]:CHAIN:_default' \ +'--genesis[Run server in genesis mode]' \ +'--uring[Enables uring support for RocksDB]' \ +'-v[Verbose mode]' \ +'--verbose[Verbose mode]' \ +'--ignore-prerequisites[Ignores prerequisites checks]' \ +'-h[Print help]' \ +'--help[Print help]' \ +&& ret=0 +;; +(wait) +_arguments "${_arguments_options[@]}" : \ +'-t+[Wait timeout in seconds]:SECONDS:_default' \ +'--timeout=[Wait timeout in seconds]:SECONDS:_default' \ +'--poll-interval=[Poll interval in milliseconds]:MILLIS:_default' \ +'--chain=[Chain to use]:CHAIN:_default' \ +'-v[Verbose mode]' \ +'--verbose[Verbose mode]' \ +'--ignore-prerequisites[Ignores prerequisites checks]' \ +'-h[Print help]' \ +'--help[Print help]' \ +&& ret=0 +;; +(help) +_arguments "${_arguments_options[@]}" : \ +":: :_zkstack__server__help_commands" \ +"*::: :->help" \ +&& ret=0 + + case $state in + (help) + words=($line[1] "${words[@]}") + (( CURRENT += 1 )) + curcontext="${curcontext%:*:*}:zkstack-server-help-command-$line[1]:" + case $line[1] in + (build) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; +(run) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; +(wait) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; +(help) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; + esac + ;; +esac +;; + esac + ;; +esac +;; (external-node) _arguments "${_arguments_options[@]}" : \ '--chain=[Chain to use]:CHAIN:_default' \ @@ -2039,6 +2144,16 @@ _arguments "${_arguments_options[@]}" : \ '--help[Print help]' \ && ret=0 ;; +(build) +_arguments "${_arguments_options[@]}" : \ +'--chain=[Chain to use]:CHAIN:_default' \ +'-v[Verbose mode]' \ +'--verbose[Verbose mode]' \ +'--ignore-prerequisites[Ignores prerequisites checks]' \ +'-h[Print help]' \ +'--help[Print help]' \ +&& ret=0 +;; (run) _arguments "${_arguments_options[@]}" : \ '*--components=[Components of server to run]:COMPONENTS:_default' \ @@ -2054,6 +2169,19 @@ _arguments "${_arguments_options[@]}" : \ '--help[Print help]' \ && ret=0 ;; +(wait) +_arguments "${_arguments_options[@]}" : \ +'-t+[Wait timeout in seconds]:SECONDS:_default' \ +'--timeout=[Wait timeout in seconds]:SECONDS:_default' \ +'--poll-interval=[Poll interval in milliseconds]:MILLIS:_default' \ +'--chain=[Chain to use]:CHAIN:_default' \ +'-v[Verbose mode]' \ +'--verbose[Verbose mode]' \ +'--ignore-prerequisites[Ignores prerequisites checks]' \ +'-h[Print help]' \ +'--help[Print help]' \ +&& ret=0 +;; (help) _arguments "${_arguments_options[@]}" : \ ":: :_zkstack__external-node__help_commands" \ @@ -2074,10 +2202,18 @@ _arguments "${_arguments_options[@]}" : \ _arguments "${_arguments_options[@]}" : \ && ret=0 ;; +(build) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; (run) _arguments "${_arguments_options[@]}" : \ && ret=0 ;; +(wait) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; (help) _arguments "${_arguments_options[@]}" : \ && ret=0 @@ -2120,7 +2256,17 @@ _arguments "${_arguments_options[@]}" : \ (( CURRENT += 1 )) curcontext="${curcontext%:*:*}:zkstack-contract-verifier-command-$line[1]:" case $line[1] in - (run) + (build) +_arguments "${_arguments_options[@]}" : \ +'--chain=[Chain to use]:CHAIN:_default' \ +'-v[Verbose mode]' \ +'--verbose[Verbose mode]' \ +'--ignore-prerequisites[Ignores prerequisites checks]' \ +'-h[Print help]' \ +'--help[Print help]' \ +&& ret=0 +;; +(run) _arguments "${_arguments_options[@]}" : \ '--chain=[Chain to use]:CHAIN:_default' \ '-v[Verbose mode]' \ @@ -2130,6 +2276,19 @@ _arguments "${_arguments_options[@]}" : \ '--help[Print help]' \ && ret=0 ;; +(wait) +_arguments "${_arguments_options[@]}" : \ +'-t+[Wait timeout in seconds]:SECONDS:_default' \ +'--timeout=[Wait timeout in seconds]:SECONDS:_default' \ +'--poll-interval=[Poll interval in milliseconds]:MILLIS:_default' \ +'--chain=[Chain to use]:CHAIN:_default' \ +'-v[Verbose mode]' \ +'--verbose[Verbose mode]' \ +'--ignore-prerequisites[Ignores prerequisites checks]' \ +'-h[Print help]' \ +'--help[Print help]' \ +&& ret=0 +;; (init) _arguments "${_arguments_options[@]}" : \ '--zksolc-version=[Version of zksolc to install]:ZKSOLC_VERSION:_default' \ @@ -2158,7 +2317,15 @@ _arguments "${_arguments_options[@]}" : \ (( CURRENT += 1 )) curcontext="${curcontext%:*:*}:zkstack-contract-verifier-help-command-$line[1]:" case $line[1] in - (run) + (build) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; +(run) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; +(wait) _arguments "${_arguments_options[@]}" : \ && ret=0 ;; @@ -2312,6 +2479,19 @@ _arguments "${_arguments_options[@]}" : \ '--help[Print help]' \ && ret=0 ;; +(wait-for-registry) +_arguments "${_arguments_options[@]}" : \ +'-t+[Wait timeout in seconds]:SECONDS:_default' \ +'--timeout=[Wait timeout in seconds]:SECONDS:_default' \ +'--poll-interval=[Poll interval in milliseconds]:MILLIS:_default' \ +'--chain=[Chain to use]:CHAIN:_default' \ +'-v[Verbose mode]' \ +'--verbose[Verbose mode]' \ +'--ignore-prerequisites[Ignores prerequisites checks]' \ +'-h[Print help]' \ +'--help[Print help]' \ +&& ret=0 +;; (help) _arguments "${_arguments_options[@]}" : \ ":: :_zkstack__consensus__help_commands" \ @@ -2332,6 +2512,10 @@ _arguments "${_arguments_options[@]}" : \ _arguments "${_arguments_options[@]}" : \ && ret=0 ;; +(wait-for-registry) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; (help) _arguments "${_arguments_options[@]}" : \ && ret=0 @@ -2506,6 +2690,10 @@ _arguments "${_arguments_options[@]}" : \ _arguments "${_arguments_options[@]}" : \ && ret=0 ;; +(deploy-timestamp-asserter) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; (deploy-upgrader) _arguments "${_arguments_options[@]}" : \ && ret=0 @@ -2824,7 +3012,31 @@ esac ;; (server) _arguments "${_arguments_options[@]}" : \ +":: :_zkstack__help__server_commands" \ +"*::: :->server" \ +&& ret=0 + + case $state in + (server) + words=($line[1] "${words[@]}") + (( CURRENT += 1 )) + curcontext="${curcontext%:*:*}:zkstack-help-server-command-$line[1]:" + case $line[1] in + (build) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; +(run) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; +(wait) +_arguments "${_arguments_options[@]}" : \ && ret=0 +;; + esac + ;; +esac ;; (external-node) _arguments "${_arguments_options[@]}" : \ @@ -2846,9 +3058,17 @@ _arguments "${_arguments_options[@]}" : \ _arguments "${_arguments_options[@]}" : \ && ret=0 ;; +(build) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; (run) _arguments "${_arguments_options[@]}" : \ && ret=0 +;; +(wait) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 ;; esac ;; @@ -2870,7 +3090,15 @@ _arguments "${_arguments_options[@]}" : \ (( CURRENT += 1 )) curcontext="${curcontext%:*:*}:zkstack-help-contract-verifier-command-$line[1]:" case $line[1] in - (run) + (build) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; +(run) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 +;; +(wait) _arguments "${_arguments_options[@]}" : \ && ret=0 ;; @@ -2933,6 +3161,10 @@ _arguments "${_arguments_options[@]}" : \ (get-attester-committee) _arguments "${_arguments_options[@]}" : \ && ret=0 +;; +(wait-for-registry) +_arguments "${_arguments_options[@]}" : \ +&& ret=0 ;; esac ;; @@ -2998,6 +3230,7 @@ _zkstack__chain_commands() { 'initialize-bridges:Initialize bridges on L2' \ 'deploy-consensus-registry:Deploy L2 consensus registry' \ 'deploy-multicall3:Deploy L2 multicall3' \ +'deploy-timestamp-asserter:Deploy L2 TimestampAsserter' \ 'deploy-upgrader:Deploy Default Upgrader' \ 'deploy-paymaster:Deploy paymaster smart contract' \ 'update-token-multiplier-setter:Update Token Multiplier Setter address on L1' \ @@ -3040,6 +3273,11 @@ _zkstack__chain__deploy-paymaster_commands() { local commands; commands=() _describe -t commands 'zkstack chain deploy-paymaster commands' commands "$@" } +(( $+functions[_zkstack__chain__deploy-timestamp-asserter_commands] )) || +_zkstack__chain__deploy-timestamp-asserter_commands() { + local commands; commands=() + _describe -t commands 'zkstack chain deploy-timestamp-asserter commands' commands "$@" +} (( $+functions[_zkstack__chain__deploy-upgrader_commands] )) || _zkstack__chain__deploy-upgrader_commands() { local commands; commands=() @@ -3101,6 +3339,7 @@ _zkstack__chain__help_commands() { 'initialize-bridges:Initialize bridges on L2' \ 'deploy-consensus-registry:Deploy L2 consensus registry' \ 'deploy-multicall3:Deploy L2 multicall3' \ +'deploy-timestamp-asserter:Deploy L2 TimestampAsserter' \ 'deploy-upgrader:Deploy Default Upgrader' \ 'deploy-paymaster:Deploy paymaster smart contract' \ 'update-token-multiplier-setter:Update Token Multiplier Setter address on L1' \ @@ -3143,6 +3382,11 @@ _zkstack__chain__help__deploy-paymaster_commands() { local commands; commands=() _describe -t commands 'zkstack chain help deploy-paymaster commands' commands "$@" } +(( $+functions[_zkstack__chain__help__deploy-timestamp-asserter_commands] )) || +_zkstack__chain__help__deploy-timestamp-asserter_commands() { + local commands; commands=() + _describe -t commands 'zkstack chain help deploy-timestamp-asserter commands' commands "$@" +} (( $+functions[_zkstack__chain__help__deploy-upgrader_commands] )) || _zkstack__chain__help__deploy-upgrader_commands() { local commands; commands=() @@ -3249,6 +3493,7 @@ _zkstack__consensus_commands() { local commands; commands=( 'set-attester-committee:Sets the attester committee in the consensus registry contract to \`consensus.genesis_spec.attesters\` in general.yaml' \ 'get-attester-committee:Fetches the attester committee from the consensus registry contract' \ +'wait-for-registry:Wait until the consensus registry contract is deployed to L2' \ 'help:Print this message or the help of the given subcommand(s)' \ ) _describe -t commands 'zkstack consensus commands' commands "$@" @@ -3263,6 +3508,7 @@ _zkstack__consensus__help_commands() { local commands; commands=( 'set-attester-committee:Sets the attester committee in the consensus registry contract to \`consensus.genesis_spec.attesters\` in general.yaml' \ 'get-attester-committee:Fetches the attester committee from the consensus registry contract' \ +'wait-for-registry:Wait until the consensus registry contract is deployed to L2' \ 'help:Print this message or the help of the given subcommand(s)' \ ) _describe -t commands 'zkstack consensus help commands' commands "$@" @@ -3282,11 +3528,21 @@ _zkstack__consensus__help__set-attester-committee_commands() { local commands; commands=() _describe -t commands 'zkstack consensus help set-attester-committee commands' commands "$@" } +(( $+functions[_zkstack__consensus__help__wait-for-registry_commands] )) || +_zkstack__consensus__help__wait-for-registry_commands() { + local commands; commands=() + _describe -t commands 'zkstack consensus help wait-for-registry commands' commands "$@" +} (( $+functions[_zkstack__consensus__set-attester-committee_commands] )) || _zkstack__consensus__set-attester-committee_commands() { local commands; commands=() _describe -t commands 'zkstack consensus set-attester-committee commands' commands "$@" } +(( $+functions[_zkstack__consensus__wait-for-registry_commands] )) || +_zkstack__consensus__wait-for-registry_commands() { + local commands; commands=() + _describe -t commands 'zkstack consensus wait-for-registry commands' commands "$@" +} (( $+functions[_zkstack__containers_commands] )) || _zkstack__containers_commands() { local commands; commands=() @@ -3295,21 +3551,35 @@ _zkstack__containers_commands() { (( $+functions[_zkstack__contract-verifier_commands] )) || _zkstack__contract-verifier_commands() { local commands; commands=( +'build:Build contract verifier binary' \ 'run:Run contract verifier' \ +'wait:Wait for contract verifier to start' \ 'init:Download required binaries for contract verifier' \ 'help:Print this message or the help of the given subcommand(s)' \ ) _describe -t commands 'zkstack contract-verifier commands' commands "$@" } +(( $+functions[_zkstack__contract-verifier__build_commands] )) || +_zkstack__contract-verifier__build_commands() { + local commands; commands=() + _describe -t commands 'zkstack contract-verifier build commands' commands "$@" +} (( $+functions[_zkstack__contract-verifier__help_commands] )) || _zkstack__contract-verifier__help_commands() { local commands; commands=( +'build:Build contract verifier binary' \ 'run:Run contract verifier' \ +'wait:Wait for contract verifier to start' \ 'init:Download required binaries for contract verifier' \ 'help:Print this message or the help of the given subcommand(s)' \ ) _describe -t commands 'zkstack contract-verifier help commands' commands "$@" } +(( $+functions[_zkstack__contract-verifier__help__build_commands] )) || +_zkstack__contract-verifier__help__build_commands() { + local commands; commands=() + _describe -t commands 'zkstack contract-verifier help build commands' commands "$@" +} (( $+functions[_zkstack__contract-verifier__help__help_commands] )) || _zkstack__contract-verifier__help__help_commands() { local commands; commands=() @@ -3325,6 +3595,11 @@ _zkstack__contract-verifier__help__run_commands() { local commands; commands=() _describe -t commands 'zkstack contract-verifier help run commands' commands "$@" } +(( $+functions[_zkstack__contract-verifier__help__wait_commands] )) || +_zkstack__contract-verifier__help__wait_commands() { + local commands; commands=() + _describe -t commands 'zkstack contract-verifier help wait commands' commands "$@" +} (( $+functions[_zkstack__contract-verifier__init_commands] )) || _zkstack__contract-verifier__init_commands() { local commands; commands=() @@ -3335,6 +3610,11 @@ _zkstack__contract-verifier__run_commands() { local commands; commands=() _describe -t commands 'zkstack contract-verifier run commands' commands "$@" } +(( $+functions[_zkstack__contract-verifier__wait_commands] )) || +_zkstack__contract-verifier__wait_commands() { + local commands; commands=() + _describe -t commands 'zkstack contract-verifier wait commands' commands "$@" +} (( $+functions[_zkstack__dev_commands] )) || _zkstack__dev_commands() { local commands; commands=( @@ -4264,11 +4544,18 @@ _zkstack__external-node_commands() { local commands; commands=( 'configs:Prepare configs for EN' \ 'init:Init databases' \ +'build:Build external node' \ 'run:Run external node' \ +'wait:Wait for external node to start' \ 'help:Print this message or the help of the given subcommand(s)' \ ) _describe -t commands 'zkstack external-node commands' commands "$@" } +(( $+functions[_zkstack__external-node__build_commands] )) || +_zkstack__external-node__build_commands() { + local commands; commands=() + _describe -t commands 'zkstack external-node build commands' commands "$@" +} (( $+functions[_zkstack__external-node__configs_commands] )) || _zkstack__external-node__configs_commands() { local commands; commands=() @@ -4279,11 +4566,18 @@ _zkstack__external-node__help_commands() { local commands; commands=( 'configs:Prepare configs for EN' \ 'init:Init databases' \ +'build:Build external node' \ 'run:Run external node' \ +'wait:Wait for external node to start' \ 'help:Print this message or the help of the given subcommand(s)' \ ) _describe -t commands 'zkstack external-node help commands' commands "$@" } +(( $+functions[_zkstack__external-node__help__build_commands] )) || +_zkstack__external-node__help__build_commands() { + local commands; commands=() + _describe -t commands 'zkstack external-node help build commands' commands "$@" +} (( $+functions[_zkstack__external-node__help__configs_commands] )) || _zkstack__external-node__help__configs_commands() { local commands; commands=() @@ -4304,6 +4598,11 @@ _zkstack__external-node__help__run_commands() { local commands; commands=() _describe -t commands 'zkstack external-node help run commands' commands "$@" } +(( $+functions[_zkstack__external-node__help__wait_commands] )) || +_zkstack__external-node__help__wait_commands() { + local commands; commands=() + _describe -t commands 'zkstack external-node help wait commands' commands "$@" +} (( $+functions[_zkstack__external-node__init_commands] )) || _zkstack__external-node__init_commands() { local commands; commands=() @@ -4314,6 +4613,11 @@ _zkstack__external-node__run_commands() { local commands; commands=() _describe -t commands 'zkstack external-node run commands' commands "$@" } +(( $+functions[_zkstack__external-node__wait_commands] )) || +_zkstack__external-node__wait_commands() { + local commands; commands=() + _describe -t commands 'zkstack external-node wait commands' commands "$@" +} (( $+functions[_zkstack__help_commands] )) || _zkstack__help_commands() { local commands; commands=( @@ -4353,6 +4657,7 @@ _zkstack__help__chain_commands() { 'initialize-bridges:Initialize bridges on L2' \ 'deploy-consensus-registry:Deploy L2 consensus registry' \ 'deploy-multicall3:Deploy L2 multicall3' \ +'deploy-timestamp-asserter:Deploy L2 TimestampAsserter' \ 'deploy-upgrader:Deploy Default Upgrader' \ 'deploy-paymaster:Deploy paymaster smart contract' \ 'update-token-multiplier-setter:Update Token Multiplier Setter address on L1' \ @@ -4394,6 +4699,11 @@ _zkstack__help__chain__deploy-paymaster_commands() { local commands; commands=() _describe -t commands 'zkstack help chain deploy-paymaster commands' commands "$@" } +(( $+functions[_zkstack__help__chain__deploy-timestamp-asserter_commands] )) || +_zkstack__help__chain__deploy-timestamp-asserter_commands() { + local commands; commands=() + _describe -t commands 'zkstack help chain deploy-timestamp-asserter commands' commands "$@" +} (( $+functions[_zkstack__help__chain__deploy-upgrader_commands] )) || _zkstack__help__chain__deploy-upgrader_commands() { local commands; commands=() @@ -4449,6 +4759,7 @@ _zkstack__help__consensus_commands() { local commands; commands=( 'set-attester-committee:Sets the attester committee in the consensus registry contract to \`consensus.genesis_spec.attesters\` in general.yaml' \ 'get-attester-committee:Fetches the attester committee from the consensus registry contract' \ +'wait-for-registry:Wait until the consensus registry contract is deployed to L2' \ ) _describe -t commands 'zkstack help consensus commands' commands "$@" } @@ -4462,6 +4773,11 @@ _zkstack__help__consensus__set-attester-committee_commands() { local commands; commands=() _describe -t commands 'zkstack help consensus set-attester-committee commands' commands "$@" } +(( $+functions[_zkstack__help__consensus__wait-for-registry_commands] )) || +_zkstack__help__consensus__wait-for-registry_commands() { + local commands; commands=() + _describe -t commands 'zkstack help consensus wait-for-registry commands' commands "$@" +} (( $+functions[_zkstack__help__containers_commands] )) || _zkstack__help__containers_commands() { local commands; commands=() @@ -4470,11 +4786,18 @@ _zkstack__help__containers_commands() { (( $+functions[_zkstack__help__contract-verifier_commands] )) || _zkstack__help__contract-verifier_commands() { local commands; commands=( +'build:Build contract verifier binary' \ 'run:Run contract verifier' \ +'wait:Wait for contract verifier to start' \ 'init:Download required binaries for contract verifier' \ ) _describe -t commands 'zkstack help contract-verifier commands' commands "$@" } +(( $+functions[_zkstack__help__contract-verifier__build_commands] )) || +_zkstack__help__contract-verifier__build_commands() { + local commands; commands=() + _describe -t commands 'zkstack help contract-verifier build commands' commands "$@" +} (( $+functions[_zkstack__help__contract-verifier__init_commands] )) || _zkstack__help__contract-verifier__init_commands() { local commands; commands=() @@ -4485,6 +4808,11 @@ _zkstack__help__contract-verifier__run_commands() { local commands; commands=() _describe -t commands 'zkstack help contract-verifier run commands' commands "$@" } +(( $+functions[_zkstack__help__contract-verifier__wait_commands] )) || +_zkstack__help__contract-verifier__wait_commands() { + local commands; commands=() + _describe -t commands 'zkstack help contract-verifier wait commands' commands "$@" +} (( $+functions[_zkstack__help__dev_commands] )) || _zkstack__help__dev_commands() { local commands; commands=( @@ -4809,10 +5137,17 @@ _zkstack__help__external-node_commands() { local commands; commands=( 'configs:Prepare configs for EN' \ 'init:Init databases' \ +'build:Build external node' \ 'run:Run external node' \ +'wait:Wait for external node to start' \ ) _describe -t commands 'zkstack help external-node commands' commands "$@" } +(( $+functions[_zkstack__help__external-node__build_commands] )) || +_zkstack__help__external-node__build_commands() { + local commands; commands=() + _describe -t commands 'zkstack help external-node build commands' commands "$@" +} (( $+functions[_zkstack__help__external-node__configs_commands] )) || _zkstack__help__external-node__configs_commands() { local commands; commands=() @@ -4828,6 +5163,11 @@ _zkstack__help__external-node__run_commands() { local commands; commands=() _describe -t commands 'zkstack help external-node run commands' commands "$@" } +(( $+functions[_zkstack__help__external-node__wait_commands] )) || +_zkstack__help__external-node__wait_commands() { + local commands; commands=() + _describe -t commands 'zkstack help external-node wait commands' commands "$@" +} (( $+functions[_zkstack__help__help_commands] )) || _zkstack__help__help_commands() { local commands; commands=() @@ -4881,9 +5221,28 @@ _zkstack__help__prover__setup-keys_commands() { } (( $+functions[_zkstack__help__server_commands] )) || _zkstack__help__server_commands() { - local commands; commands=() + local commands; commands=( +'build:Builds server' \ +'run:Runs server' \ +'wait:Waits for server to start' \ + ) _describe -t commands 'zkstack help server commands' commands "$@" } +(( $+functions[_zkstack__help__server__build_commands] )) || +_zkstack__help__server__build_commands() { + local commands; commands=() + _describe -t commands 'zkstack help server build commands' commands "$@" +} +(( $+functions[_zkstack__help__server__run_commands] )) || +_zkstack__help__server__run_commands() { + local commands; commands=() + _describe -t commands 'zkstack help server run commands' commands "$@" +} +(( $+functions[_zkstack__help__server__wait_commands] )) || +_zkstack__help__server__wait_commands() { + local commands; commands=() + _describe -t commands 'zkstack help server wait commands' commands "$@" +} (( $+functions[_zkstack__help__update_commands] )) || _zkstack__help__update_commands() { local commands; commands=() @@ -4980,9 +5339,59 @@ _zkstack__prover__setup-keys_commands() { } (( $+functions[_zkstack__server_commands] )) || _zkstack__server_commands() { - local commands; commands=() + local commands; commands=( +'build:Builds server' \ +'run:Runs server' \ +'wait:Waits for server to start' \ +'help:Print this message or the help of the given subcommand(s)' \ + ) _describe -t commands 'zkstack server commands' commands "$@" } +(( $+functions[_zkstack__server__build_commands] )) || +_zkstack__server__build_commands() { + local commands; commands=() + _describe -t commands 'zkstack server build commands' commands "$@" +} +(( $+functions[_zkstack__server__help_commands] )) || +_zkstack__server__help_commands() { + local commands; commands=( +'build:Builds server' \ +'run:Runs server' \ +'wait:Waits for server to start' \ +'help:Print this message or the help of the given subcommand(s)' \ + ) + _describe -t commands 'zkstack server help commands' commands "$@" +} +(( $+functions[_zkstack__server__help__build_commands] )) || +_zkstack__server__help__build_commands() { + local commands; commands=() + _describe -t commands 'zkstack server help build commands' commands "$@" +} +(( $+functions[_zkstack__server__help__help_commands] )) || +_zkstack__server__help__help_commands() { + local commands; commands=() + _describe -t commands 'zkstack server help help commands' commands "$@" +} +(( $+functions[_zkstack__server__help__run_commands] )) || +_zkstack__server__help__run_commands() { + local commands; commands=() + _describe -t commands 'zkstack server help run commands' commands "$@" +} +(( $+functions[_zkstack__server__help__wait_commands] )) || +_zkstack__server__help__wait_commands() { + local commands; commands=() + _describe -t commands 'zkstack server help wait commands' commands "$@" +} +(( $+functions[_zkstack__server__run_commands] )) || +_zkstack__server__run_commands() { + local commands; commands=() + _describe -t commands 'zkstack server run commands' commands "$@" +} +(( $+functions[_zkstack__server__wait_commands] )) || +_zkstack__server__wait_commands() { + local commands; commands=() + _describe -t commands 'zkstack server wait commands' commands "$@" +} (( $+functions[_zkstack__update_commands] )) || _zkstack__update_commands() { local commands; commands=() diff --git a/zkstack_cli/crates/zkstack/completion/zkstack.fish b/zkstack_cli/crates/zkstack/completion/zkstack.fish index 7ad4e6959f90..8a5b338fcda2 100644 --- a/zkstack_cli/crates/zkstack/completion/zkstack.fish +++ b/zkstack_cli/crates/zkstack/completion/zkstack.fish @@ -129,24 +129,25 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_se complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from help" -f -a "change-default-chain" -d 'Change the default chain' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from help" -f -a "setup-observability" -d 'Setup observability for the ecosystem, downloading Grafana dashboards from the era-observability repo' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -l chain -d 'Chain to use' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -s v -l verbose -d 'Verbose mode' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -l ignore-prerequisites -d 'Ignores prerequisites checks' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -s h -l help -d 'Print help' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "create" -d 'Create a new chain, setting the necessary configurations for later initialization' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "build-transactions" -d 'Create unsigned transactions for chain deployment' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "init" -d 'Initialize chain, deploying necessary contracts and performing on-chain operations' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "genesis" -d 'Run server genesis' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "register-chain" -d 'Register a new chain on L1 (executed by L1 governor). This command deploys and configures Governance, ChainAdmin, and DiamondProxy contracts, registers chain with BridgeHub and sets pending admin for DiamondProxy. Note: After completion, L2 governor can accept ownership by running `accept-chain-ownership`' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-l2-contracts" -d 'Deploy all L2 contracts (executed by L1 governor)' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "accept-chain-ownership" -d 'Accept ownership of L2 chain (executed by L2 governor). This command should be run after `register-chain` to accept ownership of newly created DiamondProxy contract' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "initialize-bridges" -d 'Initialize bridges on L2' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-consensus-registry" -d 'Deploy L2 consensus registry' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-multicall3" -d 'Deploy L2 multicall3' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-upgrader" -d 'Deploy Default Upgrader' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-paymaster" -d 'Deploy paymaster smart contract' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "update-token-multiplier-setter" -d 'Update Token Multiplier Setter address on L1' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "create" -d 'Create a new chain, setting the necessary configurations for later initialization' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "build-transactions" -d 'Create unsigned transactions for chain deployment' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "init" -d 'Initialize chain, deploying necessary contracts and performing on-chain operations' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "genesis" -d 'Run server genesis' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "register-chain" -d 'Register a new chain on L1 (executed by L1 governor). This command deploys and configures Governance, ChainAdmin, and DiamondProxy contracts, registers chain with BridgeHub and sets pending admin for DiamondProxy. Note: After completion, L2 governor can accept ownership by running `accept-chain-ownership`' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-l2-contracts" -d 'Deploy all L2 contracts (executed by L1 governor)' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "accept-chain-ownership" -d 'Accept ownership of L2 chain (executed by L2 governor). This command should be run after `register-chain` to accept ownership of newly created DiamondProxy contract' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "initialize-bridges" -d 'Initialize bridges on L2' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-consensus-registry" -d 'Deploy L2 consensus registry' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-multicall3" -d 'Deploy L2 multicall3' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-timestamp-asserter" -d 'Deploy L2 TimestampAsserter' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-upgrader" -d 'Deploy Default Upgrader' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "deploy-paymaster" -d 'Deploy paymaster smart contract' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "update-token-multiplier-setter" -d 'Update Token Multiplier Setter address on L1' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l chain-name -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l chain-id -d 'Chain ID' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l prover-mode -d 'Prover options' -r -f -a "{no-proofs\t'',gpu\t''}" @@ -265,6 +266,16 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_s complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -s h -l help -d 'Print help (see more with \'--help\')' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l verify -d 'Verify deployed contracts' -r -f -a "{true\t'',false\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l verifier -d 'Verifier to use' -r -f -a "{etherscan\t'',sourcify\t'',blockscout\t'',oklink\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l verifier-url -d 'Verifier URL, if using a custom provider' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l verifier-api-key -d 'Verifier API key' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -s h -l help -d 'Print help (see more with \'--help\')' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -l verify -d 'Verify deployed contracts' -r -f -a "{true\t'',false\t''}" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -l verifier -d 'Verifier to use' -r -f -a "{etherscan\t'',sourcify\t'',blockscout\t'',oklink\t''}" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -l verifier-url -d 'Verifier URL, if using a custom provider' -r @@ -305,6 +316,7 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_s complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "initialize-bridges" -d 'Initialize bridges on L2' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "deploy-consensus-registry" -d 'Deploy L2 consensus registry' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "deploy-multicall3" -d 'Deploy L2 multicall3' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "deploy-timestamp-asserter" -d 'Deploy L2 TimestampAsserter' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "deploy-upgrader" -d 'Deploy Default Upgrader' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "deploy-paymaster" -d 'Deploy paymaster smart contract' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "update-token-multiplier-setter" -d 'Update Token Multiplier Setter address on L1' @@ -394,7 +406,6 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_sub complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from contracts" -l l1-contracts -d 'Build L1 contracts' -r -f -a "{true\t'',false\t''}" complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from contracts" -l l2-contracts -d 'Build L2 contracts' -r -f -a "{true\t'',false\t''}" complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from contracts" -l system-contracts -d 'Build system contracts' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from contracts" -l test-contracts -d 'Build test contracts' -r -f -a "{true\t'',false\t''}" complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from contracts" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from contracts" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from contracts" -l ignore-prerequisites -d 'Ignores prerequisites checks' @@ -488,8 +499,9 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_ complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -l round -r -f -a "{all-rounds\t'',basic-circuits\t'',leaf-aggregation\t'',node-aggregation\t'',recursion-tip\t'',scheduler\t''}" complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -l threads -r complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -l max-allocation -d 'Memory allocation limit in bytes (for prover component)' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -l witness-vector-generator-count -r -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -l max-allocation -r +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -s l -l light-wvg-count -r +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -s h -l heavy-wvg-count -r +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -s m -l max-allocation -r complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -l docker -r -f -a "{true\t'',false\t''}" complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -l tag -r complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -l chain -d 'Chain to use' -r @@ -513,23 +525,50 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_ complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from help" -f -a "init-bellman-cuda" -d 'Initialize bellman-cuda' complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from help" -f -a "compressor-keys" -d 'Download compressor keys' complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' -complete -c zkstack -n "__fish_zkstack_using_subcommand server" -l components -d 'Components of server to run' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand server" -s a -l additional-args -d 'Additional arguments that can be passed through the CLI' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand server" -l chain -d 'Chain to use' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand server" -l genesis -d 'Run server in genesis mode' -complete -c zkstack -n "__fish_zkstack_using_subcommand server" -l build -d 'Build server but don\'t run it' -complete -c zkstack -n "__fish_zkstack_using_subcommand server" -l uring -d 'Enables uring support for RocksDB' -complete -c zkstack -n "__fish_zkstack_using_subcommand server" -s v -l verbose -d 'Verbose mode' -complete -c zkstack -n "__fish_zkstack_using_subcommand server" -l ignore-prerequisites -d 'Ignores prerequisites checks' -complete -c zkstack -n "__fish_zkstack_using_subcommand server" -s h -l help -d 'Print help' -complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init run help" -l chain -d 'Chain to use' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init run help" -s v -l verbose -d 'Verbose mode' -complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init run help" -l ignore-prerequisites -d 'Ignores prerequisites checks' -complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init run help" -s h -l help -d 'Print help' -complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init run help" -f -a "configs" -d 'Prepare configs for EN' -complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init run help" -f -a "init" -d 'Init databases' -complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init run help" -f -a "run" -d 'Run external node' -complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init run help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -l components -d 'Components of server to run' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -s a -l additional-args -d 'Additional arguments that can be passed through the CLI' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -l genesis -d 'Run server in genesis mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -l uring -d 'Enables uring support for RocksDB' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -f -a "build" -d 'Builds server' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -f -a "run" -d 'Runs server' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -f -a "wait" -d 'Waits for server to start' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and not __fish_seen_subcommand_from build run wait help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from build" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from build" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from build" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from build" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from run" -l components -d 'Components of server to run' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from run" -s a -l additional-args -d 'Additional arguments that can be passed through the CLI' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from run" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from run" -l genesis -d 'Run server in genesis mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from run" -l uring -d 'Enables uring support for RocksDB' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from run" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from run" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from run" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from wait" -s t -l timeout -d 'Wait timeout in seconds' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from wait" -l poll-interval -d 'Poll interval in milliseconds' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from wait" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from wait" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from wait" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from wait" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from help" -f -a "build" -d 'Builds server' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from help" -f -a "run" -d 'Runs server' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from help" -f -a "wait" -d 'Waits for server to start' +complete -c zkstack -n "__fish_zkstack_using_subcommand server; and __fish_seen_subcommand_from help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init build run wait help" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init build run wait help" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init build run wait help" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init build run wait help" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init build run wait help" -f -a "configs" -d 'Prepare configs for EN' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init build run wait help" -f -a "init" -d 'Init databases' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init build run wait help" -f -a "build" -d 'Build external node' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init build run wait help" -f -a "run" -d 'Run external node' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init build run wait help" -f -a "wait" -d 'Wait for external node to start' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and not __fish_seen_subcommand_from configs init build run wait help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from configs" -l db-url -r complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from configs" -l db-name -r complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from configs" -l l1-rpc-url -r @@ -542,6 +581,10 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fis complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from init" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from init" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from init" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from build" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from build" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from build" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from build" -s h -l help -d 'Print help' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from run" -l components -d 'Components of server to run' -r complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from run" -l enable-consensus -d 'Enable consensus' -r -f -a "{true\t'',false\t''}" complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from run" -s a -l additional-args -d 'Additional arguments that can be passed through the CLI' -r @@ -550,26 +593,46 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fis complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from run" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from run" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from run" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from wait" -s t -l timeout -d 'Wait timeout in seconds' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from wait" -l poll-interval -d 'Poll interval in milliseconds' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from wait" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from wait" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from wait" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from wait" -s h -l help -d 'Print help' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from help" -f -a "configs" -d 'Prepare configs for EN' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from help" -f -a "init" -d 'Init databases' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from help" -f -a "build" -d 'Build external node' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from help" -f -a "run" -d 'Run external node' +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from help" -f -a "wait" -d 'Wait for external node to start' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' complete -c zkstack -n "__fish_zkstack_using_subcommand containers" -s o -l observability -d 'Enable Grafana' -r -f -a "{true\t'',false\t''}" complete -c zkstack -n "__fish_zkstack_using_subcommand containers" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand containers" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand containers" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand containers" -s h -l help -d 'Print help' -complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from run init help" -l chain -d 'Chain to use' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from run init help" -s v -l verbose -d 'Verbose mode' -complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from run init help" -l ignore-prerequisites -d 'Ignores prerequisites checks' -complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from run init help" -s h -l help -d 'Print help' -complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from run init help" -f -a "run" -d 'Run contract verifier' -complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from run init help" -f -a "init" -d 'Download required binaries for contract verifier' -complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from run init help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from build run wait init help" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from build run wait init help" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from build run wait init help" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from build run wait init help" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from build run wait init help" -f -a "build" -d 'Build contract verifier binary' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from build run wait init help" -f -a "run" -d 'Run contract verifier' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from build run wait init help" -f -a "wait" -d 'Wait for contract verifier to start' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from build run wait init help" -f -a "init" -d 'Download required binaries for contract verifier' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and not __fish_seen_subcommand_from build run wait init help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from build" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from build" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from build" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from build" -s h -l help -d 'Print help' complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from run" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from run" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from run" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from run" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from wait" -s t -l timeout -d 'Wait timeout in seconds' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from wait" -l poll-interval -d 'Poll interval in milliseconds' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from wait" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from wait" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from wait" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from wait" -s h -l help -d 'Print help' complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from init" -l zksolc-version -d 'Version of zksolc to install' -r complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from init" -l zkvyper-version -d 'Version of zkvyper to install' -r complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from init" -l solc-version -d 'Version of solc to install' -r @@ -580,7 +643,9 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and _ complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from init" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from init" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from init" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from help" -f -a "build" -d 'Build contract verifier binary' complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from help" -f -a "run" -d 'Run contract verifier' +complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from help" -f -a "wait" -d 'Wait for contract verifier to start' complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from help" -f -a "init" -d 'Download required binaries for contract verifier' complete -c zkstack -n "__fish_zkstack_using_subcommand contract-verifier; and __fish_seen_subcommand_from help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' complete -c zkstack -n "__fish_zkstack_using_subcommand portal" -l chain -d 'Chain to use' -r @@ -611,13 +676,14 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand explorer; and __fish_see complete -c zkstack -n "__fish_zkstack_using_subcommand explorer; and __fish_seen_subcommand_from help" -f -a "run-backend" -d 'Start explorer backend services (api, data_fetcher, worker) for a given chain. Uses default chain, unless --chain is passed' complete -c zkstack -n "__fish_zkstack_using_subcommand explorer; and __fish_seen_subcommand_from help" -f -a "run" -d 'Run explorer app' complete -c zkstack -n "__fish_zkstack_using_subcommand explorer; and __fish_seen_subcommand_from help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' -complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee help" -l chain -d 'Chain to use' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee help" -s v -l verbose -d 'Verbose mode' -complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee help" -l ignore-prerequisites -d 'Ignores prerequisites checks' -complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee help" -s h -l help -d 'Print help' -complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee help" -f -a "set-attester-committee" -d 'Sets the attester committee in the consensus registry contract to `consensus.genesis_spec.attesters` in general.yaml' -complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee help" -f -a "get-attester-committee" -d 'Fetches the attester committee from the consensus registry contract' -complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee wait-for-registry help" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee wait-for-registry help" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee wait-for-registry help" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee wait-for-registry help" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee wait-for-registry help" -f -a "set-attester-committee" -d 'Sets the attester committee in the consensus registry contract to `consensus.genesis_spec.attesters` in general.yaml' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee wait-for-registry help" -f -a "get-attester-committee" -d 'Fetches the attester committee from the consensus registry contract' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee wait-for-registry help" -f -a "wait-for-registry" -d 'Wait until the consensus registry contract is deployed to L2' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and not __fish_seen_subcommand_from set-attester-committee get-attester-committee wait-for-registry help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from set-attester-committee" -l from-file -d 'Sets the attester committee in the consensus registry contract to the committee in the yaml file. File format is definied in `commands/consensus/proto/mod.proto`' -r -F complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from set-attester-committee" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from set-attester-committee" -l from-genesis -d 'Sets the attester committee in the consensus registry contract to `consensus.genesis_spec.attesters` in general.yaml' @@ -628,8 +694,15 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_se complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from get-attester-committee" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from get-attester-committee" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from get-attester-committee" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from wait-for-registry" -s t -l timeout -d 'Wait timeout in seconds' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from wait-for-registry" -l poll-interval -d 'Poll interval in milliseconds' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from wait-for-registry" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from wait-for-registry" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from wait-for-registry" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from wait-for-registry" -s h -l help -d 'Print help' complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from help" -f -a "set-attester-committee" -d 'Sets the attester committee in the consensus registry contract to `consensus.genesis_spec.attesters` in general.yaml' complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from help" -f -a "get-attester-committee" -d 'Fetches the attester committee from the consensus registry contract' +complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from help" -f -a "wait-for-registry" -d 'Wait until the consensus registry contract is deployed to L2' complete -c zkstack -n "__fish_zkstack_using_subcommand consensus; and __fish_seen_subcommand_from help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' complete -c zkstack -n "__fish_zkstack_using_subcommand update" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand update" -s c -l only-config -d 'Update only the config files' @@ -670,6 +743,7 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_su complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "initialize-bridges" -d 'Initialize bridges on L2' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "deploy-consensus-registry" -d 'Deploy L2 consensus registry' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "deploy-multicall3" -d 'Deploy L2 multicall3' +complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "deploy-timestamp-asserter" -d 'Deploy L2 TimestampAsserter' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "deploy-upgrader" -d 'Deploy Default Upgrader' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "deploy-paymaster" -d 'Deploy paymaster smart contract' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "update-token-multiplier-setter" -d 'Update Token Multiplier Setter address on L1' @@ -690,13 +764,21 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_su complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from prover" -f -a "run" -d 'Run prover' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from prover" -f -a "init-bellman-cuda" -d 'Initialize bellman-cuda' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from prover" -f -a "compressor-keys" -d 'Download compressor keys' +complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from server" -f -a "build" -d 'Builds server' +complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from server" -f -a "run" -d 'Runs server' +complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from server" -f -a "wait" -d 'Waits for server to start' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from external-node" -f -a "configs" -d 'Prepare configs for EN' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from external-node" -f -a "init" -d 'Init databases' +complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from external-node" -f -a "build" -d 'Build external node' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from external-node" -f -a "run" -d 'Run external node' +complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from external-node" -f -a "wait" -d 'Wait for external node to start' +complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from contract-verifier" -f -a "build" -d 'Build contract verifier binary' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from contract-verifier" -f -a "run" -d 'Run contract verifier' +complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from contract-verifier" -f -a "wait" -d 'Wait for contract verifier to start' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from contract-verifier" -f -a "init" -d 'Download required binaries for contract verifier' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from explorer" -f -a "init" -d 'Initialize explorer (create database to store explorer data and generate docker compose file with explorer services). Runs for all chains, unless --chain is passed' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from explorer" -f -a "run-backend" -d 'Start explorer backend services (api, data_fetcher, worker) for a given chain. Uses default chain, unless --chain is passed' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from explorer" -f -a "run" -d 'Run explorer app' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from consensus" -f -a "set-attester-committee" -d 'Sets the attester committee in the consensus registry contract to `consensus.genesis_spec.attesters` in general.yaml' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from consensus" -f -a "get-attester-committee" -d 'Fetches the attester committee from the consensus registry contract' +complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from consensus" -f -a "wait-for-registry" -d 'Wait until the consensus registry contract is deployed to L2' diff --git a/zkstack_cli/crates/zkstack/completion/zkstack.sh b/zkstack_cli/crates/zkstack/completion/zkstack.sh index ff351ebd79ed..bb373c3f63eb 100644 --- a/zkstack_cli/crates/zkstack/completion/zkstack.sh +++ b/zkstack_cli/crates/zkstack/completion/zkstack.sh @@ -78,6 +78,9 @@ _zkstack() { zkstack__chain,deploy-paymaster) cmd="zkstack__chain__deploy__paymaster" ;; + zkstack__chain,deploy-timestamp-asserter) + cmd="zkstack__chain__deploy__timestamp__asserter" + ;; zkstack__chain,deploy-upgrader) cmd="zkstack__chain__deploy__upgrader" ;; @@ -138,6 +141,9 @@ _zkstack() { zkstack__chain__help,deploy-paymaster) cmd="zkstack__chain__help__deploy__paymaster" ;; + zkstack__chain__help,deploy-timestamp-asserter) + cmd="zkstack__chain__help__deploy__timestamp__asserter" + ;; zkstack__chain__help,deploy-upgrader) cmd="zkstack__chain__help__deploy__upgrader" ;; @@ -189,6 +195,9 @@ _zkstack() { zkstack__consensus,set-attester-committee) cmd="zkstack__consensus__set__attester__committee" ;; + zkstack__consensus,wait-for-registry) + cmd="zkstack__consensus__wait__for__registry" + ;; zkstack__consensus__help,get-attester-committee) cmd="zkstack__consensus__help__get__attester__committee" ;; @@ -198,6 +207,12 @@ _zkstack() { zkstack__consensus__help,set-attester-committee) cmd="zkstack__consensus__help__set__attester__committee" ;; + zkstack__consensus__help,wait-for-registry) + cmd="zkstack__consensus__help__wait__for__registry" + ;; + zkstack__contract__verifier,build) + cmd="zkstack__contract__verifier__build" + ;; zkstack__contract__verifier,help) cmd="zkstack__contract__verifier__help" ;; @@ -207,6 +222,12 @@ _zkstack() { zkstack__contract__verifier,run) cmd="zkstack__contract__verifier__run" ;; + zkstack__contract__verifier,wait) + cmd="zkstack__contract__verifier__wait" + ;; + zkstack__contract__verifier__help,build) + cmd="zkstack__contract__verifier__help__build" + ;; zkstack__contract__verifier__help,help) cmd="zkstack__contract__verifier__help__help" ;; @@ -216,6 +237,9 @@ _zkstack() { zkstack__contract__verifier__help,run) cmd="zkstack__contract__verifier__help__run" ;; + zkstack__contract__verifier__help,wait) + cmd="zkstack__contract__verifier__help__wait" + ;; zkstack__dev,clean) cmd="zkstack__dev__clean" ;; @@ -657,6 +681,9 @@ _zkstack() { zkstack__explorer__help,run-backend) cmd="zkstack__explorer__help__run__backend" ;; + zkstack__external__node,build) + cmd="zkstack__external__node__build" + ;; zkstack__external__node,configs) cmd="zkstack__external__node__configs" ;; @@ -669,6 +696,12 @@ _zkstack() { zkstack__external__node,run) cmd="zkstack__external__node__run" ;; + zkstack__external__node,wait) + cmd="zkstack__external__node__wait" + ;; + zkstack__external__node__help,build) + cmd="zkstack__external__node__help__build" + ;; zkstack__external__node__help,configs) cmd="zkstack__external__node__help__configs" ;; @@ -681,6 +714,9 @@ _zkstack() { zkstack__external__node__help,run) cmd="zkstack__external__node__help__run" ;; + zkstack__external__node__help,wait) + cmd="zkstack__external__node__help__wait" + ;; zkstack__help,autocomplete) cmd="zkstack__help__autocomplete" ;; @@ -747,6 +783,9 @@ _zkstack() { zkstack__help__chain,deploy-paymaster) cmd="zkstack__help__chain__deploy__paymaster" ;; + zkstack__help__chain,deploy-timestamp-asserter) + cmd="zkstack__help__chain__deploy__timestamp__asserter" + ;; zkstack__help__chain,deploy-upgrader) cmd="zkstack__help__chain__deploy__upgrader" ;; @@ -780,12 +819,21 @@ _zkstack() { zkstack__help__consensus,set-attester-committee) cmd="zkstack__help__consensus__set__attester__committee" ;; + zkstack__help__consensus,wait-for-registry) + cmd="zkstack__help__consensus__wait__for__registry" + ;; + zkstack__help__contract__verifier,build) + cmd="zkstack__help__contract__verifier__build" + ;; zkstack__help__contract__verifier,init) cmd="zkstack__help__contract__verifier__init" ;; zkstack__help__contract__verifier,run) cmd="zkstack__help__contract__verifier__run" ;; + zkstack__help__contract__verifier,wait) + cmd="zkstack__help__contract__verifier__wait" + ;; zkstack__help__dev,clean) cmd="zkstack__help__dev__clean" ;; @@ -933,6 +981,9 @@ _zkstack() { zkstack__help__explorer,run-backend) cmd="zkstack__help__explorer__run__backend" ;; + zkstack__help__external__node,build) + cmd="zkstack__help__external__node__build" + ;; zkstack__help__external__node,configs) cmd="zkstack__help__external__node__configs" ;; @@ -942,6 +993,9 @@ _zkstack() { zkstack__help__external__node,run) cmd="zkstack__help__external__node__run" ;; + zkstack__help__external__node,wait) + cmd="zkstack__help__external__node__wait" + ;; zkstack__help__prover,compressor-keys) cmd="zkstack__help__prover__compressor__keys" ;; @@ -957,6 +1011,15 @@ _zkstack() { zkstack__help__prover,setup-keys) cmd="zkstack__help__prover__setup__keys" ;; + zkstack__help__server,build) + cmd="zkstack__help__server__build" + ;; + zkstack__help__server,run) + cmd="zkstack__help__server__run" + ;; + zkstack__help__server,wait) + cmd="zkstack__help__server__wait" + ;; zkstack__prover,compressor-keys) cmd="zkstack__prover__compressor__keys" ;; @@ -993,6 +1056,30 @@ _zkstack() { zkstack__prover__help,setup-keys) cmd="zkstack__prover__help__setup__keys" ;; + zkstack__server,build) + cmd="zkstack__server__build" + ;; + zkstack__server,help) + cmd="zkstack__server__help" + ;; + zkstack__server,run) + cmd="zkstack__server__run" + ;; + zkstack__server,wait) + cmd="zkstack__server__wait" + ;; + zkstack__server__help,build) + cmd="zkstack__server__help__build" + ;; + zkstack__server__help,help) + cmd="zkstack__server__help__help" + ;; + zkstack__server__help,run) + cmd="zkstack__server__help__run" + ;; + zkstack__server__help,wait) + cmd="zkstack__server__help__wait" + ;; *) ;; esac @@ -1048,7 +1135,7 @@ _zkstack() { return 0 ;; zkstack__chain) - opts="-v -h --verbose --chain --ignore-prerequisites --help create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" + opts="-v -h --verbose --chain --ignore-prerequisites --help create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1402,6 +1489,48 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__chain__deploy__timestamp__asserter) + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --verify) + COMPREPLY=($(compgen -W "true false" -- "${cur}")) + return 0 + ;; + --verifier) + COMPREPLY=($(compgen -W "etherscan sourcify blockscout oklink" -- "${cur}")) + return 0 + ;; + --verifier-url) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --verifier-api-key) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --additional-args) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + -a) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --chain) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__chain__deploy__upgrader) opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then @@ -1571,7 +1700,7 @@ _zkstack() { return 0 ;; zkstack__chain__help) - opts="create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter help" + opts="create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1682,6 +1811,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__chain__help__deploy__timestamp__asserter) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__chain__help__deploy__upgrader) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then @@ -2079,7 +2222,7 @@ _zkstack() { return 0 ;; zkstack__consensus) - opts="-v -h --verbose --chain --ignore-prerequisites --help set-attester-committee get-attester-committee help" + opts="-v -h --verbose --chain --ignore-prerequisites --help set-attester-committee get-attester-committee wait-for-registry help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -2115,7 +2258,7 @@ _zkstack() { return 0 ;; zkstack__consensus__help) - opts="set-attester-committee get-attester-committee help" + opts="set-attester-committee get-attester-committee wait-for-registry help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -2170,6 +2313,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__consensus__help__wait__for__registry) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__consensus__set__attester__committee) opts="-v -h --from-genesis --from-file --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then @@ -2192,6 +2349,36 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__consensus__wait__for__registry) + opts="-t -v -h --timeout --poll-interval --verbose --chain --ignore-prerequisites --help" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --timeout) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + -t) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --poll-interval) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --chain) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__containers) opts="-o -v -h --observability --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then @@ -2219,7 +2406,7 @@ _zkstack() { return 0 ;; zkstack__contract__verifier) - opts="-v -h --verbose --chain --ignore-prerequisites --help run init help" + opts="-v -h --verbose --chain --ignore-prerequisites --help build run wait init help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -2236,8 +2423,26 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__contract__verifier__build) + opts="-v -h --verbose --chain --ignore-prerequisites --help" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --chain) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__contract__verifier__help) - opts="run init help" + opts="build run wait init help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -2250,6 +2455,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__contract__verifier__help__build) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__contract__verifier__help__help) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then @@ -2292,6 +2511,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__contract__verifier__help__wait) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__contract__verifier__init) opts="-v -h --zksolc-version --zkvyper-version --solc-version --era-vm-solc-version --vyper-version --only --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then @@ -2348,6 +2581,36 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__contract__verifier__wait) + opts="-t -v -h --timeout --poll-interval --verbose --chain --ignore-prerequisites --help" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --timeout) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + -t) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --poll-interval) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --chain) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__dev) opts="-v -h --verbose --chain --ignore-prerequisites --help database test clean snapshot lint fmt prover contracts config-writer send-transactions status generate-genesis help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then @@ -2535,7 +2798,7 @@ _zkstack() { return 0 ;; zkstack__dev__contracts) - opts="-v -h --l1-contracts --l2-contracts --system-contracts --test-contracts --verbose --chain --ignore-prerequisites --help" + opts="-v -h --l1-contracts --l2-contracts --system-contracts --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -2553,10 +2816,6 @@ _zkstack() { COMPREPLY=($(compgen -W "true false" -- "${cur}")) return 0 ;; - --test-contracts) - COMPREPLY=($(compgen -W "true false" -- "${cur}")) - return 0 - ;; --chain) COMPREPLY=($(compgen -f "${cur}")) return 0 @@ -5075,7 +5334,7 @@ _zkstack() { return 0 ;; zkstack__external__node) - opts="-v -h --verbose --chain --ignore-prerequisites --help configs init run help" + opts="-v -h --verbose --chain --ignore-prerequisites --help configs init build run wait help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -5092,6 +5351,24 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__external__node__build) + opts="-v -h --verbose --chain --ignore-prerequisites --help" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --chain) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__external__node__configs) opts="-u -v -h --db-url --db-name --l1-rpc-url --use-default --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then @@ -5123,7 +5400,7 @@ _zkstack() { return 0 ;; zkstack__external__node__help) - opts="configs init run help" + opts="configs init build run wait help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -5136,6 +5413,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__external__node__help__build) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__external__node__help__configs) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then @@ -5192,6 +5483,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__external__node__help__wait) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__external__node__init) opts="-v -h --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then @@ -5244,13 +5549,29 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; - zkstack__help) - opts="autocomplete ecosystem chain dev prover server external-node containers contract-verifier portal explorer consensus update markdown help" - if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then + zkstack__external__node__wait) + opts="-t -v -h --timeout --poll-interval --verbose --chain --ignore-prerequisites --help" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 fi case "${prev}" in + --timeout) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + -t) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --poll-interval) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --chain) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; *) COMPREPLY=() ;; @@ -5258,7 +5579,21 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; - zkstack__help__autocomplete) + zkstack__help) + opts="autocomplete ecosystem chain dev prover server external-node containers contract-verifier portal explorer consensus update markdown help" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + zkstack__help__autocomplete) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) @@ -5273,7 +5608,7 @@ _zkstack() { return 0 ;; zkstack__help__chain) - opts="create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-upgrader deploy-paymaster update-token-multiplier-setter" + opts="create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -5384,6 +5719,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__help__chain__deploy__timestamp__asserter) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__help__chain__deploy__upgrader) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then @@ -5511,7 +5860,7 @@ _zkstack() { return 0 ;; zkstack__help__consensus) - opts="set-attester-committee get-attester-committee" + opts="set-attester-committee get-attester-committee wait-for-registry" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -5552,6 +5901,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__help__consensus__wait__for__registry) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__help__containers) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then @@ -5567,7 +5930,7 @@ _zkstack() { return 0 ;; zkstack__help__contract__verifier) - opts="run init" + opts="build run wait init" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -5580,6 +5943,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__help__contract__verifier__build) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__help__contract__verifier__init) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then @@ -5608,6 +5985,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__help__contract__verifier__wait) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__help__dev) opts="database test clean snapshot lint fmt prover contracts config-writer send-transactions status generate-genesis" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then @@ -6337,7 +6728,7 @@ _zkstack() { return 0 ;; zkstack__help__external__node) - opts="configs init run" + opts="configs init build run wait" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -6350,6 +6741,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__help__external__node__build) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__help__external__node__configs) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then @@ -6392,6 +6797,20 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__help__external__node__wait) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__help__help) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then @@ -6519,7 +6938,7 @@ _zkstack() { return 0 ;; zkstack__help__server) - opts="" + opts="build run wait" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -6532,6 +6951,48 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__help__server__build) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + zkstack__help__server__run) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + zkstack__help__server__wait) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__help__update) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then @@ -6873,7 +7334,7 @@ _zkstack() { return 0 ;; zkstack__prover__run) - opts="-v -h --component --round --threads --max-allocation --witness-vector-generator-count --max-allocation --docker --tag --verbose --chain --ignore-prerequisites --help" + opts="-l -h -m -v -h --component --round --threads --max-allocation --light-wvg-count --heavy-wvg-count --max-allocation --docker --tag --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -6895,7 +7356,19 @@ _zkstack() { COMPREPLY=($(compgen -f "${cur}")) return 0 ;; - --witness-vector-generator-count) + --light-wvg-count) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + -l) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --heavy-wvg-count) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + -h) COMPREPLY=($(compgen -f "${cur}")) return 0 ;; @@ -6903,6 +7376,10 @@ _zkstack() { COMPREPLY=($(compgen -f "${cur}")) return 0 ;; + -m) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; --docker) COMPREPLY=($(compgen -W "true false" -- "${cur}")) return 0 @@ -6949,7 +7426,7 @@ _zkstack() { return 0 ;; zkstack__server) - opts="-a -v -h --components --genesis --additional-args --build --uring --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --components --genesis --additional-args --uring --verbose --chain --ignore-prerequisites --help build run wait help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -6978,6 +7455,154 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; + zkstack__server__build) + opts="-v -h --verbose --chain --ignore-prerequisites --help" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --chain) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + zkstack__server__help) + opts="build run wait help" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + zkstack__server__help__build) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + zkstack__server__help__help) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + zkstack__server__help__run) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + zkstack__server__help__wait) + opts="" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + zkstack__server__run) + opts="-a -v -h --components --genesis --additional-args --uring --verbose --chain --ignore-prerequisites --help" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --components) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --additional-args) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + -a) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --chain) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; + zkstack__server__wait) + opts="-t -v -h --timeout --poll-interval --verbose --chain --ignore-prerequisites --help" + if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + fi + case "${prev}" in + --timeout) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + -t) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --poll-interval) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + --chain) + COMPREPLY=($(compgen -f "${cur}")) + return 0 + ;; + *) + COMPREPLY=() + ;; + esac + COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) + return 0 + ;; zkstack__update) opts="-c -v -h --only-config --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then diff --git a/zkstack_cli/crates/zkstack/src/commands/args/mod.rs b/zkstack_cli/crates/zkstack/src/commands/args/mod.rs index 5fa83aadf51f..477f3a6ae9af 100644 --- a/zkstack_cli/crates/zkstack/src/commands/args/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/args/mod.rs @@ -1,9 +1,7 @@ -pub use autocomplete::*; -pub use containers::*; -pub use run_server::*; -pub use update::*; +pub use self::{autocomplete::*, containers::*, run_server::*, update::*, wait::*}; mod autocomplete; mod containers; mod run_server; mod update; +mod wait; diff --git a/zkstack_cli/crates/zkstack/src/commands/args/run_server.rs b/zkstack_cli/crates/zkstack/src/commands/args/run_server.rs index d090c0de03f9..40344c90ad05 100644 --- a/zkstack_cli/crates/zkstack/src/commands/args/run_server.rs +++ b/zkstack_cli/crates/zkstack/src/commands/args/run_server.rs @@ -1,22 +1,53 @@ -use clap::Parser; +use clap::{Parser, Subcommand}; use serde::{Deserialize, Serialize}; -use crate::messages::{ - MSG_SERVER_ADDITIONAL_ARGS_HELP, MSG_SERVER_BUILD_HELP, MSG_SERVER_COMPONENTS_HELP, - MSG_SERVER_GENESIS_HELP, MSG_SERVER_URING_HELP, +use crate::{ + commands::args::WaitArgs, + messages::{ + MSG_SERVER_ADDITIONAL_ARGS_HELP, MSG_SERVER_COMPONENTS_HELP, MSG_SERVER_GENESIS_HELP, + MSG_SERVER_URING_HELP, + }, }; +#[derive(Debug, Parser)] +#[command(args_conflicts_with_subcommands = true, flatten_help = true)] +pub struct ServerArgs { + #[command(subcommand)] + command: Option, + #[command(flatten)] + run: RunServerArgs, +} + +#[derive(Debug, Subcommand)] +pub enum ServerCommand { + /// Builds server + Build, + /// Runs server + Run(RunServerArgs), + /// Waits for server to start + Wait(WaitArgs), +} + +impl From for ServerCommand { + fn from(args: ServerArgs) -> Self { + args.command.unwrap_or(ServerCommand::Run(args.run)) + } +} + #[derive(Debug, Serialize, Deserialize, Parser)] pub struct RunServerArgs { - #[clap(long, help = MSG_SERVER_COMPONENTS_HELP)] + #[arg(long, help = MSG_SERVER_COMPONENTS_HELP)] pub components: Option>, - #[clap(long, help = MSG_SERVER_GENESIS_HELP)] + #[arg(long, help = MSG_SERVER_GENESIS_HELP)] pub genesis: bool, - #[clap(long, short)] - #[arg(trailing_var_arg = true, allow_hyphen_values = true, hide = false, help = MSG_SERVER_ADDITIONAL_ARGS_HELP)] + #[arg( + long, short, + trailing_var_arg = true, + allow_hyphen_values = true, + hide = false, + help = MSG_SERVER_ADDITIONAL_ARGS_HELP + )] additional_args: Vec, - #[clap(long, help = MSG_SERVER_BUILD_HELP)] - pub build: bool, - #[clap(help=MSG_SERVER_URING_HELP, long, default_missing_value = "true")] + #[clap(help = MSG_SERVER_URING_HELP, long, default_missing_value = "true")] pub uring: bool, } diff --git a/zkstack_cli/crates/zkstack/src/commands/args/wait.rs b/zkstack_cli/crates/zkstack/src/commands/args/wait.rs new file mode 100644 index 000000000000..a3a7e32ae8b4 --- /dev/null +++ b/zkstack_cli/crates/zkstack/src/commands/args/wait.rs @@ -0,0 +1,130 @@ +use std::{fmt, future::Future, time::Duration}; + +use anyhow::Context as _; +use clap::Parser; +use common::logger; +use reqwest::StatusCode; +use serde::{Deserialize, Serialize}; +use tokio::time::MissedTickBehavior; + +use crate::messages::{ + msg_wait_connect_err, msg_wait_non_successful_response, msg_wait_not_healthy, + msg_wait_starting_polling, msg_wait_timeout, MSG_WAIT_POLL_INTERVAL_HELP, + MSG_WAIT_TIMEOUT_HELP, +}; + +#[derive(Debug, Clone, Copy)] +enum PolledComponent { + Prometheus, + HealthCheck, +} + +impl fmt::Display for PolledComponent { + fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + formatter.write_str(match self { + Self::Prometheus => "Prometheus", + Self::HealthCheck => "health check", + }) + } +} + +#[derive(Debug, Parser, Serialize, Deserialize)] +pub struct WaitArgs { + #[arg(long, short = 't', value_name = "SECONDS", help = MSG_WAIT_TIMEOUT_HELP)] + timeout: Option, + #[arg(long, value_name = "MILLIS", help = MSG_WAIT_POLL_INTERVAL_HELP, default_value_t = 100)] + poll_interval: u64, +} + +impl WaitArgs { + pub fn poll_interval(&self) -> Duration { + Duration::from_millis(self.poll_interval) + } + + pub async fn poll_prometheus(&self, port: u16, verbose: bool) -> anyhow::Result<()> { + let component = PolledComponent::Prometheus; + let url = format!("http://127.0.0.1:{port}/metrics"); + self.poll_with_timeout(component, self.poll_inner(component, &url, verbose)) + .await + } + + pub async fn poll_health_check(&self, port: u16, verbose: bool) -> anyhow::Result<()> { + let component = PolledComponent::HealthCheck; + let url = format!("http://127.0.0.1:{port}/health"); + self.poll_with_timeout(component, self.poll_inner(component, &url, verbose)) + .await + } + + pub async fn poll_with_timeout( + &self, + component: impl fmt::Display, + action: impl Future>, + ) -> anyhow::Result<()> { + match self.timeout { + None => action.await, + Some(timeout) => tokio::time::timeout(Duration::from_secs(timeout), action) + .await + .map_err(|_| anyhow::Error::msg(msg_wait_timeout(&component)))?, + } + } + + async fn poll_inner( + &self, + component: PolledComponent, + url: &str, + verbose: bool, + ) -> anyhow::Result<()> { + let poll_interval = Duration::from_millis(self.poll_interval); + let mut interval = tokio::time::interval(poll_interval); + interval.set_missed_tick_behavior(MissedTickBehavior::Skip); + + if verbose { + logger::debug(msg_wait_starting_polling(&component, url, poll_interval)); + } + + let client = reqwest::Client::builder() + .connect_timeout(poll_interval) + .build() + .context("failed to build reqwest::Client")?; + + loop { + interval.tick().await; + + let response = match client.get(url).send().await { + Ok(response) => response, + Err(err) if err.is_connect() || err.is_timeout() => { + continue; + } + Err(err) => { + return Err( + anyhow::Error::new(err).context(msg_wait_connect_err(&component, url)) + ) + } + }; + + match component { + PolledComponent::Prometheus => { + response + .error_for_status() + .with_context(|| msg_wait_non_successful_response(&component))?; + return Ok(()); + } + PolledComponent::HealthCheck => { + if response.status().is_success() { + return Ok(()); + } + + if response.status() == StatusCode::SERVICE_UNAVAILABLE { + if verbose { + logger::debug(msg_wait_not_healthy(url)); + } + } else { + response + .error_for_status() + .with_context(|| msg_wait_non_successful_response(&component))?; + } + } + } + } + } +} diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/args/create.rs b/zkstack_cli/crates/zkstack/src/commands/chain/args/create.rs index ae08d4712b34..ec37f9ba0304 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/args/create.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/args/create.rs @@ -1,22 +1,14 @@ -use std::{ - path::{Path, PathBuf}, - str::FromStr, -}; +use std::{path::PathBuf, str::FromStr}; use anyhow::{bail, Context}; use clap::{Parser, ValueEnum, ValueHint}; use common::{Prompt, PromptConfirm, PromptSelect}; -use config::{ - forge_interface::deploy_ecosystem::output::Erc20Token, traits::ReadConfigWithBasePath, - EcosystemConfig, -}; +use config::forge_interface::deploy_ecosystem::output::Erc20Token; use serde::{Deserialize, Serialize}; use slugify_rs::slugify; use strum::{Display, EnumIter, IntoEnumIterator}; use types::{BaseToken, L1BatchCommitmentMode, L1Network, ProverMode, WalletCreation}; -use xshell::Shell; use zksync_basic_types::H160; -use zksync_config::GenesisConfig; use crate::{ defaults::L2_CHAIN_ID, @@ -26,7 +18,7 @@ use crate::{ MSG_BASE_TOKEN_PRICE_DENOMINATOR_PROMPT, MSG_BASE_TOKEN_PRICE_NOMINATOR_HELP, MSG_BASE_TOKEN_PRICE_NOMINATOR_PROMPT, MSG_BASE_TOKEN_SELECTION_PROMPT, MSG_CHAIN_ID_HELP, MSG_CHAIN_ID_PROMPT, MSG_CHAIN_ID_VALIDATOR_ERR, MSG_CHAIN_NAME_PROMPT, - MSG_EVM_EMULATOR_HASH_MISSING_ERR, MSG_EVM_EMULATOR_HELP, MSG_EVM_EMULATOR_PROMPT, + MSG_EVM_EMULATOR_HELP, MSG_EVM_EMULATOR_PROMPT, MSG_L1_BATCH_COMMIT_DATA_GENERATOR_MODE_PROMPT, MSG_L1_COMMIT_DATA_GENERATOR_MODE_HELP, MSG_NUMBER_VALIDATOR_GREATHER_THAN_ZERO_ERR, MSG_NUMBER_VALIDATOR_NOT_ZERO_ERR, MSG_PROVER_MODE_HELP, MSG_PROVER_VERSION_PROMPT, MSG_SET_AS_DEFAULT_HELP, @@ -83,11 +75,10 @@ pub struct ChainCreateArgs { impl ChainCreateArgs { pub fn fill_values_with_prompt( self, - shell: &Shell, number_of_chains: u32, l1_network: &L1Network, possible_erc20: Vec, - link_to_code: &Path, + link_to_code: String, ) -> anyhow::Result { let mut chain_name = self .chain_name @@ -224,24 +215,11 @@ impl ChainCreateArgs { } }; - let default_genesis_config = GenesisConfig::read_with_base_path( - shell, - EcosystemConfig::default_configs_path(link_to_code), - ) - .context("failed reading genesis config")?; - let has_evm_emulation_support = default_genesis_config.evm_emulator_hash.is_some(); let evm_emulator = self.evm_emulator.unwrap_or_else(|| { - if !has_evm_emulation_support { - false - } else { - PromptConfirm::new(MSG_EVM_EMULATOR_PROMPT) - .default(false) - .ask() - } + PromptConfirm::new(MSG_EVM_EMULATOR_PROMPT) + .default(false) + .ask() }); - if !has_evm_emulation_support && evm_emulator { - bail!(MSG_EVM_EMULATOR_HASH_MISSING_ERR); - } let set_as_default = self.set_as_default.unwrap_or_else(|| { PromptConfirm::new(MSG_SET_AS_DEFAULT_PROMPT) @@ -260,6 +238,7 @@ impl ChainCreateArgs { set_as_default, legacy_bridge: self.legacy_bridge, evm_emulator, + link_to_code, }) } } @@ -276,6 +255,7 @@ pub struct ChainCreateArgsFinal { pub set_as_default: bool, pub legacy_bridge: bool, pub evm_emulator: bool, + pub link_to_code: String, } #[derive(Debug, Clone, EnumIter, Display, PartialEq, Eq)] diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/create.rs b/zkstack_cli/crates/zkstack/src/commands/chain/create.rs index bdf5711e3213..730c1df8d3f2 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/create.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/create.rs @@ -3,8 +3,9 @@ use std::cell::OnceCell; use anyhow::Context; use common::{logger, spinner::Spinner}; use config::{ - create_local_configs_dir, create_wallets, traits::SaveConfigWithBasePath, ChainConfig, - EcosystemConfig, + create_local_configs_dir, create_wallets, + traits::{ReadConfigWithBasePath, SaveConfigWithBasePath}, + ChainConfig, EcosystemConfig, GenesisConfig, }; use xshell::Shell; use zksync_basic_types::L2ChainId; @@ -13,8 +14,10 @@ use crate::{ commands::chain::args::create::{ChainCreateArgs, ChainCreateArgsFinal}, messages::{ MSG_ARGS_VALIDATOR_ERR, MSG_CHAIN_CREATED, MSG_CREATING_CHAIN, - MSG_CREATING_CHAIN_CONFIGURATIONS_SPINNER, MSG_SELECTED_CONFIG, + MSG_CREATING_CHAIN_CONFIGURATIONS_SPINNER, MSG_EVM_EMULATOR_HASH_MISSING_ERR, + MSG_SELECTED_CONFIG, }, + utils::link_to_code::resolve_link_to_code, }; pub fn run(args: ChainCreateArgs, shell: &Shell) -> anyhow::Result<()> { @@ -30,11 +33,10 @@ fn create( let tokens = ecosystem_config.get_erc20_tokens(); let args = args .fill_values_with_prompt( - shell, ecosystem_config.list_of_chains().len() as u32, &ecosystem_config.l1_network, tokens, - &ecosystem_config.link_to_code, + ecosystem_config.link_to_code.clone().display().to_string(), ) .context(MSG_ARGS_VALIDATOR_ERR)?; @@ -74,6 +76,15 @@ pub(crate) fn create_chain_inner( (L2ChainId::from(args.chain_id), None) }; let internal_id = ecosystem_config.list_of_chains().len() as u32; + let link_to_code = resolve_link_to_code(shell, chain_path.clone(), args.link_to_code.clone())?; + let default_genesis_config = GenesisConfig::read_with_base_path( + shell, + EcosystemConfig::default_configs_path(&link_to_code), + )?; + let has_evm_emulation_support = default_genesis_config.evm_emulator_hash.is_some(); + if args.evm_emulator && !has_evm_emulation_support { + anyhow::bail!(MSG_EVM_EMULATOR_HASH_MISSING_ERR); + } let chain_config = ChainConfig { id: internal_id, diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs b/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs index 091bef86d26d..31cfc7f83977 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs @@ -36,6 +36,7 @@ pub enum Deploy2ContractsOption { InitiailizeBridges, ConsensusRegistry, Multicall3, + TimestampAsserter, } pub async fn run( @@ -93,6 +94,16 @@ pub async fn run( ) .await?; } + Deploy2ContractsOption::TimestampAsserter => { + deploy_timestamp_asserter( + shell, + &chain_config, + &ecosystem_config, + &mut contracts, + args, + ) + .await?; + } Deploy2ContractsOption::InitiailizeBridges => { initialize_bridges( shell, @@ -213,6 +224,27 @@ pub async fn deploy_multicall3( .await } +pub async fn deploy_timestamp_asserter( + shell: &Shell, + chain_config: &ChainConfig, + ecosystem_config: &EcosystemConfig, + contracts_config: &mut ContractsConfig, + forge_args: ForgeScriptArgs, +) -> anyhow::Result<()> { + build_and_deploy( + shell, + chain_config, + ecosystem_config, + forge_args, + Some("runDeployTimestampAsserter"), + |shell, out| { + contracts_config + .set_timestamp_asserter_addr(&TimestampAsserterOutput::read(shell, out)?) + }, + ) + .await +} + pub async fn deploy_l2_contracts( shell: &Shell, chain_config: &ChainConfig, diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/mod.rs b/zkstack_cli/crates/zkstack/src/commands/chain/mod.rs index c9a47616486d..82b8656154ab 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/mod.rs @@ -56,6 +56,9 @@ pub enum ChainCommands { /// Deploy L2 multicall3 #[command(alias = "multicall3")] DeployMulticall3(ForgeScriptArgs), + /// Deploy L2 TimestampAsserter + #[command(alias = "timestamp-asserter")] + DeployTimestampAsserter(ForgeScriptArgs), /// Deploy Default Upgrader #[command(alias = "upgrader")] DeployUpgrader(ForgeScriptArgs), @@ -83,6 +86,9 @@ pub(crate) async fn run(shell: &Shell, args: ChainCommands) -> anyhow::Result<() ChainCommands::DeployMulticall3(args) => { deploy_l2_contracts::run(args, shell, Deploy2ContractsOption::Multicall3).await } + ChainCommands::DeployTimestampAsserter(args) => { + deploy_l2_contracts::run(args, shell, Deploy2ContractsOption::TimestampAsserter).await + } ChainCommands::DeployUpgrader(args) => { deploy_l2_contracts::run(args, shell, Deploy2ContractsOption::Upgrader).await } diff --git a/zkstack_cli/crates/zkstack/src/commands/consensus/mod.rs b/zkstack_cli/crates/zkstack/src/commands/consensus/mod.rs index 1855a5943dc7..7a998efedbf2 100644 --- a/zkstack_cli/crates/zkstack/src/commands/consensus/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/consensus/mod.rs @@ -3,22 +3,23 @@ use std::{borrow::Borrow, collections::HashMap, path::PathBuf, sync::Arc}; /// Consensus registry contract operations. /// Includes code duplicated from `zksync_node_consensus::registry::abi`. use anyhow::Context as _; -use common::{logger, wallets::Wallet}; +use common::{config::global_config, logger, wallets::Wallet}; use config::EcosystemConfig; use conv::*; use ethers::{ abi::Detokenize, contract::{FunctionCall, Multicall}, middleware::{Middleware, NonceManagerMiddleware, SignerMiddleware}, - providers::{Http, JsonRpcClient, PendingTransaction, Provider, RawCall as _}, + providers::{Http, JsonRpcClient, PendingTransaction, Provider, ProviderError, RawCall as _}, signers::{LocalWallet, Signer as _}, types::{Address, BlockId, H256}, }; +use tokio::time::MissedTickBehavior; use xshell::Shell; use zksync_consensus_crypto::ByteFmt; use zksync_consensus_roles::{attester, validator}; -use crate::{messages, utils::consensus::parse_attester_committee}; +use crate::{commands::args::WaitArgs, messages, utils::consensus::parse_attester_committee}; mod conv; mod proto; @@ -92,6 +93,8 @@ pub enum Command { SetAttesterCommittee(SetAttesterCommitteeCommand), /// Fetches the attester committee from the consensus registry contract. GetAttesterCommittee, + /// Wait until the consensus registry contract is deployed to L2. + WaitForRegistry(WaitArgs), } /// Collection of sent transactions. @@ -210,15 +213,18 @@ impl Setup { }) } + fn consensus_registry_addr(&self) -> anyhow::Result

{ + self.contracts + .l2 + .consensus_registry + .context(messages::MSG_CONSENSUS_REGISTRY_ADDRESS_NOT_CONFIGURED) + } + fn consensus_registry( &self, m: Arc, ) -> anyhow::Result> { - let addr = self - .contracts - .l2 - .consensus_registry - .context(messages::MSG_CONSENSUS_REGISTRY_ADDRESS_NOT_CONFIGURED)?; + let addr = self.consensus_registry_addr()?; Ok(abi::ConsensusRegistry::new(addr, m)) } @@ -276,6 +282,58 @@ impl Setup { parse_attester_committee(attesters).context("parse_attester_committee()") } + async fn wait_for_registry_contract_inner( + &self, + args: &WaitArgs, + verbose: bool, + ) -> anyhow::Result<()> { + let addr = self.consensus_registry_addr()?; + let provider = self.provider().context("provider()")?; + let mut interval = tokio::time::interval(args.poll_interval()); + interval.set_missed_tick_behavior(MissedTickBehavior::Skip); + + if verbose { + logger::debug(messages::msg_wait_consensus_registry_started_polling( + addr, + provider.url(), + )); + } + + loop { + interval.tick().await; + + let code = match provider.get_code(addr, None).await { + Ok(code) => code, + Err(ProviderError::HTTPError(err)) if err.is_connect() || err.is_timeout() => { + continue; + } + Err(err) => { + return Err(anyhow::Error::new(err) + .context(messages::MSG_CONSENSUS_REGISTRY_POLL_ERROR)) + } + }; + if !code.is_empty() { + logger::info(messages::msg_consensus_registry_wait_success( + addr, + code.len(), + )); + return Ok(()); + } + } + } + + async fn wait_for_registry_contract( + &self, + args: &WaitArgs, + verbose: bool, + ) -> anyhow::Result<()> { + args.poll_with_timeout( + messages::MSG_CONSENSUS_REGISTRY_WAIT_COMPONENT, + self.wait_for_registry_contract_inner(args, verbose), + ) + .await + } + async fn set_attester_committee(&self, want: &attester::Committee) -> anyhow::Result<()> { let provider = self.provider().context("provider()")?; let block_id = self.last_block(&provider).await.context("last_block()")?; @@ -410,6 +468,10 @@ impl Command { let got = setup.get_attester_committee().await?; print_attesters(&got); } + Self::WaitForRegistry(args) => { + let verbose = global_config().verbose; + setup.wait_for_registry_contract(&args, verbose).await?; + } } Ok(()) } diff --git a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/build.rs b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/build.rs new file mode 100644 index 000000000000..0ba72f6b2257 --- /dev/null +++ b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/build.rs @@ -0,0 +1,26 @@ +use anyhow::Context; +use common::{cmd::Cmd, logger}; +use config::EcosystemConfig; +use xshell::{cmd, Shell}; + +use crate::messages::{ + MSG_BUILDING_CONTRACT_VERIFIER, MSG_CHAIN_NOT_FOUND_ERR, + MSG_FAILED_TO_BUILD_CONTRACT_VERIFIER_ERR, +}; + +pub(crate) async fn build(shell: &Shell) -> anyhow::Result<()> { + let ecosystem = EcosystemConfig::from_file(shell)?; + let chain = ecosystem + .load_current_chain() + .context(MSG_CHAIN_NOT_FOUND_ERR)?; + let _dir_guard = shell.push_dir(&chain.link_to_code); + + logger::info(MSG_BUILDING_CONTRACT_VERIFIER); + + let mut cmd = Cmd::new(cmd!( + shell, + "cargo build --release --bin zksync_contract_verifier" + )); + cmd = cmd.with_force_run(); + cmd.run().context(MSG_FAILED_TO_BUILD_CONTRACT_VERIFIER_ERR) +} diff --git a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/mod.rs b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/mod.rs index 78bdc5fae7ec..e36e6ba62e7b 100644 --- a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/mod.rs @@ -1,22 +1,32 @@ -use args::init::InitContractVerifierArgs; use clap::Subcommand; use xshell::Shell; -pub mod args; -pub mod init; -pub mod run; +use self::args::init::InitContractVerifierArgs; +use crate::commands::args::WaitArgs; + +mod args; +mod build; +mod init; +mod run; +mod wait; #[derive(Subcommand, Debug)] pub enum ContractVerifierCommands { + /// Build contract verifier binary + Build, /// Run contract verifier Run, + /// Wait for contract verifier to start + Wait(WaitArgs), /// Download required binaries for contract verifier Init(InitContractVerifierArgs), } pub(crate) async fn run(shell: &Shell, args: ContractVerifierCommands) -> anyhow::Result<()> { match args { + ContractVerifierCommands::Build => build::build(shell).await, ContractVerifierCommands::Run => run::run(shell).await, + ContractVerifierCommands::Wait(args) => wait::wait(shell, args).await, ContractVerifierCommands::Init(args) => init::run(shell, args).await, } } diff --git a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/run.rs b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/run.rs index 9913ec817e90..ebc33840bdea 100644 --- a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/run.rs +++ b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/run.rs @@ -22,7 +22,7 @@ pub(crate) async fn run(shell: &Shell) -> anyhow::Result<()> { let mut cmd = Cmd::new(cmd!( shell, - "cargo run --bin zksync_contract_verifier -- --config-path={config_path} --secrets-path={secrets_path}" + "cargo run --release --bin zksync_contract_verifier -- --config-path={config_path} --secrets-path={secrets_path}" )); cmd = cmd.with_force_run(); cmd.run().context(MSG_FAILED_TO_RUN_CONTRACT_VERIFIER_ERR) diff --git a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/wait.rs b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/wait.rs new file mode 100644 index 000000000000..011c888d3041 --- /dev/null +++ b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/wait.rs @@ -0,0 +1,27 @@ +use anyhow::Context as _; +use common::{config::global_config, logger}; +use config::EcosystemConfig; +use xshell::Shell; + +use crate::{commands::args::WaitArgs, messages::MSG_CHAIN_NOT_FOUND_ERR}; + +pub(crate) async fn wait(shell: &Shell, args: WaitArgs) -> anyhow::Result<()> { + let ecosystem = EcosystemConfig::from_file(shell)?; + let chain = ecosystem + .load_current_chain() + .context(MSG_CHAIN_NOT_FOUND_ERR)?; + let verbose = global_config().verbose; + + let prometheus_port = chain + .get_general_config()? + .contract_verifier + .as_ref() + .context("contract verifier config not specified")? + .prometheus_port; + logger::info("Waiting for contract verifier to become alive"); + args.poll_prometheus(prometheus_port, verbose).await?; + logger::info(format!( + "Contract verifier is alive with Prometheus server bound to :{prometheus_port}" + )); + Ok(()) +} diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/contracts.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/contracts.rs index fbafaec09e6e..8e0384cbca99 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/contracts.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/contracts.rs @@ -2,9 +2,7 @@ use std::path::PathBuf; use clap::Parser; use common::{ - contracts::{ - build_l1_contracts, build_l2_contracts, build_system_contracts, build_test_contracts, - }, + contracts::{build_l1_contracts, build_l2_contracts, build_system_contracts}, logger, spinner::Spinner, }; @@ -14,8 +12,8 @@ use xshell::Shell; use crate::commands::dev::messages::{ MSG_BUILDING_CONTRACTS, MSG_BUILDING_CONTRACTS_SUCCESS, MSG_BUILDING_L1_CONTRACTS_SPINNER, MSG_BUILDING_L2_CONTRACTS_SPINNER, MSG_BUILDING_SYSTEM_CONTRACTS_SPINNER, - MSG_BUILDING_TEST_CONTRACTS_SPINNER, MSG_BUILD_L1_CONTRACTS_HELP, MSG_BUILD_L2_CONTRACTS_HELP, - MSG_BUILD_SYSTEM_CONTRACTS_HELP, MSG_BUILD_TEST_CONTRACTS_HELP, MSG_NOTHING_TO_BUILD_MSG, + MSG_BUILD_L1_CONTRACTS_HELP, MSG_BUILD_L2_CONTRACTS_HELP, MSG_BUILD_SYSTEM_CONTRACTS_HELP, + MSG_NOTHING_TO_BUILD_MSG, }; #[derive(Debug, Parser)] @@ -26,8 +24,6 @@ pub struct ContractsArgs { pub l2_contracts: Option, #[clap(long, alias = "sc", help = MSG_BUILD_SYSTEM_CONTRACTS_HELP, default_missing_value = "true", num_args = 0..=1)] pub system_contracts: Option, - #[clap(long, alias = "test", help = MSG_BUILD_TEST_CONTRACTS_HELP, default_missing_value = "true", num_args = 0..=1)] - pub test_contracts: Option, } impl ContractsArgs { @@ -35,18 +31,15 @@ impl ContractsArgs { if self.l1_contracts.is_none() && self.l2_contracts.is_none() && self.system_contracts.is_none() - && self.test_contracts.is_none() { return vec![ ContractType::L1, ContractType::L2, ContractType::SystemContracts, - ContractType::TestContracts, ]; } let mut contracts = vec![]; - if self.l1_contracts.unwrap_or(false) { contracts.push(ContractType::L1); } @@ -56,10 +49,6 @@ impl ContractsArgs { if self.system_contracts.unwrap_or(false) { contracts.push(ContractType::SystemContracts); } - if self.test_contracts.unwrap_or(false) { - contracts.push(ContractType::TestContracts); - } - contracts } } @@ -69,7 +58,6 @@ pub enum ContractType { L1, L2, SystemContracts, - TestContracts, } struct ContractBuilder { @@ -96,11 +84,6 @@ impl ContractBuilder { msg: MSG_BUILDING_SYSTEM_CONTRACTS_SPINNER.to_string(), link_to_code: ecosystem.link_to_code.clone(), }, - ContractType::TestContracts => Self { - cmd: Box::new(build_test_contracts), - msg: MSG_BUILDING_TEST_CONTRACTS_SPINNER.to_string(), - link_to_code: ecosystem.link_to_code.clone(), - }, } } diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/utils.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/utils.rs index bcd524bd2cb0..8435b437169d 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/utils.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/utils.rs @@ -16,9 +16,7 @@ use crate::commands::dev::messages::{ pub const TEST_WALLETS_PATH: &str = "etc/test_config/constant/eth.json"; const AMOUNT_FOR_DISTRIBUTION_TO_WALLETS: u128 = 1000000000000000000000; - pub const TS_INTEGRATION_PATH: &str = "core/tests/ts-integration"; -const CONTRACTS_TEST_DATA_PATH: &str = "etc/contracts-test-data"; #[derive(Deserialize)] pub struct TestWallets { @@ -90,9 +88,6 @@ pub fn build_contracts(shell: &Shell, ecosystem_config: &EcosystemConfig) -> any Cmd::new(cmd!(shell, "yarn build")).run()?; Cmd::new(cmd!(shell, "yarn build-yul")).run()?; - let _dir_guard = shell.push_dir(ecosystem_config.link_to_code.join(CONTRACTS_TEST_DATA_PATH)); - Cmd::new(cmd!(shell, "yarn build")).run()?; - spinner.finish(); Ok(()) } diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/messages.rs b/zkstack_cli/crates/zkstack/src/commands/dev/messages.rs index 3d31497b7ebc..235aa95ee492 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/messages.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/messages.rs @@ -111,12 +111,10 @@ pub(super) const MSG_BUILDING_CONTRACTS: &str = "Building contracts"; pub(super) const MSG_BUILDING_L2_CONTRACTS_SPINNER: &str = "Building L2 contracts.."; pub(super) const MSG_BUILDING_L1_CONTRACTS_SPINNER: &str = "Building L1 contracts.."; pub(super) const MSG_BUILDING_SYSTEM_CONTRACTS_SPINNER: &str = "Building system contracts.."; -pub(super) const MSG_BUILDING_TEST_CONTRACTS_SPINNER: &str = "Building test contracts.."; pub(super) const MSG_BUILDING_CONTRACTS_SUCCESS: &str = "Contracts built successfully"; pub(super) const MSG_BUILD_L1_CONTRACTS_HELP: &str = "Build L1 contracts"; pub(super) const MSG_BUILD_L2_CONTRACTS_HELP: &str = "Build L2 contracts"; pub(super) const MSG_BUILD_SYSTEM_CONTRACTS_HELP: &str = "Build system contracts"; -pub(super) const MSG_BUILD_TEST_CONTRACTS_HELP: &str = "Build test contracts"; // Integration tests related messages pub(super) fn msg_integration_tests_run(external_node: bool) -> String { diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/create.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/create.rs index 6b6c1236d363..53d9c27be60b 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/create.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/create.rs @@ -1,23 +1,20 @@ -use std::path::{Path, PathBuf}; +use std::path::PathBuf; -use anyhow::bail; use clap::{Parser, ValueHint}; -use common::{cmd::Cmd, logger, Prompt, PromptConfirm, PromptSelect}; +use common::{Prompt, PromptConfirm, PromptSelect}; use serde::{Deserialize, Serialize}; use slugify_rs::slugify; -use strum::{EnumIter, IntoEnumIterator}; +use strum::IntoEnumIterator; use types::{L1Network, WalletCreation}; -use xshell::{cmd, Shell}; +use xshell::Shell; use crate::{ commands::chain::{args::create::ChainCreateArgs, ChainCreateArgsFinal}, messages::{ - msg_path_to_zksync_does_not_exist_err, MSG_CONFIRM_STILL_USE_FOLDER, MSG_ECOSYSTEM_NAME_PROMPT, MSG_L1_NETWORK_HELP, MSG_L1_NETWORK_PROMPT, - MSG_LINK_TO_CODE_HELP, MSG_LINK_TO_CODE_PROMPT, MSG_LINK_TO_CODE_SELECTION_CLONE, - MSG_LINK_TO_CODE_SELECTION_PATH, MSG_NOT_MAIN_REPO_OR_FORK_ERR, - MSG_REPOSITORY_ORIGIN_PROMPT, MSG_START_CONTAINERS_HELP, MSG_START_CONTAINERS_PROMPT, + MSG_LINK_TO_CODE_HELP, MSG_START_CONTAINERS_HELP, MSG_START_CONTAINERS_PROMPT, }, + utils::link_to_code::get_link_to_code, }; #[derive(Debug, Serialize, Deserialize, Parser)] @@ -47,23 +44,7 @@ impl EcosystemCreateArgs { .unwrap_or_else(|| Prompt::new(MSG_ECOSYSTEM_NAME_PROMPT).ask()); ecosystem_name = slugify!(&ecosystem_name, separator = "_"); - let link_to_code = self.link_to_code.unwrap_or_else(|| { - let link_to_code_selection = - PromptSelect::new(MSG_REPOSITORY_ORIGIN_PROMPT, LinkToCodeSelection::iter()).ask(); - match link_to_code_selection { - LinkToCodeSelection::Clone => "".to_string(), - LinkToCodeSelection::Path => { - let mut path: String = Prompt::new(MSG_LINK_TO_CODE_PROMPT).ask(); - if let Err(err) = check_link_to_code(shell, &path) { - logger::warn(err); - if !PromptConfirm::new(MSG_CONFIRM_STILL_USE_FOLDER).ask() { - path = pick_new_link_to_code(shell); - } - } - path - } - } - }); + let link_to_code = self.link_to_code.unwrap_or_else(|| get_link_to_code(shell)); let l1_network = self .l1_network @@ -71,13 +52,9 @@ impl EcosystemCreateArgs { // Make the only chain as a default one self.chain.set_as_default = Some(true); - let chain = self.chain.fill_values_with_prompt( - shell, - 0, - &l1_network, - vec![], - Path::new(&link_to_code), - )?; + let chain = + self.chain + .fill_values_with_prompt(0, &l1_network, vec![], link_to_code.clone())?; let start_containers = self.start_containers.unwrap_or_else(|| { PromptConfirm::new(MSG_START_CONTAINERS_PROMPT) @@ -113,55 +90,3 @@ impl EcosystemCreateArgsFinal { self.chain_args.clone() } } - -#[derive(Debug, Clone, EnumIter, PartialEq, Eq)] -enum LinkToCodeSelection { - Clone, - Path, -} - -impl std::fmt::Display for LinkToCodeSelection { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - LinkToCodeSelection::Clone => write!(f, "{MSG_LINK_TO_CODE_SELECTION_CLONE}"), - LinkToCodeSelection::Path => write!(f, "{MSG_LINK_TO_CODE_SELECTION_PATH}"), - } - } -} - -fn check_link_to_code(shell: &Shell, path: &str) -> anyhow::Result<()> { - let path = Path::new(path); - if !shell.path_exists(path) { - bail!(msg_path_to_zksync_does_not_exist_err( - path.to_str().unwrap() - )); - } - - let _guard = shell.push_dir(path); - let out = String::from_utf8( - Cmd::new(cmd!(shell, "git remote -v")) - .run_with_output()? - .stdout, - )?; - - if !out.contains("matter-labs/zksync-era") { - bail!(MSG_NOT_MAIN_REPO_OR_FORK_ERR); - } - - Ok(()) -} - -fn pick_new_link_to_code(shell: &Shell) -> String { - let link_to_code: String = Prompt::new(MSG_LINK_TO_CODE_PROMPT).ask(); - match check_link_to_code(shell, &link_to_code) { - Ok(_) => link_to_code, - Err(err) => { - logger::warn(err); - if !PromptConfirm::new(MSG_CONFIRM_STILL_USE_FOLDER).ask() { - pick_new_link_to_code(shell) - } else { - link_to_code - } - } - } -} diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/create.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/create.rs index 356b5322980f..203c667ade65 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/create.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/create.rs @@ -1,11 +1,8 @@ -use std::{path::PathBuf, str::FromStr}; - use anyhow::{bail, Context}; -use common::{git, logger, spinner::Spinner}; +use common::{logger, spinner::Spinner}; use config::{ create_local_configs_dir, create_wallets, get_default_era_chain_id, traits::SaveConfigWithBasePath, EcosystemConfig, EcosystemConfigFromFileError, - ZKSYNC_ERA_GIT_REPO, }; use xshell::Shell; @@ -22,11 +19,12 @@ use crate::{ }, }, messages::{ - msg_created_ecosystem, MSG_ARGS_VALIDATOR_ERR, MSG_CLONING_ERA_REPO_SPINNER, - MSG_CREATING_DEFAULT_CHAIN_SPINNER, MSG_CREATING_ECOSYSTEM, - MSG_CREATING_INITIAL_CONFIGURATIONS_SPINNER, MSG_ECOSYSTEM_ALREADY_EXISTS_ERR, - MSG_ECOSYSTEM_CONFIG_INVALID_ERR, MSG_SELECTED_CONFIG, MSG_STARTING_CONTAINERS_SPINNER, + msg_created_ecosystem, MSG_ARGS_VALIDATOR_ERR, MSG_CREATING_DEFAULT_CHAIN_SPINNER, + MSG_CREATING_ECOSYSTEM, MSG_CREATING_INITIAL_CONFIGURATIONS_SPINNER, + MSG_ECOSYSTEM_ALREADY_EXISTS_ERR, MSG_ECOSYSTEM_CONFIG_INVALID_ERR, MSG_SELECTED_CONFIG, + MSG_STARTING_CONTAINERS_SPINNER, }, + utils::link_to_code::resolve_link_to_code, }; pub fn run(args: EcosystemCreateArgs, shell: &Shell) -> anyhow::Result<()> { @@ -55,21 +53,7 @@ fn create(args: EcosystemCreateArgs, shell: &Shell) -> anyhow::Result<()> { let configs_path = create_local_configs_dir(shell, ".")?; - let link_to_code = if args.link_to_code.is_empty() { - let spinner = Spinner::new(MSG_CLONING_ERA_REPO_SPINNER); - let link_to_code = git::clone( - shell, - shell.current_dir(), - ZKSYNC_ERA_GIT_REPO, - "zksync-era", - )?; - spinner.finish(); - link_to_code - } else { - let path = PathBuf::from_str(&args.link_to_code)?; - git::submodule_update(shell, path.clone())?; - path - }; + let link_to_code = resolve_link_to_code(shell, shell.current_dir(), args.link_to_code.clone())?; let spinner = Spinner::new(MSG_CREATING_INITIAL_CONFIGURATIONS_SPINNER); let chain_config = args.chain_config(); diff --git a/zkstack_cli/crates/zkstack/src/commands/external_node/build.rs b/zkstack_cli/crates/zkstack/src/commands/external_node/build.rs new file mode 100644 index 000000000000..ff15c0c77f30 --- /dev/null +++ b/zkstack_cli/crates/zkstack/src/commands/external_node/build.rs @@ -0,0 +1,23 @@ +use anyhow::Context; +use common::{cmd::Cmd, logger}; +use config::EcosystemConfig; +use xshell::{cmd, Shell}; + +use crate::messages::{MSG_BUILDING_EN, MSG_CHAIN_NOT_FOUND_ERR, MSG_FAILED_TO_BUILD_EN_ERR}; + +pub(crate) async fn build(shell: &Shell) -> anyhow::Result<()> { + let ecosystem = EcosystemConfig::from_file(shell)?; + let chain = ecosystem + .load_current_chain() + .context(MSG_CHAIN_NOT_FOUND_ERR)?; + let _dir_guard = shell.push_dir(&chain.link_to_code); + + logger::info(MSG_BUILDING_EN); + + let mut cmd = Cmd::new(cmd!( + shell, + "cargo build --release --bin zksync_external_node" + )); + cmd = cmd.with_force_run(); + cmd.run().context(MSG_FAILED_TO_BUILD_EN_ERR) +} diff --git a/zkstack_cli/crates/zkstack/src/commands/external_node/mod.rs b/zkstack_cli/crates/zkstack/src/commands/external_node/mod.rs index 095566d24e87..7bd366d5871c 100644 --- a/zkstack_cli/crates/zkstack/src/commands/external_node/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/external_node/mod.rs @@ -1,12 +1,16 @@ -use args::{prepare_configs::PrepareConfigArgs, run::RunExternalNodeArgs}; use clap::Parser; use serde::{Deserialize, Serialize}; use xshell::Shell; +use self::args::{prepare_configs::PrepareConfigArgs, run::RunExternalNodeArgs}; +use crate::commands::args::WaitArgs; + mod args; +mod build; mod init; mod prepare_configs; mod run; +mod wait; #[derive(Debug, Serialize, Deserialize, Parser)] pub enum ExternalNodeCommands { @@ -14,14 +18,20 @@ pub enum ExternalNodeCommands { Configs(PrepareConfigArgs), /// Init databases Init, + /// Build external node + Build, /// Run external node Run(RunExternalNodeArgs), + /// Wait for external node to start + Wait(WaitArgs), } pub async fn run(shell: &Shell, commands: ExternalNodeCommands) -> anyhow::Result<()> { match commands { ExternalNodeCommands::Configs(args) => prepare_configs::run(shell, args), ExternalNodeCommands::Init => init::run(shell).await, + ExternalNodeCommands::Build => build::build(shell).await, ExternalNodeCommands::Run(args) => run::run(shell, args).await, + ExternalNodeCommands::Wait(args) => wait::wait(shell, args).await, } } diff --git a/zkstack_cli/crates/zkstack/src/commands/external_node/wait.rs b/zkstack_cli/crates/zkstack/src/commands/external_node/wait.rs new file mode 100644 index 000000000000..72568c36f363 --- /dev/null +++ b/zkstack_cli/crates/zkstack/src/commands/external_node/wait.rs @@ -0,0 +1,35 @@ +use anyhow::Context as _; +use common::{config::global_config, logger}; +use config::{traits::ReadConfigWithBasePath, EcosystemConfig}; +use xshell::Shell; +use zksync_config::configs::GeneralConfig; + +use crate::{ + commands::args::WaitArgs, + messages::{msg_waiting_for_en_success, MSG_CHAIN_NOT_INITIALIZED, MSG_WAITING_FOR_EN}, +}; + +pub async fn wait(shell: &Shell, args: WaitArgs) -> anyhow::Result<()> { + let ecosystem_config = EcosystemConfig::from_file(shell)?; + let chain_config = ecosystem_config + .load_current_chain() + .context(MSG_CHAIN_NOT_INITIALIZED)?; + let verbose = global_config().verbose; + + let en_path = chain_config + .external_node_config_path + .clone() + .context("External node is not initialized")?; + let general_config = GeneralConfig::read_with_base_path(shell, &en_path)?; + let health_check_port = general_config + .api_config + .as_ref() + .context("no API config")? + .healthcheck + .port; + + logger::info(MSG_WAITING_FOR_EN); + args.poll_health_check(health_check_port, verbose).await?; + logger::info(msg_waiting_for_en_success(health_check_port)); + Ok(()) +} diff --git a/zkstack_cli/crates/zkstack/src/commands/prover/args/run.rs b/zkstack_cli/crates/zkstack/src/commands/prover/args/run.rs index b79af777673c..4b3a16a38fca 100644 --- a/zkstack_cli/crates/zkstack/src/commands/prover/args/run.rs +++ b/zkstack_cli/crates/zkstack/src/commands/prover/args/run.rs @@ -176,16 +176,16 @@ impl ProverComponent { args.fri_prover_args.max_allocation.unwrap() )); }; - if args - .circuit_prover_args - .witness_vector_generator_count - .is_some() - { + if args.circuit_prover_args.light_wvg_count.is_some() { additional_args.push(format!( - "--witness-vector-generator-count={}", - args.circuit_prover_args - .witness_vector_generator_count - .unwrap() + "--light-wvg-count={}", + args.circuit_prover_args.light_wvg_count.unwrap() + )); + }; + if args.circuit_prover_args.heavy_wvg_count.is_some() { + additional_args.push(format!( + "--heavy-wvg-count={}", + args.circuit_prover_args.heavy_wvg_count.unwrap() )); }; } @@ -242,9 +242,11 @@ impl WitnessVectorGeneratorArgs { #[derive(Debug, Clone, Parser, Default)] pub struct CircuitProverArgs { - #[clap(long)] - pub witness_vector_generator_count: Option, - #[clap(long)] + #[clap(short = 'l', long)] + pub light_wvg_count: Option, + #[clap(short = 'h', long)] + pub heavy_wvg_count: Option, + #[clap(short = 'm', long)] pub max_allocation: Option, } @@ -257,15 +259,21 @@ impl CircuitProverArgs { return Ok(Self::default()); } - let witness_vector_generator_count = - self.witness_vector_generator_count.unwrap_or_else(|| { - Prompt::new("Number of WVG jobs to run in parallel") - .default("1") - .ask() - }); + let light_wvg_count = self.light_wvg_count.unwrap_or_else(|| { + Prompt::new("Number of light WVG jobs to run in parallel") + .default("8") + .ask() + }); + + let heavy_wvg_count = self.heavy_wvg_count.unwrap_or_else(|| { + Prompt::new("Number of heavy WVG jobs to run in parallel") + .default("2") + .ask() + }); Ok(CircuitProverArgs { - witness_vector_generator_count: Some(witness_vector_generator_count), + light_wvg_count: Some(light_wvg_count), + heavy_wvg_count: Some(heavy_wvg_count), max_allocation: self.max_allocation, }) } diff --git a/zkstack_cli/crates/zkstack/src/commands/server.rs b/zkstack_cli/crates/zkstack/src/commands/server.rs index be7a676a8252..10f267fb8526 100644 --- a/zkstack_cli/crates/zkstack/src/commands/server.rs +++ b/zkstack_cli/crates/zkstack/src/commands/server.rs @@ -1,5 +1,7 @@ use anyhow::Context; use common::{ + cmd::Cmd, + config::global_config, logger, server::{Server, ServerMode}, }; @@ -7,25 +9,38 @@ use config::{ traits::FileConfigWithDefaultName, ChainConfig, ContractsConfig, EcosystemConfig, GeneralConfig, GenesisConfig, SecretsConfig, WalletsConfig, }; -use xshell::Shell; +use xshell::{cmd, Shell}; use crate::{ - commands::args::RunServerArgs, - messages::{MSG_CHAIN_NOT_INITIALIZED, MSG_FAILED_TO_RUN_SERVER_ERR, MSG_STARTING_SERVER}, + commands::args::{RunServerArgs, ServerArgs, ServerCommand, WaitArgs}, + messages::{ + msg_waiting_for_server_success, MSG_BUILDING_SERVER, MSG_CHAIN_NOT_INITIALIZED, + MSG_FAILED_TO_BUILD_SERVER_ERR, MSG_FAILED_TO_RUN_SERVER_ERR, MSG_STARTING_SERVER, + MSG_WAITING_FOR_SERVER, + }, }; -pub fn run(shell: &Shell, args: RunServerArgs) -> anyhow::Result<()> { +pub async fn run(shell: &Shell, args: ServerArgs) -> anyhow::Result<()> { let ecosystem_config = EcosystemConfig::from_file(shell)?; - let chain_config = ecosystem_config .load_current_chain() .context(MSG_CHAIN_NOT_INITIALIZED)?; - logger::info(MSG_STARTING_SERVER); + match ServerCommand::from(args) { + ServerCommand::Run(args) => run_server(args, &chain_config, shell), + ServerCommand::Build => build_server(&chain_config, shell), + ServerCommand::Wait(args) => wait_for_server(args, &chain_config).await, + } +} - run_server(args, &chain_config, shell)?; +fn build_server(chain_config: &ChainConfig, shell: &Shell) -> anyhow::Result<()> { + let _dir_guard = shell.push_dir(&chain_config.link_to_code); - Ok(()) + logger::info(MSG_BUILDING_SERVER); + + let mut cmd = Cmd::new(cmd!(shell, "cargo build --release --bin zksync_server")); + cmd = cmd.with_force_run(); + cmd.run().context(MSG_FAILED_TO_BUILD_SERVER_ERR) } fn run_server( @@ -33,17 +48,13 @@ fn run_server( chain_config: &ChainConfig, shell: &Shell, ) -> anyhow::Result<()> { + logger::info(MSG_STARTING_SERVER); let server = Server::new( args.components.clone(), chain_config.link_to_code.clone(), args.uring, ); - if args.build { - server.build(shell)?; - return Ok(()); - } - let mode = if args.genesis { ServerMode::Genesis } else { @@ -62,3 +73,20 @@ fn run_server( ) .context(MSG_FAILED_TO_RUN_SERVER_ERR) } + +async fn wait_for_server(args: WaitArgs, chain_config: &ChainConfig) -> anyhow::Result<()> { + let verbose = global_config().verbose; + + let health_check_port = chain_config + .get_general_config()? + .api_config + .as_ref() + .context("no API config")? + .healthcheck + .port; + + logger::info(MSG_WAITING_FOR_SERVER); + args.poll_health_check(health_check_port, verbose).await?; + logger::info(msg_waiting_for_server_success(health_check_port)); + Ok(()) +} diff --git a/zkstack_cli/crates/zkstack/src/consts.rs b/zkstack_cli/crates/zkstack/src/consts.rs index b7c4d2a20709..f5fbf0b0c9bb 100644 --- a/zkstack_cli/crates/zkstack/src/consts.rs +++ b/zkstack_cli/crates/zkstack/src/consts.rs @@ -12,7 +12,7 @@ pub const L2_BASE_TOKEN_ADDRESS: &str = "0x0000000000000000000000000000000000008 /// Path to the JS runtime config for the block-explorer-app docker container to be mounted to pub const EXPLORER_APP_DOCKER_CONFIG_PATH: &str = "/usr/src/app/packages/app/dist/config.js"; -pub const EXPLORER_APP_DOCKER_IMAGE: &str = "matterlabs/block-explorer-app"; +pub const EXPLORER_APP_DOCKER_IMAGE: &str = "matterlabs/block-explorer-app:v2.50.8"; /// Path to the JS runtime config for the dapp-portal docker container to be mounted to pub const PORTAL_DOCKER_CONFIG_PATH: &str = "/usr/src/app/dist/config.js"; pub const PORTAL_DOCKER_IMAGE: &str = "matterlabs/dapp-portal"; diff --git a/zkstack_cli/crates/zkstack/src/main.rs b/zkstack_cli/crates/zkstack/src/main.rs index 3ebe26a4fa21..8a115201fc81 100644 --- a/zkstack_cli/crates/zkstack/src/main.rs +++ b/zkstack_cli/crates/zkstack/src/main.rs @@ -15,7 +15,7 @@ use config::EcosystemConfig; use xshell::Shell; use crate::commands::{ - args::RunServerArgs, chain::ChainCommands, consensus, ecosystem::EcosystemCommands, + args::ServerArgs, chain::ChainCommands, consensus, ecosystem::EcosystemCommands, explorer::ExplorerCommands, external_node::ExternalNodeCommands, prover::ProverCommands, }; @@ -57,7 +57,7 @@ pub enum ZkStackSubcommands { #[command(subcommand, alias = "p")] Prover(ProverCommands), /// Run server - Server(RunServerArgs), + Server(ServerArgs), /// External Node related commands #[command(subcommand, alias = "en")] ExternalNode(ExternalNodeCommands), @@ -136,7 +136,7 @@ async fn run_subcommand(zkstack_args: ZkStack) -> anyhow::Result<()> { ZkStackSubcommands::Chain(args) => commands::chain::run(&shell, *args).await?, ZkStackSubcommands::Dev(args) => commands::dev::run(&shell, args).await?, ZkStackSubcommands::Prover(args) => commands::prover::run(&shell, args).await?, - ZkStackSubcommands::Server(args) => commands::server::run(&shell, args)?, + ZkStackSubcommands::Server(args) => commands::server::run(&shell, args).await?, ZkStackSubcommands::Containers(args) => commands::containers::run(&shell, args)?, ZkStackSubcommands::ExternalNode(args) => { commands::external_node::run(&shell, args).await? diff --git a/zkstack_cli/crates/zkstack/src/messages.rs b/zkstack_cli/crates/zkstack/src/messages.rs index 516194ef721e..bedcb233b19f 100644 --- a/zkstack_cli/crates/zkstack/src/messages.rs +++ b/zkstack_cli/crates/zkstack/src/messages.rs @@ -1,9 +1,10 @@ -use std::path::Path; +use std::{fmt, path::Path, time::Duration}; use ethers::{ - types::{H160, U256}, + types::{Address, H160, U256}, utils::format_ether, }; +use url::Url; use zksync_consensus_roles::attester; pub(super) const MSG_SETUP_KEYS_DOWNLOAD_SELECTION_PROMPT: &str = @@ -264,7 +265,6 @@ pub(super) const MSG_ENABLE_CONSENSUS_HELP: &str = "Enable consensus"; pub(super) const MSG_SERVER_GENESIS_HELP: &str = "Run server in genesis mode"; pub(super) const MSG_SERVER_ADDITIONAL_ARGS_HELP: &str = "Additional arguments that can be passed through the CLI"; -pub(super) const MSG_SERVER_BUILD_HELP: &str = "Build server but don't run it"; pub(super) const MSG_SERVER_URING_HELP: &str = "Enables uring support for RocksDB"; /// Accept ownership related messages @@ -284,6 +284,13 @@ pub(super) const MSG_OBSERVABILITY_RUN_PROMPT: &str = "Do you want to run observ pub(super) const MSG_STARTING_SERVER: &str = "Starting server"; pub(super) const MSG_FAILED_TO_RUN_SERVER_ERR: &str = "Failed to start server"; pub(super) const MSG_PREPARING_EN_CONFIGS: &str = "Preparing External Node config"; +pub(super) const MSG_BUILDING_SERVER: &str = "Building server"; +pub(super) const MSG_FAILED_TO_BUILD_SERVER_ERR: &str = "Failed to build server"; +pub(super) const MSG_WAITING_FOR_SERVER: &str = "Waiting for server to start"; + +pub(super) fn msg_waiting_for_server_success(health_check_port: u16) -> String { + format!("Server is alive with health check server on :{health_check_port}") +} /// Portal related messages pub(super) const MSG_PORTAL_FAILED_TO_FIND_ANY_CHAIN_ERR: &str = @@ -351,7 +358,14 @@ pub(super) const MSG_CONSENSUS_CONFIG_MISSING_ERR: &str = "Consensus config is m pub(super) const MSG_CONSENSUS_SECRETS_MISSING_ERR: &str = "Consensus secrets config is missing"; pub(super) const MSG_CONSENSUS_SECRETS_NODE_KEY_MISSING_ERR: &str = "Consensus node key is missing"; +pub(super) const MSG_BUILDING_EN: &str = "Building external node"; +pub(super) const MSG_FAILED_TO_BUILD_EN_ERR: &str = "Failed to build external node"; pub(super) const MSG_STARTING_EN: &str = "Starting external node"; +pub(super) const MSG_WAITING_FOR_EN: &str = "Waiting for external node to start"; + +pub(super) fn msg_waiting_for_en_success(health_check_port: u16) -> String { + format!("External node is alive with health check server on :{health_check_port}") +} /// Prover related messages pub(super) const MSG_GENERATING_SK_SPINNER: &str = "Generating setup keys..."; @@ -429,7 +443,10 @@ pub(super) fn msg_bucket_created(bucket_name: &str) -> String { } /// Contract verifier related messages +pub(super) const MSG_BUILDING_CONTRACT_VERIFIER: &str = "Building contract verifier"; pub(super) const MSG_RUNNING_CONTRACT_VERIFIER: &str = "Running contract verifier"; +pub(super) const MSG_FAILED_TO_BUILD_CONTRACT_VERIFIER_ERR: &str = + "Failed to build contract verifier"; pub(super) const MSG_FAILED_TO_RUN_CONTRACT_VERIFIER_ERR: &str = "Failed to run contract verifier"; pub(super) const MSG_INVALID_ARCH_ERR: &str = "Invalid arch"; pub(super) const MSG_GET_ZKSOLC_RELEASES_ERR: &str = "Failed to get zksolc releases"; @@ -478,6 +495,34 @@ pub(super) const MSG_DIFF_EN_GENERAL_CONFIG: &str = "Added the following fields to the external node generalconfig:"; pub(super) const MSG_UPDATING_ERA_OBSERVABILITY_SPINNER: &str = "Updating era observability..."; +/// Wait-related messages +pub(super) const MSG_WAIT_TIMEOUT_HELP: &str = "Wait timeout in seconds"; +pub(super) const MSG_WAIT_POLL_INTERVAL_HELP: &str = "Poll interval in milliseconds"; + +pub(super) fn msg_wait_starting_polling( + component: &impl fmt::Display, + url: &str, + poll_interval: Duration, +) -> String { + format!("Starting polling {component} at `{url}` each {poll_interval:?}") +} + +pub(super) fn msg_wait_timeout(component: &impl fmt::Display) -> String { + format!("timed out polling {component}") +} + +pub(super) fn msg_wait_connect_err(component: &impl fmt::Display, url: &str) -> String { + format!("failed to connect to {component} at `{url}`") +} + +pub(super) fn msg_wait_non_successful_response(component: &impl fmt::Display) -> String { + format!("non-successful {component} response") +} + +pub(super) fn msg_wait_not_healthy(url: &str) -> String { + format!("Node at `{url}` is not healthy") +} + pub(super) fn msg_diff_genesis_config(chain: &str) -> String { format!( "Found differences between chain {chain} and era genesis configs. Consider updating the chain {chain} genesis config and re-running genesis. Diff:" @@ -516,9 +561,20 @@ pub(super) const MSG_CONSENSUS_REGISTRY_ADDRESS_NOT_CONFIGURED: &str = "consensus registry address not configured"; pub(super) const MSG_CONSENSUS_GENESIS_SPEC_ATTESTERS_MISSING_IN_GENERAL_YAML: &str = "consensus.genesis_spec.attesters missing in general.yaml"; +pub(super) const MSG_CONSENSUS_REGISTRY_POLL_ERROR: &str = "failed querying L2 node"; +pub(super) const MSG_CONSENSUS_REGISTRY_WAIT_COMPONENT: &str = "main node HTTP RPC"; + pub(super) fn msg_setting_attester_committee_failed( got: &attester::Committee, want: &attester::Committee, ) -> String { format!("setting attester committee failed: got {got:?}, want {want:?}") } + +pub(super) fn msg_wait_consensus_registry_started_polling(addr: Address, url: &Url) -> String { + format!("Starting polling L2 HTTP RPC at {url} for code at {addr:?}") +} + +pub(super) fn msg_consensus_registry_wait_success(addr: Address, code_len: usize) -> String { + format!("Consensus registry is deployed at {addr:?}: {code_len} bytes") +} diff --git a/zkstack_cli/crates/zkstack/src/utils/link_to_code.rs b/zkstack_cli/crates/zkstack/src/utils/link_to_code.rs new file mode 100644 index 000000000000..1f2eb487849d --- /dev/null +++ b/zkstack_cli/crates/zkstack/src/utils/link_to_code.rs @@ -0,0 +1,109 @@ +use std::{ + path::{Path, PathBuf}, + str::FromStr, +}; + +use anyhow::bail; +use common::{cmd::Cmd, git, logger, spinner::Spinner, Prompt, PromptConfirm, PromptSelect}; +use config::ZKSYNC_ERA_GIT_REPO; +use strum::{EnumIter, IntoEnumIterator}; +use xshell::{cmd, Shell}; + +use crate::messages::{ + msg_path_to_zksync_does_not_exist_err, MSG_CLONING_ERA_REPO_SPINNER, + MSG_CONFIRM_STILL_USE_FOLDER, MSG_LINK_TO_CODE_PROMPT, MSG_LINK_TO_CODE_SELECTION_CLONE, + MSG_LINK_TO_CODE_SELECTION_PATH, MSG_NOT_MAIN_REPO_OR_FORK_ERR, MSG_REPOSITORY_ORIGIN_PROMPT, +}; + +#[derive(Debug, Clone, EnumIter, PartialEq, Eq)] +enum LinkToCodeSelection { + Clone, + Path, +} + +impl std::fmt::Display for LinkToCodeSelection { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + LinkToCodeSelection::Clone => write!(f, "{MSG_LINK_TO_CODE_SELECTION_CLONE}"), + LinkToCodeSelection::Path => write!(f, "{MSG_LINK_TO_CODE_SELECTION_PATH}"), + } + } +} + +fn check_link_to_code(shell: &Shell, path: &str) -> anyhow::Result<()> { + let path = Path::new(path); + if !shell.path_exists(path) { + bail!(msg_path_to_zksync_does_not_exist_err( + path.to_str().unwrap() + )); + } + + let _guard = shell.push_dir(path); + let out = String::from_utf8( + Cmd::new(cmd!(shell, "git remote -v")) + .run_with_output()? + .stdout, + )?; + + if !out.contains("matter-labs/zksync-era") { + bail!(MSG_NOT_MAIN_REPO_OR_FORK_ERR); + } + + Ok(()) +} + +fn pick_new_link_to_code(shell: &Shell) -> String { + let link_to_code: String = Prompt::new(MSG_LINK_TO_CODE_PROMPT).ask(); + match check_link_to_code(shell, &link_to_code) { + Ok(_) => link_to_code, + Err(err) => { + logger::warn(err); + if !PromptConfirm::new(MSG_CONFIRM_STILL_USE_FOLDER).ask() { + pick_new_link_to_code(shell) + } else { + link_to_code + } + } + } +} + +pub(crate) fn get_link_to_code(shell: &Shell) -> String { + let link_to_code_selection = + PromptSelect::new(MSG_REPOSITORY_ORIGIN_PROMPT, LinkToCodeSelection::iter()).ask(); + match link_to_code_selection { + LinkToCodeSelection::Clone => "".to_string(), + LinkToCodeSelection::Path => { + let mut path: String = Prompt::new(MSG_LINK_TO_CODE_PROMPT).ask(); + if let Err(err) = check_link_to_code(shell, &path) { + logger::warn(err); + if !PromptConfirm::new(MSG_CONFIRM_STILL_USE_FOLDER).ask() { + path = pick_new_link_to_code(shell); + } + } + path + } + } +} + +pub(crate) fn resolve_link_to_code( + shell: &Shell, + base_path: PathBuf, + link_to_code: String, +) -> anyhow::Result { + if link_to_code.is_empty() { + if base_path.join("zksync-era").exists() { + return Ok(base_path.join("zksync-era")); + } + let spinner = Spinner::new(MSG_CLONING_ERA_REPO_SPINNER); + if !base_path.exists() { + shell.create_dir(&base_path)?; + } + let link_to_code = git::clone(shell, base_path, ZKSYNC_ERA_GIT_REPO, "zksync-era")?; + spinner.finish(); + Ok(link_to_code) + } else { + let path = PathBuf::from_str(&link_to_code)?; + git::submodule_update(shell, path.clone())?; + Ok(path) + } +} diff --git a/zkstack_cli/crates/zkstack/src/utils/mod.rs b/zkstack_cli/crates/zkstack/src/utils/mod.rs index cf7a7ef48182..a8bdc00d73fc 100644 --- a/zkstack_cli/crates/zkstack/src/utils/mod.rs +++ b/zkstack_cli/crates/zkstack/src/utils/mod.rs @@ -1,4 +1,5 @@ pub mod consensus; pub mod forge; +pub mod link_to_code; pub mod ports; pub mod rocks_db;