diff --git a/.github/workflows/build-contract-verifier-template.yml b/.github/workflows/build-contract-verifier-template.yml index 9c53ca1dc484..973ed9edcb4d 100644 --- a/.github/workflows/build-contract-verifier-template.yml +++ b/.github/workflows/build-contract-verifier-template.yml @@ -100,7 +100,7 @@ jobs: if: env.BUILD_CONTRACTS == 'true' run: | mkdir ./foundry-zksync - curl -LO https://github.com/matter-labs/foundry-zksync/releases/download/nightly-15bec2f861b3b4c71e58f85e2b2c9dd722585aa8/foundry_nightly_linux_amd64.tar.gz + curl -LO https://github.com/matter-labs/foundry-zksync/releases/download/nightly-27360d4c8d12beddbb730dae07ad33a206b38f4b/foundry_nightly_linux_amd64.tar.gz tar zxf foundry_nightly_linux_amd64.tar.gz -C ./foundry-zksync chmod +x ./foundry-zksync/forge ./foundry-zksync/cast echo "$PWD/foundry-zksync" >> $GITHUB_PATH diff --git a/.github/workflows/build-core-template.yml b/.github/workflows/build-core-template.yml index c76a75e9b3ea..122bbd747147 100644 --- a/.github/workflows/build-core-template.yml +++ b/.github/workflows/build-core-template.yml @@ -105,7 +105,7 @@ jobs: if: env.BUILD_CONTRACTS == 'true' run: | mkdir ./foundry-zksync - curl -LO https://github.com/matter-labs/foundry-zksync/releases/download/nightly-15bec2f861b3b4c71e58f85e2b2c9dd722585aa8/foundry_nightly_linux_amd64.tar.gz + curl -LO https://github.com/matter-labs/foundry-zksync/releases/download/nightly-27360d4c8d12beddbb730dae07ad33a206b38f4b/foundry_nightly_linux_amd64.tar.gz tar zxf foundry_nightly_linux_amd64.tar.gz -C ./foundry-zksync chmod +x ./foundry-zksync/forge ./foundry-zksync/cast echo "$PWD/foundry-zksync" >> $GITHUB_PATH diff --git a/.github/workflows/ci-core-reusable.yml b/.github/workflows/ci-core-reusable.yml index d76bb776968d..25a0da838f42 100644 --- a/.github/workflows/ci-core-reusable.yml +++ b/.github/workflows/ci-core-reusable.yml @@ -65,15 +65,12 @@ jobs: - name: Install zkstack run: | ci_run ./zkstack_cli/zkstackup/install -g --path ./zkstack_cli/zkstackup/zkstackup - ci_run zkstackup -g --local + ci_run zkstackup -g --local --cargo-features gateway - name: Build contracts run: | ci_run zkstack dev contracts - - name: Contracts unit tests - run: ci_run yarn l1-contracts test - - name: Download compilers for contract verifier tests run: ci_run zkstack contract-verifier init --zksolc-version=v1.5.3 --zkvyper-version=v1.5.4 --solc-version=0.8.26 --vyper-version=v0.3.10 --era-vm-solc-version=0.8.26-1.0.1 --only --chain era @@ -131,7 +128,7 @@ jobs: - name: Install zkstack run: | ci_run ./zkstack_cli/zkstackup/install -g --path ./zkstack_cli/zkstackup/zkstackup || true - ci_run zkstackup -g --local + ci_run zkstackup -g --local --cargo-features gateway - name: Create and initialize legacy chain @@ -150,7 +147,7 @@ jobs: --legacy-bridge \ --evm-emulator false - ci_run zkstack ecosystem init --dev --verbose + ci_run zkstack ecosystem init --dev --support-l2-legacy-shared-bridge-test true --verbose # `sleep 60` because we need to wait until server added all the tokens - name: Run server @@ -174,13 +171,22 @@ jobs: integration-tests: runs-on: [ matterlabs-ci-runner-ultra-performance ] + strategy: + # ---------------------------------------------- + # Note, that while the contracts do support gateway chain + # in reality it won't exist for quite some time and so + # we will test both cases here + # ---------------------------------------------- + matrix: + use_gateway_chain: [ "WITH_GATEWAY", "WITHOUT_GATEWAY" ] + # In some cases it's useful to continue one job even if another fails. + fail-fast: false steps: - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 with: submodules: "recursive" fetch-depth: 0 - - name: Setup environment run: | echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV @@ -200,7 +206,7 @@ jobs: - name: Install zkstack run: | ci_run ./zkstack_cli/zkstackup/install -g --path ./zkstack_cli/zkstackup/zkstackup || true - ci_run zkstackup -g --local + ci_run zkstackup -g --local --cargo-features gateway - name: Create log directories run: | @@ -270,7 +276,8 @@ jobs: --l1-rpc-url=http://localhost:8545 \ --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ --server-db-name=zksync_server_localhost_validium \ - --chain validium + --chain validium \ + --validium-type no-da - name: Create and initialize chain with Custom Token run: | @@ -292,7 +299,8 @@ jobs: --l1-rpc-url=http://localhost:8545 \ --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ --server-db-name=zksync_server_localhost_custom_token \ - --chain custom_token + --chain custom_token \ + --validium-type no-da - name: Create and register chain with transactions signed "offline" run: | @@ -314,7 +322,7 @@ jobs: governor_pk=$(awk '/governor:/ {flag=1} flag && /private_key:/ {print $2; exit}' ./configs/wallets.yaml) ci_run zkstack dev send-transactions \ - --file ./transactions/chain/offline_chain/register-hyperchain-txns.json \ + --file ./transactions/chain/offline_chain/register-zk-chain-txns.json \ --l1-rpc-url http://127.0.0.1:8545 \ --private-key $governor_pk @@ -350,13 +358,67 @@ jobs: --l1-rpc-url=http://localhost:8545 \ --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ --server-db-name=zksync_server_localhost_consensus \ - --chain consensus + --chain consensus \ + --validium-type no-da - name: Export chain list to environment variable run: | CHAINS="era,validium,custom_token,consensus" echo "CHAINS=$CHAINS" >> $GITHUB_ENV + # ---------------------------------------------------------------- + # Only create/initialize the gateway chain *if* use_gateway_chain=WITH_GATEWAY + # ---------------------------------------------------------------- + - name: Initialize gateway chain + if: matrix.use_gateway_chain == 'WITH_GATEWAY' + run: | + ci_run zkstack chain create \ + --chain-name gateway \ + --chain-id 505 \ + --prover-mode no-proofs \ + --wallet-creation localhost \ + --l1-batch-commit-data-generator-mode rollup \ + --base-token-address 0x0000000000000000000000000000000000000001 \ + --base-token-price-nominator 1 \ + --base-token-price-denominator 1 \ + --set-as-default false \ + --ignore-prerequisites \ + --evm-emulator false + + ci_run zkstack chain init \ + --deploy-paymaster \ + --l1-rpc-url=http://localhost:8545 \ + --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ + --server-db-name=zksync_server_localhost_gateway \ + --chain gateway \ + --validium-type no-da + + ci_run zkstack chain convert-to-gateway --chain gateway --ignore-prerequisites + + - name: Run gateway + if: matrix.use_gateway_chain == 'WITH_GATEWAY' + run: | + ci_run zkstack server --ignore-prerequisites --chain gateway &> ${{ env.SERVER_LOGS_DIR }}/gateway.log & + ci_run zkstack server wait --ignore-prerequisites --verbose --chain gateway + + - name: Migrate chains to gateway + if: matrix.use_gateway_chain == 'WITH_GATEWAY' + run: | + ci_run zkstack chain migrate-to-gateway --chain era --gateway-chain-name gateway + ci_run zkstack chain migrate-to-gateway --chain validium --gateway-chain-name gateway + ci_run zkstack chain migrate-to-gateway --chain custom_token --gateway-chain-name gateway + ci_run zkstack chain migrate-to-gateway --chain consensus --gateway-chain-name gateway + + - name: Migrate back era + if: matrix.use_gateway_chain == 'WITH_GATEWAY' + run: | + ci_run zkstack chain migrate-from-gateway --chain era --gateway-chain-name gateway + + - name: Migrate to gateway again + if: matrix.use_gateway_chain == 'WITH_GATEWAY' + run: | + ci_run zkstack chain migrate-to-gateway --chain era --gateway-chain-name gateway + - name: Build test dependencies run: | ci_run zkstack dev test build @@ -402,20 +464,22 @@ jobs: - name: Init external nodes run: | + GATEWAY_RPC_URL="${{ matrix.use_gateway_chain == 'WITH_GATEWAY' && '--gateway-rpc-url=http://localhost:3550' || '' }}" + ci_run zkstack external-node configs --db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --db-name=zksync_en_localhost_era_rollup --l1-rpc-url=http://localhost:8545 --chain era + --db-name=zksync_en_localhost_era_rollup --l1-rpc-url=http://localhost:8545 $GATEWAY_RPC_URL --chain era ci_run zkstack external-node init --ignore-prerequisites --chain era ci_run zkstack external-node configs --db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --db-name=zksync_en_localhost_era_validium1 --l1-rpc-url=http://localhost:8545 --chain validium + --db-name=zksync_en_localhost_era_validium1 --l1-rpc-url=http://localhost:8545 $GATEWAY_RPC_URL --chain validium ci_run zkstack external-node init --ignore-prerequisites --chain validium ci_run zkstack external-node configs --db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --db-name=zksync_en_localhost_era_custom_token --l1-rpc-url=http://localhost:8545 --chain custom_token + --db-name=zksync_en_localhost_era_custom_token --l1-rpc-url=http://localhost:8545 $GATEWAY_RPC_URL --chain custom_token ci_run zkstack external-node init --ignore-prerequisites --chain custom_token ci_run zkstack external-node configs --db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --db-name=zksync_en_localhost_era_consensus --l1-rpc-url=http://localhost:8545 --chain consensus + --db-name=zksync_en_localhost_era_consensus --l1-rpc-url=http://localhost:8545 $GATEWAY_RPC_URL --chain consensus ci_run zkstack external-node init --ignore-prerequisites --chain consensus - name: Run recovery tests (from snapshot) @@ -432,7 +496,7 @@ jobs: ci_run zkstack external-node run --ignore-prerequisites --chain validium &> ${{ env.EXTERNAL_NODE_LOGS_DIR }}/validium.log & ci_run zkstack external-node run --ignore-prerequisites --chain custom_token &> ${{ env.EXTERNAL_NODE_LOGS_DIR }}/custom_token.log & ci_run zkstack external-node run --ignore-prerequisites --chain consensus --enable-consensus &> ${{ env.EXTERNAL_NODE_LOGS_DIR }}/consensus.log & - + ci_run zkstack external-node wait --ignore-prerequisites --verbose --chain era ci_run zkstack external-node wait --ignore-prerequisites --verbose --chain validium ci_run zkstack external-node wait --ignore-prerequisites --verbose --chain custom_token @@ -445,6 +509,14 @@ jobs: - name: Fee projection tests run: | ci_run killall -INT zksync_server || true + + # Only start & wait for the gateway server if use_gateway_chain == WITH_GATEWAY + if [ "${{ matrix.use_gateway_chain }}" == "WITH_GATEWAY" ]; then + ci_run zkstack server --ignore-prerequisites --chain gateway &> ${{ env.SERVER_LOGS_DIR }}/gateway.log & + ci_run zkstack server wait --ignore-prerequisites --verbose --chain gateway + fi + + # Always run the chain-specific fee tests ci_run ./bin/run_on_all_chains.sh "zkstack dev test fees --no-deps --no-kill" ${{ env.CHAINS }} ${{ env.FEES_LOGS_DIR }} - name: Run revert tests @@ -452,6 +524,13 @@ jobs: ci_run killall -INT zksync_server || true ci_run killall -INT zksync_external_node || true + # Only start & wait for the gateway server if use_gateway_chain == WITH_GATEWAY + if [ "${{ matrix.use_gateway_chain }}" == "WITH_GATEWAY" ]; then + ci_run zkstack server --ignore-prerequisites --chain gateway &> ${{ env.SERVER_LOGS_DIR }}/gateway.log & + ci_run zkstack server wait --ignore-prerequisites --verbose --chain gateway + fi + + # Always run the chain-specific revert tests ci_run ./bin/run_on_all_chains.sh "zkstack dev test revert --no-deps --external-node --no-kill --ignore-prerequisites" ${{ env.CHAINS }} ${{ env.INTEGRATION_TESTS_LOGS_DIR }} # Upgrade tests should run last, because as soon as they @@ -459,12 +538,20 @@ jobs: # TODO make upgrade tests safe to run multiple times - name: Run upgrade test run: | - ci_run zkstack dev test upgrade --no-deps --chain era + ci_run killall -INT zksync_server || true + # Only start & wait for the gateway server if use_gateway_chain == WITH_GATEWAY + if [ "${{ matrix.use_gateway_chain }}" == "WITH_GATEWAY" ]; then + ci_run zkstack server --ignore-prerequisites --chain gateway &> ${{ env.SERVER_LOGS_DIR }}/gateway.log & + ci_run zkstack server wait --ignore-prerequisites --verbose --chain gateway + fi + + # Always run the upgrade test against era + ci_run zkstack dev test upgrade --no-deps --chain era - name: Upload logs uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0 if: always() with: - name: logs + name: logs_${{matrix.use_gateway_chain}} path: logs diff --git a/.github/workflows/ci-prover-e2e.yml b/.github/workflows/ci-prover-e2e.yml index 77d3e2da5758..0a09aee51315 100644 --- a/.github/workflows/ci-prover-e2e.yml +++ b/.github/workflows/ci-prover-e2e.yml @@ -43,10 +43,19 @@ jobs: run: | git fetch # Checkout the commit with the DualVerifier contract to test FFLONK interface - git checkout b4d5b984 + git checkout bcdd1cb05e8f4d9ec2dd41e2cc668cdfe30ee535 git submodule update --init --recursive git rev-parse HEAD + - name: Set new genesis for fflonk + # Note, that while `Verifier` is not explicitly a part of the genensis state, + # it affects it indirectly as it is a part of the repo. + working-directory: ./etc/env/file_based + if: matrix.compressor-mode == 'fflonk' + run: | + sudo sed -i 's/^genesis_root: .*/genesis_root: 0xc3fa60b6769a0c2f222053d7cbd1d6f63be7777e3c8d029cbd61cc075526ab81/' genesis.yaml + sudo sed -i "s/^genesis_batch_commitment: .*/genesis_batch_commitment: 0x17689e705b5749ed0bbd53c845988d17c419697c2cb29eabab8785f1cb775b4a/" genesis.yaml + - name: Init run: | ci_run chmod -R +x ./bin @@ -88,13 +97,13 @@ jobs: - name: Run server run: | ci_run zkstack server --uring --chain=proving_chain --components=api,tree,eth,state_keeper,commitment_generator,proof_data_handler,vm_runner_protective_reads,vm_runner_bwip &>prover_logs_${{matrix.compressor-mode}}/server.log & - - name: Run Gateway + - name: Run prover gateway run: | ci_run zkstack prover run --component=gateway --docker=false &>prover_logs_${{matrix.compressor-mode}}/gateway.log & - name: Run Prover Job Monitor run: | ci_run zkstack prover run --component=prover-job-monitor --docker=false &>prover_logs_${{matrix.compressor-mode}}/prover-job-monitor.log & - - name: Wait for batch to be passed through gateway + - name: Wait for batch to be passed through prover gateway env: DATABASE_URL: postgres://postgres:notsecurepassword@localhost:5432/zksync_prover_localhost_proving_chain BATCH_NUMBER: 1 @@ -126,10 +135,10 @@ jobs: - name: Wait for batch to be executed on L1 env: - DATABASE_URL: postgres://postgres:notsecurepassword@localhost:5432/zksync_prover_localhost_proving_chain - BATCH_NUMBER: 1 - INTERVAL: 30 - TIMEOUT: 1200 + DATABASE_URL: postgres://postgres:notsecurepassword@localhost:5432/zksync_prover_localhost_proving_chain + BATCH_NUMBER: 1 + INTERVAL: 30 + TIMEOUT: 1200 run: | PASSED_ENV_VARS="BATCH_NUMBER,DATABASE_URL,URL,INTERVAL,TIMEOUT" \ ci_run ./bin/prover_checkers/batch_l1_status_checker diff --git a/.github/workflows/ci-prover-reusable.yml b/.github/workflows/ci-prover-reusable.yml index 7f719b2240db..26679cb2232f 100644 --- a/.github/workflows/ci-prover-reusable.yml +++ b/.github/workflows/ci-prover-reusable.yml @@ -1,6 +1,7 @@ name: Workflow template for CI jobs for Prover Components on: workflow_call: + jobs: lint: runs-on: [ matterlabs-ci-runner-highmem-long ] diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c502a5f0c205..899eaea4b445 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,6 +23,7 @@ jobs: docs: ${{ steps.changed-files.outputs.docs_any_changed }} all: ${{ steps.changed-files.outputs.all_any_changed }} steps: + - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 with: fetch-depth: 2 diff --git a/.github/workflows/vm-perf-comparison.yml b/.github/workflows/vm-perf-comparison.yml index 3520419f1337..ac83485a2c12 100644 --- a/.github/workflows/vm-perf-comparison.yml +++ b/.github/workflows/vm-perf-comparison.yml @@ -47,25 +47,26 @@ jobs: run: | run_retried docker compose pull zk docker compose up -d zk - + - name: run benchmarks on base branch shell: bash run: | ci_run zkstackup -g --local - ci_run zkstack dev contracts --system-contracts + ci_run zkstack dev contracts ci_run cargo bench --package vm-benchmark --bench instructions -- --verbose || echo "Instructions benchmark is missing" ci_run cargo run --package vm-benchmark --release --bin instruction_counts | tee base-opcodes - name: checkout PR run: | git checkout --force FETCH_HEAD --recurse-submodules + git submodule update --init --recursive - name: run benchmarks on PR shell: bash id: comparison run: | ci_run zkstackup -g --local - ci_run zkstack dev contracts --system-contracts + ci_run zkstack dev contracts ci_run cargo bench --package vm-benchmark --bench instructions -- --verbose ci_run cargo bench --package vm-benchmark --bench instructions -- --print > instructions.log 2>/dev/null diff --git a/Cargo.lock b/Cargo.lock index d6e7503bd20f..4744b424cea0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11810,6 +11810,7 @@ dependencies = [ "tokio-stream", "tonic 0.11.0", "tracing", + "url", "zksync_basic_types", "zksync_config", "zksync_da_client", @@ -11931,7 +11932,6 @@ dependencies = [ "assert_matches", "async-trait", "chrono", - "once_cell", "serde", "test-casing", "test-log", diff --git a/contracts b/contracts index 46d75088e7dd..16dedf6d7769 160000 --- a/contracts +++ b/contracts @@ -1 +1 @@ -Subproject commit 46d75088e7ddb534101874c3ec15b877da1cb417 +Subproject commit 16dedf6d77695ce00f81fce35a3066381b97fca1 diff --git a/core/bin/external_node/src/config/mod.rs b/core/bin/external_node/src/config/mod.rs index db2c6eac9cf6..235802e1073b 100644 --- a/core/bin/external_node/src/config/mod.rs +++ b/core/bin/external_node/src/config/mod.rs @@ -128,6 +128,7 @@ pub(crate) struct RemoteENConfig { pub l2_weth_bridge_addr: Option
, pub l2_testnet_paymaster_addr: Option
, pub l2_timestamp_asserter_addr: Option
, + pub l1_wrapped_base_token_store: Option
, pub base_token_addr: Address, pub l1_batch_commit_data_generator_mode: L1BatchCommitmentMode, pub dummy_verifier: bool, @@ -195,6 +196,9 @@ impl RemoteENConfig { l1_bytecodes_supplier_addr: ecosystem_contracts .as_ref() .and_then(|a| a.l1_bytecodes_supplier_addr), + l1_wrapped_base_token_store: ecosystem_contracts + .as_ref() + .and_then(|a| a.l1_wrapped_base_token_store), l1_diamond_proxy_addr, l2_testnet_paymaster_addr, l1_erc20_bridge_proxy_addr: bridges.l1_erc20_default_bridge, @@ -235,6 +239,7 @@ impl RemoteENConfig { l2_shared_bridge_addr: Some(Address::repeat_byte(6)), l2_legacy_shared_bridge_addr: Some(Address::repeat_byte(7)), l1_batch_commit_data_generator_mode: L1BatchCommitmentMode::Rollup, + l1_wrapped_base_token_store: None, dummy_verifier: true, l2_timestamp_asserter_addr: None, } @@ -1477,6 +1482,7 @@ impl From<&ExternalNodeConfig> for InternalApiConfig { l2_weth_bridge: config.remote.l2_weth_bridge_addr, }, l1_bytecodes_supplier_addr: config.remote.l1_bytecodes_supplier_addr, + l1_wrapped_base_token_store: config.remote.l1_wrapped_base_token_store, l1_bridgehub_proxy_addr: config.remote.l1_bridgehub_proxy_addr, l1_state_transition_proxy_addr: config.remote.l1_state_transition_proxy_addr, l1_transparent_proxy_admin_addr: config.remote.l1_transparent_proxy_admin_addr, diff --git a/core/lib/basic_types/src/protocol_version.rs b/core/lib/basic_types/src/protocol_version.rs index 89251b3a2a40..5d896040f760 100644 --- a/core/lib/basic_types/src/protocol_version.rs +++ b/core/lib/basic_types/src/protocol_version.rs @@ -75,11 +75,11 @@ pub enum ProtocolVersionId { impl ProtocolVersionId { pub const fn latest() -> Self { - Self::Version25 + Self::Version26 } pub const fn next() -> Self { - Self::Version26 + Self::Version27 } pub fn try_from_packed_semver(packed_semver: U256) -> Result { @@ -123,7 +123,7 @@ impl ProtocolVersionId { ProtocolVersionId::Version23 => VmVersion::Vm1_5_0SmallBootloaderMemory, ProtocolVersionId::Version24 => VmVersion::Vm1_5_0IncreasedBootloaderMemory, ProtocolVersionId::Version25 => VmVersion::Vm1_5_0IncreasedBootloaderMemory, - ProtocolVersionId::Version26 => VmVersion::Vm1_5_0IncreasedBootloaderMemory, + ProtocolVersionId::Version26 => VmVersion::VmGateway, ProtocolVersionId::Version27 => VmVersion::VmGateway, ProtocolVersionId::Version28 => unreachable!("Version 28 is not yet supported"), } @@ -192,7 +192,7 @@ impl ProtocolVersionId { } pub const fn gateway_upgrade() -> Self { - ProtocolVersionId::Version27 + ProtocolVersionId::Version26 } } @@ -298,7 +298,7 @@ impl From for VmVersion { ProtocolVersionId::Version23 => VmVersion::Vm1_5_0SmallBootloaderMemory, ProtocolVersionId::Version24 => VmVersion::Vm1_5_0IncreasedBootloaderMemory, ProtocolVersionId::Version25 => VmVersion::Vm1_5_0IncreasedBootloaderMemory, - ProtocolVersionId::Version26 => VmVersion::Vm1_5_0IncreasedBootloaderMemory, + ProtocolVersionId::Version26 => VmVersion::VmGateway, ProtocolVersionId::Version27 => VmVersion::VmGateway, ProtocolVersionId::Version28 => unreachable!("Version 28 is not yet supported"), } diff --git a/core/lib/basic_types/src/vm.rs b/core/lib/basic_types/src/vm.rs index f11f98596f18..4469785c7411 100644 --- a/core/lib/basic_types/src/vm.rs +++ b/core/lib/basic_types/src/vm.rs @@ -22,7 +22,7 @@ pub enum VmVersion { impl VmVersion { /// Returns the latest supported VM version. pub const fn latest() -> VmVersion { - Self::Vm1_5_0IncreasedBootloaderMemory + Self::VmGateway } } diff --git a/core/lib/config/src/configs/contracts.rs b/core/lib/config/src/configs/contracts.rs index f6bd02f2dfae..561e51fa5dd5 100644 --- a/core/lib/config/src/configs/contracts.rs +++ b/core/lib/config/src/configs/contracts.rs @@ -9,6 +9,10 @@ pub struct EcosystemContracts { pub state_transition_proxy_addr: Address, pub transparent_proxy_admin_addr: Address, pub l1_bytecodes_supplier_addr: Option
, + // Note that on the contract side of things this contract is called `L2WrappedBaseTokenStore`, + // while on the server side for consistency with the conventions, where the prefix denotes + // the location of the contracts we call it `l1_wrapped_base_token_store` + pub l1_wrapped_base_token_store: Option
, } impl EcosystemContracts { @@ -18,6 +22,7 @@ impl EcosystemContracts { state_transition_proxy_addr: Address::repeat_byte(0x15), transparent_proxy_admin_addr: Address::repeat_byte(0x15), l1_bytecodes_supplier_addr: Some(Address::repeat_byte(0x16)), + l1_wrapped_base_token_store: Some(Address::repeat_byte(0x17)), } } } @@ -50,8 +55,6 @@ pub struct ContractsConfig { pub base_token_addr: Option
, pub l1_base_token_asset_id: Option, - pub l2_predeployed_wrapped_base_token_address: Option
, - pub chain_admin_addr: Option
, pub l2_da_validator_addr: Option
, } @@ -76,7 +79,6 @@ impl ContractsConfig { governance_addr: Address::repeat_byte(0x13), base_token_addr: Some(Address::repeat_byte(0x14)), l1_base_token_asset_id: Some(H256::repeat_byte(0x15)), - l2_predeployed_wrapped_base_token_address: Some(Address::repeat_byte(0x1b)), ecosystem_contracts: Some(EcosystemContracts::for_tests()), chain_admin_addr: Some(Address::repeat_byte(0x18)), l2_da_validator_addr: Some(Address::repeat_byte(0x1a)), diff --git a/core/lib/config/src/testonly.rs b/core/lib/config/src/testonly.rs index 3472cf4e7d0a..431fa406d109 100644 --- a/core/lib/config/src/testonly.rs +++ b/core/lib/config/src/testonly.rs @@ -268,7 +268,6 @@ impl Distribution for EncodeDist { ecosystem_contracts: self.sample(rng), base_token_addr: self.sample_opt(|| rng.gen()), l1_base_token_asset_id: self.sample_opt(|| rng.gen()), - l2_predeployed_wrapped_base_token_address: self.sample_opt(|| rng.gen()), chain_admin_addr: self.sample_opt(|| rng.gen()), l2_da_validator_addr: self.sample_opt(|| rng.gen()), } @@ -763,6 +762,7 @@ impl Distribution for EncodeDist { state_transition_proxy_addr: rng.gen(), transparent_proxy_admin_addr: rng.gen(), l1_bytecodes_supplier_addr: rng.gen(), + l1_wrapped_base_token_store: rng.gen(), } } } diff --git a/core/lib/contracts/src/lib.rs b/core/lib/contracts/src/lib.rs index 9ca679fef899..87f102be39d6 100644 --- a/core/lib/contracts/src/lib.rs +++ b/core/lib/contracts/src/lib.rs @@ -37,24 +37,39 @@ const FORGE_PATH_PREFIX: &str = "contracts/l1-contracts/out"; const BRIDGEHUB_CONTRACT_FILE: (&str, &str) = ("bridgehub", "IBridgehub.sol/IBridgehub.json"); const STATE_TRANSITION_CONTRACT_FILE: (&str, &str) = ( "state-transition", - "StateTransitionManager.sol/StateTransitionManager.json", + "ChainTypeManager.sol/ChainTypeManager.json", ); +const BYTECODE_SUPPLIER_CONTRACT_FILE: (&str, &str) = + ("upgrades", "BytecodesSupplier.sol/BytecodesSupplier.json"); const ZKSYNC_HYPERCHAIN_CONTRACT_FILE: (&str, &str) = ( "state-transition/chain-interfaces", - "IZkSyncHyperchain.sol/IZkSyncHyperchain.json", + "IZKChain.sol/IZKChain.json", ); const DIAMOND_INIT_CONTRACT_FILE: (&str, &str) = ( "state-transition", "chain-interfaces/IDiamondInit.sol/IDiamondInit.json", ); const GOVERNANCE_CONTRACT_FILE: (&str, &str) = ("governance", "IGovernance.sol/IGovernance.json"); -const CHAIN_ADMIN_CONTRACT_FILE: (&str, &str) = ("governance", "IChainAdmin.sol/IChainAdmin.json"); +// TODO(EVM-924): We currently only support the "Ownable" chain admin. +const CHAIN_ADMIN_CONTRACT_FILE: (&str, &str) = ( + "governance", + "IChainAdminOwnable.sol/IChainAdminOwnable.json", +); const GETTERS_FACET_CONTRACT_FILE: (&str, &str) = ( "state-transition/chain-interfaces", "IGetters.sol/IGetters.json", ); const MULTICALL3_CONTRACT_FILE: (&str, &str) = ("dev-contracts", "Multicall3.sol/Multicall3.json"); +const L1_ASSET_ROUTER_FILE: (&str, &str) = ( + "bridge/asset-router", + "L1AssetRouter.sol/L1AssetRouter.json", +); +const L2_WRAPPED_BASE_TOKEN_STORE: (&str, &str) = ( + "bridge", + "L2WrappedBaseTokenStore.sol/L2WrappedBaseTokenStore.json", +); + const VERIFIER_CONTRACT_FILE: (&str, &str) = ("state-transition", "Verifier.sol/Verifier.json"); const DUAL_VERIFIER_CONTRACT_FILE: (&str, &str) = ( "state-transition/verifiers", @@ -158,6 +173,10 @@ pub fn state_transition_manager_contract() -> Contract { load_contract_for_both_compilers(STATE_TRANSITION_CONTRACT_FILE) } +pub fn bytecode_supplier_contract() -> Contract { + load_contract_for_both_compilers(BYTECODE_SUPPLIER_CONTRACT_FILE) +} + pub fn hyperchain_contract() -> Contract { load_contract_for_both_compilers(ZKSYNC_HYPERCHAIN_CONTRACT_FILE) } @@ -170,6 +189,14 @@ pub fn multicall_contract() -> Contract { load_contract_for_both_compilers(MULTICALL3_CONTRACT_FILE) } +pub fn l1_asset_router_contract() -> Contract { + load_contract_for_both_compilers(L1_ASSET_ROUTER_FILE) +} + +pub fn wrapped_base_token_store_contract() -> Contract { + load_contract_for_both_compilers(L2_WRAPPED_BASE_TOKEN_STORE) +} + pub fn verifier_contract() -> Contract { let path = format!("{}/{}", FORGE_PATH_PREFIX, DUAL_VERIFIER_CONTRACT_FILE.1); let zksync_home = home_path(); @@ -190,6 +217,14 @@ pub fn l1_messenger_contract() -> Contract { load_sys_contract("L1Messenger") } +pub fn l2_message_root() -> Contract { + load_contract("contracts/l1-contracts/out/MessageRoot.sol/MessageRoot.json") +} + +pub fn l2_rollup_da_validator_bytecode() -> Vec { + read_bytecode("contracts/l2-contracts/zkout/RollupL2DAValidator.sol/RollupL2DAValidator.json") +} + /// Reads bytecode from the path RELATIVE to the Cargo workspace location. pub fn read_bytecode(relative_path: impl AsRef + std::fmt::Debug) -> Vec { read_bytecode_from_path(relative_path).expect("Failed to open file") @@ -719,14 +754,14 @@ pub static PRE_BOOJUM_COMMIT_FUNCTION: Lazy = Lazy::new(|| { serde_json::from_str(abi).unwrap() }); -pub static SET_CHAIN_ID_EVENT: Lazy = Lazy::new(|| { +pub static GENESIS_UPGRADE_EVENT: Lazy = Lazy::new(|| { let abi = r#" { "anonymous": false, "inputs": [ { "indexed": true, - "name": "_stateTransitionChain", + "name": "_hyperchain", "type": "address" }, { @@ -804,9 +839,14 @@ pub static SET_CHAIN_ID_EVENT: Lazy = Lazy::new(|| { "indexed": true, "name": "_protocolVersion", "type": "uint256" + }, + { + "indexed": false, + "name": "_factoryDeps", + "type": "bytes[]" } ], - "name": "SetChainIdUpgrade", + "name": "GenesisUpgrade", "type": "event" }"#; serde_json::from_str(abi).unwrap() @@ -1422,28 +1462,3 @@ pub static POST_SHARED_BRIDGE_EXECUTE_FUNCTION: Lazy = Lazy::new(|| { }"#; serde_json::from_str(abi).unwrap() }); - -// Temporary thing, should be removed when new contracts are merged. -pub static MESSAGE_ROOT_CONTRACT: Lazy = Lazy::new(|| { - let abi = r#" - [{ - "inputs": [ - { - "internalType": "uint256", - "name": "_chainId", - "type": "uint256" - } - ], - "name": "getChainRoot", - "outputs": [ - { - "internalType": "bytes32", - "name": "", - "type": "bytes32" - } - ], - "stateMutability": "view", - "type": "function" - }]"#; - serde_json::from_str(abi).unwrap() -}); diff --git a/core/lib/dal/.sqlx/query-33d49ec6028974fa8b46d7bf1f79e41923477ed8dc179ca0e1fe64b4700e6572.json b/core/lib/dal/.sqlx/query-33d49ec6028974fa8b46d7bf1f79e41923477ed8dc179ca0e1fe64b4700e6572.json new file mode 100644 index 000000000000..703a57ae0597 --- /dev/null +++ b/core/lib/dal/.sqlx/query-33d49ec6028974fa8b46d7bf1f79e41923477ed8dc179ca0e1fe64b4700e6572.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n COUNT(*)\n FROM\n eth_txs\n WHERE\n confirmed_eth_tx_history_id IS NULL\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + null + ] + }, + "hash": "33d49ec6028974fa8b46d7bf1f79e41923477ed8dc179ca0e1fe64b4700e6572" +} diff --git a/core/lib/dal/src/eth_sender_dal.rs b/core/lib/dal/src/eth_sender_dal.rs index 191ea3231d1c..eecd102f395e 100644 --- a/core/lib/dal/src/eth_sender_dal.rs +++ b/core/lib/dal/src/eth_sender_dal.rs @@ -86,6 +86,25 @@ impl EthSenderDal<'_, '_> { Ok(count.try_into().unwrap()) } + pub async fn get_unconfirmed_txs_count(&mut self) -> DalResult { + let count = sqlx::query!( + r#" + SELECT + COUNT(*) + FROM + eth_txs + WHERE + confirmed_eth_tx_history_id IS NULL + "# + ) + .instrument("get_unconfirmed_txs_count") + .fetch_one(self.storage) + .await? + .count + .unwrap(); + Ok(count.try_into().unwrap()) + } + pub async fn get_eth_l1_batches(&mut self) -> sqlx::Result { struct EthTxRow { number: i64, diff --git a/core/lib/env_config/src/contracts.rs b/core/lib/env_config/src/contracts.rs index 457a946d9831..4cd0b021ff20 100644 --- a/core/lib/env_config/src/contracts.rs +++ b/core/lib/env_config/src/contracts.rs @@ -10,8 +10,14 @@ impl FromEnv for EcosystemContracts { .parse()?, transparent_proxy_admin_addr: std::env::var("CONTRACTS_TRANSPARENT_PROXY_ADMIN_ADDR")? .parse()?, - // Not supported yet - l1_bytecodes_supplier_addr: None, + l1_bytecodes_supplier_addr: std::env::var("CONTRACTS_L1_BYTECODE_SUPPLIER_ADDR")? + .parse() + .ok(), + l1_wrapped_base_token_store: std::env::var( + "CONTRACTS_L1_WRAPPED_BASE_TOKEN_STORE_ADDR", + )? + .parse() + .ok(), }) } } @@ -44,6 +50,9 @@ impl FromEnv for ContractsConfig { #[cfg(test)] mod tests { + use std::str::FromStr; + + use zksync_basic_types::H256; use zksync_config::configs::EcosystemContracts; use zksync_system_constants::SHARED_BRIDGE_ETHER_TOKEN_ADDRESS; @@ -72,11 +81,20 @@ mod tests { bridgehub_proxy_addr: addr("0x35ea7f92f4c5f433efe15284e99c040110cf6297"), state_transition_proxy_addr: addr("0xd90f1c081c6117241624e97cb6147257c3cb2097"), transparent_proxy_admin_addr: addr("0xdd6fa5c14e7550b4caf2aa2818d24c69cbc347e5"), - l1_bytecodes_supplier_addr: None, + l1_bytecodes_supplier_addr: Some(addr( + "0x36ea7f92f4c5f433efe15284e99c040110cf6297", + )), + l1_wrapped_base_token_store: Some(addr( + "0x36ea7f92f4c5f433efe15284e99c040110cf6298", + )), }), base_token_addr: Some(SHARED_BRIDGE_ETHER_TOKEN_ADDRESS), - l1_base_token_asset_id: None, - l2_predeployed_wrapped_base_token_address: None, + l1_base_token_asset_id: Some( + H256::from_str( + "0x0000000000000000000000000000000000000001000000000000000000000000", + ) + .unwrap(), + ), chain_admin_addr: Some(addr("0xdd6fa5c14e7550b4caf2aa2818d24c69cbc347ff")), l2_da_validator_addr: Some(addr("0xed6fa5c14e7550b4caf2aa2818d24c69cbc347ff")), l2_timestamp_asserter_addr: Some(addr("0x0000000000000000000000000000000000000002")), @@ -101,11 +119,16 @@ CONTRACTS_L2_CONSENSUS_REGISTRY_ADDR="D64e136566a9E04eb05B30184fF577F52682D182" CONTRACTS_L1_MULTICALL3_ADDR="0xcA11bde05977b3631167028862bE2a173976CA11" CONTRACTS_L1_SHARED_BRIDGE_PROXY_ADDR="0x8656770FA78c830456B00B4fFCeE6b1De0e1b888" CONTRACTS_L2_SHARED_BRIDGE_ADDR="0x8656770FA78c830456B00B4fFCeE6b1De0e1b888" +CONTRACTS_L1_BYTECODE_SUPPLIER_ADDR="0x36ea7f92f4c5f433efe15284e99c040110cf6297" CONTRACTS_L2_LEGACY_SHARED_BRIDGE_ADDR="0x8656770FA78c830456B00B4fFCeE6b1De0e1b888" CONTRACTS_BRIDGEHUB_PROXY_ADDR="0x35ea7f92f4c5f433efe15284e99c040110cf6297" CONTRACTS_STATE_TRANSITION_PROXY_ADDR="0xd90f1c081c6117241624e97cb6147257c3cb2097" CONTRACTS_TRANSPARENT_PROXY_ADMIN_ADDR="0xdd6fa5c14e7550b4caf2aa2818d24c69cbc347e5" CONTRACTS_BASE_TOKEN_ADDR="0x0000000000000000000000000000000000000001" +CONTRACTS_L1_BASE_TOKEN_ASSET_ID="0x0000000000000000000000000000000000000001000000000000000000000000" +CONTRACTS_L1_WRAPPED_BASE_TOKEN_STORE_ADDR="0x36ea7f92f4c5f433efe15284e99c040110cf6298" +CONTRACTS_L2_NATIVE_TOKEN_VAULT_PROXY_ADDR="0xfc073319977e314f251eae6ae6be76b0b3baeecf" +CONTRACTS_PREDEPLOYED_L2_WRAPPED_BASE_TOKEN_ADDRESS="0x35ea7f92f4c5f433efe15284e99c040110cf6299" CONTRACTS_CHAIN_ADMIN_ADDR="0xdd6fa5c14e7550b4caf2aa2818d24c69cbc347ff" CONTRACTS_L2_DA_VALIDATOR_ADDR="0xed6fa5c14e7550b4caf2aa2818d24c69cbc347ff" CONTRACTS_L2_TIMESTAMP_ASSERTER_ADDR="0x0000000000000000000000000000000000000002" diff --git a/core/lib/env_config/src/eth_sender.rs b/core/lib/env_config/src/eth_sender.rs index b15a153c30c3..3030d4206812 100644 --- a/core/lib/env_config/src/eth_sender.rs +++ b/core/lib/env_config/src/eth_sender.rs @@ -61,7 +61,6 @@ mod tests { aggregated_block_execute_deadline: 4_000, max_aggregated_tx_gas: 4_000_000, max_eth_tx_data_size: 120_000, - timestamp_criteria_max_allowed_lag: 30, max_aggregated_blocks_to_commit: 3, max_aggregated_blocks_to_execute: 4, diff --git a/core/lib/env_config/src/wallets.rs b/core/lib/env_config/src/wallets.rs index 3518d56f7b45..e9574be4456f 100644 --- a/core/lib/env_config/src/wallets.rs +++ b/core/lib/env_config/src/wallets.rs @@ -33,6 +33,7 @@ impl FromEnv for Wallets { } else { None }; + Some(EthSender { operator, blob_operator, diff --git a/core/lib/l1_contract_interface/src/i_executor/methods/commit_batches.rs b/core/lib/l1_contract_interface/src/i_executor/methods/commit_batches.rs index 01e362fb7d65..3487ad81a840 100644 --- a/core/lib/l1_contract_interface/src/i_executor/methods/commit_batches.rs +++ b/core/lib/l1_contract_interface/src/i_executor/methods/commit_batches.rs @@ -18,7 +18,7 @@ pub struct CommitBatches<'a> { pub mode: L1BatchCommitmentMode, } -impl Tokenize for CommitBatches<'_> { +impl Tokenize for &CommitBatches<'_> { fn into_tokens(self) -> Vec { let protocol_version = self.l1_batches[0].header.protocol_version.unwrap(); let stored_batch_info = StoredBatchInfo::from(self.last_committed_l1_batch).into_token(); @@ -27,6 +27,7 @@ impl Tokenize for CommitBatches<'_> { .iter() .map(|batch| CommitBatchInfo::new(self.mode, batch, self.pubdata_da).into_token()) .collect(); + if protocol_version.is_pre_gateway() { vec![stored_batch_info, Token::Array(l1_batches_to_commit)] } else { diff --git a/core/lib/l1_contract_interface/src/i_executor/methods/prove_batches.rs b/core/lib/l1_contract_interface/src/i_executor/methods/prove_batches.rs index 2d02bd5a1764..817448cc1b62 100644 --- a/core/lib/l1_contract_interface/src/i_executor/methods/prove_batches.rs +++ b/core/lib/l1_contract_interface/src/i_executor/methods/prove_batches.rs @@ -58,8 +58,10 @@ impl ProveBatches { } }; + let should_use_fflonk = !is_verifier_pre_fflonk || !protocol_version.is_pre_fflonk(); + if protocol_version.is_pre_gateway() { - let proof_input = if !is_verifier_pre_fflonk || !protocol_version.is_pre_fflonk() { + let proof_input = if should_use_fflonk { Token::Tuple(vec![ Token::Array(vec![verifier_type.into_token()]), Token::Array(proof.into_iter().map(Token::Uint).collect()), @@ -73,7 +75,17 @@ impl ProveBatches { vec![prev_l1_batch_info, batches_arg, proof_input] } else { - let proof_input = Token::Array(proof.into_iter().map(Token::Uint).collect()); + let proof_input = if should_use_fflonk { + Token::Array( + vec![verifier_type] + .into_iter() + .chain(proof) + .map(Token::Uint) + .collect(), + ) + } else { + Token::Array(proof.into_iter().map(Token::Uint).collect()) + }; let encoded_data = encode(&[prev_l1_batch_info, batches_arg, proof_input]); let prove_data = [[SUPPORTED_ENCODING_VERSION].to_vec(), encoded_data] diff --git a/core/lib/l1_contract_interface/src/i_executor/structures/mod.rs b/core/lib/l1_contract_interface/src/i_executor/structures/mod.rs index 9583e0204f75..5035abf6af60 100644 --- a/core/lib/l1_contract_interface/src/i_executor/structures/mod.rs +++ b/core/lib/l1_contract_interface/src/i_executor/structures/mod.rs @@ -3,6 +3,8 @@ mod commit_batch_info; mod stored_batch_info; +pub const SUPPORTED_ENCODING_VERSION: u8 = 0; + #[cfg(test)] mod tests; @@ -13,5 +15,3 @@ pub use self::{ }, stored_batch_info::StoredBatchInfo, }; - -pub const SUPPORTED_ENCODING_VERSION: u8 = 0; diff --git a/core/lib/multivm/src/versions/shadow/tests.rs b/core/lib/multivm/src/versions/shadow/tests.rs index 354459853f11..2d3dd5d3ae30 100644 --- a/core/lib/multivm/src/versions/shadow/tests.rs +++ b/core/lib/multivm/src/versions/shadow/tests.rs @@ -308,7 +308,6 @@ mod l1_messenger { use crate::versions::testonly::l1_messenger::*; #[test] - #[ignore] // Requires post-gateway system contracts fn rollup_da_output_hash_match() { test_rollup_da_output_hash_match::(); } diff --git a/core/lib/multivm/src/versions/testonly/circuits.rs b/core/lib/multivm/src/versions/testonly/circuits.rs index de987a8912db..c379372bc970 100644 --- a/core/lib/multivm/src/versions/testonly/circuits.rs +++ b/core/lib/multivm/src/versions/testonly/circuits.rs @@ -34,8 +34,19 @@ pub(crate) fn test_circuits() { let s = res.statistics.circuit_statistic; // Check `circuit_statistic`. const EXPECTED: [f32; 13] = [ - 1.34935, 0.15026, 1.66666, 0.00315, 1.0594, 0.00058, 0.00348, 0.00076, 0.11945, 0.14285, - 0.0, 0.0, 0.0, + 1.258627, + 0.13982475, + 1.6666666, + 0.003154238, + 0.9247803, + 0.00058723404, + 0.0034893616, + 0.00076709175, + 0.11945392, + 0.14285715, + 0.0, + 0.0, + 0.0, ]; let actual = [ (s.main_vm, "main_vm"), diff --git a/core/lib/multivm/src/versions/testonly/l1_messenger.rs b/core/lib/multivm/src/versions/testonly/l1_messenger.rs index dcbc432aafd0..c8b7b6bd8ed0 100644 --- a/core/lib/multivm/src/versions/testonly/l1_messenger.rs +++ b/core/lib/multivm/src/versions/testonly/l1_messenger.rs @@ -1,15 +1,14 @@ use std::rc::Rc; use ethabi::Token; -use zksync_contracts::l1_messenger_contract; +use zksync_contracts::{l1_messenger_contract, l2_rollup_da_validator_bytecode}; use zksync_test_contracts::{TestContract, TxType}; use zksync_types::{ address_to_h256, u256_to_h256, web3::keccak256, Address, Execute, ProtocolVersionId, L1_MESSENGER_ADDRESS, U256, }; -use zksync_vm_interface::SystemEnv; -use super::{default_system_env, ContractToDeploy, TestedVm, VmTesterBuilder}; +use super::{ContractToDeploy, TestedVm, VmTesterBuilder}; use crate::{ interface::{ pubdata::{PubdataBuilder, PubdataInput}, @@ -22,11 +21,6 @@ use crate::{ const L2_DA_VALIDATOR_OUTPUT_HASH_KEY: usize = 5; const USED_L2_DA_VALIDATOR_ADDRESS_KEY: usize = 6; -// Bytecode is temporary hardcoded, should be removed after contracts are merged. -fn l2_rollup_da_validator_bytecode() -> Vec { - hex::decode("0012000000000002000a000000000002000000000301001900000060043002700000012703400197000100000031035500020000003103550003000000310355000400000031035500050000003103550006000000310355000700000031035500080000003103550009000000310355000a000000310355000b000000310355000c000000310355000d000000310355000e000000310355000f00000031035500100000003103550011000000010355000001270040019d0000008004000039000000400040043f00000001002001900000005d0000c13d000000040030008c000000fe0000413d000000000201043b00000129022001970000012a0020009c000000fe0000c13d000000a40030008c000000fe0000413d0000000002000416000000000002004b000000fe0000c13d0000008402100370000000000202043b000300000002001d0000012b0020009c000000fe0000213d00000003020000290000002302200039000000000032004b000000fe0000813d00000003020000290000000402200039000000000421034f000000000604043b0000012b0060009c000000fe0000213d0000000304000029000700240040003d0000000704600029000000000034004b000000fe0000213d0000004403100370000000000303043b000400000003001d0000006403100370000000000303043b000200000003001d000000040060008c000000fe0000413d0000002002200039000000000221034f000000000202043b000000e00220027000000058022000c90000000804200039000000000064004b000000fe0000213d00000003022000290000002802200039000000000121034f000000000101043b000500e00010027a000600000006001d000000650000c13d00000000090000190000000403000029000000000039004b000000f10000c13d0000014e0040009c000000fb0000a13d0000014001000041000000000010043f0000001101000039000000040010043f00000138010000410000049a000104300000000001000416000000000001004b000000fe0000c13d0000002001000039000001000010044300000120000004430000012801000041000004990001042e000000000800001900000000090000190000014f0040009c000000570000813d0000000403400039000000000063004b000000fe0000213d00000007024000290000001101000367000000000221034f000000000502043b000000e004500270000000000034001a000000570000413d0000000007340019000000000067004b000000fe0000213d00000000020004140000012c0050009c0000007b0000813d0000000003000031000000840000013d000000070330002900000127053001970001000000510355000000000034001a000000570000413d0000000003340019000000000330007b000000570000413d000000000151034f000a00000009001d000800000008001d000900000007001d000001270330019700010000003103e50000012d0020009c000003c20000813d00000000013103df000000c0022002100000012e022001970000012c022001c700010000002103b500000000012103af0000801002000039049804930000040f0000000003010019000000600330027000000127033001970000000100200190000002450000613d0000001f0230003900000130052001970000003f025000390000013104200197000000400200043d0000000004420019000000000024004b000000000600003900000001060040390000012b0040009c0000023f0000213d00000001006001900000023f0000c13d000000400040043f0000000004320436000000000005004b000000b10000613d0000000005540019000000000600003100000011066003670000000007040019000000006806043c0000000007870436000000000057004b000000ad0000c13d0000012f063001980000000005640019000000ba0000613d000000000701034f0000000008040019000000007907043c0000000008980436000000000058004b000000b60000c13d0000001f03300190000000c70000613d000000000161034f0000000303300210000000000605043300000000063601cf000000000636022f000000000101043b0000010003300089000000000131022f00000000013101cf000000000161019f00000000001504350000000001020433000000200010008c0000000a05000029000004210000c13d0000000002040433000000400100043d000000400310003900000000002304350000002002100039000000000052043500000040030000390000000000310435000001320010009c0000023f0000213d0000006003100039000000400030043f000001270020009c000001270200804100000040022002100000000001010433000001270010009c00000127010080410000006001100210000000000121019f0000000002000414000001270020009c0000012702008041000000c002200210000000000112019f00000133011001c700008010020000390498048e0000040f0000000100200190000000060600002900000009040000290000000808000029000000fe0000613d000000000901043b0000000108800039000000050080006c000000670000413d000000520000013d000000400100043d000000440210003900000000009204350000002402100039000000000032043500000134020000410000000000210435000000040210003900000000000204350000042d0000013d0000000403400039000000000063004b000001000000a13d00000000010000190000049a0001043000000007014000290000001101100367000000000101043b000400e00010027a0000025d0000c13d000000000900001900000000050300190000000003090019000000020090006c000002f20000c13d000000060050006c000002fd0000813d00000007015000290000001102000367000000000112034f000000000101043b000000f801100270000000010010008c000003030000c13d00000000060500190000014e0060009c0000000604000029000000570000213d0000000403600039000000000043004b000000fe0000213d00000003016000290000002501100039000000000112034f000000000101043b000000000043004b000002fd0000813d000000e8011002700000000703300029000000000432034f0000000503500039000000000404043b000000000031001a0000000607000029000000570000413d000a00000031001d0000000a0070006b000000fe0000213d000000050600008a0000000a0060006b000000570000213d0000000a050000290000000405500039000000000075004b000000fe0000213d0000000a08000029000300070080002d0000000306200360000000000606043b000400000006001d000000e006600272000500000006001d00090110006000cd0000013f0000613d000000090800002900000005068000fa000001100060008c000000570000c13d000000090050002a000000570000413d000200090050002d000000020070006c000000fe0000413d000000f804400270000000400a00043d0000004406a00039000000800700003900000000007604350000002406a000390000000000460435000001410400004100000000004a043500000007055000290000008404a00039000000090900002900000000009404350000000404a0003900000005060000290000000000640435000000000752034f0000001f0890018f00080000000a001d000000a405a0003900000142099001980000000006950019000001610000613d000000000a07034f000000000b05001900000000ac0a043c000000000bcb043600000000006b004b0000015d0000c13d0000000703300029000000000008004b0000016f0000613d000000000797034f0000000308800210000000000906043300000000098901cf000000000989022f000000000707043b0000010008800089000000000787022f00000000078701cf000000000797019f00000000007604350000000907000029000000000675001900000000000604350000001f06700039000001430660019700000000066500190000000004460049000000080500002900000064055000390000000000450435000000000432034f0000001f0510018f000000000216043600000144061001980000000003620019000001850000613d000000000704034f0000000008020019000000007907043c0000000008980436000000000038004b000001810000c13d000000000005004b000001920000613d000000000464034f0000000305500210000000000603043300000000065601cf000000000656022f000000000404043b0000010005500089000000000454022f00000000045401cf000000000464019f0000000000430435000000000312001900000000000304350000001f011000390000014501100197000000080300002900000000013100490000000001210019000001270010009c00000127010080410000006001100210000001270030009c000001270200004100000000020340190000004002200210000000000121019f0000000002000414000001270020009c0000012702008041000000c002200210000000000112019f0000800e02000039049804890000040f000000000301001900000060033002700000012703300197000000200030008c000000200400003900000000040340190000001f0640018f00000020074001900000000805700029000001b80000613d000000000801034f0000000809000029000000008a08043c0000000009a90436000000000059004b000001b40000c13d000000000006004b000001c50000613d000000000771034f0000000306600210000000000805043300000000086801cf000000000868022f000000000707043b0000010006600089000000000767022f00000000066701cf000000000686019f00000000006504350000000100200190000003480000613d0000001f01400039000000600110018f0000000802100029000000000012004b00000000010000390000000101004039000100000002001d0000012b0020009c0000023f0000213d00000001001001900000023f0000c13d0000000101000029000000400010043f000000200030008c0000000604000029000000fe0000413d00000008010000290000000001010433000800000001001d00000004010000290000012c0010009c000001e10000413d000000090200002900000005012000fa000001100010008c000000570000c13d0000000103000029000000440130003900000024023000390000000403300039000000020440006c000003660000c13d000001460400004100000001050000290000000000450435000000200400003900000000004304350000000a04000029000000000042043500000150034001980000001f0440018f000000000231001900000007050000290000001105500367000001fa0000613d000000000605034f0000000007010019000000006806043c0000000007870436000000000027004b000001f60000c13d000000000004004b000002070000613d000000000335034f0000000304400210000000000502043300000000054501cf000000000545022f000000000303043b0000010004400089000000000343022f00000000034301cf000000000353019f00000000003204350000000a030000290000001f023000390000015002200197000000000131001900000000000104350000004401200039000001270010009c000001270100804100000060011002100000000102000029000001270020009c00000127020080410000004002200210000000000112019f0000000002000414000001270020009c0000012702008041000000c002200210000000000112019f00008011020000390498048e0000040f000000000301001900000060033002700000001f0430018f0000012f0530019700000127033001970000000100200190000003720000613d0000000102500029000000000005004b0000022c0000613d000000000601034f0000000107000029000000006806043c0000000007870436000000000027004b000002280000c13d000000000004004b000002390000613d000000000151034f0000000304400210000000000502043300000000054501cf000000000545022f000000000101043b0000010004400089000000000141022f00000000014101cf000000000151019f00000000001204350000001f0130003900000130011001970000000101100029000900000001001d0000012b0010009c0000038a0000a13d0000014001000041000000000010043f0000004101000039000000040010043f00000138010000410000049a000104300000001f0430018f0000012f023001980000024e0000613d000000000501034f0000000006000019000000005705043c0000000006760436000000000026004b0000024a0000c13d000000000004004b0000025b0000613d000000000121034f0000000304400210000000000502043300000000054501cf000000000545022f000000000101043b0000010004400089000000000141022f00000000014101cf000000000151019f000000000012043500000060013002100000049a00010430000000000800001900000000090000190000014e0030009c000000570000213d0000000402300039000000000062004b000000fe0000213d00000007033000290000001101000367000000000331034f000000000303043b000000e00a30027000000000002a001a000000570000413d00000000072a0019000000000067004b000000fe0000213d0000013600300198000003130000c13d000001390030009c000003190000813d0000013a003001980000031f0000613d000000070420002900000127034001970000000002000414000100000031035500000000004a001a000000570000413d00000000044a0019000000000440007b000000570000413d00090000000a001d000a00000009001d000500000008001d000800000007001d000000000131034f000001270340019700010000003103e5000001270020009c000003c20000213d00000000013103df000000c0022002100000012e022001970000012c022001c700010000002103b500000000012103af0000000202000039049804930000040f00000000030100190000006003300270000001270330019700000001002001900000032a0000613d0000001f0230003900000130052001970000003f025000390000013104200197000000400200043d0000000004420019000000000024004b000000000600003900000001060040390000012b0040009c0000023f0000213d00000001006001900000023f0000c13d000000400040043f0000000004320436000000000005004b000000090a000029000002ad0000613d0000000005540019000000000600003100000011066003670000000007040019000000006806043c0000000007870436000000000057004b000002a90000c13d0000012f063001980000000005640019000002b60000613d000000000701034f0000000008040019000000007907043c0000000008980436000000000058004b000002b20000c13d0000001f03300190000002c30000613d000000000161034f0000000303300210000000000605043300000000063601cf000000000636022f000000000101043b0000010003300089000000000131022f00000000013101cf000000000161019f0000000000150435000000400100043d0000000002020433000000200020008c0000000a05000029000003420000c13d00000000020404330000013d02200197000000db03a002100000013e03300197000000000223019f0000013f022001c7000000400310003900000000002304350000002002100039000000000052043500000040030000390000000000310435000001320010009c0000023f0000213d0000006003100039000000400030043f000001270020009c000001270200804100000040022002100000000001010433000001270010009c00000127010080410000006001100210000000000121019f0000000002000414000001270020009c0000012702008041000000c002200210000000000112019f00000133011001c700008010020000390498048e0000040f0000000100200190000000060600002900000008030000290000000508000029000000fe0000613d000000000901043b0000000108800039000000040080006c0000025f0000413d000001060000013d000000400100043d0000004402100039000000000032043500000024021000390000000203000029000000000032043500000134020000410000000000210435000000040210003900000001030000390000042c0000013d0000014001000041000000000010043f0000003201000039000000040010043f00000138010000410000049a00010430000000400200043d0000004403200039000000000013043500000024012000390000000103000039000000000031043500000134010000410000000000120435000000040120003900000002030000390000000000310435000001270020009c0000012702008041000000400120021000000135011001c70000049a00010430000000400100043d0000013702000041000000000021043500000004021000390000000203000039000003240000013d000000400100043d0000013702000041000000000021043500000004021000390000000103000039000003240000013d000000400100043d00000137020000410000000000210435000000040210003900000003030000390000000000320435000001270010009c0000012701008041000000400110021000000138011001c70000049a000104300000001f0430018f0000012f02300198000003330000613d000000000501034f0000000006000019000000005705043c0000000006760436000000000026004b0000032f0000c13d000000000004004b000003400000613d000000000121034f0000000304400210000000000502043300000000054501cf000000000545022f000000000101043b0000010004400089000000000141022f00000000014101cf000000000151019f000000000012043500000060013002100000049a0001043000000044021000390000013b03000041000000000032043500000024021000390000001903000039000004270000013d0000001f0530018f0000012f06300198000000400200043d0000000004620019000003530000613d000000000701034f0000000008020019000000007907043c0000000008980436000000000048004b0000034f0000c13d000000000005004b000003600000613d000000000161034f0000000305500210000000000604043300000000065601cf000000000656022f000000000101043b0000010005500089000000000151022f00000000015101cf000000000161019f00000000001404350000006001300210000001270020009c00000127020080410000004002200210000000000112019f0000049a000104300000013405000041000000010600002900000000005604350000000305000039000000000053043500000000000204350000000000410435000001270060009c0000012706008041000000400160021000000135011001c70000049a00010430000000400200043d0000000006520019000000000005004b0000037c0000613d000000000701034f0000000008020019000000007907043c0000000008980436000000000068004b000003780000c13d000000000004004b000003600000613d000000000151034f0000000304400210000000000506043300000000054501cf000000000545022f000000000101043b0000010004400089000000000141022f00000000014101cf000000000151019f0000000000160435000003600000013d0000000901000029000000400010043f000000200030008c000000fe0000413d000000010100002900000000010104330000012b0010009c000000fe0000213d000000010230002900000001011000290000001f03100039000000000023004b000000fe0000813d00000000140104340000012b0040009c0000023f0000213d00000005034002100000003f05300039000001470550019700000009055000290000012b0050009c0000023f0000213d000000400050043f000000090500002900000000004504350000000003130019000000000023004b000000fe0000213d000000000004004b000003ae0000613d0000000902000029000000200220003900000000140104340000000000420435000000000031004b000003a90000413d000000000100041400000011020003670000000a0000006b000003b40000c13d0000000003000031000003be0000013d00000007030000290000012704300197000100000042035500000003050000290000000a0050006c000000570000413d0000000305000029000000000350007b000000570000413d000000000242034f000001270330019700010000003203e5000001270010009c000003c90000a13d000000400100043d00000044021000390000014d03000041000000000032043500000024021000390000000803000039000004270000013d00000000023203df000000c0011002100000012e011001970000012c011001c700010000001203b500000000011203af0000801002000039049804930000040f0000000003010019000000600330027000000127033001970000000100200190000004320000613d0000001f0230003900000130052001970000003f025000390000013104200197000000400200043d0000000004420019000000000024004b000000000600003900000001060040390000012b0040009c0000023f0000213d00000001006001900000023f0000c13d000000400040043f0000000004320436000000000005004b000003ef0000613d0000000005540019000000000600003100000011066003670000000007040019000000006806043c0000000007870436000000000057004b000003eb0000c13d0000001f0530018f0000012f063001980000000003640019000003f90000613d000000000701034f0000000008040019000000007907043c0000000008980436000000000038004b000003f50000c13d000000000005004b000004060000613d000000000161034f0000000305500210000000000603043300000000065601cf000000000656022f000000000101043b0000010005500089000000000151022f00000000015101cf000000000161019f00000000001304350000000001020433000000200010008c000004210000c13d000000400100043d00000009020000290000000002020433000001000020008c0000044a0000413d00000064021000390000014a03000041000000000032043500000044021000390000014b0300004100000000003204350000002402100039000000250300003900000000003204350000013c020000410000000000210435000000040210003900000020030000390000000000320435000001270010009c000001270100804100000040011002100000014c011001c70000049a00010430000000400100043d00000044021000390000014803000041000000000032043500000024021000390000001f0300003900000000003204350000013c020000410000000000210435000000040210003900000020030000390000000000320435000001270010009c0000012701008041000000400110021000000135011001c70000049a000104300000001f0430018f0000012f023001980000043b0000613d000000000501034f0000000006000019000000005705043c0000000006760436000000000026004b000004370000c13d000000000004004b000004480000613d000000000121034f0000000304400210000000000502043300000000054501cf000000000545022f000000000101043b0000010004400089000000000141022f00000000014101cf000000000151019f000000000012043500000060013002100000049a000104300000000003040433000000f8022002100000006004100039000000000024043500000040021000390000000000320435000000200210003900000008030000290000000000320435000000610310003900000009040000290000000004040433000000000004004b000004610000613d000000000500001900000009060000290000002006600039000900000006001d000000000606043300000000036304360000000105500039000000000045004b000004590000413d0000000003130049000000200430008a00000000004104350000001f0330003900000150043001970000000003140019000000000043004b000000000400003900000001040040390000012b0030009c0000023f0000213d00000001004001900000023f0000c13d000000400030043f000001270020009c000001270200804100000040022002100000000001010433000001270010009c00000127010080410000006001100210000000000121019f0000000002000414000001270020009c0000012702008041000000c002200210000000000112019f00000133011001c700008010020000390498048e0000040f0000000100200190000000fe0000613d000000000101043b000000400200043d0000000000120435000001270020009c0000012702008041000000400120021000000149011001c7000004990001042e0000048c002104210000000102000039000000000001042d0000000002000019000000000001042d00000491002104230000000102000039000000000001042d0000000002000019000000000001042d00000496002104230000000102000039000000000001042d0000000002000019000000000001042d0000049800000432000004990001042e0000049a00010430000000000000000000000000000000000000000000000000000000000000000000000000ffffffff0000000200000000000000000000000000000040000001000000000000000000ffffffff0000000000000000000000000000000000000000000000000000000089f9a07200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffff0000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000ffffffff00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffe000000000000000000000000000000000000000000000000000000001ffffffe000000000000000000000000000000000000000000000000000000003ffffffe0000000000000000000000000000000000000000000000000ffffffffffffff9f02000000000000000000000000000000000000000000000000000000000000007f7b0cf70000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000001f0000000000000000000000000000000000000000000000000000000043e266b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000007368612072657475726e656420696e76616c696420646174610000000000000008c379a00000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffff06ffffff0000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000004e487b71000000000000000000000000000000000000000000000000000000006006d8b500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffffe0000000000000000000000000000000000000000000000000000003ffffffffe00000000000000000000000000000000000000000000000000000000000ffffe00000000000000000000000000000000000000000000000000000000001ffffe018876a04000000000000000000000000000000000000000000000000000000007fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe06b656363616b3235362072657475726e656420696e76616c69642064617461000000000000000000000000000000000000000020000000000000000000000000206269747300000000000000000000000000000000000000000000000000000053616665436173743a2076616c756520646f65736e27742066697420696e203800000000000000000000000000000000000000840000000000000000000000004f766572666c6f77000000000000000000000000000000000000000000000000fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffbfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffcffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe00000000000000000000000000000000000000000000000000000000000000000e901f5bd8811df26e614332e2110b9bc002e2cbadd82065c67e102f858079d5a").unwrap() -} - fn encoded_uncompressed_state_diffs(input: &PubdataInput) -> Vec { let mut result = vec![]; for state_diff in input.state_diffs.iter() { @@ -78,15 +72,10 @@ pub(crate) fn test_rollup_da_output_hash_match() { // In this test, we check whether the L2 DA output hash is as expected. let l2_da_validator_address = Address::repeat_byte(0x12); - let system_env = SystemEnv { - version: ProtocolVersionId::Version27, - ..default_system_env() - }; let mut vm = VmTesterBuilder::new() .with_empty_in_memory_storage() .with_execution_mode(TxExecutionMode::VerifyExecute) .with_rich_accounts(1) - .with_system_env(system_env) .with_custom_contracts(vec![ContractToDeploy { bytecode: l2_rollup_da_validator_bytecode(), address: l2_da_validator_address, diff --git a/core/lib/multivm/src/versions/testonly/l2_blocks.rs b/core/lib/multivm/src/versions/testonly/l2_blocks.rs index f8813231c9e1..ad14aeb60670 100644 --- a/core/lib/multivm/src/versions/testonly/l2_blocks.rs +++ b/core/lib/multivm/src/versions/testonly/l2_blocks.rs @@ -4,6 +4,7 @@ //! use assert_matches::assert_matches; +use ethabi::{ParamType, Token}; use zksync_system_constants::REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_BYTE; use zksync_types::{ block::{pack_block_info, L2BlockHasher}, @@ -13,6 +14,7 @@ use zksync_types::{ SYSTEM_CONTEXT_CURRENT_L2_BLOCK_INFO_POSITION, SYSTEM_CONTEXT_CURRENT_TX_ROLLING_HASH_POSITION, U256, }; +use zksync_vm_interface::VmRevertReason; use super::{default_l1_batch, get_empty_storage, tester::VmTesterBuilder, TestedVm}; use crate::{ @@ -27,6 +29,29 @@ use crate::{ }, }; +/// Encodes a Solidity function call with parameters into a Vec. +fn encode_function_call( + name: &str, + types: &[ParamType], + params: &[Token], +) -> Result { + let short_sig = ethabi::short_signature(name, types); + + // Check if the provided number of parameters matches the function's expected inputs + if types.len() != params.len() { + return Err(ethabi::Error::InvalidData); + } + + // Encode the function call with the provided parameters + let encoded_data = ethabi::encode(params); + + Ok(VmRevertReason::Unknown { + function_selector: short_sig.to_vec(), + data: [short_sig.to_vec(), encoded_data].concat(), + } + .to_string()) +} + fn get_l1_noop() -> Transaction { Transaction { common_data: ExecuteTransactionCommon::L1(L1TxCommonData { @@ -72,7 +97,7 @@ pub(crate) fn test_l2_block_initialization_timestamp() { assert_matches!( res.result, ExecutionResult::Halt { reason: Halt::FailedToSetL2Block(msg) } - if msg.contains("timestamp") + if msg.contains("0x5e9ad9b0") ); } @@ -107,7 +132,7 @@ pub(crate) fn test_l2_block_initialization_number_non_zero() { res.result, ExecutionResult::Halt { reason: Halt::FailedToSetL2Block( - "L2 block number is never expected to be zero".to_string() + encode_function_call("L2BlockNumberZero", &[], &[]).unwrap() ) } ); @@ -163,7 +188,15 @@ pub(crate) fn test_l2_block_same_l2_block() { // Case 1: Incorrect timestamp test_same_l2_block::( Some(Halt::FailedToSetL2Block( - "The timestamp of the same L2 block must be same".to_string(), + encode_function_call( + "IncorrectSameL2BlockTimestamp", + &[ParamType::Uint(128), ParamType::Uint(128)], + &[ + Token::Uint(U256::zero()), + Token::Uint(U256::from(1_700_000_001)), + ], + ) + .unwrap(), )), Some(0), None, @@ -172,7 +205,20 @@ pub(crate) fn test_l2_block_same_l2_block() { // Case 2: Incorrect previous block hash test_same_l2_block::( Some(Halt::FailedToSetL2Block( - "The previous hash of the same L2 block must be same".to_string(), + encode_function_call( + "IncorrectSameL2BlockPrevBlockHash", + &[ParamType::FixedBytes(32), ParamType::FixedBytes(32)], + &[ + Token::FixedBytes(H256::zero().0.to_vec()), + Token::FixedBytes( + hex::decode( + "e8e77626586f73b955364c7b4bbf0bb7f7685ebd40e852b164633a4acbd3244c", + ) + .unwrap(), + ), + ], + ) + .unwrap(), )), None, Some(H256::zero()), @@ -249,7 +295,12 @@ pub(crate) fn test_l2_block_new_l2_block() { None, None, Some(Halt::FailedToSetL2Block( - "Invalid new L2 block number".to_string(), + encode_function_call( + "InvalidNewL2BlockNumber", + &[ParamType::Uint(256)], + &[Token::Uint(U256::from(3u32))], + ) + .unwrap(), )), ); @@ -259,7 +310,14 @@ pub(crate) fn test_l2_block_new_l2_block() { None, Some(1), None, - Some(Halt::FailedToSetL2Block("The timestamp of the new L2 block must be greater than the timestamp of the previous L2 block".to_string())), + Some(Halt::FailedToSetL2Block( + encode_function_call( + "NonMonotonicL2BlockTimestamp", + &[ParamType::Uint(128), ParamType::Uint(128)], + &[Token::Uint(U256::from(1)), Token::Uint(U256::from(1))], + ) + .unwrap(), + )), ); // Case 3: Incorrect previous block hash @@ -269,7 +327,20 @@ pub(crate) fn test_l2_block_new_l2_block() { None, Some(H256::zero()), Some(Halt::FailedToSetL2Block( - "The current L2 block hash is incorrect".to_string(), + encode_function_call( + "IncorrectL2BlockHash", + &[ParamType::FixedBytes(32), ParamType::FixedBytes(32)], + &[ + Token::FixedBytes(H256::zero().0.to_vec()), + Token::FixedBytes( + hex::decode( + "de4c551714ad02a0a4f51252f966ef90c13376ea4c8a463eedfb242b97551c43", + ) + .unwrap(), + ), + ], + ) + .unwrap(), )), ); @@ -395,7 +466,14 @@ pub(crate) fn test_l2_block_first_in_batch() { prev_block_hash, max_virtual_blocks_to_create: 1, }, - Some(Halt::FailedToSetL2Block("The timestamp of the L2 block must be greater than or equal to the timestamp of the current batch".to_string())), + Some(Halt::FailedToSetL2Block( + encode_function_call( + "L2BlockAndBatchTimestampMismatch", + &[ParamType::Uint(128), ParamType::Uint(128)], + &[Token::Uint(U256::from(9)), Token::Uint(U256::from(12))], + ) + .unwrap(), + )), ); } diff --git a/core/lib/multivm/src/versions/testonly/refunds.rs b/core/lib/multivm/src/versions/testonly/refunds.rs index 384a3edb7dbd..4d549f5a9be0 100644 --- a/core/lib/multivm/src/versions/testonly/refunds.rs +++ b/core/lib/multivm/src/versions/testonly/refunds.rs @@ -140,7 +140,7 @@ pub(crate) fn test_predetermined_refunded_gas() { current_state_without_predefined_refunds.user_l2_to_l1_logs ); - assert_ne!( + assert_eq!( current_state_with_changed_predefined_refunds.system_logs, current_state_without_predefined_refunds.system_logs ); diff --git a/core/lib/multivm/src/versions/vm_fast/tests/l1_messenger.rs b/core/lib/multivm/src/versions/vm_fast/tests/l1_messenger.rs index c7d4594d7692..0bd01c7de134 100644 --- a/core/lib/multivm/src/versions/vm_fast/tests/l1_messenger.rs +++ b/core/lib/multivm/src/versions/vm_fast/tests/l1_messenger.rs @@ -1,7 +1,6 @@ use crate::{versions::testonly::l1_messenger::test_rollup_da_output_hash_match, vm_fast::Vm}; #[test] -#[ignore] // Requires post-gateway system contracts fn rollup_da_output_hash_match() { test_rollup_da_output_hash_match::>(); } diff --git a/core/lib/multivm/src/versions/vm_latest/tests/l1_messenger.rs b/core/lib/multivm/src/versions/vm_latest/tests/l1_messenger.rs index 7d301f33a131..f1dade9dd8e6 100644 --- a/core/lib/multivm/src/versions/vm_latest/tests/l1_messenger.rs +++ b/core/lib/multivm/src/versions/vm_latest/tests/l1_messenger.rs @@ -4,7 +4,6 @@ use crate::{ }; #[test] -#[ignore] // Requires post-gateway system contracts fn rollup_da_output_hash_match() { test_rollup_da_output_hash_match::>(); } diff --git a/core/lib/multivm/src/versions/vm_latest/vm.rs b/core/lib/multivm/src/versions/vm_latest/vm.rs index 1db369d4ae20..3914bfca17a2 100644 --- a/core/lib/multivm/src/versions/vm_latest/vm.rs +++ b/core/lib/multivm/src/versions/vm_latest/vm.rs @@ -47,7 +47,7 @@ pub(crate) enum MultiVmSubversion { impl MultiVmSubversion { #[cfg(test)] pub(crate) fn latest() -> Self { - Self::IncreasedBootloaderMemory + Self::Gateway } } diff --git a/core/lib/protobuf_config/src/contracts.rs b/core/lib/protobuf_config/src/contracts.rs index 12cbf996697b..1c9711ef62d2 100644 --- a/core/lib/protobuf_config/src/contracts.rs +++ b/core/lib/protobuf_config/src/contracts.rs @@ -34,6 +34,10 @@ impl ProtoRepr for proto::Contracts { .l1_bytecodes_supplier_addr .as_ref() .map(|x| parse_h160(x).expect("Invalid address")), + l1_wrapped_base_token_store: ecosystem_contracts + .l1_wrapped_base_token_store + .as_ref() + .map(|x| parse_h160(x).expect("Invalid address")), }) } else { None @@ -123,12 +127,6 @@ impl ProtoRepr for proto::Contracts { .map(|x| parse_h256(x)) .transpose() .context("base_token_asset_id")?, - l2_predeployed_wrapped_base_token_address: l2 - .predeployed_wrapped_base_token_address - .as_ref() - .map(|x| parse_h160(x)) - .transpose() - .context("l2 predeployed_wrapped_base_token_address")?, chain_admin_addr: l1 .chain_admin_addr .as_ref() @@ -164,6 +162,9 @@ impl ProtoRepr for proto::Contracts { l1_bytecodes_supplier_addr: ecosystem_contracts .l1_bytecodes_supplier_addr .map(|x| format!("{:?}", x)), + l1_wrapped_base_token_store: ecosystem_contracts + .l1_wrapped_base_token_store + .map(|x| format!("{:?}", x)), }); Self { ecosystem_contracts, @@ -184,9 +185,6 @@ impl ProtoRepr for proto::Contracts { legacy_shared_bridge_addr: this .l2_legacy_shared_bridge_addr .map(|a| format!("{:?}", a)), - predeployed_wrapped_base_token_address: this - .l2_predeployed_wrapped_base_token_address - .map(|x| format!("{:?}", x)), timestamp_asserter_addr: this .l2_timestamp_asserter_addr .map(|a| format!("{:?}", a)), diff --git a/core/lib/protobuf_config/src/proto/config/contracts.proto b/core/lib/protobuf_config/src/proto/config/contracts.proto index febbc981478b..538f415ff408 100644 --- a/core/lib/protobuf_config/src/proto/config/contracts.proto +++ b/core/lib/protobuf_config/src/proto/config/contracts.proto @@ -7,6 +7,7 @@ message EcosystemContracts { optional string state_transition_proxy_addr = 2; // optional; h160 optional string transparent_proxy_admin_addr = 3; // optional; h160 optional string l1_bytecodes_supplier_addr = 4; // optional; h160 + optional string l1_wrapped_base_token_store = 5; // optional; h160 } message L1 { @@ -26,7 +27,7 @@ message L2 { optional string da_validator_addr = 2; // optional; H160 optional string legacy_shared_bridge_addr = 3; // optional; H160 optional string timestamp_asserter_addr = 4; // optional; H160 - optional string predeployed_wrapped_base_token_address = 5; // optional; H160 + reserved 5; reserved "predeployed_wrapped_base_token_address"; } message Bridge { diff --git a/core/lib/types/src/abi.rs b/core/lib/types/src/abi.rs index b40aaaf882e2..da51c6297d4d 100644 --- a/core/lib/types/src/abi.rs +++ b/core/lib/types/src/abi.rs @@ -531,7 +531,7 @@ impl GatewayUpgradeEncodedInput { pub struct ZkChainSpecificUpgradeData { pub base_token_asset_id: H256, pub l2_legacy_shared_bridge: Address, - pub predeployed_l2_weth_address: Address, + pub l2_predeployed_wrapped_base_token: Address, pub base_token_l1_address: Address, pub base_token_name: String, pub base_token_symbol: String, @@ -551,7 +551,7 @@ impl ZkChainSpecificUpgradeData { l2_legacy_shared_bridge: l2_legacy_shared_bridge?, // Note, that some chains may not contain previous deployment of L2 wrapped base // token. For those, zero address is used. - predeployed_l2_weth_address: predeployed_l2_weth_address.unwrap_or_default(), + l2_predeployed_wrapped_base_token: predeployed_l2_weth_address.unwrap_or_default(), base_token_l1_address: base_token_l1_address?, base_token_name: base_token_name?, base_token_symbol: base_token_symbol?, @@ -572,7 +572,7 @@ impl ZkChainSpecificUpgradeData { Token::Tuple(vec![ Token::FixedBytes(self.base_token_asset_id.0.to_vec()), Token::Address(self.l2_legacy_shared_bridge), - Token::Address(self.predeployed_l2_weth_address), + Token::Address(self.l2_predeployed_wrapped_base_token), Token::Address(self.base_token_l1_address), Token::String(self.base_token_name.clone()), Token::String(self.base_token_symbol.clone()), diff --git a/core/lib/types/src/commitment/mod.rs b/core/lib/types/src/commitment/mod.rs index 6d73ad048774..9aef6b14a0f2 100644 --- a/core/lib/types/src/commitment/mod.rs +++ b/core/lib/types/src/commitment/mod.rs @@ -112,6 +112,7 @@ pub struct L1BatchMetadata { pub aux_data_hash: H256, pub meta_parameters_hash: H256, pub pass_through_data_hash: H256, + /// The commitment to the final events queue state after the batch is committed. /// Practically, it is a commitment to all events that happened on L2 during the batch execution. pub events_queue_commitment: Option, diff --git a/core/lib/types/src/protocol_upgrade.rs b/core/lib/types/src/protocol_upgrade.rs index 3bd9e696ce1f..324650c97e21 100644 --- a/core/lib/types/src/protocol_upgrade.rs +++ b/core/lib/types/src/protocol_upgrade.rs @@ -255,12 +255,30 @@ impl ProtocolUpgrade { } } -pub fn decode_set_chain_id_event( +pub fn decode_genesis_upgrade_event( event: Log, ) -> Result<(ProtocolVersionId, ProtocolUpgradeTx), ethabi::Error> { - let tx = ethabi::decode(&[abi::L2CanonicalTransaction::schema()], &event.data.0)?; - let tx = abi::L2CanonicalTransaction::decode(tx.into_iter().next().unwrap()).unwrap(); - + let tokens = ethabi::decode( + &[ + abi::L2CanonicalTransaction::schema(), + ParamType::Array(Box::new(ParamType::Bytes)), + ], + &event.data.0, + )?; + let mut t: std::vec::IntoIter = tokens.into_iter(); + let mut next = || t.next().unwrap(); + + let tx = abi::L2CanonicalTransaction::decode(next()).unwrap(); + let factory_deps = next() + .into_array() + .context("factory_deps") + .map_err(|_| ethabi::Error::InvalidData)? + .into_iter() + .enumerate() + .map(|(i, t)| t.into_bytes().context(i)) + .collect::>, _>>() + .context("factory_deps") + .map_err(|_| ethabi::Error::InvalidData)?; let full_version_id = h256_to_u256(event.topics[2]); let protocol_version = ProtocolVersionId::try_from_packed_semver(full_version_id) .unwrap_or_else(|_| panic!("Version is not supported, packed version: {full_version_id}")); @@ -269,8 +287,11 @@ pub fn decode_set_chain_id_event( Transaction::from_abi( abi::Transaction::L1 { tx: tx.into(), - eth_block: 0, - factory_deps: vec![], + eth_block: event + .block_number + .expect("Event block number is missing") + .as_u64(), + factory_deps, }, true, ) diff --git a/core/lib/types/src/system_contracts.rs b/core/lib/types/src/system_contracts.rs index 4d1ff9b554ea..28d8def59277 100644 --- a/core/lib/types/src/system_contracts.rs +++ b/core/lib/types/src/system_contracts.rs @@ -4,8 +4,10 @@ use zksync_basic_types::{AccountTreeId, Address, U256}; use zksync_contracts::{read_sys_contract_bytecode, ContractLanguage, SystemContractsRepo}; use zksync_system_constants::{ BOOTLOADER_UTILITIES_ADDRESS, CODE_ORACLE_ADDRESS, COMPRESSOR_ADDRESS, CREATE2_FACTORY_ADDRESS, - EVENT_WRITER_ADDRESS, EVM_GAS_MANAGER_ADDRESS, P256VERIFY_PRECOMPILE_ADDRESS, - PUBDATA_CHUNK_PUBLISHER_ADDRESS, + EVENT_WRITER_ADDRESS, EVM_GAS_MANAGER_ADDRESS, L2_ASSET_ROUTER_ADDRESS, L2_BRIDGEHUB_ADDRESS, + L2_GENESIS_UPGRADE_ADDRESS, L2_MESSAGE_ROOT_ADDRESS, L2_NATIVE_TOKEN_VAULT_ADDRESS, + L2_WRAPPED_BASE_TOKEN_IMPL, P256VERIFY_PRECOMPILE_ADDRESS, PUBDATA_CHUNK_PUBLISHER_ADDRESS, + SLOAD_CONTRACT_ADDRESS, }; use crate::{ @@ -25,7 +27,7 @@ use crate::{ pub const TX_NONCE_INCREMENT: U256 = U256([1, 0, 0, 0]); // 1 pub const DEPLOYMENT_NONCE_INCREMENT: U256 = U256([0, 0, 1, 0]); // 2^128 -static SYSTEM_CONTRACT_LIST: [(&str, &str, Address, ContractLanguage); 26] = [ +static SYSTEM_CONTRACT_LIST: [(&str, &str, Address, ContractLanguage); 33] = [ ( "", "AccountCodeStorage", @@ -174,6 +176,48 @@ static SYSTEM_CONTRACT_LIST: [(&str, &str, Address, ContractLanguage); 26] = [ CREATE2_FACTORY_ADDRESS, ContractLanguage::Sol, ), + ( + "", + "L2GenesisUpgrade", + L2_GENESIS_UPGRADE_ADDRESS, + ContractLanguage::Sol, + ), + ( + "../../l1-contracts/zkout/", + "Bridgehub", + L2_BRIDGEHUB_ADDRESS, + ContractLanguage::Sol, + ), + ( + "../../l1-contracts/zkout/", + "MessageRoot", + L2_MESSAGE_ROOT_ADDRESS, + ContractLanguage::Sol, + ), + ( + "../../l1-contracts/zkout/", + "L2AssetRouter", + L2_ASSET_ROUTER_ADDRESS, + ContractLanguage::Sol, + ), + ( + "../../l1-contracts/zkout/", + "L2NativeTokenVault", + L2_NATIVE_TOKEN_VAULT_ADDRESS, + ContractLanguage::Sol, + ), + ( + "", + "SloadContract", + SLOAD_CONTRACT_ADDRESS, + ContractLanguage::Sol, + ), + ( + "../../l1-contracts/zkout/", + "L2WrappedBaseToken", + L2_WRAPPED_BASE_TOKEN_IMPL, + ContractLanguage::Sol, + ), ]; /// Gets default set of system contracts, based on Cargo workspace location. diff --git a/core/lib/vm_executor/src/batch/factory.rs b/core/lib/vm_executor/src/batch/factory.rs index 9797e1681032..59b610426669 100644 --- a/core/lib/vm_executor/src/batch/factory.rs +++ b/core/lib/vm_executor/src/batch/factory.rs @@ -238,6 +238,7 @@ impl BatchVm { .expect("failed extracting call traces") .take() .unwrap_or_default(); + BatchTransactionExecutionResult { tx_result: Box::new(tx_result), compressed_bytecodes, diff --git a/core/lib/vm_executor/src/oneshot/contracts.rs b/core/lib/vm_executor/src/oneshot/contracts.rs index 257ede5a7c7c..d67d1dfbc662 100644 --- a/core/lib/vm_executor/src/oneshot/contracts.rs +++ b/core/lib/vm_executor/src/oneshot/contracts.rs @@ -106,10 +106,8 @@ impl MultiVmBaseSystemContracts { ProtocolVersionId::Version21 | ProtocolVersionId::Version22 => &self.post_1_4_2, ProtocolVersionId::Version23 => &self.vm_1_5_0_small_memory, ProtocolVersionId::Version24 => &self.vm_1_5_0_increased_memory, - ProtocolVersionId::Version25 | ProtocolVersionId::Version26 => { - &self.vm_protocol_defense - } - ProtocolVersionId::Version27 => &self.gateway, + ProtocolVersionId::Version25 => &self.vm_protocol_defense, + ProtocolVersionId::Version26 | ProtocolVersionId::Version27 => &self.gateway, ProtocolVersionId::Version28 => unreachable!("Version 28 is not supported yet"), }; let base = base.clone(); diff --git a/core/lib/web3_decl/src/namespaces/unstable.rs b/core/lib/web3_decl/src/namespaces/unstable.rs index f666f02f2811..da18806d126c 100644 --- a/core/lib/web3_decl/src/namespaces/unstable.rs +++ b/core/lib/web3_decl/src/namespaces/unstable.rs @@ -38,4 +38,7 @@ pub trait UnstableNamespace { l1_batch_number: L1BatchNumber, chain_id: L2ChainId, ) -> RpcResult>; + + #[method(name = "unconfirmedTxsCount")] + async fn get_unconfirmed_txs_count(&self) -> RpcResult; } diff --git a/core/lib/zksync_core_leftovers/src/lib.rs b/core/lib/zksync_core_leftovers/src/lib.rs index 87fb7ea28f71..2bdc8094d142 100644 --- a/core/lib/zksync_core_leftovers/src/lib.rs +++ b/core/lib/zksync_core_leftovers/src/lib.rs @@ -3,7 +3,6 @@ use std::str::FromStr; use tokio::sync::oneshot; - pub mod temp_config_store; /// Sets up an interrupt handler and returns a future that resolves once an interrupt signal diff --git a/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs b/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs index cfa8c84b05b0..214e34241cf9 100644 --- a/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs +++ b/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs @@ -40,4 +40,10 @@ impl UnstableNamespaceServer for UnstableNamespace { .await .map_err(|err| self.current_method().map_err(err)) } + + async fn get_unconfirmed_txs_count(&self) -> RpcResult { + self.get_unconfirmed_txs_count_impl() + .await + .map_err(|err| self.current_method().map_err(err)) + } } diff --git a/core/node/api_server/src/web3/namespaces/en.rs b/core/node/api_server/src/web3/namespaces/en.rs index b2baa8497c98..9ccecf9001b5 100644 --- a/core/node/api_server/src/web3/namespaces/en.rs +++ b/core/node/api_server/src/web3/namespaces/en.rs @@ -165,6 +165,7 @@ impl EnNamespace { .l1_transparent_proxy_admin_addr .unwrap(), l1_bytecodes_supplier_addr: self.state.api_config.l1_bytecodes_supplier_addr, + l1_wrapped_base_token_store: self.state.api_config.l1_wrapped_base_token_store, }) .context("Shared bridge doesn't supported")?) } diff --git a/core/node/api_server/src/web3/namespaces/unstable/mod.rs b/core/node/api_server/src/web3/namespaces/unstable/mod.rs index 47e43f10282b..c70cb6f6e0fe 100644 --- a/core/node/api_server/src/web3/namespaces/unstable/mod.rs +++ b/core/node/api_server/src/web3/namespaces/unstable/mod.rs @@ -139,4 +139,16 @@ impl UnstableNamespace { chain_id_leaf_proof_mask: chain_id_leaf_proof_mask as u64, })) } + + pub async fn get_unconfirmed_txs_count_impl(&self) -> Result { + let mut connection = self.state.acquire_connection().await?; + + let result = connection + .eth_sender_dal() + .get_unconfirmed_txs_count() + .await + .map_err(DalError::generalize)?; + + Ok(result) + } } diff --git a/core/node/api_server/src/web3/state.rs b/core/node/api_server/src/web3/state.rs index a50b9d062321..97ffd933c801 100644 --- a/core/node/api_server/src/web3/state.rs +++ b/core/node/api_server/src/web3/state.rs @@ -108,6 +108,7 @@ pub struct InternalApiConfig { pub estimate_gas_optimize_search: bool, pub bridge_addresses: api::BridgeAddresses, pub l1_bytecodes_supplier_addr: Option
, + pub l1_wrapped_base_token_store: Option
, pub l1_bridgehub_proxy_addr: Option
, pub l1_state_transition_proxy_addr: Option
, pub l1_transparent_proxy_admin_addr: Option
, @@ -169,6 +170,10 @@ impl InternalApiConfig { .ecosystem_contracts .as_ref() .and_then(|a| a.l1_bytecodes_supplier_addr), + l1_wrapped_base_token_store: contracts_config + .ecosystem_contracts + .as_ref() + .and_then(|a| a.l1_wrapped_base_token_store), l1_diamond_proxy_addr: contracts_config.diamond_proxy_addr, l2_testnet_paymaster_addr: contracts_config.l2_testnet_paymaster_addr, req_entities_limit: web3_config.req_entities_limit(), @@ -236,6 +241,10 @@ impl BridgeAddressesHandle { self.0.write().await.l1_shared_default_bridge = Some(l1_shared_bridge); } + pub async fn update_l2_shared_bridge(&self, l2_shared_bridge: Address) { + self.0.write().await.l2_shared_default_bridge = Some(l2_shared_bridge); + } + pub async fn read(&self) -> api::BridgeAddresses { self.0.read().await.clone() } diff --git a/core/node/base_token_adjuster/src/base_token_l1_behaviour.rs b/core/node/base_token_adjuster/src/base_token_l1_behaviour.rs index 599aba36f3e9..580083cd1682 100644 --- a/core/node/base_token_adjuster/src/base_token_l1_behaviour.rs +++ b/core/node/base_token_adjuster/src/base_token_l1_behaviour.rs @@ -151,6 +151,7 @@ impl BaseTokenL1Behaviour { }; } + // TODO(EVM-924): this logic supports only `ChainAdminOwnable`. async fn do_update_l1( &self, l1_params: &UpdateOnL1Params, diff --git a/core/node/commitment_generator/src/lib.rs b/core/node/commitment_generator/src/lib.rs index 71b019e230a7..3c7dadb5fd30 100644 --- a/core/node/commitment_generator/src/lib.rs +++ b/core/node/commitment_generator/src/lib.rs @@ -6,6 +6,7 @@ use tokio::{sync::watch, task::JoinHandle}; use zksync_dal::{ConnectionPool, Core, CoreDal}; use zksync_health_check::{Health, HealthStatus, HealthUpdater, ReactiveHealthCheck}; use zksync_l1_contract_interface::i_executor::commit::kzg::pubdata_to_blob_commitments; +use zksync_multivm::zk_evm_latest::ethereum_types::U256; use zksync_types::{ blob::num_blobs_required, commitment::{ @@ -14,7 +15,7 @@ use zksync_types::{ }, h256_to_u256, writes::{InitialStorageWrite, RepeatedStorageWrite, StateDiffRecord}, - L1BatchNumber, ProtocolVersionId, StorageKey, H256, U256, + L1BatchNumber, ProtocolVersionId, StorageKey, H256, }; use crate::{ diff --git a/core/node/consistency_checker/src/lib.rs b/core/node/consistency_checker/src/lib.rs index f7d904955789..c1735a54fd7f 100644 --- a/core/node/consistency_checker/src/lib.rs +++ b/core/node/consistency_checker/src/lib.rs @@ -396,15 +396,9 @@ impl ConsistencyChecker { }; let gateway_chain_data = if let Some(client) = gateway_client { - let contract = bridgehub_contract(); - let function_name = if contract.function("getZKChain").is_ok() { - "getZKChain" - } else { - "getHyperchain" - }; let gateway_diamond_proxy = - CallFunctionArgs::new(function_name, Token::Uint(l2_chain_id.as_u64().into())) - .for_contract(L2_BRIDGEHUB_ADDRESS, &contract) + CallFunctionArgs::new("getZKChain", Token::Uint(l2_chain_id.as_u64().into())) + .for_contract(L2_BRIDGEHUB_ADDRESS, &bridgehub_contract()) .call(&client) .await?; let chain_id = client.fetch_chain_id().await?; diff --git a/core/node/consistency_checker/src/tests/mod.rs b/core/node/consistency_checker/src/tests/mod.rs index 1635bddffb83..57511fbb69c7 100644 --- a/core/node/consistency_checker/src/tests/mod.rs +++ b/core/node/consistency_checker/src/tests/mod.rs @@ -134,13 +134,8 @@ fn create_mock_sl(chain_id: u64, with_get_zk_chain: bool) -> MockSettlementLayer } Some(addr) if with_get_zk_chain && addr == L2_BRIDGEHUB_ADDRESS => { let contract = zksync_contracts::bridgehub_contract(); - let function_name = if contract.function("getZKChain").is_ok() { - "getZKChain" - } else { - "getHyperchain" - }; let expected_input = contract - .function(function_name) + .function("getZKChain") .unwrap() .encode_input(&[Token::Uint(ERA_CHAIN_ID.into())]) .unwrap(); diff --git a/core/node/da_clients/Cargo.toml b/core/node/da_clients/Cargo.toml index e0c85b3030ab..6fcd94225a8b 100644 --- a/core/node/da_clients/Cargo.toml +++ b/core/node/da_clients/Cargo.toml @@ -40,6 +40,7 @@ jsonrpsee = { workspace = true, features = ["ws-client"] } reqwest = { workspace = true } bytes = { workspace = true } backon.workspace = true +url.workspace = true # Celestia dependencies http.workspace = true diff --git a/core/node/da_clients/src/avail/client.rs b/core/node/da_clients/src/avail/client.rs index 115ad77bf44e..411a0354d632 100644 --- a/core/node/da_clients/src/avail/client.rs +++ b/core/node/da_clients/src/avail/client.rs @@ -2,9 +2,11 @@ use std::{fmt::Debug, sync::Arc, time::Duration}; use anyhow::anyhow; use async_trait::async_trait; +use http::StatusCode; use jsonrpsee::ws_client::WsClientBuilder; use serde::{Deserialize, Serialize}; use subxt_signer::ExposeSecret; +use url::Url; use zksync_config::configs::da_client::avail::{AvailClientConfig, AvailConfig, AvailSecrets}; use zksync_da_client::{ types::{DAError, DispatchResponse, InclusionData}, @@ -40,10 +42,10 @@ pub struct AvailClient { pub struct BridgeAPIResponse { blob_root: Option, bridge_root: Option, - data_root_index: Option, + data_root_index: Option, data_root_proof: Option>, leaf: Option, - leaf_index: Option, + leaf_index: Option, leaf_proof: Option>, range_hash: Option, error: Option, @@ -191,19 +193,30 @@ impl DataAvailabilityClient for AvailClient { error: anyhow!("Invalid blob ID format"), is_retriable: false, })?; - let url = format!( - "{}/eth/proof/{}?index={}", - self.config.bridge_api_url, block_hash, tx_idx - ); + let url = Url::parse(&self.config.bridge_api_url) + .map_err(|_| DAError { + error: anyhow!("Invalid URL"), + is_retriable: false, + })? + .join(format!("/eth/proof/{}?index={}", block_hash, tx_idx).as_str()) + .map_err(|_| DAError { + error: anyhow!("Unable to join to URL"), + is_retriable: false, + })?; let response = self .api_client - .get(&url) + .get(url) .timeout(Duration::from_millis(self.config.timeout_ms as u64)) .send() .await .map_err(to_retriable_da_error)?; + // 404 means that the blob is not included in the bridge yet + if response.status() == StatusCode::NOT_FOUND { + return Ok(None); + } + let bridge_api_data = response .json::() .await @@ -213,12 +226,13 @@ impl DataAvailabilityClient for AvailClient { data_root_proof: bridge_api_data.data_root_proof.unwrap(), leaf_proof: bridge_api_data.leaf_proof.unwrap(), range_hash: bridge_api_data.range_hash.unwrap(), - data_root_index: bridge_api_data.data_root_index.unwrap(), + data_root_index: bridge_api_data.data_root_index.unwrap().into(), blob_root: bridge_api_data.blob_root.unwrap(), bridge_root: bridge_api_data.bridge_root.unwrap(), leaf: bridge_api_data.leaf.unwrap(), - leaf_index: bridge_api_data.leaf_index.unwrap(), + leaf_index: bridge_api_data.leaf_index.unwrap().into(), }; + Ok(Some(InclusionData { data: ethabi::encode(&attestation_data.into_tokens()), })) diff --git a/core/node/eth_sender/Cargo.toml b/core/node/eth_sender/Cargo.toml index f578743dcea9..2f95bf54e176 100644 --- a/core/node/eth_sender/Cargo.toml +++ b/core/node/eth_sender/Cargo.toml @@ -25,7 +25,6 @@ zksync_prover_interface.workspace = true zksync_shared_metrics.workspace = true zksync_node_fee_model.workspace = true zksync_mini_merkle_tree.workspace = true -once_cell.workspace = true tokio = { workspace = true, features = ["time"] } anyhow.workspace = true diff --git a/core/node/eth_watch/src/client.rs b/core/node/eth_watch/src/client.rs index a86f760dc398..03b22df55995 100644 --- a/core/node/eth_watch/src/client.rs +++ b/core/node/eth_watch/src/client.rs @@ -2,8 +2,8 @@ use std::{collections::HashMap, fmt, sync::Arc}; use anyhow::Context; use zksync_contracts::{ - getters_facet_contract, state_transition_manager_contract, verifier_contract, - MESSAGE_ROOT_CONTRACT, + bytecode_supplier_contract, getters_facet_contract, l1_asset_router_contract, l2_message_root, + state_transition_manager_contract, verifier_contract, wrapped_base_token_store_contract, }; use zksync_eth_client::{ clients::{DynClient, L1}, @@ -12,12 +12,12 @@ use zksync_eth_client::{ }; use zksync_system_constants::L2_MESSAGE_ROOT_ADDRESS; use zksync_types::{ + abi::ZkChainSpecificUpgradeData, api::{ChainAggProof, Log}, - ethabi::{self, decode, Contract, ParamType}, - tokens::TokenMetadata, - web3::{BlockId, BlockNumber, CallRequest, Filter, FilterBuilder}, - Address, L1BatchNumber, L2ChainId, SLChainId, H256, SHARED_BRIDGE_ETHER_TOKEN_ADDRESS, U256, - U64, + ethabi::{self, decode, encode, Contract, ParamType}, + web3::{keccak256, BlockId, BlockNumber, CallRequest, Filter, FilterBuilder}, + Address, L1BatchNumber, L2ChainId, SLChainId, H256, L2_NATIVE_TOKEN_VAULT_ADDRESS, + SHARED_BRIDGE_ETHER_TOKEN_ADDRESS, U256, U64, }; use zksync_web3_decl::{ client::{Network, L2}, @@ -64,7 +64,9 @@ pub trait EthClient: 'static + fmt::Debug + Send + Sync { hashes: Vec, ) -> EnrichedClientResult>>>; - async fn get_base_token_metadata(&self) -> Result; + async fn get_chain_gateway_upgrade_info( + &self, + ) -> Result, ContractCallError>; /// Returns ID of the chain. async fn chain_id(&self) -> EnrichedClientResult; @@ -97,27 +99,36 @@ pub struct EthHttpQueryClient { new_upgrade_cut_data_signature: H256, bytecode_published_signature: H256, bytecode_supplier_addr: Option
, + wrapped_base_token_store: Option
, + l1_shared_bridge_addr: Option
, // Only present for post-shared bridge chains. state_transition_manager_address: Option
, chain_admin_address: Option
, verifier_contract_abi: Contract, getters_facet_contract_abi: Contract, message_root_abi: Contract, + l1_asset_router_abi: Contract, + wrapped_base_token_store_abi: Contract, confirmations_for_eth_event: Option, + l2_chain_id: L2ChainId, } impl EthHttpQueryClient where Box>: GetLogsClient, { + #[allow(clippy::too_many_arguments)] pub fn new( client: Box>, diamond_proxy_addr: Address, bytecode_supplier_addr: Option
, + wrapped_base_token_store: Option
, + l1_shared_bridge_addr: Option
, state_transition_manager_address: Option
, chain_admin_address: Option
, governance_address: Address, confirmations_for_eth_event: Option, + l2_chain_id: L2ChainId, ) -> Self { tracing::debug!( "New eth client, ZKsync addr: {:x}, governance addr: {:?}", @@ -136,14 +147,20 @@ where .context("NewUpgradeCutData event is missing in ABI") .unwrap() .signature(), - bytecode_published_signature: ethabi::long_signature( - "BytecodePublished", - &[ParamType::FixedBytes(32), ParamType::Bytes], - ), + bytecode_published_signature: bytecode_supplier_contract() + .event("BytecodePublished") + .context("BytecodePublished event is missing in ABI") + .unwrap() + .signature(), verifier_contract_abi: verifier_contract(), getters_facet_contract_abi: getters_facet_contract(), - message_root_abi: MESSAGE_ROOT_CONTRACT.clone(), + message_root_abi: l2_message_root(), + l1_asset_router_abi: l1_asset_router_contract(), + wrapped_base_token_store_abi: wrapped_base_token_store_contract(), confirmations_for_eth_event, + wrapped_base_token_store, + l1_shared_bridge_addr, + l2_chain_id, } } @@ -439,51 +456,100 @@ where .await } - async fn get_base_token_metadata(&self) -> Result { - let base_token_addr: Address = CallFunctionArgs::new("getBaseToken", ()) + async fn get_chain_gateway_upgrade_info( + &self, + ) -> Result, ContractCallError> { + let Some(l1_shared_bridge_addr) = self.l1_shared_bridge_addr else { + tracing::warn!("l1 shared bridge is not provided!"); + return Ok(None); + }; + + let Some(l1_wrapped_base_token_store) = self.wrapped_base_token_store else { + tracing::warn!("l1 wrapped base token store is not provided!"); + return Ok(None); + }; + + let l2_chain_id = U256::from(self.l2_chain_id.as_u64()); + + // It does not matter whether the l1 shared bridge is an L1AssetRouter or L1Nullifier, + // either way it supports the "l2BridgeAddress" method. + let l2_legacy_shared_bridge: Address = + CallFunctionArgs::new("l2BridgeAddress", l2_chain_id) + .for_contract(l1_shared_bridge_addr, &self.l1_asset_router_abi) + .call(&self.client) + .await?; + + if l2_legacy_shared_bridge == Address::zero() { + // This state is not completely impossible, but somewhat undesirable. + // Contracts will still allow the upgrade to go through without + // the shared bridge, so we will allow it here as well. + tracing::error!("L2 shared bridge from L1 is empty"); + } + + let l2_predeployed_wrapped_base_token: Address = + CallFunctionArgs::new("l2WBaseTokenAddress", l2_chain_id) + .for_contract( + l1_wrapped_base_token_store, + &self.wrapped_base_token_store_abi, + ) + .call(&self.client) + .await?; + + if l2_predeployed_wrapped_base_token == Address::zero() { + // This state is not completely impossible, but somewhat undesirable. + // Contracts will still allow the upgrade to go through without + // the l2 predeployed wrapped base token, so we will allow it here as well. + tracing::error!("L2 predeployed wrapped base token is empty"); + } + + let base_token_l1_address: Address = CallFunctionArgs::new("getBaseToken", ()) .for_contract(self.diamond_proxy_addr, &self.getters_facet_contract_abi) .call(&self.client) .await?; - if base_token_addr == SHARED_BRIDGE_ETHER_TOKEN_ADDRESS { - return Ok(TokenMetadata { - name: String::from("Ether"), - symbol: String::from("ETH"), - decimals: 18, - }); - } + let (base_token_name, base_token_symbol) = + if base_token_l1_address == SHARED_BRIDGE_ETHER_TOKEN_ADDRESS { + (String::from("Ether"), String::from("ETH")) + } else { + // TODO(EVM-934): support non-standard tokens. + let selectors: [[u8; 4]; 2] = [ + ethabi::short_signature("name", &[]), + ethabi::short_signature("symbol", &[]), + ]; + let types: [ParamType; 2] = [ParamType::String, ParamType::String]; + + let mut decoded_result = vec![]; + for (selector, param_type) in selectors.into_iter().zip(types.into_iter()) { + let request = CallRequest { + to: Some(base_token_l1_address), + data: Some(selector.into()), + ..Default::default() + }; + let result = self.client.call_contract_function(request, None).await?; + // Base tokens are expected to support erc20 metadata + let mut token = ethabi::decode(&[param_type], &result.0) + .expect("base token does not support erc20 metadata"); + decoded_result.push(token.pop().unwrap()); + } - // TODO(EVM-934): support non-standard tokens. - let selectors: [[u8; 4]; 3] = [ - zksync_types::ethabi::short_signature("name", &[]), - zksync_types::ethabi::short_signature("symbol", &[]), - zksync_types::ethabi::short_signature("decimals", &[]), - ]; - let types: [ParamType; 3] = [ParamType::String, ParamType::String, ParamType::Uint(32)]; - - let mut decoded_result = vec![]; - for (selector, param_type) in selectors.into_iter().zip(types.into_iter()) { - let request = CallRequest { - to: Some(base_token_addr), - data: Some(selector.into()), - ..Default::default() + (decoded_result[0].to_string(), decoded_result[1].to_string()) }; - let result = self.client.call_contract_function(request, None).await?; - // Base tokens are expected to support erc20 metadata - let mut token = zksync_types::ethabi::decode(&[param_type], &result.0) - .expect("base token does not support erc20 metadata"); - decoded_result.push(token.pop().unwrap()); - } - Ok(TokenMetadata { - name: decoded_result[0].to_string(), - symbol: decoded_result[1].to_string(), - decimals: decoded_result[2] - .clone() - .into_uint() - .expect("decimals not supported") - .as_u32() as u8, - }) + let base_token_asset_id = encode_ntv_asset_id( + // Note, that this is correct only for tokens that are being upgraded to the gateway protocol version. + // The chains that were deployed after it may have tokens with non-L1 base tokens. + U256::from(self.chain_id().await?.0), + base_token_l1_address, + ); + + Ok(Some(ZkChainSpecificUpgradeData { + base_token_asset_id, + l2_legacy_shared_bridge, + l2_predeployed_wrapped_base_token, + base_token_l1_address, + base_token_name, + base_token_symbol, + })) } } @@ -632,8 +698,10 @@ impl EthClient for L2EthClientW { self.0.get_chain_root(block_number, l2_chain_id).await } - async fn get_base_token_metadata(&self) -> Result { - self.0.get_base_token_metadata().await + async fn get_chain_gateway_upgrade_info( + &self, + ) -> Result, ContractCallError> { + self.0.get_chain_gateway_upgrade_info().await } async fn get_published_preimages( @@ -643,3 +711,13 @@ impl EthClient for L2EthClientW { self.0.get_published_preimages(hashes).await } } + +pub(crate) fn encode_ntv_asset_id(l1_chain_id: U256, addr: Address) -> H256 { + let encoded_data = encode(&[ + ethabi::Token::Uint(l1_chain_id), + ethabi::Token::Address(L2_NATIVE_TOKEN_VAULT_ADDRESS), + ethabi::Token::Address(addr), + ]); + + H256(keccak256(&encoded_data)) +} diff --git a/core/node/eth_watch/src/event_processors/decentralized_upgrades.rs b/core/node/eth_watch/src/event_processors/decentralized_upgrades.rs index c2f499b8ce64..2892d6ca718f 100644 --- a/core/node/eth_watch/src/event_processors/decentralized_upgrades.rs +++ b/core/node/eth_watch/src/event_processors/decentralized_upgrades.rs @@ -3,9 +3,8 @@ use std::sync::Arc; use anyhow::Context as _; use zksync_dal::{eth_watcher_dal::EventType, Connection, Core, CoreDal, DalError}; use zksync_types::{ - abi::ZkChainSpecificUpgradeData, api::Log, ethabi::Contract, - protocol_upgrade::ProtocolUpgradePreimageOracle, protocol_version::ProtocolSemanticVersion, - ProtocolUpgrade, H256, U256, + api::Log, ethabi::Contract, protocol_upgrade::ProtocolUpgradePreimageOracle, + protocol_version::ProtocolSemanticVersion, ProtocolUpgrade, H256, U256, }; use crate::{ @@ -20,7 +19,6 @@ pub struct DecentralizedUpgradesEventProcessor { /// Last protocol version seen. Used to skip events for already known upgrade proposals. last_seen_protocol_version: ProtocolSemanticVersion, update_upgrade_timestamp_signature: H256, - chain_specific_data: Option, sl_client: Arc, l1_client: Arc, } @@ -29,7 +27,6 @@ impl DecentralizedUpgradesEventProcessor { pub fn new( last_seen_protocol_version: ProtocolSemanticVersion, chain_admin_contract: &Contract, - chain_specific_data: Option, sl_client: Arc, l1_client: Arc, ) -> Self { @@ -40,7 +37,6 @@ impl DecentralizedUpgradesEventProcessor { .context("UpdateUpgradeTimestamp event is missing in ABI") .unwrap() .signature(), - chain_specific_data, sl_client, l1_client, } @@ -58,7 +54,10 @@ impl ProtocolUpgradePreimageOracle for &dyn EthClient { let mut result = vec![]; for (i, preimage) in preimages.into_iter().enumerate() { let preimage = preimage.with_context(|| { - format!("Protocol upgrade preimage for {:#?} is missing", hashes[i]) + format!( + "Protocol upgrade preimage under id {i} for {:#?} is missing", + hashes[i] + ) })?; result.push(preimage); } @@ -93,7 +92,7 @@ impl EventProcessor for DecentralizedUpgradesEventProcessor { ..ProtocolUpgrade::try_from_diamond_cut( &diamond_cut, self.l1_client.as_ref(), - self.chain_specific_data.clone(), + self.l1_client.get_chain_gateway_upgrade_info().await?, ) .await? }; diff --git a/core/node/eth_watch/src/lib.rs b/core/node/eth_watch/src/lib.rs index 59f441457139..f866c8e627c5 100644 --- a/core/node/eth_watch/src/lib.rs +++ b/core/node/eth_watch/src/lib.rs @@ -6,14 +6,12 @@ use std::{sync::Arc, time::Duration}; use anyhow::Context as _; use tokio::sync::watch; -use zksync_config::ContractsConfig; use zksync_dal::{Connection, ConnectionPool, Core, CoreDal, DalError}; use zksync_mini_merkle_tree::MiniMerkleTree; use zksync_system_constants::PRIORITY_EXPIRATION; use zksync_types::{ - abi::ZkChainSpecificUpgradeData, ethabi::Contract, protocol_version::ProtocolSemanticVersion, - tokens::TokenMetadata, web3::BlockNumber as Web3BlockNumber, L1BatchNumber, L2ChainId, - PriorityOpId, + ethabi::Contract, protocol_version::ProtocolSemanticVersion, + web3::BlockNumber as Web3BlockNumber, L1BatchNumber, L2ChainId, PriorityOpId, }; pub use self::client::{EthClient, EthHttpQueryClient, L2EthClient}; @@ -58,7 +56,6 @@ impl EthWatch { sl_l2_client: Option>, pool: ConnectionPool, poll_interval: Duration, - contracts_config: &ContractsConfig, chain_id: L2ChainId, ) -> anyhow::Result { let mut storage = pool.connection_tagged("eth_watch").await?; @@ -79,7 +76,6 @@ impl EthWatch { let decentralized_upgrades_processor = DecentralizedUpgradesEventProcessor::new( state.last_seen_protocol_version, chain_admin_contract, - get_chain_specific_upgrade_params(&l1_client, contracts_config).await?, sl_client.clone(), l1_client.clone(), ); @@ -246,19 +242,3 @@ impl EthWatch { Ok(()) } } - -async fn get_chain_specific_upgrade_params( - l1_client: &Arc, - contracts_config: &ContractsConfig, -) -> anyhow::Result> { - let TokenMetadata { name, symbol, .. } = l1_client.get_base_token_metadata().await?; - - Ok(ZkChainSpecificUpgradeData::from_partial_components( - contracts_config.l1_base_token_asset_id, - contracts_config.l2_legacy_shared_bridge_addr, - contracts_config.l2_predeployed_wrapped_base_token_address, - contracts_config.base_token_addr, - Some(name), - Some(symbol), - )) -} diff --git a/core/node/eth_watch/src/tests/client.rs b/core/node/eth_watch/src/tests/client.rs index f242488949b1..cec297435225 100644 --- a/core/node/eth_watch/src/tests/client.rs +++ b/core/node/eth_watch/src/tests/client.rs @@ -6,19 +6,19 @@ use zksync_contracts::{ }; use zksync_eth_client::{ContractCallError, EnrichedClientResult}; use zksync_types::{ - abi::{self, ProposedUpgrade}, + abi::{self, ProposedUpgrade, ZkChainSpecificUpgradeData}, api::{ChainAggProof, Log}, bytecode::BytecodeHash, ethabi::{self, Token}, l1::L1Tx, protocol_upgrade::ProtocolUpgradeTx, - tokens::TokenMetadata, u256_to_h256, web3::{contract::Tokenizable, BlockNumber}, - Address, L1BatchNumber, L2ChainId, ProtocolUpgrade, SLChainId, Transaction, H256, U256, U64, + Address, L1BatchNumber, L2ChainId, ProtocolUpgrade, SLChainId, Transaction, H256, + SHARED_BRIDGE_ETHER_TOKEN_ADDRESS, U256, U64, }; -use crate::client::{EthClient, L2EthClient, RETRY_LIMIT}; +use crate::client::{encode_ntv_asset_id, EthClient, L2EthClient, RETRY_LIMIT}; #[derive(Debug)] pub struct FakeEthClientData { @@ -306,12 +306,20 @@ impl EthClient for MockEthClient { Ok(result) } - async fn get_base_token_metadata(&self) -> Result { - Ok(TokenMetadata { - name: "ETH".to_string(), - symbol: "Ether".to_string(), - decimals: 18, - }) + async fn get_chain_gateway_upgrade_info( + &self, + ) -> Result, ContractCallError> { + Ok(Some(ZkChainSpecificUpgradeData { + base_token_asset_id: encode_ntv_asset_id( + self.chain_id().await?.0.into(), + SHARED_BRIDGE_ETHER_TOKEN_ADDRESS, + ), + l2_legacy_shared_bridge: Address::repeat_byte(0x01), + l2_predeployed_wrapped_base_token: Address::repeat_byte(0x02), + base_token_l1_address: SHARED_BRIDGE_ETHER_TOKEN_ADDRESS, + base_token_name: String::from("Ether"), + base_token_symbol: String::from("ETH"), + })) } async fn fflonk_scheduler_vk_hash( diff --git a/core/node/eth_watch/src/tests/mod.rs b/core/node/eth_watch/src/tests/mod.rs index e6c7945b7d93..36833eb0f2dc 100644 --- a/core/node/eth_watch/src/tests/mod.rs +++ b/core/node/eth_watch/src/tests/mod.rs @@ -1,6 +1,5 @@ use std::convert::TryInto; -use zksync_config::ContractsConfig; use zksync_contracts::chain_admin_contract; use zksync_dal::{Connection, ConnectionPool, Core, CoreDal}; use zksync_types::{ @@ -72,6 +71,7 @@ fn build_upgrade_tx(id: ProtocolVersionId) -> ProtocolUpgradeTx { common_data: ProtocolUpgradeTxCommonData { upgrade_id: id, sender: [1u8; 20].into(), + // Note, that the field is deprecated eth_block: 0, gas_limit: Default::default(), max_fee_per_gas: Default::default(), @@ -109,7 +109,6 @@ async fn create_test_watcher( sl_l2_client, connection_pool, std::time::Duration::from_nanos(1), - &ContractsConfig::for_tests(), L2ChainId::default(), ) .await @@ -216,13 +215,12 @@ async fn test_normal_operation_upgrade_timestamp() { None, connection_pool.clone(), std::time::Duration::from_nanos(1), - &ContractsConfig::for_tests(), L2ChainId::default(), ) .await .unwrap(); - let expected_upgrade_tx = build_upgrade_tx(ProtocolVersionId::Version28); + let expected_upgrade_tx = build_upgrade_tx(ProtocolVersionId::next()); let mut storage = connection_pool.connection().await.unwrap(); client @@ -237,7 +235,7 @@ async fn test_normal_operation_upgrade_timestamp() { ( ProtocolUpgrade { version: ProtocolSemanticVersion { - minor: ProtocolVersionId::Version28, + minor: ProtocolVersionId::next(), patch: 0.into(), }, tx: Some(expected_upgrade_tx.clone()), @@ -248,7 +246,7 @@ async fn test_normal_operation_upgrade_timestamp() { ( ProtocolUpgrade { version: ProtocolSemanticVersion { - minor: ProtocolVersionId::Version28, + minor: ProtocolVersionId::next(), patch: 1.into(), }, tx: None, @@ -272,7 +270,7 @@ async fn test_normal_operation_upgrade_timestamp() { watcher.loop_iteration(&mut storage).await.unwrap(); let db_versions = storage.protocol_versions_dal().all_versions().await; let mut expected_version = ProtocolSemanticVersion { - minor: ProtocolVersionId::Version28, + minor: ProtocolVersionId::next(), patch: 0.into(), }; assert_eq!(db_versions.len(), 4); @@ -283,7 +281,7 @@ async fn test_normal_operation_upgrade_timestamp() { // Check that tx was saved with the second upgrade. let tx = storage .protocol_versions_dal() - .get_protocol_upgrade_tx(ProtocolVersionId::Version28) + .get_protocol_upgrade_tx(ProtocolVersionId::next()) .await .unwrap() .expect("no protocol upgrade transaction"); diff --git a/core/node/genesis/src/lib.rs b/core/node/genesis/src/lib.rs index e68aa59b7696..0425a475c5bc 100644 --- a/core/node/genesis/src/lib.rs +++ b/core/node/genesis/src/lib.rs @@ -8,7 +8,7 @@ use anyhow::Context as _; use zksync_config::GenesisConfig; use zksync_contracts::{ hyperchain_contract, verifier_contract, BaseSystemContracts, BaseSystemContractsHashes, - SET_CHAIN_ID_EVENT, + GENESIS_UPGRADE_EVENT, }; use zksync_dal::{custom_genesis_export_dal::GenesisState, Connection, Core, CoreDal, DalError}; use zksync_eth_client::{CallFunctionArgs, EthInterface}; @@ -20,7 +20,7 @@ use zksync_types::{ bytecode::BytecodeHash, commitment::{CommitmentInput, L1BatchCommitment}, fee_model::BatchFeeInput, - protocol_upgrade::decode_set_chain_id_event, + protocol_upgrade::decode_genesis_upgrade_event, protocol_version::{L1VerifierConfig, ProtocolSemanticVersion}, system_contracts::get_system_smart_contracts, u256_to_h256, @@ -589,14 +589,14 @@ pub async fn save_set_chain_id_tx( storage: &mut Connection<'_, Core>, query_client: &dyn EthInterface, diamond_proxy_address: Address, - state_transition_manager_address: Address, ) -> anyhow::Result<()> { let to = query_client.block_number().await?.as_u64(); let from = to.saturating_sub(PRIORITY_EXPIRATION); + let filter = FilterBuilder::default() - .address(vec![state_transition_manager_address]) + .address(vec![diamond_proxy_address]) .topics( - Some(vec![SET_CHAIN_ID_EVENT.signature()]), + Some(vec![GENESIS_UPGRADE_EVENT.signature()]), Some(vec![diamond_proxy_address.into()]), None, None, @@ -612,7 +612,7 @@ pub async fn save_set_chain_id_tx( logs ); let (version_id, upgrade_tx) = - decode_set_chain_id_event(logs.remove(0)).context("Chain id event is incorrect")?; + decode_genesis_upgrade_event(logs.remove(0)).context("Chain id event is incorrect")?; tracing::info!("New version id {:?}", version_id); storage diff --git a/core/node/node_framework/src/implementations/layers/eth_sender/manager.rs b/core/node/node_framework/src/implementations/layers/eth_sender/manager.rs index e9ce4cc19e1a..b8951c2a91ca 100644 --- a/core/node/node_framework/src/implementations/layers/eth_sender/manager.rs +++ b/core/node/node_framework/src/implementations/layers/eth_sender/manager.rs @@ -6,7 +6,10 @@ use zksync_eth_sender::EthTxManager; use crate::{ implementations::resources::{ circuit_breakers::CircuitBreakersResource, - eth_interface::{BoundEthInterfaceForBlobsResource, BoundEthInterfaceResource}, + eth_interface::{ + BoundEthInterfaceForBlobsResource, BoundEthInterfaceForL2Resource, + BoundEthInterfaceResource, + }, gas_adjuster::GasAdjusterResource, healthcheck::AppHealthCheckResource, pools::{MasterPool, PoolResource, ReplicaPool}, @@ -46,6 +49,7 @@ pub struct Input { pub replica_pool: PoolResource, pub eth_client: BoundEthInterfaceResource, pub eth_client_blobs: Option, + pub eth_client_gateway: Option, pub gas_adjuster: GasAdjusterResource, #[context(default)] pub circuit_breakers: CircuitBreakersResource, @@ -80,10 +84,9 @@ impl WiringLayer for EthTxManagerLayer { let master_pool = input.master_pool.get().await.unwrap(); let replica_pool = input.replica_pool.get().await.unwrap(); - let settlement_mode = self.eth_sender_config.gas_adjuster.unwrap().settlement_mode; let eth_client = input.eth_client.0.clone(); let eth_client_blobs = input.eth_client_blobs.map(|c| c.0); - let l2_client = input.eth_client.0; + let l2_client = input.eth_client_gateway.map(|c| c.0); let config = self.eth_sender_config.sender.context("sender")?; @@ -93,21 +96,9 @@ impl WiringLayer for EthTxManagerLayer { master_pool, config, gas_adjuster, - if !settlement_mode.is_gateway() { - Some(eth_client) - } else { - None - }, - if !settlement_mode.is_gateway() { - eth_client_blobs - } else { - None - }, - if settlement_mode.is_gateway() { - Some(l2_client) - } else { - None - }, + Some(eth_client), + eth_client_blobs, + l2_client, ); // Insert circuit breaker. diff --git a/core/node/node_framework/src/implementations/layers/eth_watch.rs b/core/node/node_framework/src/implementations/layers/eth_watch.rs index da0e26355cf2..92356e770c40 100644 --- a/core/node/node_framework/src/implementations/layers/eth_watch.rs +++ b/core/node/node_framework/src/implementations/layers/eth_watch.rs @@ -98,6 +98,11 @@ impl WiringLayer for EthWatchLayer { .ecosystem_contracts .as_ref() .and_then(|a| a.l1_bytecodes_supplier_addr), + self.contracts_config + .ecosystem_contracts + .as_ref() + .and_then(|a| a.l1_wrapped_base_token_store), + self.contracts_config.l1_shared_bridge_proxy_addr, self.contracts_config .ecosystem_contracts .as_ref() @@ -105,6 +110,7 @@ impl WiringLayer for EthWatchLayer { self.contracts_config.chain_admin_addr, self.contracts_config.governance_addr, self.eth_watch_config.confirmations_for_eth_event, + self.chain_id, ); let sl_l2_client: Option> = @@ -113,12 +119,17 @@ impl WiringLayer for EthWatchLayer { Some(Box::new(EthHttpQueryClient::new( gateway_client.0, contracts_config.diamond_proxy_addr, - // Bytecode supplier is only present on L1 + // Only present on L1. + None, + // Only present on L1. + None, + // Only present on L1. None, Some(contracts_config.state_transition_proxy_addr), contracts_config.chain_admin_addr, contracts_config.governance_addr, self.eth_watch_config.confirmations_for_eth_event, + self.chain_id, ))) } else { None @@ -130,7 +141,6 @@ impl WiringLayer for EthWatchLayer { sl_l2_client, main_pool, self.eth_watch_config.poll_interval(), - &self.contracts_config, self.chain_id, ) .await?; diff --git a/core/node/node_framework/src/implementations/layers/web3_api/server/bridge_addresses.rs b/core/node/node_framework/src/implementations/layers/web3_api/server/bridge_addresses.rs index 785c19846a60..a515e4cc1db9 100644 --- a/core/node/node_framework/src/implementations/layers/web3_api/server/bridge_addresses.rs +++ b/core/node/node_framework/src/implementations/layers/web3_api/server/bridge_addresses.rs @@ -1,8 +1,8 @@ use std::time::Duration; -use zksync_eth_client::CallFunctionArgs; +use zksync_eth_client::{CallFunctionArgs, ContractCallError}; use zksync_node_api_server::web3::state::BridgeAddressesHandle; -use zksync_types::{ethabi::Contract, Address}; +use zksync_types::{ethabi::Contract, Address, L2_ASSET_ROUTER_ADDRESS}; use zksync_web3_decl::{ client::{DynClient, L1, L2}, namespaces::ZksNamespaceClient, @@ -37,20 +37,52 @@ pub struct L1UpdaterInner { pub bridgehub_addr: Address, pub update_interval: Option, pub bridgehub_abi: Contract, + pub l1_asset_router_abi: Contract, +} + +struct L1SharedBridgeInfo { + l1_shared_bridge_addr: Address, + should_use_l2_asset_router: bool, } impl L1UpdaterInner { - async fn loop_iteration(&self) { - let call_result = CallFunctionArgs::new("sharedBridge", ()) + async fn get_shared_bridge_info(&self) -> Result { + let l1_shared_bridge_addr: Address = CallFunctionArgs::new("sharedBridge", ()) .for_contract(self.bridgehub_addr, &self.bridgehub_abi) .call(&self.l1_eth_client) - .await; + .await?; + + let l1_nullifier_addr: Result = + CallFunctionArgs::new("L1_NULLIFIER", ()) + .for_contract(l1_shared_bridge_addr, &self.l1_asset_router_abi) + .call(&self.l1_eth_client) + .await; - match call_result { - Ok(shared_bridge_address) => { + // In case we can successfully retrieve the l1 nullifier, this is definitely the new l1 asset router. + // The contrary is not necessarily true: the query can fail either due to network issues or + // due to the contract being outdated. To be conservative, we just always treat such cases as `false`. + let should_use_l2_asset_router = l1_nullifier_addr.is_ok(); + + Ok(L1SharedBridgeInfo { + l1_shared_bridge_addr, + should_use_l2_asset_router, + }) + } + + async fn loop_iteration(&self) { + match self.get_shared_bridge_info().await { + Ok(info) => { self.bridge_address_updater - .update_l1_shared_bridge(shared_bridge_address) + .update_l1_shared_bridge(info.l1_shared_bridge_addr) .await; + // We only update one way: + // - Once the L2 asset router should be used, there is never a need to go back + // - To not undo the previous change in case of a network error + if info.should_use_l2_asset_router { + self.bridge_address_updater + .update_l2_shared_bridge(L2_ASSET_ROUTER_ADDRESS) + .await; + } } Err(err) => { tracing::error!("Failed to query shared bridge address, error: {err:?}"); diff --git a/core/node/node_framework/src/implementations/layers/web3_api/server/mod.rs b/core/node/node_framework/src/implementations/layers/web3_api/server/mod.rs index c4c18b6ecb3f..b1d9ca79979e 100644 --- a/core/node/node_framework/src/implementations/layers/web3_api/server/mod.rs +++ b/core/node/node_framework/src/implementations/layers/web3_api/server/mod.rs @@ -5,7 +5,7 @@ use bridge_addresses::{L1UpdaterInner, MainNodeUpdaterInner}; use tokio::{sync::oneshot, task::JoinHandle}; use zksync_circuit_breaker::replication_lag::ReplicationLagChecker; use zksync_config::configs::api::MaxResponseSize; -use zksync_contracts::bridgehub_contract; +use zksync_contracts::{bridgehub_contract, l1_asset_router_contract}; use zksync_node_api_server::web3::{ state::{BridgeAddressesHandle, InternalApiConfig, SealedL2BlockNumber}, ApiBuilder, ApiServer, Namespace, @@ -209,25 +209,26 @@ impl WiringLayer for Web3ServerLayer { // In case it is an EN, the bridge addresses should be updated by fetching values from the main node. // It is the main node, the bridge addresses need to be updated by querying the L1. - - let bridge_addresses_updater_task = if let Some(main_node_client) = input.main_node_client { - BridgeAddressesUpdaterTask::MainNodeUpdater(MainNodeUpdaterInner { - bridge_address_updater: bridge_addresses_handle.clone(), - main_node_client: main_node_client.0, - update_interval: self.optional_config.bridge_addresses_refresh_interval, - }) - } else { - BridgeAddressesUpdaterTask::L1Updater(L1UpdaterInner { - bridge_address_updater: bridge_addresses_handle.clone(), - l1_eth_client: input.l1_eth_client.0, - bridgehub_addr: self - .internal_api_config - .l1_bridgehub_proxy_addr - .context("Lacking l1 bridgehub proxy address")?, - update_interval: self.optional_config.bridge_addresses_refresh_interval, - bridgehub_abi: bridgehub_contract(), - }) - }; + let bridge_addresses_updater_task = + if let Some(main_node_client) = input.main_node_client.clone() { + BridgeAddressesUpdaterTask::MainNodeUpdater(MainNodeUpdaterInner { + bridge_address_updater: bridge_addresses_handle.clone(), + main_node_client: main_node_client.0, + update_interval: self.optional_config.bridge_addresses_refresh_interval, + }) + } else { + BridgeAddressesUpdaterTask::L1Updater(L1UpdaterInner { + bridge_address_updater: bridge_addresses_handle.clone(), + l1_eth_client: input.l1_eth_client.0, + bridgehub_addr: self + .internal_api_config + .l1_bridgehub_proxy_addr + .context("Lacking l1 bridgehub proxy address")?, + update_interval: self.optional_config.bridge_addresses_refresh_interval, + bridgehub_abi: bridgehub_contract(), + l1_asset_router_abi: l1_asset_router_contract(), + }) + }; // Build server. let mut api_builder = @@ -251,6 +252,9 @@ impl WiringLayer for Web3ServerLayer { if let Some(sync_state) = sync_state { api_builder = api_builder.with_sync_state(sync_state); } + if let Some(main_node_client) = input.main_node_client { + api_builder = api_builder.with_l2_l1_log_proof_handler(main_node_client.0) + } let replication_lag_limit = self.optional_config.replication_lag_limit; api_builder = self.optional_config.apply(api_builder); diff --git a/core/node/node_storage_init/src/main_node/genesis.rs b/core/node/node_storage_init/src/main_node/genesis.rs index a5d6c0e628ac..cef25e87ba7c 100644 --- a/core/node/node_storage_init/src/main_node/genesis.rs +++ b/core/node/node_storage_init/src/main_node/genesis.rs @@ -55,16 +55,13 @@ impl InitializeStorage for MainNodeGenesis { ) .await?; - if let Some(ecosystem_contracts) = &self.contracts.ecosystem_contracts { - zksync_node_genesis::save_set_chain_id_tx( - &mut storage, - &self.l1_client, - self.contracts.diamond_proxy_addr, - ecosystem_contracts.state_transition_proxy_addr, - ) - .await - .context("Failed to save SetChainId upgrade transaction")?; - } + zksync_node_genesis::save_set_chain_id_tx( + &mut storage, + &self.l1_client, + self.contracts.diamond_proxy_addr, + ) + .await + .context("Failed to save SetChainId upgrade transaction")?; Ok(()) } diff --git a/core/node/node_sync/src/tree_data_fetcher/provider/mod.rs b/core/node/node_sync/src/tree_data_fetcher/provider/mod.rs index 432808422632..c627006f70e7 100644 --- a/core/node/node_sync/src/tree_data_fetcher/provider/mod.rs +++ b/core/node/node_sync/src/tree_data_fetcher/provider/mod.rs @@ -144,17 +144,11 @@ impl L1DataProvider { diamond_proxy_addr: l1_diamond_proxy_addr, }; let gateway_chain_data = if let Some(client) = gateway_client { - let contract = bridgehub_contract(); - let function_name = if contract.function("getZKChain").is_ok() { - "getZKChain" - } else { - "getHyperchain" - }; let gateway_diamond_proxy = CallFunctionArgs::new( - function_name, + "getZKChain", zksync_types::ethabi::Token::Uint(l2_chain_id.as_u64().into()), ) - .for_contract(L2_BRIDGEHUB_ADDRESS, &contract) + .for_contract(L2_BRIDGEHUB_ADDRESS, &bridgehub_contract()) .call(&client) .await?; let chain_id = client.fetch_chain_id().await?; diff --git a/core/node/node_sync/src/tree_data_fetcher/provider/tests.rs b/core/node/node_sync/src/tree_data_fetcher/provider/tests.rs index e8c855359390..14ab34bab10d 100644 --- a/core/node/node_sync/src/tree_data_fetcher/provider/tests.rs +++ b/core/node/node_sync/src/tree_data_fetcher/provider/tests.rs @@ -256,13 +256,8 @@ fn mock_l1_client(block_number: U64, logs: Vec, chain_id: SLChainId) .method("eth_chainId", move || Ok(U64::from(chain_id.0))) .method("eth_call", move |req: CallRequest, _block_id: BlockId| { let contract = bridgehub_contract(); - let function_name = if contract.function("getZKChain").is_ok() { - "getZKChain" - } else { - "getHyperchain" - }; let expected_input = contract - .function(function_name) + .function("getZKChain") .unwrap() .encode_input(&[ethabi::Token::Uint(ERA_CHAIN_ID.into())]) .unwrap(); diff --git a/core/node/state_keeper/src/executor/tests/mod.rs b/core/node/state_keeper/src/executor/tests/mod.rs index eade0233d0e0..219cacc60c85 100644 --- a/core/node/state_keeper/src/executor/tests/mod.rs +++ b/core/node/state_keeper/src/executor/tests/mod.rs @@ -3,11 +3,13 @@ use assert_matches::assert_matches; use rand::{thread_rng, Rng}; use test_casing::{test_casing, Product}; +use zksync_contracts::l2_message_root; use zksync_dal::{ConnectionPool, Core}; use zksync_multivm::interface::{BatchTransactionExecutionResult, ExecutionResult, Halt}; use zksync_test_contracts::{Account, TestContract}; use zksync_types::{ - get_nonce_key, utils::storage_key_for_eth_balance, vm::FastVmMode, web3, PriorityOpId, H256, + get_nonce_key, utils::storage_key_for_eth_balance, vm::FastVmMode, web3, Execute, PriorityOpId, + H256, L2_MESSAGE_ROOT_ADDRESS, U256, }; use self::tester::{AccountExt, StorageSnapshot, TestConfig, Tester}; @@ -63,7 +65,30 @@ async fn execute_l2_tx(storage_type: StorageType, vm_mode: FastVmMode) { let mut tester = Tester::new(connection_pool, vm_mode); tester.genesis().await; tester.fund(&[alice.address()]).await; - let mut executor = tester.create_batch_executor(storage_type).await; + + let l2_message_root = l2_message_root(); + let encoded_data = l2_message_root + .function("initialize") + .unwrap() + .encode_input(&[]) + .unwrap(); + + let message_root_init_txn = alice.get_l2_tx_for_execute( + Execute { + contract_address: Some(L2_MESSAGE_ROOT_ADDRESS), + calldata: encoded_data, + value: U256::zero(), + factory_deps: vec![], + }, + None, + ); + + let mut executor = tester + .create_batch_executor_with_init_transactions( + storage_type, + &[message_root_init_txn.clone()], + ) + .await; let res = executor.execute_tx(alice.execute()).await.unwrap(); assert_executed(&res); @@ -106,7 +131,25 @@ async fn execute_l2_tx_after_snapshot_recovery( let mut alice = Account::random(); let connection_pool = ConnectionPool::::constrained_test_pool(1).await; - let mut storage_snapshot = StorageSnapshot::new(&connection_pool, &mut alice, 10).await; + let l2_message_root = l2_message_root(); + let encoded_data = l2_message_root + .function("initialize") + .unwrap() + .encode_input(&[]) + .unwrap(); + + let message_root_init_txn = alice.get_l2_tx_for_execute( + Execute { + contract_address: Some(L2_MESSAGE_ROOT_ADDRESS), + calldata: encoded_data, + value: U256::zero(), + factory_deps: vec![], + }, + None, + ); + + let mut storage_snapshot = + StorageSnapshot::new(&connection_pool, &mut alice, 10, &[message_root_init_txn]).await; assert!(storage_snapshot.storage_logs.len() > 10); // sanity check assert!(!storage_snapshot.factory_deps.is_empty()); if let Some(mutation) = mutation { @@ -138,8 +181,29 @@ async fn execute_l1_tx(vm_mode: FastVmMode) { tester.genesis().await; tester.fund(&[alice.address()]).await; + + let l2_message_root = l2_message_root(); + let encoded_data = l2_message_root + .function("initialize") + .unwrap() + .encode_input(&[]) + .unwrap(); + + let message_root_init_txn = alice.get_l2_tx_for_execute( + Execute { + contract_address: Some(L2_MESSAGE_ROOT_ADDRESS), + calldata: encoded_data, + value: U256::zero(), + factory_deps: vec![], + }, + None, + ); + let mut executor = tester - .create_batch_executor(StorageType::AsyncRocksdbCache) + .create_batch_executor_with_init_transactions( + StorageType::AsyncRocksdbCache, + &[message_root_init_txn.clone()], + ) .await; let res = executor @@ -160,8 +224,29 @@ async fn execute_l2_and_l1_txs(vm_mode: FastVmMode) { let mut tester = Tester::new(connection_pool, vm_mode); tester.genesis().await; tester.fund(&[alice.address()]).await; + + let l2_message_root = l2_message_root(); + let encoded_data = l2_message_root + .function("initialize") + .unwrap() + .encode_input(&[]) + .unwrap(); + + let message_root_init_txn = alice.get_l2_tx_for_execute( + Execute { + contract_address: Some(L2_MESSAGE_ROOT_ADDRESS), + calldata: encoded_data, + value: U256::zero(), + factory_deps: vec![], + }, + None, + ); + let mut executor = tester - .create_batch_executor(StorageType::AsyncRocksdbCache) + .create_batch_executor_with_init_transactions( + StorageType::AsyncRocksdbCache, + &[message_root_init_txn.clone()], + ) .await; let res = executor.execute_tx(alice.execute()).await.unwrap(); @@ -243,8 +328,29 @@ async fn rollback(vm_mode: FastVmMode) { tester.genesis().await; tester.fund(&[alice.address()]).await; + + let l2_message_root = l2_message_root(); + let encoded_data = l2_message_root + .function("initialize") + .unwrap() + .encode_input(&[]) + .unwrap(); + + let message_root_init_txn = alice.get_l2_tx_for_execute( + Execute { + contract_address: Some(L2_MESSAGE_ROOT_ADDRESS), + calldata: encoded_data, + value: U256::zero(), + factory_deps: vec![], + }, + None, + ); + let mut executor = tester - .create_batch_executor(StorageType::AsyncRocksdbCache) + .create_batch_executor_with_init_transactions( + StorageType::AsyncRocksdbCache, + &[message_root_init_txn.clone()], + ) .await; let tx = alice.execute(); @@ -297,8 +403,29 @@ async fn too_big_gas_limit(vm_mode: FastVmMode) { let mut tester = Tester::new(connection_pool, vm_mode); tester.genesis().await; tester.fund(&[alice.address()]).await; + + let l2_message_root = l2_message_root(); + let encoded_data = l2_message_root + .function("initialize") + .unwrap() + .encode_input(&[]) + .unwrap(); + + let message_root_init_txn = alice.get_l2_tx_for_execute( + Execute { + contract_address: Some(L2_MESSAGE_ROOT_ADDRESS), + calldata: encoded_data, + value: U256::zero(), + factory_deps: vec![], + }, + None, + ); + let mut executor = tester - .create_batch_executor(StorageType::AsyncRocksdbCache) + .create_batch_executor_with_init_transactions( + StorageType::AsyncRocksdbCache, + &[message_root_init_txn.clone()], + ) .await; let big_gas_limit_tx = alice.execute_with_gas_limit(u32::MAX); @@ -341,8 +468,29 @@ async fn deploy_and_call_loadtest(vm_mode: FastVmMode) { let mut tester = Tester::new(connection_pool, vm_mode); tester.genesis().await; tester.fund(&[alice.address()]).await; + + let l2_message_root = l2_message_root(); + let encoded_data = l2_message_root + .function("initialize") + .unwrap() + .encode_input(&[]) + .unwrap(); + + let message_root_init_txn = alice.get_l2_tx_for_execute( + Execute { + contract_address: Some(L2_MESSAGE_ROOT_ADDRESS), + calldata: encoded_data, + value: U256::zero(), + factory_deps: vec![], + }, + None, + ); + let mut executor = tester - .create_batch_executor(StorageType::AsyncRocksdbCache) + .create_batch_executor_with_init_transactions( + StorageType::AsyncRocksdbCache, + &[message_root_init_txn.clone()], + ) .await; let tx = alice.deploy_loadnext_tx(); @@ -389,13 +537,35 @@ async fn deploy_failedcall(vm_mode: FastVmMode) { async fn execute_reverted_tx(vm_mode: FastVmMode) { let connection_pool = ConnectionPool::::constrained_test_pool(1).await; let mut alice = Account::random(); + let mut bob = Account::random(); let mut tester = Tester::new(connection_pool, vm_mode); tester.genesis().await; - tester.fund(&[alice.address()]).await; + tester.fund(&[alice.address(), bob.address()]).await; + + let l2_message_root = l2_message_root(); + let encoded_data = l2_message_root + .function("initialize") + .unwrap() + .encode_input(&[]) + .unwrap(); + + let message_root_init_txn = bob.get_l2_tx_for_execute( + Execute { + contract_address: Some(L2_MESSAGE_ROOT_ADDRESS), + calldata: encoded_data, + value: U256::zero(), + factory_deps: vec![], + }, + None, + ); + let mut executor = tester - .create_batch_executor(StorageType::AsyncRocksdbCache) + .create_batch_executor_with_init_transactions( + StorageType::AsyncRocksdbCache, + &[message_root_init_txn.clone()], + ) .await; let tx = alice.deploy_loadnext_tx(); @@ -427,8 +597,29 @@ async fn execute_realistic_scenario(vm_mode: FastVmMode) { tester.genesis().await; tester.fund(&[alice.address()]).await; tester.fund(&[bob.address()]).await; + + let l2_message_root = l2_message_root(); + let encoded_data = l2_message_root + .function("initialize") + .unwrap() + .encode_input(&[]) + .unwrap(); + + let message_root_init_txn = alice.get_l2_tx_for_execute( + Execute { + contract_address: Some(L2_MESSAGE_ROOT_ADDRESS), + calldata: encoded_data, + value: U256::zero(), + factory_deps: vec![], + }, + None, + ); + let mut executor = tester - .create_batch_executor(StorageType::AsyncRocksdbCache) + .create_batch_executor_with_init_transactions( + StorageType::AsyncRocksdbCache, + &[message_root_init_txn.clone()], + ) .await; // A good tx should be executed successfully. @@ -567,8 +758,30 @@ async fn catchup_rocksdb_cache() { tester.genesis().await; tester.fund(&[alice.address(), bob.address()]).await; + let l2_message_root = l2_message_root(); + let encoded_data = l2_message_root + .function("initialize") + .unwrap() + .encode_input(&[]) + .unwrap(); + + let message_root_init_txn = alice.get_l2_tx_for_execute( + Execute { + contract_address: Some(L2_MESSAGE_ROOT_ADDRESS), + calldata: encoded_data, + value: U256::zero(), + factory_deps: vec![], + }, + None, + ); + // Execute a bunch of transactions to populate Postgres-based storage (note that RocksDB stays empty) - let mut executor = tester.create_batch_executor(StorageType::Postgres).await; + let mut executor = tester + .create_batch_executor_with_init_transactions( + StorageType::Postgres, + &[message_root_init_txn.clone()], + ) + .await; for _ in 0..10 { let res = executor.execute_tx(alice.execute()).await.unwrap(); assert_executed(&res); @@ -582,7 +795,10 @@ async fn catchup_rocksdb_cache() { // Async RocksDB cache should be aware of the tx and should reject it let mut executor = tester - .create_batch_executor(StorageType::AsyncRocksdbCache) + .create_batch_executor_with_init_transactions( + StorageType::AsyncRocksdbCache, + &[message_root_init_txn.clone()], + ) .await; let res = executor.execute_tx(tx.clone()).await.unwrap(); assert_rejected(&res); @@ -595,7 +811,12 @@ async fn catchup_rocksdb_cache() { tester.wait_for_tasks().await; // Sync RocksDB storage should be aware of the tx and should reject it - let mut executor = tester.create_batch_executor(StorageType::Rocksdb).await; + let mut executor = tester + .create_batch_executor_with_init_transactions( + StorageType::Rocksdb, + &[message_root_init_txn.clone()], + ) + .await; let res = executor.execute_tx(tx).await.unwrap(); assert_rejected(&res); } diff --git a/core/node/state_keeper/src/executor/tests/tester.rs b/core/node/state_keeper/src/executor/tests/tester.rs index 3727d9c16bfb..8b6df7f04840 100644 --- a/core/node/state_keeper/src/executor/tests/tester.rs +++ b/core/node/state_keeper/src/executor/tests/tester.rs @@ -1,16 +1,18 @@ //! Testing harness for the batch executor. //! Contains helper functionality to initialize test context and perform tests without too much boilerplate. -use std::{collections::HashMap, fmt::Debug, sync::Arc}; +use std::{collections::HashMap, fmt::Debug, str::FromStr, sync::Arc}; +use assert_matches::assert_matches; use tempfile::TempDir; use tokio::{sync::watch, task::JoinHandle}; use zksync_config::configs::chain::StateKeeperConfig; -use zksync_dal::{ConnectionPool, Core, CoreDal}; +use zksync_contracts::l2_rollup_da_validator_bytecode; +use zksync_dal::{Connection, ConnectionPool, Core, CoreDal}; use zksync_multivm::{ interface::{ executor::{BatchExecutor, BatchExecutorFactory}, - L1BatchEnv, L2BlockEnv, SystemEnv, + ExecutionResult, L1BatchEnv, L2BlockEnv, SystemEnv, }, utils::StorageWritesDeduplicator, vm_latest::constants::INITIAL_STORAGE_WRITE_PUBDATA_BYTES, @@ -23,8 +25,10 @@ use zksync_test_contracts::{ }; use zksync_types::{ block::L2BlockHasher, + bytecode::BytecodeHash, commitment::PubdataParams, ethabi::Token, + get_code_key, get_known_code_key, protocol_version::ProtocolSemanticVersion, snapshots::{SnapshotRecoveryStatus, SnapshotStorageLog}, system_contracts::get_system_smart_contracts, @@ -38,12 +42,15 @@ use zksync_vm_executor::batch::{MainBatchExecutorFactory, TraceCalls}; use super::{read_storage_factory::RocksdbStorageFactory, StorageType}; use crate::{ - testonly, - testonly::BASE_SYSTEM_CONTRACTS, + testonly::{self, BASE_SYSTEM_CONTRACTS}, tests::{default_l1_batch_env, default_system_env}, AsyncRocksdbCache, }; +fn get_da_contract_address() -> Address { + Address::from_str("7726827caac94a7f9e1b160f7ea819f172f7b6f9").unwrap() +} + /// Representation of configuration parameters used by the state keeper. /// Has sensible defaults for most tests, each of which can be overridden. #[derive(Debug)] @@ -97,6 +104,22 @@ impl Tester { self.config = config; } + /// Extension of `create_batch_executor` that allows us to run some initial transactions to bootstrap the state. + pub(super) async fn create_batch_executor_with_init_transactions( + &mut self, + storage_type: StorageType, + transactions: &[Transaction], + ) -> Box> { + let mut executor = self.create_batch_executor(storage_type).await; + + for txn in transactions { + let res = executor.execute_tx(txn.clone()).await.unwrap(); + assert_matches!(res.tx_result.result, ExecutionResult::Success { .. }); + } + + executor + } + /// Creates a batch executor instance with the specified storage type. /// This function intentionally uses sensible defaults to not introduce boilerplate. pub(super) async fn create_batch_executor( @@ -270,6 +293,9 @@ impl Tester { ) .await .unwrap(); + + // Also setting up the da for tests + Self::setup_da(&mut storage).await; } } @@ -308,6 +334,42 @@ impl Tester { } } + pub async fn setup_contract<'a>( + con: &mut Connection<'a, Core>, + address: Address, + code: Vec, + ) { + let hash: H256 = BytecodeHash::for_bytecode(&code).value(); + let known_code_key = get_known_code_key(&hash); + let code_key = get_code_key(&address); + + let logs = vec![ + StorageLog::new_write_log(known_code_key, H256::from_low_u64_be(1u64)), + StorageLog::new_write_log(code_key, hash), + ]; + + for log in logs { + apply_genesis_log(con, log).await; + } + + let mut factory_deps = HashMap::new(); + factory_deps.insert(hash, code); + + con.factory_deps_dal() + .insert_factory_deps(L2BlockNumber(0), &factory_deps) + .await + .unwrap(); + } + + async fn setup_da<'a>(con: &mut Connection<'a, Core>) { + Self::setup_contract( + con, + get_da_contract_address(), + l2_rollup_da_validator_bytecode(), + ) + .await; + } + pub(super) async fn wait_for_tasks(&mut self) { for task in self.tasks.drain(..) { task.await.expect("Failed to join a task"); @@ -557,6 +619,7 @@ impl StorageSnapshot { connection_pool: &ConnectionPool, alice: &mut Account, transaction_count: u32, + transactions: &[Transaction], ) -> Self { let mut tester = Tester::new(connection_pool.clone(), FastVmMode::Old); tester.genesis().await; @@ -594,6 +657,30 @@ impl StorageSnapshot { }; let mut storage_writes_deduplicator = StorageWritesDeduplicator::new(); + for transaction in transactions { + let tx_hash = transaction.hash(); // probably incorrect + let res = executor.execute_tx(transaction.clone()).await.unwrap(); + if !res.tx_result.result.is_failed() { + let storage_logs = &res.tx_result.logs.storage_logs; + storage_writes_deduplicator + .apply(storage_logs.iter().filter(|log| log.log.is_write())); + } else { + panic!("Unexpected tx execution result: {res:?}"); + }; + + let mut hasher = L2BlockHasher::new( + L2BlockNumber(l2_block_env.number), + l2_block_env.timestamp, + l2_block_env.prev_block_hash, + ); + hasher.push_tx_hash(tx_hash); + + l2_block_env.number += 1; + l2_block_env.timestamp += 1; + l2_block_env.prev_block_hash = hasher.finalize(ProtocolVersionId::latest()); + executor.start_next_l2_block(l2_block_env).await.unwrap(); + } + for _ in 0..transaction_count { let tx = alice.execute(); let tx_hash = tx.hash(); // probably incorrect @@ -679,3 +766,25 @@ impl StorageSnapshot { snapshot } } + +async fn apply_genesis_log<'a>(storage: &mut Connection<'a, Core>, log: StorageLog) { + storage + .storage_logs_dal() + .append_storage_logs(L2BlockNumber(0), &[log]) + .await + .unwrap(); + + if storage + .storage_logs_dedup_dal() + .filter_written_slots(&[log.key.hashed_key()]) + .await + .unwrap() + .is_empty() + { + storage + .storage_logs_dedup_dal() + .insert_initial_writes(L1BatchNumber(0), &[log.key.hashed_key()]) + .await + .unwrap(); + } +} diff --git a/core/node/state_keeper/src/keeper.rs b/core/node/state_keeper/src/keeper.rs index c892fd8534ec..401150a3fccd 100644 --- a/core/node/state_keeper/src/keeper.rs +++ b/core/node/state_keeper/src/keeper.rs @@ -259,7 +259,7 @@ impl ZkSyncStateKeeper { } /// This function is meant to be called only once during the state-keeper initialization. - /// It will check if we should load a protocol upgrade or a `setChainId` transaction, + /// It will check if we should load a protocol upgrade or a `GenesisUpgrade` transaction, /// perform some checks and return it. pub(super) async fn load_protocol_upgrade_tx( &mut self, @@ -268,9 +268,9 @@ impl ZkSyncStateKeeper { l1_batch_number: L1BatchNumber, ) -> Result, Error> { // After the Shared Bridge is integrated, - // there has to be a setChainId upgrade transaction after the chain genesis. + // there has to be a GenesisUpgrade upgrade transaction after the chain genesis. // It has to be the first transaction of the first batch. - // The setChainId upgrade does not bump the protocol version, but attaches an upgrade + // The GenesisUpgrade upgrade does not bump the protocol version, but attaches an upgrade // transaction to the genesis protocol version. let first_batch_in_shared_bridge = l1_batch_number == L1BatchNumber(1) && !protocol_version.is_pre_shared_bridge(); diff --git a/core/node/state_keeper/src/testonly/mod.rs b/core/node/state_keeper/src/testonly/mod.rs index 3da666628b1b..c0f3707f9455 100644 --- a/core/node/state_keeper/src/testonly/mod.rs +++ b/core/node/state_keeper/src/testonly/mod.rs @@ -1,10 +1,12 @@ //! Test utilities that can be used for testing sequencer that may //! be useful outside of this crate. +use std::collections::HashMap; + use async_trait::async_trait; use once_cell::sync::Lazy; use zksync_contracts::BaseSystemContracts; -use zksync_dal::{ConnectionPool, Core, CoreDal as _}; +use zksync_dal::{Connection, ConnectionPool, Core, CoreDal as _}; use zksync_multivm::interface::{ executor::{BatchExecutor, BatchExecutorFactory}, storage::{InMemoryStorage, StorageView}, @@ -13,10 +15,10 @@ use zksync_multivm::interface::{ }; use zksync_state::OwnedStorage; use zksync_types::{ - commitment::PubdataParams, fee::Fee, u256_to_h256, - utils::storage_key_for_standard_token_balance, AccountTreeId, Address, L1BatchNumber, - L2BlockNumber, StorageLog, Transaction, L2_BASE_TOKEN_ADDRESS, SYSTEM_CONTEXT_MINIMAL_BASE_FEE, - U256, + bytecode::BytecodeHash, commitment::PubdataParams, fee::Fee, get_code_key, get_known_code_key, + u256_to_h256, utils::storage_key_for_standard_token_balance, AccountTreeId, Address, + L1BatchNumber, L2BlockNumber, StorageLog, Transaction, H256, L2_BASE_TOKEN_ADDRESS, + SYSTEM_CONTEXT_MINIMAL_BASE_FEE, U256, }; pub mod test_batch_executor; @@ -74,6 +76,27 @@ impl BatchExecutor for MockBatchExecutor { } } +async fn apply_genesis_log<'a>(storage: &mut Connection<'a, Core>, log: StorageLog) { + storage + .storage_logs_dal() + .append_storage_logs(L2BlockNumber(0), &[log]) + .await + .unwrap(); + if storage + .storage_logs_dedup_dal() + .filter_written_slots(&[log.key.hashed_key()]) + .await + .unwrap() + .is_empty() + { + storage + .storage_logs_dedup_dal() + .insert_initial_writes(L1BatchNumber(0), &[log.key.hashed_key()]) + .await + .unwrap(); + } +} + /// Adds funds for specified account list. /// Expects genesis to be performed (i.e. `setup_storage` called beforehand). pub async fn fund(pool: &ConnectionPool, addresses: &[Address]) { @@ -89,27 +112,36 @@ pub async fn fund(pool: &ConnectionPool, addresses: &[Address]) { let value = u256_to_h256(eth_amount); let storage_log = StorageLog::new_write_log(key, value); - storage - .storage_logs_dal() - .append_storage_logs(L2BlockNumber(0), &[storage_log]) - .await - .unwrap(); - if storage - .storage_logs_dedup_dal() - .filter_written_slots(&[storage_log.key.hashed_key()]) - .await - .unwrap() - .is_empty() - { - storage - .storage_logs_dedup_dal() - .insert_initial_writes(L1BatchNumber(0), &[storage_log.key.hashed_key()]) - .await - .unwrap(); - } + apply_genesis_log(&mut storage, storage_log).await; } } +pub async fn setup_contract(pool: &ConnectionPool, address: Address, code: Vec) { + let mut storage = pool.connection().await.unwrap(); + + let hash: H256 = BytecodeHash::for_bytecode(&code).value(); + let known_code_key = get_known_code_key(&hash); + let code_key = get_code_key(&address); + + let logs = vec![ + StorageLog::new_write_log(known_code_key, H256::from_low_u64_be(1u64)), + StorageLog::new_write_log(code_key, hash), + ]; + + for log in logs { + apply_genesis_log(&mut storage, log).await; + } + + let mut factory_deps = HashMap::new(); + factory_deps.insert(hash, code); + + storage + .factory_deps_dal() + .insert_factory_deps(L2BlockNumber(0), &factory_deps) + .await + .unwrap(); +} + pub(crate) const DEFAULT_GAS_PER_PUBDATA: u32 = 10000; pub fn fee(gas_limit: u32) -> Fee { diff --git a/core/node/state_keeper/src/tests/mod.rs b/core/node/state_keeper/src/tests/mod.rs index b73741998a03..e235cddf8423 100644 --- a/core/node/state_keeper/src/tests/mod.rs +++ b/core/node/state_keeper/src/tests/mod.rs @@ -365,7 +365,7 @@ async fn load_upgrade_tx() { // TODO: add one more test case for the shared bridge after it's integrated. // If we are processing the 1st batch while using the shared bridge, - // we should load the upgrade transaction -- that's the `SetChainIdUpgrade`. + // we should load the upgrade transaction -- that's the `GenesisUpgrade`. } /// Unconditionally seal the batch without triggering specific criteria. diff --git a/core/tests/loadnext/src/executor.rs b/core/tests/loadnext/src/executor.rs index 43a1be164b64..d0fbb696b607 100644 --- a/core/tests/loadnext/src/executor.rs +++ b/core/tests/loadnext/src/executor.rs @@ -400,6 +400,7 @@ impl Executor { ) .await .unwrap(); + eth_nonce += U256::one(); eth_txs.push(res); } @@ -428,6 +429,19 @@ impl Executor { } } + let balance = self + .pool + .master_wallet + .get_balance(BlockNumber::Latest, self.l2_main_token) + .await?; + let necessary_balance = + U256::from(self.erc20_transfer_amount() * self.config.accounts_amount as u128); + + tracing::info!( + "Master account token balance on l2: {balance:?}, necessary balance \ + for initial transfers {necessary_balance:?}" + ); + // And then we will prepare an L2 transaction to send ERC20 token (for transfers and fees). let mut builder = master_wallet .start_transfer() @@ -441,10 +455,8 @@ impl Executor { self.l2_main_token, MIN_ALLOWANCE_FOR_PAYMASTER_ESTIMATE.into(), ); - let fee = builder.estimate_fee(Some(paymaster_params)).await?; builder = builder.fee(fee.clone()); - let paymaster_params = get_approval_based_paymaster_input( paymaster_address, self.l2_main_token, diff --git a/core/tests/loadnext/src/sdk/abi/update-abi.sh b/core/tests/loadnext/src/sdk/abi/update-abi.sh index 3fdcd4d58028..34b7e759c6cf 100755 --- a/core/tests/loadnext/src/sdk/abi/update-abi.sh +++ b/core/tests/loadnext/src/sdk/abi/update-abi.sh @@ -7,7 +7,7 @@ cat $ZKSYNC_HOME/contracts/l1-contracts/artifacts/contracts/bridgehub/IBridgehub cat $ZKSYNC_HOME/contracts/l1-contracts/artifacts/contracts/state-transition/IStateTransitionManager.sol/IStateTransitionManager.json | jq '{ abi: .abi}' > IStateTransitionManager.json cat $ZKSYNC_HOME/contracts/l1-contracts/artifacts/contracts/state-transition/chain-interfaces/IZkSyncHyperchain.sol/IZkSyncHyperchain.json | jq '{ abi: .abi}' > IZkSyncHyperchain.json # Default L1 bridge -cat $ZKSYNC_HOME/contracts/l1-contracts/artifacts/contracts/bridge/interfaces/IL1SharedBridge.sol/IL1SharedBridge.json | jq '{ abi: .abi}' > IL1SharedBridge.json +cat $ZKSYNC_HOME/contracts/l1-contracts/artifacts/contracts/bridge/interfaces/IL1AssetRouter.sol/IL1AssetRouter.json | jq '{ abi: .abi}' > IL1AssetRouter.json cat $ZKSYNC_HOME/contracts/l1-contracts/artifacts/contracts/bridge/interfaces/IL1ERC20Bridge.sol/IL1ERC20Bridge.json | jq '{ abi: .abi}' > IL1ERC20Bridge.json # Paymaster interface cat $ZKSYNC_HOME/contracts/l2-contracts/artifacts-zk/contracts/interfaces/IPaymasterFlow.sol/IPaymasterFlow.json | jq '{ abi: .abi}' > IPaymasterFlow.json diff --git a/core/tests/loadnext/src/sdk/ethereum/mod.rs b/core/tests/loadnext/src/sdk/ethereum/mod.rs index 4557c2c43200..bbb3514e2a0d 100644 --- a/core/tests/loadnext/src/sdk/ethereum/mod.rs +++ b/core/tests/loadnext/src/sdk/ethereum/mod.rs @@ -475,7 +475,7 @@ impl EthereumProvider { .as_u64() .ok_or(ClientError::Other)? } else { - 600000u64 + 800000u64 } }; diff --git a/core/tests/revert-test/tests/utils.ts b/core/tests/revert-test/tests/utils.ts index dac19f228ffc..8290598a1feb 100644 --- a/core/tests/revert-test/tests/utils.ts +++ b/core/tests/revert-test/tests/utils.ts @@ -144,6 +144,7 @@ async function runBlockReverter( --secrets-path=${configPaths['secrets.yaml']} --wallets-path=${configPaths['wallets.yaml']} --genesis-path=${configPaths['genesis.yaml']} + --gateway-chain-path=${configPaths['gateway_chain.yaml']} `; } @@ -312,14 +313,14 @@ export class NodeSpawner { public async spawnMainNode(enableExecute: boolean): Promise> { const env = this.env ?? process.env; - env.ETH_SENDER_SENDER_AGGREGATED_BLOCK_EXECUTE_DEADLINE = enableExecute ? '1' : '10000'; + env.ETH_SENDER_SENDER_L1_BATCH_MIN_AGE_BEFORE_EXECUTE_SECONDS = enableExecute ? '0' : '10000'; // Set full mode for the Merkle tree as it is required to get blocks committed. env.DATABASE_MERKLE_TREE_MODE = 'full'; const { fileConfig, pathToHome, options, logs } = this; if (fileConfig.loadFromFile) { - replaceL1BatchMinAgeBeforeExecuteSeconds(pathToHome, fileConfig, enableExecute ? 1 : 10000); + replaceL1BatchMinAgeBeforeExecuteSeconds(pathToHome, fileConfig, enableExecute ? 0 : 10000); } let components = 'api,tree,eth,state_keeper,commitment_generator,da_dispatcher,vm_runner_protective_reads'; diff --git a/core/tests/ts-integration/package.json b/core/tests/ts-integration/package.json index ee0fa9c99848..3362b9d6a89e 100644 --- a/core/tests/ts-integration/package.json +++ b/core/tests/ts-integration/package.json @@ -4,7 +4,7 @@ "license": "MIT", "private": true, "scripts": { - "test": "zk f jest --forceExit --verbose --testTimeout 120000", + "test": "zk f jest --forceExit --verbose --testTimeout 150000", "long-running-test": "zk f jest", "fee-test": "RUN_FEE_TEST=1 zk f jest -- fees.test.ts", "api-test": "zk f jest -- api/web3.test.ts api/debug.test.ts", @@ -22,6 +22,7 @@ "@types/node": "^18.19.15", "@types/node-fetch": "^2.5.7", "chalk": "^4.0.0", + "elliptic": "^6.5.5", "ethereumjs-abi": "^0.6.8", "ethers": "^6.7.1", "hardhat": "=2.22.2", @@ -32,8 +33,7 @@ "ts-jest": "^29.0.1", "ts-node": "^10.1.0", "typescript": "^4.3.5", - "zksync-ethers": "^6.9.0", - "elliptic": "^6.5.5", - "yaml": "^2.4.2" + "yaml": "^2.4.2", + "zksync-ethers": "https://github.com/zksync-sdk/zksync-ethers#sb-use-new-encoding-in-sdk" } } diff --git a/core/tests/ts-integration/src/context-owner.ts b/core/tests/ts-integration/src/context-owner.ts index 6b9c4d0541b2..57ca54da7b27 100644 --- a/core/tests/ts-integration/src/context-owner.ts +++ b/core/tests/ts-integration/src/context-owner.ts @@ -604,7 +604,7 @@ export class TestContextOwner { // Reset the reporter context. this.reporter = new Reporter(); try { - if (this.env.nodeMode == NodeMode.Main && isLocalHost(this.env.network.toLowerCase())) { + if (this.env.nodeMode == NodeMode.Main && isLocalHost(this.env.network)) { // Check that the VM execution hasn't diverged using the VM playground. The component and thus the main node // will crash on divergence, so we just need to make sure that the test doesn't exit before the VM playground // processes all batches on the node. diff --git a/core/tests/ts-integration/src/env.ts b/core/tests/ts-integration/src/env.ts index 58dc5b08a8d9..1c0725acc13a 100644 --- a/core/tests/ts-integration/src/env.ts +++ b/core/tests/ts-integration/src/env.ts @@ -168,6 +168,7 @@ async function loadTestEnvironmentFromFile(fileConfig: FileConfig): Promise { l2Address: baseTokenAddressL2 }, timestampAsserterAddress, - timestampAsserterMinTimeTillEndSec + timestampAsserterMinTimeTillEndSec, + l2WETHAddress: undefined }; } diff --git a/core/tests/ts-integration/src/helpers.ts b/core/tests/ts-integration/src/helpers.ts index d1d84d54a545..88819c669655 100644 --- a/core/tests/ts-integration/src/helpers.ts +++ b/core/tests/ts-integration/src/helpers.ts @@ -30,6 +30,10 @@ export function getContractSource(relativePath: string): string { return source; } +export function readContract(path: string, fileName: string) { + return JSON.parse(fs.readFileSync(`${path}/${fileName}.sol/${fileName}.json`, { encoding: 'utf-8' })); +} + /** * Performs a contract deployment * @@ -86,7 +90,7 @@ export async function waitForNewL1Batch(wallet: zksync.Wallet): Promise { return; } - const EIP1559_TX_TYPE = 2; const amount = 1; const erc20ABI = ['function transfer(address to, uint256 amount)']; const erc20contract = new ethers.Contract(l2Token, erc20ABI, alice); @@ -230,8 +229,9 @@ describe('web3 API compatibility tests', () => { expect(tx1.l1BatchNumber).toEqual(expect.anything()); // Can be anything except `null` or `undefined`. expect(tx1.l1BatchTxIndex).toEqual(expect.anything()); // Can be anything except `null` or `undefined`. expect(tx1.chainId).toEqual(chainId); - expect(tx1.type).toEqual(EIP1559_TX_TYPE); + expect(tx1.type).toEqual(EIP712_TX_TYPE); + const EIP1559_TX_TYPE = 2; expect(receipt!.l1BatchNumber).toEqual(expect.anything()); // Can be anything except `null` or `undefined`. expect(receipt!.l1BatchTxIndex).toEqual(expect.anything()); // Can be anything except `null` or `undefined`. expect(receipt!.logs[0].l1BatchNumber).toEqual(receipt!.l1BatchNumber); @@ -240,6 +240,7 @@ describe('web3 API compatibility tests', () => { expect(block.l1BatchTimestamp).toEqual(expect.anything()); expect(blockWithTransactions.l1BatchNumber).toEqual(receipt!.l1BatchNumber); expect(blockWithTransactions.l1BatchTimestamp).toEqual(expect.anything()); + for (const tx of blockWithTransactions.prefetchedTransactions) { expect(tx.l1BatchNumber).toEqual(expect.anything()); // Can be anything except `null` or `undefined`. expect(tx.l1BatchTxIndex).toEqual(expect.anything()); // Can be anything except `null` or `undefined`. diff --git a/core/tests/ts-integration/tests/base-token.test.ts b/core/tests/ts-integration/tests/base-token.test.ts index 432ce70ae17f..a22014751035 100644 --- a/core/tests/ts-integration/tests/base-token.test.ts +++ b/core/tests/ts-integration/tests/base-token.test.ts @@ -7,7 +7,7 @@ import { Token } from '../src/types'; import * as zksync from 'zksync-ethers'; import * as ethers from 'ethers'; -import { scaledGasPrice } from '../src/helpers'; +import { scaledGasPrice, waitForL2ToL1LogProof } from '../src/helpers'; const SECONDS = 2000; jest.setTimeout(100 * SECONDS); @@ -78,7 +78,7 @@ describe('base ERC20 contract checks', () => { // TODO: should all the following tests use strict equality? const finalEthBalance = await alice.getBalanceL1(); - expect(initialEthBalance).toBeGreaterThan(finalEthBalance + fee); // Fee should be taken from the ETH balance on L1. + expect(initialEthBalance).toBeGreaterThanOrEqual(finalEthBalance + fee); // Fee should be taken from the ETH balance on L1. const finalL1Balance = await alice.getBalanceL1(baseTokenDetails.l1Address); expect(initialL1Balance).toBeGreaterThanOrEqual(finalL1Balance + amount); @@ -167,7 +167,8 @@ describe('base ERC20 contract checks', () => { const withdrawalPromise = alice.withdraw({ token: baseTokenDetails.l2Address, amount }); await expect(withdrawalPromise).toBeAccepted([]); const withdrawalTx = await withdrawalPromise; - await withdrawalTx.waitFinalize(); + const l2Receipt = await withdrawalTx.wait(); + await waitForL2ToL1LogProof(alice, l2Receipt!.blockNumber, withdrawalTx.hash); await expect(alice.finalizeWithdrawal(withdrawalTx.hash)).toBeAccepted([]); const receipt = await alice._providerL2().getTransactionReceipt(withdrawalTx.hash); diff --git a/core/tests/ts-integration/tests/contracts.test.ts b/core/tests/ts-integration/tests/contracts.test.ts index de1c632ab9cc..aa9dbe6e1a89 100644 --- a/core/tests/ts-integration/tests/contracts.test.ts +++ b/core/tests/ts-integration/tests/contracts.test.ts @@ -7,7 +7,7 @@ */ import { TestMaster } from '../src'; -import { deployContract, getTestContract, waitForNewL1Batch } from '../src/helpers'; +import { deployContract, getTestContract, scaledGasPrice, waitForNewL1Batch } from '../src/helpers'; import { shouldOnlyTakeFee } from '../src/modifiers/balance-checker'; import * as ethers from 'ethers'; @@ -99,22 +99,24 @@ describe('Smart contract behavior checks', () => { return; } + const gasPrice = await scaledGasPrice(alice); const infiniteLoop = await deployContract(alice, contracts.infinite, []); // Test eth_call first // TODO: provide a proper error for transactions that consume too much gas. // await expect(infiniteLoop.callStatic.infiniteLoop()).toBeRejected('cannot estimate transaction: out of gas'); // ...and then an actual transaction - await expect(infiniteLoop.infiniteLoop({ gasLimit: 1_000_000 })).toBeReverted([]); + await expect(infiniteLoop.infiniteLoop({ gasLimit: 1_000_000, gasPrice })).toBeReverted([]); }); test('Should test reverting storage logs', async () => { // In this test we check that if transaction reverts, it rolls back the storage slots. const prevValue = await counterContract.get(); + const gasPrice = await scaledGasPrice(alice); - // We manually provide a constant, since otherwise the exception would be thrown - // while estimating gas - await expect(counterContract.incrementWithRevert(5, true, { gasLimit: 5000000 })).toBeReverted([]); + // We manually provide a gas limit and gas price, since otherwise the exception would be thrown + // while querying zks_estimateFee. + await expect(counterContract.incrementWithRevert(5, true, { gasLimit: 5000000, gasPrice })).toBeReverted(); // The tx has been reverted, so the value Should not have been changed: const newValue = await counterContract.get(); diff --git a/core/tests/ts-integration/tests/erc20.test.ts b/core/tests/ts-integration/tests/erc20.test.ts index 9173989ea98b..a0345fb71ab1 100644 --- a/core/tests/ts-integration/tests/erc20.test.ts +++ b/core/tests/ts-integration/tests/erc20.test.ts @@ -8,10 +8,10 @@ import { shouldChangeTokenBalances, shouldOnlyTakeFee } from '../src/modifiers/b import * as zksync from 'zksync-ethers'; import * as ethers from 'ethers'; -import { scaledGasPrice, waitUntilBlockFinalized } from '../src/helpers'; +import { scaledGasPrice, waitForL2ToL1LogProof } from '../src/helpers'; import { L2_DEFAULT_ETH_PER_ACCOUNT } from '../src/context-owner'; -describe('ERC20 contract checks', () => { +describe('L1 ERC20 contract checks', () => { let testMaster: TestMaster; let alice: zksync.Wallet; let bob: zksync.Wallet; @@ -96,6 +96,7 @@ describe('ERC20 contract checks', () => { test('Incorrect transfer should revert', async () => { const value = ethers.parseEther('1000000.0'); + const gasPrice = await scaledGasPrice(alice); // Since gas estimation is expected to fail, we request gas limit for similar non-failing tx. const gasLimit = await aliceErc20.transfer.estimateGas(bob.address, 1); @@ -109,12 +110,16 @@ describe('ERC20 contract checks', () => { const feeTaken = await shouldOnlyTakeFee(alice); // Send transfer, it should revert due to lack of balance. - await expect(aliceErc20.transfer(bob.address, value, { gasLimit })).toBeReverted([noBalanceChange, feeTaken]); + await expect(aliceErc20.transfer(bob.address, value, { gasLimit, gasPrice })).toBeReverted([ + noBalanceChange, + feeTaken + ]); }); test('Transfer to zero address should revert', async () => { const zeroAddress = ethers.ZeroAddress; const value = 200n; + const gasPrice = await scaledGasPrice(alice); // Since gas estimation is expected to fail, we request gas limit for similar non-failing tx. const gasLimit = await aliceErc20.transfer.estimateGas(bob.address, 1); @@ -127,7 +132,10 @@ describe('ERC20 contract checks', () => { const feeTaken = await shouldOnlyTakeFee(alice); // Send transfer, it should revert because transfers to zero address are not allowed. - await expect(aliceErc20.transfer(zeroAddress, value, { gasLimit })).toBeReverted([noBalanceChange, feeTaken]); + await expect(aliceErc20.transfer(zeroAddress, value, { gasLimit, gasPrice })).toBeReverted([ + noBalanceChange, + feeTaken + ]); }); test('Approve and transferFrom should work', async () => { @@ -166,7 +174,8 @@ describe('ERC20 contract checks', () => { }); await expect(withdrawalPromise).toBeAccepted([l2BalanceChange, feeCheck]); const withdrawalTx = await withdrawalPromise; - await withdrawalTx.waitFinalize(); + const l2TxReceipt = await alice.provider.getTransactionReceipt(withdrawalTx.hash); + await waitForL2ToL1LogProof(alice, l2TxReceipt!.blockNumber, withdrawalTx.hash); // Note: For L1 we should use L1 token address. const l1BalanceChange = await shouldChangeTokenBalances( @@ -176,6 +185,7 @@ describe('ERC20 contract checks', () => { l1: true } ); + await expect(alice.finalizeWithdrawal(withdrawalTx.hash)).toBeAccepted([l1BalanceChange]); }); @@ -206,7 +216,7 @@ describe('ERC20 contract checks', () => { // It throws once it gets status == 0 in the receipt and doesn't wait for the finalization. const l2Hash = zksync.utils.getL2HashFromPriorityOp(l1Receipt, await alice.provider.getMainContractAddress()); const l2TxReceipt = await alice.provider.getTransactionReceipt(l2Hash); - await waitUntilBlockFinalized(alice, l2TxReceipt!.blockNumber); + await waitForL2ToL1LogProof(alice, l2TxReceipt!.blockNumber, l2Hash); // Claim failed deposit. await expect(alice.claimFailedDeposit(l2Hash)).toBeAccepted(); await expect(alice.getBalanceL1(tokenDetails.l1Address)).resolves.toEqual(initialBalance); diff --git a/core/tests/ts-integration/tests/ether.test.ts b/core/tests/ts-integration/tests/ether.test.ts index b3f4b6ee14a9..099cf2de8c68 100644 --- a/core/tests/ts-integration/tests/ether.test.ts +++ b/core/tests/ts-integration/tests/ether.test.ts @@ -11,7 +11,7 @@ import { import { checkReceipt } from '../src/modifiers/receipt-check'; import * as zksync from 'zksync-ethers'; -import { scaledGasPrice } from '../src/helpers'; +import { scaledGasPrice, waitForL2ToL1LogProof } from '../src/helpers'; import { ethers } from 'ethers'; describe('ETH token checks', () => { @@ -59,10 +59,9 @@ describe('ETH token checks', () => { const gasPerPubdataByte = zksync.utils.REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_LIMIT; - const l2GasLimit = await zksync.utils.estimateDefaultBridgeDepositL2Gas( + const l2GasLimit = await alice.provider.estimateDefaultBridgeDepositL2Gas( alice.providerL1!, - alice.provider, - zksync.utils.ETH_ADDRESS, + zksync.utils.ETH_ADDRESS_IN_CONTRACTS, amount, alice.address, alice.address, @@ -203,7 +202,10 @@ describe('ETH token checks', () => { const EIP_1559_TX_TYPE = 0x02; const value = 200n; - await expect(alice.sendTransaction({ type: EIP_2930_TX_TYPE, to: bob.address, value })).toBeRejected( + // SDK sets maxFeePerGas to the type 1 transactions, causing issues on the SDK level + const gasPrice = await scaledGasPrice(alice); + + await expect(alice.sendTransaction({ type: EIP_2930_TX_TYPE, to: bob.address, value, gasPrice })).toBeRejected( 'access lists are not supported' ); @@ -258,7 +260,8 @@ describe('ETH token checks', () => { }); await expect(withdrawalPromise).toBeAccepted([l2ethBalanceChange]); const withdrawalTx = await withdrawalPromise; - await withdrawalTx.waitFinalize(); + const l2TxReceipt = await alice.provider.getTransactionReceipt(withdrawalTx.hash); + await waitForL2ToL1LogProof(alice, l2TxReceipt!.blockNumber, withdrawalTx.hash); // TODO (SMA-1374): Enable L1 ETH checks as soon as they're supported. await expect(alice.finalizeWithdrawal(withdrawalTx.hash)).toBeAccepted(); diff --git a/core/tests/ts-integration/tests/fees.test.ts b/core/tests/ts-integration/tests/fees.test.ts index 628a17febd76..a111f5804852 100644 --- a/core/tests/ts-integration/tests/fees.test.ts +++ b/core/tests/ts-integration/tests/fees.test.ts @@ -190,14 +190,16 @@ testFees('Test fees', function () { await ( await alice.sendTransaction({ to: receiver, - value: BigInt(1) + value: BigInt(1), + type: 2 }) ).wait(); await ( await alice.sendTransaction({ data: aliceErc20.interface.encodeFunctionData('transfer', [receiver, 1n]), - to: tokenDetails.l2Address + to: tokenDetails.l2Address, + type: 2 }) ).wait(); @@ -221,22 +223,26 @@ testFees('Test fees', function () { [ { to: ethers.Wallet.createRandom().address, - value: 1n + value: 1n, + type: 2 }, { to: receiver, - value: 1n + value: 1n, + type: 2 }, { data: aliceErc20.interface.encodeFunctionData('transfer', [ ethers.Wallet.createRandom().address, 1n ]), - to: tokenDetails.l2Address + to: tokenDetails.l2Address, + type: 2 }, { data: aliceErc20.interface.encodeFunctionData('transfer', [receiver, 1n]), - to: tokenDetails.l2Address + to: tokenDetails.l2Address, + type: 2 } ], gasPrice, @@ -444,8 +450,9 @@ async function updateReport( oldReport: string ): Promise { const expectedL1Price = +ethers.formatEther(l1Receipt.gasUsed * newL1GasPrice); - - const estimatedL2GasPrice = await sender.provider.getGasPrice(); + // This is flaky without multiplying by 3. + const estimatedL2GasPrice = ethers.getBigInt(await sender.provider.send('eth_gasPrice', [])) * 3n; + transactionRequest.maxFeePerGas = estimatedL2GasPrice; const estimatedL2GasLimit = await sender.estimateGas(transactionRequest); const estimatedPrice = estimatedL2GasPrice * estimatedL2GasLimit; diff --git a/core/tests/ts-integration/tests/l1.test.ts b/core/tests/ts-integration/tests/l1.test.ts index 2d9b9fd78d69..2e3cddb29f9e 100644 --- a/core/tests/ts-integration/tests/l1.test.ts +++ b/core/tests/ts-integration/tests/l1.test.ts @@ -8,7 +8,14 @@ import { TestMaster } from '../src'; import * as zksync from 'zksync-ethers'; import * as ethers from 'ethers'; -import { bigIntMax, deployContract, getTestContract, scaledGasPrice, waitForNewL1Batch } from '../src/helpers'; +import { + bigIntMax, + deployContract, + getTestContract, + scaledGasPrice, + waitForL2ToL1LogProof, + waitForNewL1Batch +} from '../src/helpers'; import { L1_MESSENGER, L1_MESSENGER_ADDRESS, REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_LIMIT } from 'zksync-ethers/build/utils'; const contracts = { @@ -135,6 +142,7 @@ describe('Tests for L1 behavior', () => { const l2ToL1LogIndex = receipt.l2ToL1Logs.findIndex( (log: zksync.types.L2ToL1Log) => log.sender == L1_MESSENGER_ADDRESS ); + await waitForL2ToL1LogProof(alice, receipt.blockNumber, tx.hash); const msgProof = await alice.provider.getLogProof(tx.hash, l2ToL1LogIndex); expect(msgProof).toBeTruthy(); diff --git a/core/tests/ts-integration/tests/l2-erc20.test.ts b/core/tests/ts-integration/tests/l2-erc20.test.ts new file mode 100644 index 000000000000..16b55b648993 --- /dev/null +++ b/core/tests/ts-integration/tests/l2-erc20.test.ts @@ -0,0 +1,262 @@ +/** + * This suite contains tests checking default ERC-20 contract behavior. + */ + +import { TestMaster } from '../src'; +import { Token } from '../src/types'; +import { shouldChangeTokenBalances, shouldOnlyTakeFee } from '../src/modifiers/balance-checker'; + +import * as zksync from 'zksync-ethers'; +import * as ethers from 'ethers'; +import { Provider, Wallet } from 'ethers'; +import { scaledGasPrice, deployContract, readContract, waitForL2ToL1LogProof } from '../src/helpers'; +import { encodeNTVAssetId } from 'zksync-ethers/build/utils'; + +describe('L2 native ERC20 contract checks', () => { + let testMaster: TestMaster; + let alice: zksync.Wallet; + let isETHBasedChain: boolean; + let baseTokenAddress: string; + let zkTokenAssetId: string; + let tokenDetails: Token; + let aliceErc20: zksync.Contract; + let l1NativeTokenVault: ethers.Contract; + let l1Wallet: Wallet; + let l2Wallet: Wallet; + let l1Provider: Provider; + let l2Provider: Provider; + let l2NativeTokenVault: zksync.Contract; + + beforeAll(async () => { + testMaster = TestMaster.getInstance(__filename); + alice = testMaster.mainAccount(); + const bridgeContracts = await alice.getL1BridgeContracts(); + const assetRouter = bridgeContracts.shared; + l2Provider = alice._providerL2(); + l1Provider = alice._providerL1(); + l2Wallet = new Wallet(alice.privateKey, l2Provider); + l1Wallet = new Wallet(alice.privateKey, l1Provider); + const L2_NATIVE_TOKEN_VAULT_ADDRESS = '0x0000000000000000000000000000000000010004'; + const ARTIFACTS_PATH = '../../../contracts/l1-contracts/out'; + const l2NtvInterface = readContract(`${ARTIFACTS_PATH}`, 'L2NativeTokenVault').abi; + l2NativeTokenVault = new zksync.Contract(L2_NATIVE_TOKEN_VAULT_ADDRESS, l2NtvInterface, l2Wallet); + const l1AssetRouterInterface = readContract(`${ARTIFACTS_PATH}`, 'L1AssetRouter').abi; + const l1NativeTokenVaultInterface = readContract(`${ARTIFACTS_PATH}`, 'L1NativeTokenVault').abi; + const l1AssetRouter = new ethers.Contract(await assetRouter.getAddress(), l1AssetRouterInterface, l1Wallet); + l1NativeTokenVault = new ethers.Contract( + await l1AssetRouter.nativeTokenVault(), + l1NativeTokenVaultInterface, + l1Wallet + ); + + // Get the information about base token address directly from the L2. + baseTokenAddress = await alice._providerL2().getBaseTokenContractAddress(); + isETHBasedChain = baseTokenAddress == zksync.utils.ETH_ADDRESS_IN_CONTRACTS; + + const ZkSyncERC20 = await readContract('../../../contracts/l1-contracts/zkout', 'TestnetERC20Token'); + + aliceErc20 = await deployContract(alice, ZkSyncERC20, ['ZKsync', 'ZK', 18]); + const l2TokenAddress = await aliceErc20.getAddress(); + tokenDetails = { + name: 'ZKsync', + symbol: 'ZK', + decimals: 18n, + l1Address: ethers.ZeroAddress, + l2Address: l2TokenAddress + }; + const mintTx = await aliceErc20.mint(alice.address, 1000n); + await mintTx.wait(); + + // We will test that the token can be withdrawn and work with without explicit registration + const l2ChainId = (await l2Provider.getNetwork()).chainId; + zkTokenAssetId = encodeNTVAssetId(l2ChainId, l2TokenAddress); + + const tokenApprovalTx = await aliceErc20.approve(L2_NATIVE_TOKEN_VAULT_ADDRESS, 100n); + await tokenApprovalTx.wait(); + }); + + test('check weth', async () => { + const weth = testMaster.environment().l2WETHAddress; + if (!weth) { + console.log('skip weth'); + return; + } + const wethabi = await readContract('../../../contracts/l2-contracts/zkout', 'L2WETH').abi; + const wethContract = new zksync.Contract(weth, wethabi, alice); + + const name = await wethContract.name(); + expect(name).toEqual('Wrapped ETH'); + + const addressFromNTV = await l2NativeTokenVault.WETH_TOKEN(); + expect(addressFromNTV.toLowerCase()).toEqual(weth.toLowerCase()); + + const wrapTx = await wethContract.deposit({ value: 1 }); + await expect(wrapTx).toBeAccepted(); + + const balance = await wethContract.balanceOf(alice.address); + expect(balance).toEqual(1n); + + const withdrawTx = alice.withdraw({ + token: weth, + amount: 1 + }); + let thrown = false; + try { + await withdrawTx; + } catch (err: any) { + thrown = true; + // TokenNotSupported(weth) + expect(err.toString()).toContain(ethers.concat(['0x06439c6b', ethers.zeroPadBytes('0x', 12), weth])); + } + expect(thrown).toBeTruthy(); + }); + + test('Token properties are correct', async () => { + await expect(aliceErc20.name()).resolves.toBe(tokenDetails.name); + await expect(aliceErc20.decimals()).resolves.toBe(tokenDetails.decimals); + await expect(aliceErc20.symbol()).resolves.toBe(tokenDetails.symbol); + await expect(aliceErc20.balanceOf(alice.address)).resolves.toBeGreaterThan(0n); // 'Alice should have non-zero balance' + }); + + test('Can perform a withdrawal', async () => { + if (testMaster.isFastMode()) { + return; + } + const amount = 10n; + + const l2BalanceChange = await shouldChangeTokenBalances(tokenDetails.l2Address, [ + { wallet: alice, change: -amount } + ]); + const feeCheck = await shouldOnlyTakeFee(alice); + const withdrawalPromise = alice.withdraw({ + token: tokenDetails.l2Address, + amount + }); + await expect(withdrawalPromise).toBeAccepted([l2BalanceChange, feeCheck]); + const withdrawalTx = await withdrawalPromise; + const l2TxReceipt = await alice.provider.getTransactionReceipt(withdrawalTx.hash); + await withdrawalTx.waitFinalize(); + await waitForL2ToL1LogProof(alice, l2TxReceipt!.blockNumber, withdrawalTx.hash); + + await expect(alice.finalizeWithdrawal(withdrawalTx.hash)).toBeAccepted(); + + tokenDetails.l1Address = await l1NativeTokenVault.tokenAddress(zkTokenAssetId); + const balanceAfterBridging = await alice.getBalanceL1(tokenDetails.l1Address); + expect(balanceAfterBridging).toEqual(10n); + }); + + test('Can perform a deposit', async () => { + const amount = 1n; // 1 wei is enough. + const gasPrice = await scaledGasPrice(alice); + + // Note: for L1 we should use L1 token address. + const l1BalanceChange = await shouldChangeTokenBalances( + tokenDetails.l1Address, + [{ wallet: alice, change: -amount }], + { + l1: true + } + ); + const l2BalanceChange = await shouldChangeTokenBalances(tokenDetails.l2Address, [ + { wallet: alice, change: amount } + ]); + const feeCheck = await shouldOnlyTakeFee(alice, true); + + await expect( + alice.deposit({ + token: tokenDetails.l1Address, + amount, + approveERC20: true, + approveBaseERC20: true, + approveOverrides: { + gasPrice + }, + overrides: { + gasPrice + } + }) + ).toBeAccepted([l1BalanceChange, l2BalanceChange, feeCheck]); + }); + + test('Should claim failed deposit', async () => { + if (testMaster.isFastMode()) { + return; + } + + const amount = 1n; + const initialBalance = await alice.getBalanceL1(tokenDetails.l1Address); + // Deposit to the zero address is forbidden and should fail with the current implementation. + const depositHandle = await alice.deposit({ + token: tokenDetails.l1Address, + to: ethers.ZeroAddress, + amount, + approveERC20: true, + approveBaseERC20: true, + l2GasLimit: 5_000_000 // Setting the limit manually to avoid estimation for L1->L2 transaction + }); + const l1Receipt = await depositHandle.waitL1Commit(); + + // L1 balance should change, but tx should fail in L2. + await expect(alice.getBalanceL1(tokenDetails.l1Address)).resolves.toEqual(initialBalance - amount); + await expect(depositHandle).toBeReverted(); + + // Wait for tx to be finalized. + // `waitFinalize` is not used because it doesn't work as expected for failed transactions. + // It throws once it gets status == 0 in the receipt and doesn't wait for the finalization. + const l2Hash = zksync.utils.getL2HashFromPriorityOp(l1Receipt, await alice.provider.getMainContractAddress()); + const l2TxReceipt = await alice.provider.getTransactionReceipt(l2Hash); + await waitForL2ToL1LogProof(alice, l2TxReceipt!.blockNumber, l2Hash); + + // Claim failed deposit. + await expect(alice.claimFailedDeposit(l2Hash)).toBeAccepted(); + await expect(alice.getBalanceL1(tokenDetails.l1Address)).resolves.toEqual(initialBalance); + }); + + test('Can perform a deposit with precalculated max value', async () => { + if (!isETHBasedChain) { + // approving whole base token balance + const baseTokenDetails = testMaster.environment().baseToken; + const baseTokenMaxAmount = await alice.getBalanceL1(baseTokenDetails.l1Address); + await (await alice.approveERC20(baseTokenDetails.l1Address, baseTokenMaxAmount)).wait(); + } + + // depositing the max amount: the whole balance of the token + const tokenDepositAmount = await alice.getBalanceL1(tokenDetails.l1Address); + + // approving the needed allowance for the deposit + await (await alice.approveERC20(tokenDetails.l1Address, tokenDepositAmount)).wait(); + + // fee of the deposit in ether + const depositFee = await alice.getFullRequiredDepositFee({ + token: tokenDetails.l1Address + }); + + // checking if alice has enough funds to pay the fee + const l1Fee = depositFee.l1GasLimit * (depositFee.maxFeePerGas! || depositFee.gasPrice!); + const l2Fee = depositFee.baseCost; + const aliceBalance = await alice.getBalanceL1(); + if (aliceBalance < l1Fee + l2Fee) { + throw new Error('Not enough balance to pay the fee'); + } + + // deposit handle with the precalculated max amount + const depositHandle = await alice.deposit({ + token: tokenDetails.l1Address, + amount: tokenDepositAmount, + l2GasLimit: depositFee.l2GasLimit, + approveBaseERC20: true, + approveERC20: true, + overrides: depositFee + }); + + // checking the l2 balance change + const l2TokenBalanceChange = await shouldChangeTokenBalances(tokenDetails.l2Address, [ + { wallet: alice, change: tokenDepositAmount } + ]); + await expect(depositHandle).toBeAccepted([l2TokenBalanceChange]); + }); + + afterAll(async () => { + await testMaster.deinitialize(); + }); +}); diff --git a/core/tests/ts-integration/tests/system.test.ts b/core/tests/ts-integration/tests/system.test.ts index 38b21c5839ae..fd833578e86c 100644 --- a/core/tests/ts-integration/tests/system.test.ts +++ b/core/tests/ts-integration/tests/system.test.ts @@ -11,7 +11,7 @@ import { L2_DEFAULT_ETH_PER_ACCOUNT } from '../src/context-owner'; import * as zksync from 'zksync-ethers'; import * as ethers from 'ethers'; -import { SYSTEM_CONTEXT_ADDRESS, getTestContract } from '../src/helpers'; +import { SYSTEM_CONTEXT_ADDRESS, getTestContract, waitForL2ToL1LogProof } from '../src/helpers'; import { DataAvailabityMode } from '../src/types'; import { BigNumberish } from 'ethers'; @@ -251,6 +251,9 @@ describe('System behavior checks', () => { testMaster.reporter.debug( `Obtained withdrawal receipt for Bob: blockNumber=${bobReceipt.blockNumber}, l1BatchNumber=${bobReceipt.l1BatchNumber}, status=${bobReceipt.status}` ); + + await waitForL2ToL1LogProof(alice, aliceReceipt.blockNumber, aliceReceipt.hash); + await waitForL2ToL1LogProof(bob, bobReceipt.blockNumber, bobReceipt.hash); await expect(alice.finalizeWithdrawal(aliceReceipt.hash)).toBeAccepted([aliceChange]); testMaster.reporter.debug('Finalized withdrawal for Alice'); await expect(alice.finalizeWithdrawal(bobReceipt.hash)).toBeAccepted([bobChange]); @@ -295,6 +298,9 @@ describe('System behavior checks', () => { testMaster.reporter.debug( `Obtained withdrawal receipt #2: blockNumber=${receipt2.blockNumber}, l1BatchNumber=${receipt2.l1BatchNumber}, status=${receipt2.status}` ); + + await waitForL2ToL1LogProof(alice, receipt1.blockNumber, receipt1.hash); + await waitForL2ToL1LogProof(alice, receipt2.blockNumber, receipt2.hash); await expect(alice.finalizeWithdrawal(receipt1.hash)).toBeAccepted([change1]); testMaster.reporter.debug('Finalized withdrawal #1'); await expect(alice.finalizeWithdrawal(receipt2.hash)).toBeAccepted([change2]); diff --git a/core/tests/upgrade-test/tests/upgrade.test.ts b/core/tests/upgrade-test/tests/upgrade.test.ts index b4b950028e1e..5a1902ec8671 100644 --- a/core/tests/upgrade-test/tests/upgrade.test.ts +++ b/core/tests/upgrade-test/tests/upgrade.test.ts @@ -24,13 +24,14 @@ async function logsPath(name: string): Promise { return await logsTestPath(fileConfig.chain, 'logs/upgrade/', name); } +const L2_BRIDGEHUB_ADDRESS = '0x0000000000000000000000000000000000010002'; const pathToHome = path.join(__dirname, '../../../..'); const fileConfig = shouldLoadConfigFromFile(); const contracts: Contracts = initContracts(pathToHome, fileConfig.loadFromFile); const ZK_CHAIN_INTERFACE = JSON.parse( - readFileSync(pathToHome + '/contracts/l1-contracts/out/IZkSyncHyperchain.sol/IZkSyncHyperchain.json').toString() + readFileSync(pathToHome + '/contracts/l1-contracts/out/IZKChain.sol/IZKChain.json').toString() ).abi; const depositAmount = ethers.parseEther('0.001'); @@ -70,6 +71,8 @@ describe('Upgrade test', function () { let slMainContract: ethers.Contract; let bootloaderHash: string; + let defaultAccountHash: string; + let bytecodeSupplier: string; let executeOperation: string; let forceDeployAddress: string; let forceDeployBytecode: string; @@ -118,6 +121,7 @@ describe('Upgrade test', function () { ethProviderAddress = secretsConfig.l1.l1_rpc_url; web3JsonRpc = generalConfig.api.web3_json_rpc.http_url; contractsL2DefaultUpgradeAddr = contractsConfig.l2.default_l2_upgrader; + bytecodeSupplier = contractsConfig.ecosystem_contracts.l1_bytecodes_supplier_addr; contractsPriorityTxMaxGasLimit = '72000000'; gatewayInfo = getGatewayInfo(pathToHome, fileConfig.chain); @@ -176,7 +180,7 @@ describe('Upgrade test', function () { const l1CtmContract = new ethers.Contract( contractsConfig.ecosystem_contracts.state_transition_proxy_addr, - contracts.stateTransitionManager, + contracts.chainTypeManager, tester.syncWallet.providerL1 ); ecosystemGovernance = await l1CtmContract.owner(); @@ -262,10 +266,11 @@ describe('Upgrade test', function () { ); bootloaderHash = ethers.hexlify(zksync.utils.hashBytecode(bootloaderCode)); + defaultAccountHash = ethers.hexlify(zksync.utils.hashBytecode(defaultAACode)); - await publishBytecode(tester.syncWallet, bootloaderCode); - await publishBytecode(tester.syncWallet, defaultAACode); - await publishBytecode(tester.syncWallet, forceDeployBytecode); + await publishBytecode(tester.ethWallet, bytecodeSupplier, bootloaderCode); + await publishBytecode(tester.ethWallet, bytecodeSupplier, defaultAACode); + await publishBytecode(tester.ethWallet, bytecodeSupplier, forceDeployBytecode); }); step('Schedule governance call', async () => { @@ -303,11 +308,14 @@ describe('Upgrade test', function () { reserved: [0, 0, 0, 0], data, signature: '0x', - factoryDeps: [ethers.hexlify(zksync.utils.hashBytecode(forceDeployBytecode))], + factoryDeps: [ + bootloaderHash, + defaultAccountHash, + ethers.hexlify(zksync.utils.hashBytecode(forceDeployBytecode)) + ], paymasterInput: '0x', reservedDynamic: '0x' }, - factoryDeps: [forceDeployBytecode], bootloaderHash, upgradeTimestamp: 0 }, @@ -315,6 +323,21 @@ describe('Upgrade test', function () { ); executeOperation = chainUpgradeCalldata; + const pauseMigrationCalldata = await pauseMigrationsCalldata( + alice._providerL1(), + alice._providerL2(), + gatewayInfo + ); + console.log('Scheduling pause migration'); + await sendGovernanceOperation(pauseMigrationCalldata.scheduleTransparentOperation, 0, null); + + console.log('Sending pause migration'); + await sendGovernanceOperation( + pauseMigrationCalldata.executeOperation, + pauseMigrationCalldata.executeOperationValue, + gatewayInfo ? gatewayInfo.gatewayProvider : null + ); + console.log('Sending scheduleTransparentOperation'); await sendGovernanceOperation(stmUpgradeData.scheduleTransparentOperation, 0, null); @@ -326,12 +349,18 @@ describe('Upgrade test', function () { ); console.log('Sending chain admin operation'); - await ( - await slAdminGovWallet.sendTransaction({ - to: await slChainAdminContract.getAddress(), - data: setTimestampCalldata - }) - ).wait(); + // Different chain admin impls are used depending on whether gateway is used. + if (gatewayInfo) { + // ChainAdmin.sol: `setUpgradeTimestamp` has onlySelf so we do multicall. + await sendChainAdminOperation({ + target: await slChainAdminContract.getAddress(), + data: setTimestampCalldata, + value: 0 + }); + } else { + // ChainAdminOwnable.sol: `setUpgradeTimestamp` has onlyOwner so we call it directly. + await chainAdminSetTimestamp(setTimestampCalldata); + } // Wait for server to process L1 event. await utils.sleep(2); @@ -436,6 +465,17 @@ describe('Upgrade test', function () { console.log('Transaction complete!'); } + async function chainAdminSetTimestamp(data: string) { + const transaction = await slAdminGovWallet.sendTransaction({ + to: await slChainAdminContract.getAddress(), + data, + type: 0 + }); + console.log(`Sent chain admin operation, tx_hash=${transaction.hash}, nonce=${transaction.nonce}`); + await transaction.wait(); + console.log(`Chain admin operation succeeded, tx_hash=${transaction.hash}`); + } + async function sendChainAdminOperation(call: Call) { const executeMulticallData = slChainAdminContract.interface.encodeFunctionData('multicall', [[call], true]); @@ -483,18 +523,18 @@ function readCode(newPath: string, legacyPath: string): string { } } -async function publishBytecode(wallet: zksync.Wallet, bytecode: string) { - const txHandle = await wallet.requestExecute({ - contractAddress: ethers.ZeroAddress, - calldata: '0x', - l2GasLimit: 20000000, - factoryDeps: [bytecode], - overrides: { - gasLimit: 3000000 - } - }); - await txHandle.wait(); - await waitForNewL1Batch(wallet); +async function publishBytecode(wallet: ethers.Wallet, bytecodeSupplierAddr: string, bytecode: string) { + const hash = zksync.utils.hashBytecode(bytecode); + const abi = [ + 'function publishBytecode(bytes calldata _bytecode) public', + 'function publishingBlock(bytes32 _hash) public view returns (uint256)' + ]; + + const contract = new ethers.Contract(bytecodeSupplierAddr, abi, wallet); + const block = await contract.publishingBlock(hash); + if (block == BigInt(0)) { + await (await contract.publishBytecode(bytecode)).wait(); + } } async function checkedRandomTransfer(sender: zksync.Wallet, amount: bigint): Promise { @@ -578,7 +618,6 @@ async function prepareUpgradeCalldata( paymasterInput: BytesLike; reservedDynamic: BytesLike; }; - factoryDeps: BytesLike[]; bootloaderHash?: BytesLike; defaultAAHash?: BytesLike; verifier?: string; @@ -604,7 +643,7 @@ async function prepareUpgradeCalldata( const zksyncAddress = await l2Provider.getMainContractAddress(); settlementLayerDiamondProxy = new ethers.Contract(zksyncAddress, ZK_CHAIN_INTERFACE, l1Provider); } - const settlementLayerCTMAddress = await settlementLayerDiamondProxy.getStateTransitionManager(); + const settlementLayerCTMAddress = await settlementLayerDiamondProxy.getChainTypeManager(); const oldProtocolVersion = Number(await settlementLayerDiamondProxy.getProtocolVersion()); const newProtocolVersion = addToProtocolVersion(oldProtocolVersion, 1, 1); @@ -613,7 +652,6 @@ async function prepareUpgradeCalldata( const upgradeInitData = contracts.l1DefaultUpgradeAbi.encodeFunctionData('upgrade', [ [ params.l2ProtocolUpgradeTx, - params.factoryDeps, params.bootloaderHash ?? ethers.ZeroHash, params.defaultAAHash ?? ethers.ZeroHash, params.verifier ?? ethers.ZeroAddress, @@ -633,7 +671,7 @@ async function prepareUpgradeCalldata( }; // Prepare calldata for upgrading STM - const stmUpgradeCalldata = contracts.stateTransitionManager.encodeFunctionData('setNewVersionUpgrade', [ + const stmUpgradeCalldata = contracts.chainTypeManager.encodeFunctionData('setNewVersionUpgrade', [ upgradeParam, oldProtocolVersion, // The protocol version will not have any deadline in this upgrade @@ -670,6 +708,25 @@ async function prepareUpgradeCalldata( }; } +async function pauseMigrationsCalldata( + l1Provider: ethers.Provider, + l2Provider: zksync.Provider, + gatewayInfo: GatewayInfo | null +) { + const l1BridgehubAddr = await l2Provider.getBridgehubContractAddress(); + const to = gatewayInfo ? L2_BRIDGEHUB_ADDRESS : l1BridgehubAddr; + + const iface = new ethers.Interface(['function pauseMigration() external']); + + return prepareGovernanceCalldata( + to, + iface.encodeFunctionData('pauseMigration', []), + l1BridgehubAddr, + l1Provider, + gatewayInfo + ); +} + interface UpgradeCalldata { scheduleTransparentOperation: string; executeOperation: string; diff --git a/core/tests/upgrade-test/tests/utils.ts b/core/tests/upgrade-test/tests/utils.ts index 7ea7efb88cb8..9d29bcda4045 100644 --- a/core/tests/upgrade-test/tests/utils.ts +++ b/core/tests/upgrade-test/tests/utils.ts @@ -40,7 +40,7 @@ export interface Contracts { l2ForceDeployUpgraderAbi: any; complexUpgraderAbi: any; counterBytecode: any; - stateTransitionManager: any; + chainTypeManager: any; } export function initContracts(pathToHome: string, zkStack: boolean): Contracts { @@ -68,10 +68,8 @@ export function initContracts(pathToHome: string, zkStack: boolean): Contracts { counterBytecode: require( `${pathToHome}/core/tests/ts-integration/artifacts-zk/contracts/counter/counter.sol/Counter.json` ).deployedBytecode, - stateTransitionManager: new ethers.Interface( - require( - `${CONTRACTS_FOLDER}/l1-contracts/out/StateTransitionManager.sol/StateTransitionManager.json` - ).abi + chainTypeManager: new ethers.Interface( + require(`${CONTRACTS_FOLDER}/l1-contracts/out/ChainTypeManager.sol/ChainTypeManager.json`).abi ) }; } else { @@ -99,10 +97,8 @@ export function initContracts(pathToHome: string, zkStack: boolean): Contracts { ), counterBytecode: require(`${pathToHome}/core/tests/ts-integration/zkout/counter.sol/Counter.json`) .deployedBytecode, - stateTransitionManager: new ethers.Interface( - require( - `${L1_CONTRACTS_FOLDER}/state-transition/StateTransitionManager.sol/StateTransitionManager.json` - ).abi + chainTypeManager: new ethers.Interface( + require(`${L1_CONTRACTS_FOLDER}/state-transition/ChainTypeManager.sol/ChainTypeManager.json`).abi ) }; } diff --git a/docs/src/specs/l1_smart_contracts.md b/docs/src/specs/l1_smart_contracts.md index 65c408714ba3..23fede090124 100644 --- a/docs/src/specs/l1_smart_contracts.md +++ b/docs/src/specs/l1_smart_contracts.md @@ -184,7 +184,7 @@ fee-on-transfer tokens or other custom logic for handling user balances. The owner of the L1ERC20Bridge is the Governance contract. -### L1SharedBridge +### L1AssetRouter The main bridge implementation handles transfers Ether, ERC20 tokens and of WETH tokens between the two domains. It is designed to streamline and enhance the user experience for bridging WETH tokens by minimizing the number of transactions diff --git a/etc/env/base/chain.toml b/etc/env/base/chain.toml index 6d1fdae53cee..7b632c3ae3a4 100644 --- a/etc/env/base/chain.toml +++ b/etc/env/base/chain.toml @@ -90,8 +90,8 @@ fee_model_version = "V2" validation_computational_gas_limit = 300000 save_call_traces = true -bootloader_hash = "0x010008c3be57ae5800e077b6c2056d9d75ad1a7b4f0ce583407961cc6fe0b678" -default_aa_hash = "0x0100055dba11508480be023137563caec69debc85f826cb3a4b68246a7cabe30" +bootloader_hash = "0x010008c753336bc8d1ddca235602b9f31d346412b2d463cd342899f7bfb73baf" +default_aa_hash = "0x0100055d760f11a3d737e7fd1816e600a4cd874a9f17f7a225d1f1c537c51a1e" protective_reads_persistence_enabled = false diff --git a/etc/env/base/contracts.toml b/etc/env/base/contracts.toml index ef52ed4c711b..1cb22440e33c 100644 --- a/etc/env/base/contracts.toml +++ b/etc/env/base/contracts.toml @@ -15,7 +15,6 @@ DIAMOND_PROXY_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" L1_MULTICALL3_ADDR = "0xcA11bde05977b3631167028862bE2a173976CA11" L1_ERC20_BRIDGE_PROXY_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" L1_ERC20_BRIDGE_IMPL_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" -L2_ERC20_BRIDGE_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" L2_TESTNET_PAYMASTER_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" L1_ALLOW_LIST_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" CREATE2_FACTORY_ADDR = "0xce0042B868300000d44A59004Da54A005ffdcf9f" @@ -26,13 +25,10 @@ RECURSION_NODE_LEVEL_VK_HASH = "0x1186ec268d49f1905f8d9c1e9d39fc33e98c74f91d91a2 RECURSION_LEAF_LEVEL_VK_HASH = "0x101e08b00193e529145ee09823378ef51a3bc8966504064f1f6ba3f1ba863210" RECURSION_CIRCUITS_SET_VKS_HASH = "0x18c1639094f58177409186e8c48d9f577c9410901d2f1d486b3e7d6cf553ae4c" GENESIS_TX_HASH = "0xb99ebfea46cbe05a21cd80fe5597d97b204befc52a16303f579c607dc1ac2e2e" -GENESIS_ROOT = "0x7275936e5a0063b159d5d22734931fea07871e8d57e564d61ef56e4a6ee23e5c" -GENESIS_BATCH_COMMITMENT = "0xf5f9a5abe62e8a6e0cb2d34d27435c3e5a8fbd7e2e54ca1d108fc58cb86c708a" PRIORITY_TX_MAX_GAS_LIMIT = 72000000 DEPLOY_L2_BRIDGE_COUNTERPART_GAS_LIMIT = 10000000 -GENESIS_ROLLUP_LEAF_INDEX = "54" -GENESIS_PROTOCOL_VERSION = "25" -GENESIS_PROTOCOL_SEMANTIC_VERSION = "0.25.0" +GENESIS_PROTOCOL_VERSION = "26" +GENESIS_PROTOCOL_SEMANTIC_VERSION = "0.26.0" L1_WETH_BRIDGE_IMPL_ADDR = "0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9" L1_WETH_BRIDGE_PROXY_ADDR = "0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9" L1_WETH_TOKEN_ADDR = "0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9" @@ -41,6 +37,19 @@ L2_WETH_TOKEN_IMPL_ADDR = "0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9" L2_WETH_TOKEN_PROXY_ADDR = "0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9" BLOB_VERSIONED_HASH_RETRIEVER_ADDR = "0x0000000000000000000000000000000000000000" +GENESIS_ROOT = "0x09e68951458b18c24ae5f4100160b53c4888c9b3c3c1859cc674bc02236675ad" +GENESIS_BATCH_COMMITMENT = "0x7238eab6a0e9f5bb84421feae6b6b9ae80816d490c875d29ff3ded375a3e078f" +GENESIS_ROLLUP_LEAF_INDEX = "64" + +# Ecosystem-wide params +L1_ROLLUP_DA_VALIDATOR = "0x0000000000000000000000000000000000000000" +L1_VALIDIUM_DA_VALIDATOR = "0x0000000000000000000000000000000000000000" + +# Chain-specific params +L1_DA_VALIDATOR_ADDR = "0x0000000000000000000000000000000000000000" +L2_DA_VALIDATOR_ADDR = "0x0000000000000000000000000000000000000000" +L1_RELAYED_SL_DA_VALIDATOR = "0x0000000000000000000000000000000000000000" + L1_SHARED_BRIDGE_IMPL_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" # These are currently not used, but will be used once the shared bridge is up BRIDGEHUB_PROXY_ADDR = "0x0000000000000000000000000000000000000000" @@ -48,13 +57,29 @@ BRIDGEHUB_IMPL_ADDR = "0x0000000000000000000000000000000000000000" STATE_TRANSITION_PROXY_ADDR = "0x0000000000000000000000000000000000000000" STATE_TRANSITION_IMPL_ADDR = "0x0000000000000000000000000000000000000000" TRANSPARENT_PROXY_ADMIN_ADDR = "0x0000000000000000000000000000000000000000" +L2_PROXY_ADMIN_ADDR = "0x0000000000000000000000000000000000000000" BASE_TOKEN_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" BASE_TOKEN_BRIDGE_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" GENESIS_UPGRADE_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" -MAX_NUMBER_OF_HYPERCHAINS = 100 +MAX_NUMBER_OF_ZK_CHAINS = 100 L1_SHARED_BRIDGE_PROXY_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" -L2_SHARED_BRIDGE_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" -L2_SHARED_BRIDGE_IMPL_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L1_NATIVE_TOKEN_VAULT_IMPL_ADDR ="0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L1_NATIVE_TOKEN_VAULT_PROXY_ADDR ="0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L2_NATIVE_TOKEN_VAULT_IMPL_ADDR = "0x0000000000000000000000000000000000010004" +L2_NATIVE_TOKEN_VAULT_PROXY_ADDR = "0x0000000000000000000000000000000000010004" +L2_SHARED_BRIDGE_IMPL_ADDR = "0x0000000000000000000000000000000000010003" +L2_SHARED_BRIDGE_ADDR = "0x0000000000000000000000000000000000010003" +L2_ERC20_BRIDGE_ADDR = "0x0000000000000000000000000000000000010003" +CTM_DEPLOYMENT_TRACKER_IMPL_ADDR ="0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +CTM_DEPLOYMENT_TRACKER_PROXY_ADDR ="0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +MESSAGE_ROOT_IMPL_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +MESSAGE_ROOT_PROXY_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L1_NULLIFIER_IMPL_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L1_NULLIFIER_PROXY_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L1_BRIDGED_STANDARD_ERC20_IMPL_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L1_BRIDGED_TOKEN_BEACON_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L2_LEGACY_SHARED_BRIDGE_IMPL_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L2_LEGACY_SHARED_BRIDGE_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" FRI_RECURSION_LEAF_LEVEL_VK_HASH = "0xf9664f4324c1400fa5c3822d667f30e873f53f1b8033180cd15fe41c1e2355c6" FRI_RECURSION_NODE_LEVEL_VK_HASH = "0xf520cd5b37e74e19fdb369c8d676a04dce8a19457497ac6686d2bb95d94109c8" FRI_RECURSION_SCHEDULER_LEVEL_VK_HASH = "0x14f97b81e54b35fe673d8708cc1a19e1ea5b5e348e12d31e39824ed4f42bbca2" @@ -64,6 +89,9 @@ SHARED_BRIDGE_UPGRADE_STORAGE_SWITCH = 0 ERA_CHAIN_ID = 9 ERA_DIAMOND_PROXY_ADDR = "0x0000000000000000000000000000000000000000" CHAIN_ADMIN_ADDR = "0x0000000000000000000000000000000000000000" +CTM_ASSET_INFO = "0xf9664f4324c1400fa5c3822d667f30e873f53f1b8033180cd15fe41c1e2355c6" + +L1_CHAIN_ID = 9 [contracts.test] dummy_verifier = true easy_priority_mode = false diff --git a/etc/env/file_based/general.yaml b/etc/env/file_based/general.yaml index 31dd1a0ed742..f15b63a757f7 100644 --- a/etc/env/file_based/general.yaml +++ b/etc/env/file_based/general.yaml @@ -41,7 +41,7 @@ api: estimate_gas_scale_factor: 1.3 estimate_gas_acceptable_overestimation: 5000 max_tx_size: 1000000 - api_namespaces: [ en,eth,net,web3,zks,pubsub,debug ] + api_namespaces: [ en,eth,net,web3,zks,pubsub,debug,unstable ] state_keeper: transaction_slots: 8192 max_allowed_l2_tx_gas_limit: 15000000000 diff --git a/etc/env/file_based/genesis.yaml b/etc/env/file_based/genesis.yaml index 9f94dd0c04b6..0cd9959baeaf 100644 --- a/etc/env/file_based/genesis.yaml +++ b/etc/env/file_based/genesis.yaml @@ -1,9 +1,9 @@ -genesis_root: 0x9b30c35100835c0d811c9d385cc9804816dbceb4461b8fe4cbb8d0d5ecdacdec -genesis_rollup_leaf_index: 54 -genesis_batch_commitment: 0x043d432c1b668e54ada198d683516109e45e4f7f81f216ff4c4f469117732e50 -genesis_protocol_version: 25 -default_aa_hash: 0x01000523eadd3061f8e701acda503defb7ac3734ae3371e4daf7494651d8b523 -bootloader_hash: 0x010008e15394cd83a8d463d61e00b4361afbc27c932b07a9d2100861b7d05e78 +genesis_root: 0xd8c9be7efb705e7dcf529c14fce7048ea99dea9eab6a6b4e5f8de1ebf4f2ebf2 +genesis_rollup_leaf_index: 68 +genesis_batch_commitment: 0xf6e873e8894b90f157511a133d941fb6f0892f83147e3d0d2cafa71af8c838e5 +genesis_protocol_version: 26 +default_aa_hash: 0x010004dbf8be36c421254d005352f8245146906919be0099e8a50d0e78df85e0 +bootloader_hash: 0x0100088580465d88420e6369230ee94a32ff356dbcdd407a4be49fc8009b2a81 l1_chain_id: 9 l2_chain_id: 270 fee_account: '0x0000000000000000000000000000000000000001' @@ -11,7 +11,7 @@ prover: fflonk_snark_wrapper_vk_hash: 0x560b19cfd6bcf1049c6409c18d81db288ab7639db080ed3b48df17ddfbcc4666 dummy_verifier: true snark_wrapper_vk_hash: 0x14f97b81e54b35fe673d8708cc1a19e1ea5b5e348e12d31e39824ed4f42bbca2 -genesis_protocol_semantic_version: 0.25.0 +genesis_protocol_semantic_version: 0.26.0 l1_batch_commit_data_generator_mode: Rollup # TODO: uncomment once EVM emulator is present in the `contracts` submodule # evm_emulator_hash: 0x01000e53aa35d9d19fa99341c2e2901cf93b3668f01569dd5c6ca409c7696b91 diff --git a/etc/multivm_bootloaders/vm_gateway/commit b/etc/multivm_bootloaders/vm_gateway/commit index a3547f577034..b6352645c93a 100644 --- a/etc/multivm_bootloaders/vm_gateway/commit +++ b/etc/multivm_bootloaders/vm_gateway/commit @@ -1 +1 @@ -a8bf0ca28d43899882a2e123e2fdf1379f0fd656 +16dedf6d77695ce00f81fce35a3066381b97fca1 diff --git a/etc/multivm_bootloaders/vm_gateway/fee_estimate.yul/fee_estimate.yul.zbin b/etc/multivm_bootloaders/vm_gateway/fee_estimate.yul/fee_estimate.yul.zbin index fb6017f69cf0..9f8ed5b9d676 100644 Binary files a/etc/multivm_bootloaders/vm_gateway/fee_estimate.yul/fee_estimate.yul.zbin and b/etc/multivm_bootloaders/vm_gateway/fee_estimate.yul/fee_estimate.yul.zbin differ diff --git a/etc/multivm_bootloaders/vm_gateway/gas_test.yul/gas_test.yul.zbin b/etc/multivm_bootloaders/vm_gateway/gas_test.yul/gas_test.yul.zbin index c1726d8301ff..3268a37a313c 100644 Binary files a/etc/multivm_bootloaders/vm_gateway/gas_test.yul/gas_test.yul.zbin and b/etc/multivm_bootloaders/vm_gateway/gas_test.yul/gas_test.yul.zbin differ diff --git a/etc/multivm_bootloaders/vm_gateway/playground_batch.yul/playground_batch.yul.zbin b/etc/multivm_bootloaders/vm_gateway/playground_batch.yul/playground_batch.yul.zbin index b154276bd611..ef3354d54659 100644 Binary files a/etc/multivm_bootloaders/vm_gateway/playground_batch.yul/playground_batch.yul.zbin and b/etc/multivm_bootloaders/vm_gateway/playground_batch.yul/playground_batch.yul.zbin differ diff --git a/etc/multivm_bootloaders/vm_gateway/proved_batch.yul/proved_batch.yul.zbin b/etc/multivm_bootloaders/vm_gateway/proved_batch.yul/proved_batch.yul.zbin index 2506ce065d74..e877b81cc2fe 100644 Binary files a/etc/multivm_bootloaders/vm_gateway/proved_batch.yul/proved_batch.yul.zbin and b/etc/multivm_bootloaders/vm_gateway/proved_batch.yul/proved_batch.yul.zbin differ diff --git a/infrastructure/local-gateway-upgrade-testing/README.md b/infrastructure/local-gateway-upgrade-testing/README.md new file mode 100644 index 000000000000..d878055f5ff4 --- /dev/null +++ b/infrastructure/local-gateway-upgrade-testing/README.md @@ -0,0 +1,118 @@ +# Local upgrade testing + +While it is theoretically possible to do it in CI-like style, it generally leads to needless recompilations, esp of rust +programs. + +Here we contain the files/instructions needed to test the gateway upgrade locally. + +## Step 0 + +- pull zksync-era to ~/zksync-era +- pull zksync-era-private to ~/zksync-era-private + +## Step 1: Preparation + +To easiest way to avoid needless is caching. There are two ways to avoid caching: + +- Cache target/etc in a separate directory +- Have two folders of zksync-era and switch between those + +We use the second approach for robustness and simplicity. + +### Enabling `era-cacher` + +Copy `era-cacher` to some other folder (as the zksync-era one will change) and add it to PATH, so it can be invoked. + +You should download a clone of zksync-era, put it into the `zksync-era-old` directory. It should point to the commit of +`main` we will upgrade from. + +## Step 2: spawning old chain + +Run `use-old-era.sh`. The old contents of the zksync-era will be moved to `zksync-era-new` folder (there the gateway +version is stored), while the old one will be present in `zksync-era-new`. + +## Step 3: Move to new chain and upgrade it + +Use upgrade scripts as in the example below. + +## Full flow + +``` +# make sure that there are 2 folders: zksync-era with old era and zksync-era-private with new era +# if test was run previously you probably need to move folder +mv ~/zksync-era-current ~/zksync-era-private + +cd ~ && use-old-era.sh && cd ./zksync-era-current + +zkstackup --local && zkstack dev clean all && zkstack up --observability false + +zkstack ecosystem init --deploy-paymaster --deploy-erc20 \ + --deploy-ecosystem --l1-rpc-url=http://127.0.0.1:8545 \ + --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ + --server-db-name=zksync_server_localhost_era \ + --ignore-prerequisites --verbose \ + --observability=false + +cd ~ && use-new-era.sh && cd ./zksync-era-current + +zkstackup --local +zkstack dev contracts +zkstack dev database migrate + +zkstack chain gateway-upgrade -- adapt-config + +# Server should be started in a different window for consistency +zkstack server --ignore-prerequisites --chain era + +zkstack e gateway-upgrade --ecosystem-upgrade-stage no-governance-prepare + +# only if chain has weth deployed before upgrade. +# i.e. you must run it iff `predeployed_l2_wrapped_base_token_address` is set in config. +zkstack chain gateway-upgrade -- set-l2weth-for-chain + +zkstack e gateway-upgrade --ecosystem-upgrade-stage governance-stage1 + +zkstack chain gateway-upgrade -- prepare-stage1 + +# restart the server. wait for all L1 txs to exeucte!!!! + +zkstack chain gateway-upgrade -- schedule-stage1 + +# turn off the server => we need it because we need to somehow update validator timelock +# also getPriorityTreeStartIndex needs to be updated. + +zkstack chain gateway-upgrade -- finalize-stage1 + +# restart the server + +cd ~/zksync-era +zkstack dev test integration --no-deps --ignore-prerequisites --chain era +cd ~/zksync-era-current + +zkstack ecosystem gateway-upgrade --ecosystem-upgrade-stage governance-stage2 +zkstack ecosystem gateway-upgrade --ecosystem-upgrade-stage no-governance-stage2 + +# turn off the server + +zkstack chain gateway-upgrade -- finalize-stage2 + +# turn on the server + +zkstack dev test integration --no-deps --ignore-prerequisites --chain era + + + +zkstack ecosystem gateway-upgrade --ecosystem-upgrade-stage governance-stage3 +zkstack ecosystem gateway-upgrade --ecosystem-upgrade-stage no-governance-stage3 + +# in separate window +zkstack server --ignore-prerequisites --chain gateway + +# wait for era server to finalize all L1 txs +# stop era server! + +zkstack chain migrate-to-gateway --chain era --gateway-chain-name gateway + +# restart era server! +zkstack dev test integration --no-deps --ignore-prerequisites --chain era +``` diff --git a/infrastructure/local-gateway-upgrade-testing/era-cacher/use-new-era.sh b/infrastructure/local-gateway-upgrade-testing/era-cacher/use-new-era.sh new file mode 100755 index 000000000000..7b2bc9ad495a --- /dev/null +++ b/infrastructure/local-gateway-upgrade-testing/era-cacher/use-new-era.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +OLD_REPO=~/zksync-era +NEW_REPO=~/zksync-era-private + +WORKING_DIRECTORY=~/zksync-era-current + +# Check if the folder exists +if [ ! -d "$NEW_REPO" ]; then + echo "Error: The folder '$NEW_REPO' does not exist." + exit 1 +else + echo "Updating to use new era" +fi + +rm -rf $NEW_REPO/chains +mkdir $NEW_REPO/chains +cp -rf $WORKING_DIRECTORY/chains $NEW_REPO + + +rm -rf $NEW_REPO/configs +mkdir $NEW_REPO/configs +cp -rf $WORKING_DIRECTORY/configs $NEW_REPO + + +mv $WORKING_DIRECTORY $OLD_REPO +mv $NEW_REPO $WORKING_DIRECTORY diff --git a/infrastructure/local-gateway-upgrade-testing/era-cacher/use-old-era.sh b/infrastructure/local-gateway-upgrade-testing/era-cacher/use-old-era.sh new file mode 100755 index 000000000000..e52d6ad278b7 --- /dev/null +++ b/infrastructure/local-gateway-upgrade-testing/era-cacher/use-old-era.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +OLD_REPO=~/zksync-era +NEW_REPO=~/zksync-era-private + +WORKING_DIRECTORY=~/zksync-era-current + +# Check if the folder exists +if [ ! -d "$OLD_REPO" ]; then + echo "Error: The folder '$OLD_REPO' does not exist." + exit 1 +else + echo "Updating to use old era." +fi + +mv $OLD_REPO $WORKING_DIRECTORY diff --git a/package.json b/package.json index b293bedd8f69..6c7457ba29c0 100644 --- a/package.json +++ b/package.json @@ -7,6 +7,7 @@ "packages": [ "contracts", "contracts/l1-contracts", + "contracts/da-contracts", "contracts/l2-contracts", "contracts/system-contracts", "etc/ERC20", @@ -28,6 +29,7 @@ "local-prep": "yarn workspace local-setup-preparation", "l1-contracts": "yarn workspace l1-contracts", "l2-contracts": "yarn workspace l2-contracts", + "da-contracts": "yarn workspace da-contracts", "revert-test": "yarn workspace revert-test", "upgrade-test": "yarn workspace upgrade-test", "recovery-test": "yarn workspace recovery-test", diff --git a/prover/crates/lib/prover_fri_types/src/lib.rs b/prover/crates/lib/prover_fri_types/src/lib.rs index 44c54da578ee..9d7c32c21d73 100644 --- a/prover/crates/lib/prover_fri_types/src/lib.rs +++ b/prover/crates/lib/prover_fri_types/src/lib.rs @@ -30,7 +30,7 @@ pub mod queue; pub const MAX_COMPRESSION_CIRCUITS: u8 = 5; // THESE VALUES SHOULD BE UPDATED ON ANY PROTOCOL UPGRADE OF PROVERS -pub const PROVER_PROTOCOL_VERSION: ProtocolVersionId = ProtocolVersionId::Version25; +pub const PROVER_PROTOCOL_VERSION: ProtocolVersionId = ProtocolVersionId::Version26; pub const PROVER_PROTOCOL_PATCH: VersionPatch = VersionPatch(0); pub const PROVER_PROTOCOL_SEMANTIC_VERSION: ProtocolSemanticVersion = ProtocolSemanticVersion { minor: PROVER_PROTOCOL_VERSION, diff --git a/yarn.lock b/yarn.lock index 5df8cb570e0f..732577daeb68 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1737,15 +1737,23 @@ sinon-chai "^3.7.0" ts-morph "^22.0.0" -"@matterlabs/hardhat-zksync-node@^0.0.1-beta.7": - version "0.0.1" - resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-node/-/hardhat-zksync-node-0.0.1.tgz#d44bda3c0069b149e2a67c9697eb81166b169ea6" - integrity sha512-rMabl+I813lzXINqTq5OvujQ30wsfO9mTLMPDXuYzEEhEzvnXlaVxuqynKBXrgXAxjmr+G79rqvcWgeKygtwBA== +"@matterlabs/hardhat-zksync-node@^1.2.0": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-node/-/hardhat-zksync-node-1.2.1.tgz#786d51b28ad3aa5b8b973831e016151326d844e4" + integrity sha512-BZDJyEB9iu54D6sOKTGeJrN5TRFLrg6k9E1x3lEwpOfewPwg1eTfb9e/LKGSCePbSremZIHzK3eDRr80hVdDjA== dependencies: - "@matterlabs/hardhat-zksync-solc" "^1.0.5" - axios "^1.4.0" - chalk "4.1.2" - fs-extra "^11.1.1" + "@matterlabs/hardhat-zksync-solc" "^1.2.5" + axios "^1.7.2" + chai "^4.3.4" + chalk "^4.1.2" + debug "^4.3.5" + fs-extra "^11.2.0" + proxyquire "^2.1.3" + semver "^7.6.2" + sinon "^18.0.0" + sinon-chai "^3.7.0" + source-map-support "^0.5.21" + undici "^6.18.2" "@matterlabs/hardhat-zksync-solc@0.4.2": version "0.4.2" @@ -1771,16 +1779,7 @@ proper-lockfile "^4.1.2" semver "^7.5.1" -"@matterlabs/hardhat-zksync-solc@^0.3.15": - version "0.3.17" - resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-solc/-/hardhat-zksync-solc-0.3.17.tgz#72f199544dc89b268d7bfc06d022a311042752fd" - integrity sha512-aZgQ0yfXW5xPkfuEH1d44ncWV4T2LzKZd0VVPo4PL5cUrYs2/II1FaEDp5zsf3FxOR1xT3mBsjuSrtJkk4AL8Q== - dependencies: - "@nomiclabs/hardhat-docker" "^2.0.0" - chalk "4.1.2" - dockerode "^3.3.4" - -"@matterlabs/hardhat-zksync-solc@^1.0.5", "@matterlabs/hardhat-zksync-solc@^1.1.4": +"@matterlabs/hardhat-zksync-solc@=1.1.4", "@matterlabs/hardhat-zksync-solc@^1.0.5": version "1.1.4" resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-solc/-/hardhat-zksync-solc-1.1.4.tgz#04a2fad6fb6b6944c64ad969080ee65b9af3f617" integrity sha512-4/usbogh9neewR2/v8Dn2OzqVblZMUuT/iH2MyPZgPRZYQlL4SlZtMvokU9UQjZT6iSoaKCbbdWESHDHSzfUjA== @@ -1797,6 +1796,15 @@ sinon-chai "^3.7.0" undici "^5.14.0" +"@matterlabs/hardhat-zksync-solc@^0.3.15": + version "0.3.17" + resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-solc/-/hardhat-zksync-solc-0.3.17.tgz#72f199544dc89b268d7bfc06d022a311042752fd" + integrity sha512-aZgQ0yfXW5xPkfuEH1d44ncWV4T2LzKZd0VVPo4PL5cUrYs2/II1FaEDp5zsf3FxOR1xT3mBsjuSrtJkk4AL8Q== + dependencies: + "@nomiclabs/hardhat-docker" "^2.0.0" + chalk "4.1.2" + dockerode "^3.3.4" + "@matterlabs/hardhat-zksync-solc@^1.2.0": version "1.2.0" resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-solc/-/hardhat-zksync-solc-1.2.0.tgz#c1ccd1eca0381840196f220b339da08320ad9583" @@ -1814,7 +1822,7 @@ sinon-chai "^3.7.0" undici "^6.18.2" -"@matterlabs/hardhat-zksync-solc@^1.2.4": +"@matterlabs/hardhat-zksync-solc@^1.2.4", "@matterlabs/hardhat-zksync-solc@^1.2.5": version "1.2.5" resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-solc/-/hardhat-zksync-solc-1.2.5.tgz#fbeeabc3fea0dd232fa3c8cb31bd93c103eba11a" integrity sha512-iZyznWl1Hoe/Z46hnUe1s2drBZBjJOS/eN+Ql2lIBX9B6NevBl9DYzkKzH5HEIMCLGnX9sWpRAJqUQJWy9UB6w== @@ -1882,6 +1890,11 @@ resolved "https://registry.yarnpkg.com/@matterlabs/prettier-config/-/prettier-config-1.0.3.tgz#3e2eb559c0112bbe9671895f935700dad2a15d38" integrity sha512-JW7nHREPqEtjBWz3EfxLarkmJBD8vi7Kx/1AQ6eBZnz12eHc1VkOyrc6mpR5ogTf0dOUNXFAfZut+cDe2dn4kQ== +"@matterlabs/zksync-contracts@^0.6.1": + version "0.6.1" + resolved "https://registry.yarnpkg.com/@matterlabs/zksync-contracts/-/zksync-contracts-0.6.1.tgz#39f061959d5890fd0043a2f1ae710f764b172230" + integrity sha512-+hucLw4DhGmTmQlXOTEtpboYCaOm/X2VJcWmnW4abNcOgQXEHX+mTxQrxEfPjIZT0ZE6z5FTUrOK9+RgUZwBMQ== + "@metamask/eth-sig-util@^4.0.0": version "4.0.1" resolved "https://registry.yarnpkg.com/@metamask/eth-sig-util/-/eth-sig-util-4.0.1.tgz#3ad61f6ea9ad73ba5b19db780d40d9aae5157088" @@ -2305,11 +2318,21 @@ resolved "https://registry.yarnpkg.com/@openzeppelin/contracts-upgradeable/-/contracts-upgradeable-4.9.5.tgz#572b5da102fc9be1d73f34968e0ca56765969812" integrity sha512-f7L1//4sLlflAN7fVzJLoRedrf5Na3Oal5PZfIq55NFcVZ90EpV1q5xOvL4lFvg3MNICSDr2hH0JUBxwlxcoPg== +"@openzeppelin/contracts-upgradeable@4.9.5": + version "4.9.5" + resolved "https://registry.yarnpkg.com/@openzeppelin/contracts-upgradeable/-/contracts-upgradeable-4.9.5.tgz#572b5da102fc9be1d73f34968e0ca56765969812" + integrity sha512-f7L1//4sLlflAN7fVzJLoRedrf5Na3Oal5PZfIq55NFcVZ90EpV1q5xOvL4lFvg3MNICSDr2hH0JUBxwlxcoPg== + "@openzeppelin/contracts-v4@npm:@openzeppelin/contracts@4.9.5": version "4.9.5" resolved "https://registry.yarnpkg.com/@openzeppelin/contracts/-/contracts-4.9.5.tgz#1eed23d4844c861a1835b5d33507c1017fa98de8" integrity sha512-ZK+W5mVhRppff9BE6YdR8CC52C8zAvsVAiWhEtQ5+oNxFE6h1WdeWo+FJSF8KKvtxxVYZ7MTP/5KoVpAU3aSWg== +"@openzeppelin/contracts@4.9.5": + version "4.9.5" + resolved "https://registry.yarnpkg.com/@openzeppelin/contracts/-/contracts-4.9.5.tgz#1eed23d4844c861a1835b5d33507c1017fa98de8" + integrity sha512-ZK+W5mVhRppff9BE6YdR8CC52C8zAvsVAiWhEtQ5+oNxFE6h1WdeWo+FJSF8KKvtxxVYZ7MTP/5KoVpAU3aSWg== + "@openzeppelin/contracts@^4.8.0": version "4.9.6" resolved "https://registry.yarnpkg.com/@openzeppelin/contracts/-/contracts-4.9.6.tgz#2a880a24eb19b4f8b25adc2a5095f2aa27f39677" @@ -5750,6 +5773,14 @@ file-entry-cache@^6.0.1: dependencies: flat-cache "^3.0.4" +fill-keys@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/fill-keys/-/fill-keys-1.0.2.tgz#9a8fa36f4e8ad634e3bf6b4f3c8882551452eb20" + integrity sha512-tcgI872xXjwFF4xgQmLxi76GnwJG3g/3isB1l4/G5Z4zrbddGpBjqZCO9oEAcB5wX0Hj/5iQB3toxfO7in1hHA== + dependencies: + is-object "~1.0.1" + merge-descriptors "~1.0.0" + fill-range@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" @@ -6827,6 +6858,13 @@ is-core-module@^2.11.0, is-core-module@^2.13.0, is-core-module@^2.13.1: dependencies: hasown "^2.0.0" +is-core-module@^2.16.0: + version "2.16.1" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.16.1.tgz#2a98801a849f43e2add644fbb6bc6229b19a4ef4" + integrity sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w== + dependencies: + hasown "^2.0.2" + is-data-view@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-data-view/-/is-data-view-1.0.1.tgz#4b4d3a511b70f3dc26d42c03ca9ca515d847759f" @@ -6895,6 +6933,11 @@ is-number@^7.0.0: resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== +is-object@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-object/-/is-object-1.0.2.tgz#a56552e1c665c9e950b4a025461da87e72f86fcf" + integrity sha512-2rRIahhZr2UWb45fIOuvZGpFtz0TyOZLf32KxBbSoUCeZR495zCKlWUKKUByk3geS2eAs7ZAABt0Y/Rx0GiQGA== + is-path-inside@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" @@ -8123,6 +8166,11 @@ memorystream@^0.3.1: resolved "https://registry.yarnpkg.com/memorystream/-/memorystream-0.3.1.tgz#86d7090b30ce455d63fbae12dda51a47ddcaf9b2" integrity sha512-S3UwM3yj5mtUSEfP41UZmt/0SCoVYUcU1rkXv+BQ5Ig8ndL4sPoJNBUJERafdPb5jjHJGuMgytgKvKIf58XNBw== +merge-descriptors@~1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.3.tgz#d80319a65f3c7935351e5cfdac8f9318504dbed5" + integrity sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ== + merge-stream@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" @@ -8380,6 +8428,11 @@ mocha@^9.0.2: yargs-parser "20.2.4" yargs-unparser "2.0.0" +module-not-found-error@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/module-not-found-error/-/module-not-found-error-1.0.1.tgz#cf8b4ff4f29640674d6cdd02b0e3bc523c2bbdc0" + integrity sha512-pEk4ECWQXV6z2zjhRZUongnLJNUeGQJ3w6OQ5ctGwD+i5o93qjRQUk2Rt6VdNeu3sEP0AB4LcfvdebpxBRVr4g== + moo@^0.5.0: version "0.5.2" resolved "https://registry.yarnpkg.com/moo/-/moo-0.5.2.tgz#f9fe82473bc7c184b0d32e2215d3f6e67278733c" @@ -9220,6 +9273,15 @@ proxy-from-env@^1.1.0: resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== +proxyquire@^2.1.3: + version "2.1.3" + resolved "https://registry.yarnpkg.com/proxyquire/-/proxyquire-2.1.3.tgz#2049a7eefa10a9a953346a18e54aab2b4268df39" + integrity sha512-BQWfCqYM+QINd+yawJz23tbBM40VIGXOdDw3X344KcclI/gtBbdWF6SlQ4nK/bYhF9d27KYug9WzljHC6B9Ysg== + dependencies: + fill-keys "^1.0.2" + module-not-found-error "^1.0.1" + resolve "^1.11.1" + prr@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" @@ -9547,6 +9609,15 @@ resolve@^1.1.6, resolve@^1.10.0, resolve@^1.12.0, resolve@^1.20.0, resolve@^1.22 path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" +resolve@^1.11.1: + version "1.22.10" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.10.tgz#b663e83ffb09bbf2386944736baae803029b8b39" + integrity sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w== + dependencies: + is-core-module "^2.16.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + responselike@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/responselike/-/responselike-3.0.0.tgz#20decb6c298aff0dbee1c355ca95461d42823626" @@ -10068,7 +10139,7 @@ source-map-support@0.5.13: buffer-from "^1.0.0" source-map "^0.6.0" -source-map-support@^0.5.13: +source-map-support@^0.5.13, source-map-support@^0.5.21: version "0.5.21" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== @@ -10196,7 +10267,7 @@ string-length@^4.0.1: char-regex "^1.0.2" strip-ansi "^6.0.0" -"string-width-cjs@npm:string-width@^4.2.0", string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2, string-width@^4.2.3: +"string-width-cjs@npm:string-width@^4.2.0": version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -10213,6 +10284,15 @@ string-width@^2.1.0, string-width@^2.1.1: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" +string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2, string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + string-width@^5.0.1, string-width@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794" @@ -10279,7 +10359,7 @@ string_decoder@~1.1.1: dependencies: safe-buffer "~5.1.0" -"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1: +"strip-ansi-cjs@npm:strip-ansi@^6.0.1": version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== @@ -10300,6 +10380,13 @@ strip-ansi@^5.1.0: dependencies: ansi-regex "^4.1.0" +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + strip-ansi@^7.0.1: version "7.1.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" @@ -10400,7 +10487,7 @@ synckit@^0.8.6: version "0.1.0" dependencies: "@matterlabs/hardhat-zksync-deploy" "^0.7.0" - "@matterlabs/hardhat-zksync-solc" "^1.1.4" + "@matterlabs/hardhat-zksync-solc" "=1.1.4" "@matterlabs/hardhat-zksync-verify" "^1.4.3" commander "^9.4.1" eslint "^8.51.0" @@ -10582,6 +10669,11 @@ toidentifier@1.0.1: resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== +toml@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/toml/-/toml-3.0.0.tgz#342160f1af1904ec9d204d03a5d61222d762c5ee" + integrity sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w== + tough-cookie@~2.5.0: version "2.5.0" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2" @@ -11150,7 +11242,16 @@ workerpool@6.2.1: resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-6.2.1.tgz#46fc150c17d826b86a008e5a4508656777e9c343" integrity sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw== -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== @@ -11231,6 +11332,11 @@ yaml@^2.4.2: resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.4.2.tgz#7a2b30f2243a5fc299e1f14ca58d475ed4bc5362" integrity sha512-B3VqDZ+JAg1nZpaEmWtTXUlBneoGx6CPM9b0TENK6aoSu5t73dItudwdgmi6tHlIZZId4dZ9skcAQ2UbcyAeVA== +yaml@^2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.7.0.tgz#aef9bb617a64c937a9a748803786ad8d3ffe1e98" + integrity sha512-+hSoy/QHluxmC9kCIJyL/uyFmLmc+e5CFR5Wa+bpIhIj85LVb9ZH2nVnqrHoSvKogwODv0ClqZkmiSSaIH5LTA== + yargs-parser@20.2.4: version "20.2.4" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.4.tgz#b42890f14566796f85ae8e3a25290d205f154a54" @@ -11315,3 +11421,7 @@ zksync-ethers@^6.9.0: version "6.9.0" resolved "https://registry.yarnpkg.com/zksync-ethers/-/zksync-ethers-6.9.0.tgz#efaff1d59e2cff837eeda84c4ba59fdca4972a91" integrity sha512-2CppwvLHtz689L7E9EhevbFtsqVukKC/lVicwdeUS2yqV46ET4iBR11rYdEfGW2oEo1h6yJuuwIBDFm2SybkIA== + +"zksync-ethers@https://github.com/zksync-sdk/zksync-ethers#sb-use-new-encoding-in-sdk": + version "6.12.1" + resolved "https://github.com/zksync-sdk/zksync-ethers#bc6e3ab201f743fcbb53e0216f3de421bb3a617f" diff --git a/zkstack_cli/Cargo.lock b/zkstack_cli/Cargo.lock index 1ea38d968073..cfc16535db50 100644 --- a/zkstack_cli/Cargo.lock +++ b/zkstack_cli/Cargo.lock @@ -7081,6 +7081,8 @@ dependencies = [ "zksync_consensus_crypto", "zksync_consensus_roles", "zksync_consensus_utils", + "zksync_contracts", + "zksync_eth_client", "zksync_protobuf", "zksync_protobuf_build", "zksync_protobuf_config", @@ -7139,6 +7141,7 @@ dependencies = [ "zksync_config", "zksync_protobuf", "zksync_protobuf_config", + "zksync_system_constants", ] [[package]] @@ -7298,6 +7301,34 @@ dependencies = [ "zksync_basic_types", ] +[[package]] +name = "zksync_eth_client" +version = "0.1.0" +dependencies = [ + "async-trait", + "jsonrpsee", + "rlp", + "thiserror", + "tracing", + "vise", + "zksync_config", + "zksync_contracts", + "zksync_eth_signer", + "zksync_types", + "zksync_web3_decl", +] + +[[package]] +name = "zksync_eth_signer" +version = "0.1.0" +dependencies = [ + "async-trait", + "rlp", + "thiserror", + "zksync_basic_types", + "zksync_crypto_primitives", +] + [[package]] name = "zksync_mini_merkle_tree" version = "0.1.0" diff --git a/zkstack_cli/Cargo.toml b/zkstack_cli/Cargo.toml index 6d18d1a531d7..f3e0d8a9c9d5 100644 --- a/zkstack_cli/Cargo.toml +++ b/zkstack_cli/Cargo.toml @@ -34,6 +34,7 @@ zksync_system_constants = { path = "../core/lib/constants" } zksync_types = { path = "../core/lib/types" } zksync_web3_decl = { path = "../core/lib/web3_decl" } zksync_eth_client = { path = "../core/lib/eth_client" } +zksync_contracts = { path = "../core/lib/contracts" } zksync_consensus_roles = "=0.7.0" zksync_consensus_crypto = "=0.7.0" zksync_consensus_utils = "=0.7.0" diff --git a/zkstack_cli/crates/common/src/contracts.rs b/zkstack_cli/crates/common/src/contracts.rs index 4cef4467f382..268c1a7ae521 100644 --- a/zkstack_cli/crates/common/src/contracts.rs +++ b/zkstack_cli/crates/common/src/contracts.rs @@ -6,32 +6,23 @@ use crate::cmd::Cmd; pub fn build_l1_contracts(shell: Shell, link_to_code: PathBuf) -> anyhow::Result<()> { let _dir_guard = shell.push_dir(link_to_code.join("contracts/l1-contracts")); + Ok(Cmd::new(cmd!(shell, "yarn build:foundry")).run()?) +} + +pub fn build_l1_da_contracts(shell: Shell, link_to_code: PathBuf) -> anyhow::Result<()> { + let _dir_guard = shell.push_dir(link_to_code.join("contracts/da-contracts")); Ok(Cmd::new(cmd!(shell, "forge build")).run()?) } pub fn build_l2_contracts(shell: Shell, link_to_code: PathBuf) -> anyhow::Result<()> { let _dir_guard = shell.push_dir(link_to_code.join("contracts/l2-contracts")); - Ok(Cmd::new(cmd!( - shell, - "forge build --zksync --zk-enable-eravm-extensions" - )) - .run()?) + Cmd::new(cmd!(shell, "yarn build:foundry")).run()?; + Ok(()) } pub fn build_system_contracts(shell: Shell, link_to_code: PathBuf) -> anyhow::Result<()> { let _dir_guard = shell.push_dir(link_to_code.join("contracts/system-contracts")); // Do not update era-contract's lockfile to avoid dirty submodule Cmd::new(cmd!(shell, "yarn install --frozen-lockfile")).run()?; - Cmd::new(cmd!(shell, "yarn preprocess:system-contracts")).run()?; - Cmd::new(cmd!( - shell, - "forge build --zksync --zk-enable-eravm-extensions" - )) - .run()?; - Cmd::new(cmd!(shell, "yarn preprocess:bootloader")).run()?; - Ok(Cmd::new(cmd!( - shell, - "forge build --zksync --zk-enable-eravm-extensions" - )) - .run()?) + Ok(Cmd::new(cmd!(shell, "yarn build:foundry")).run()?) } diff --git a/zkstack_cli/crates/common/src/forge.rs b/zkstack_cli/crates/common/src/forge.rs index a7cf08a50bc0..1bf0570873d4 100644 --- a/zkstack_cli/crates/common/src/forge.rs +++ b/zkstack_cli/crates/common/src/forge.rs @@ -69,6 +69,17 @@ impl ForgeScript { return Ok(res?); } } + + // TODO: This line is very helpful for debugging purposes, + // maybe it makes sense to make it conditionally displayed. + let command = format!( + "forge script {} --legacy {}", + script_path.to_str().unwrap(), + args_no_resume.join(" ") + ); + + println!("Command: {}", command); + let mut cmd = Cmd::new(cmd!( shell, "forge script {script_path} --legacy {args_no_resume...}" @@ -291,6 +302,8 @@ pub struct ForgeScriptArgs { pub verifier_api_key: Option, #[clap(long)] pub resume: bool, + #[clap(long)] + pub zksync: bool, /// List of additional arguments that can be passed through the CLI. /// /// e.g.: `zkstack init -a --private-key=` @@ -304,6 +317,9 @@ impl ForgeScriptArgs { pub fn build(&mut self) -> Vec { self.add_verify_args(); self.cleanup_contract_args(); + if self.zksync { + self.add_arg(ForgeScriptArg::Zksync); + } self.args .iter() .map(|arg| arg.to_string()) @@ -399,6 +415,10 @@ impl ForgeScriptArgs { .iter() .any(|arg| WALLET_ARGS.contains(&arg.as_ref())) } + + pub fn with_zksync(&mut self) { + self.zksync = true; + } } #[derive(Debug, Clone, ValueEnum, Display, Serialize, Deserialize, Default)] diff --git a/zkstack_cli/crates/config/Cargo.toml b/zkstack_cli/crates/config/Cargo.toml index 5ddf36c2d30b..0926f2522cb2 100644 --- a/zkstack_cli/crates/config/Cargo.toml +++ b/zkstack_cli/crates/config/Cargo.toml @@ -29,3 +29,4 @@ zksync_protobuf_config.workspace = true zksync_protobuf.workspace = true zksync_config.workspace = true zksync_basic_types.workspace = true +zksync_system_constants.workspace = true diff --git a/zkstack_cli/crates/config/src/chain.rs b/zkstack_cli/crates/config/src/chain.rs index edd6199511d7..19f275d78f80 100644 --- a/zkstack_cli/crates/config/src/chain.rs +++ b/zkstack_cli/crates/config/src/chain.rs @@ -7,7 +7,10 @@ use serde::{Deserialize, Serialize, Serializer}; use xshell::Shell; use zkstack_cli_types::{BaseToken, L1BatchCommitmentMode, L1Network, ProverMode, WalletCreation}; use zksync_basic_types::L2ChainId; -use zksync_config::configs::{GatewayChainConfig, GatewayConfig}; +use zksync_config::{ + configs::{gateway::GatewayChainConfig, GatewayConfig}, + DAClientConfig::{Avail, NoDA}, +}; use crate::{ consts::{ @@ -19,7 +22,7 @@ use crate::{ FileConfigWithDefaultName, ReadConfig, ReadConfigWithBasePath, SaveConfig, SaveConfigWithBasePath, ZkStackConfig, }, - ContractsConfig, GeneralConfig, GenesisConfig, SecretsConfig, WalletsConfig, + ContractsConfig, GeneralConfig, GenesisConfig, SecretsConfig, WalletsConfig, GATEWAY_FILE, }; /// Chain configuration file. This file is created in the chain @@ -67,6 +70,13 @@ pub struct ChainConfig { pub evm_emulator: bool, } +#[derive(Debug, Clone)] +pub enum DAValidatorType { + Rollup = 0, + NoDA = 1, + Avail = 2, +} + impl Serialize for ChainConfig { fn serialize(&self, serializer: S) -> Result where @@ -101,6 +111,20 @@ impl ChainConfig { } anyhow::bail!("Wallets configs has not been found"); } + + pub fn get_da_validator_type(&self) -> anyhow::Result { + let general = self.get_general_config().expect("General config not found"); + match ( + self.l1_batch_commit_data_generator_mode, + general.da_client_config, + ) { + (L1BatchCommitmentMode::Rollup, _) => Ok(DAValidatorType::Rollup), + (L1BatchCommitmentMode::Validium, None | Some(NoDA)) => Ok(DAValidatorType::NoDA), + (L1BatchCommitmentMode::Validium, Some(Avail(_))) => Ok(DAValidatorType::Avail), + _ => anyhow::bail!("DAValidatorType is not supported"), + } + } + pub fn get_contracts_config(&self) -> anyhow::Result { ContractsConfig::read_with_base_path(self.get_shell(), &self.configs) } @@ -137,6 +161,10 @@ impl ChainConfig { self.configs.join(SECRETS_FILE) } + pub fn path_to_gateway_config(&self) -> PathBuf { + self.configs.join(GATEWAY_FILE) + } + pub fn save_general_config(&self, general_config: &GeneralConfig) -> anyhow::Result<()> { general_config.save_with_base_path(self.get_shell(), &self.configs) } diff --git a/zkstack_cli/crates/config/src/consts.rs b/zkstack_cli/crates/config/src/consts.rs index 6c5dfc8165ce..95c097bf4247 100644 --- a/zkstack_cli/crates/config/src/consts.rs +++ b/zkstack_cli/crates/config/src/consts.rs @@ -29,7 +29,7 @@ pub const ZKSYNC_ERA_GIT_REPO: &str = "https://github.com/matter-labs/zksync-era /// Name of the docker-compose file inside zksync repository pub const DOCKER_COMPOSE_FILE: &str = "docker-compose.yml"; /// Path to the config file with mnemonic for localhost wallets -pub(crate) const CONFIGS_PATH: &str = "etc/env/file_based"; +pub const CONFIGS_PATH: &str = "etc/env/file_based"; /// Path to the docker-compose file for grafana pub const ERA_OBSERVABILITY_COMPOSE_FILE: &str = "era-observability/docker-compose.yml"; /// Path to era observability repository diff --git a/zkstack_cli/crates/config/src/contracts.rs b/zkstack_cli/crates/config/src/contracts.rs index bac17958e3b7..eb88cf3af854 100644 --- a/zkstack_cli/crates/config/src/contracts.rs +++ b/zkstack_cli/crates/config/src/contracts.rs @@ -1,5 +1,6 @@ use ethers::types::{Address, H256}; use serde::{Deserialize, Serialize}; +use zksync_system_constants::{L2_ASSET_ROUTER_ADDRESS, L2_NATIVE_TOKEN_VAULT_ADDRESS}; use crate::{ consts::CONTRACTS_FILE, @@ -38,6 +39,12 @@ impl ContractsConfig { .deployed_addresses .bridges .shared_bridge_proxy_addr; + self.bridges.l1_nullifier_addr = Some( + deploy_l1_output + .deployed_addresses + .bridges + .l1_nullifier_proxy_addr, + ); self.ecosystem_contracts.bridgehub_proxy_addr = deploy_l1_output .deployed_addresses .bridgehub @@ -49,6 +56,26 @@ impl ContractsConfig { self.ecosystem_contracts.transparent_proxy_admin_addr = deploy_l1_output .deployed_addresses .transparent_proxy_admin_addr; + self.ecosystem_contracts.l1_bytecodes_supplier_addr = Some( + deploy_l1_output + .deployed_addresses + .state_transition + .bytecodes_supplier_addr, + ); + self.ecosystem_contracts.stm_deployment_tracker_proxy_addr = Some( + deploy_l1_output + .deployed_addresses + .bridgehub + .ctm_deployment_tracker_proxy_addr, + ); + self.ecosystem_contracts.force_deployments_data = Some( + deploy_l1_output + .contracts_config + .force_deployments_data + .clone(), + ); + self.ecosystem_contracts.expected_rollup_l2_da_validator = + Some(deploy_l1_output.expected_rollup_l2_da_validator_addr); self.l1.default_upgrade_addr = deploy_l1_output .deployed_addresses .state_transition @@ -61,6 +88,8 @@ impl ContractsConfig { self.l1.multicall3_addr = deploy_l1_output.multicall3_addr; self.ecosystem_contracts.validator_timelock_addr = deploy_l1_output.deployed_addresses.validator_timelock_addr; + self.ecosystem_contracts.native_token_vault_addr = + Some(deploy_l1_output.deployed_addresses.native_token_vault_addr); self.l1.verifier_addr = deploy_l1_output .deployed_addresses .state_transition @@ -70,6 +99,21 @@ impl ContractsConfig { self.ecosystem_contracts .diamond_cut_data .clone_from(&deploy_l1_output.contracts_config.diamond_cut_data); + self.l1.rollup_l1_da_validator_addr = Some( + deploy_l1_output + .deployed_addresses + .rollup_l1_da_validator_addr, + ); + self.l1.no_da_validium_l1_validator_addr = Some( + deploy_l1_output + .deployed_addresses + .no_da_validium_l1_validator_addr, + ); + self.l1.avail_l1_da_validator_addr = Some( + deploy_l1_output + .deployed_addresses + .avail_l1_da_validator_addr, + ); self.l1.chain_admin_addr = deploy_l1_output.deployed_addresses.chain_admin; } @@ -77,15 +121,20 @@ impl ContractsConfig { self.l1.diamond_proxy_addr = register_chain_output.diamond_proxy_addr; self.l1.governance_addr = register_chain_output.governance_addr; self.l1.chain_admin_addr = register_chain_output.chain_admin_addr; + self.l1.access_control_restriction_addr = + Some(register_chain_output.access_control_restriction_addr); + self.l1.chain_proxy_admin_addr = Some(register_chain_output.chain_proxy_admin_addr); + self.l2.legacy_shared_bridge_addr = register_chain_output.l2_legacy_shared_bridge_addr; } pub fn set_l2_shared_bridge( &mut self, initialize_bridges_output: &InitializeBridgeOutput, ) -> anyhow::Result<()> { - self.bridges.shared.l2_address = Some(initialize_bridges_output.l2_shared_bridge_proxy); - self.bridges.erc20.l2_address = Some(initialize_bridges_output.l2_shared_bridge_proxy); - self.l2.legacy_shared_bridge_addr = Some(initialize_bridges_output.l2_shared_bridge_proxy); + self.bridges.shared.l2_address = Some(L2_ASSET_ROUTER_ADDRESS); + self.bridges.erc20.l2_address = Some(L2_ASSET_ROUTER_ADDRESS); + self.l2.l2_native_token_vault_proxy_addr = Some(L2_NATIVE_TOKEN_VAULT_ADDRESS); + self.l2.da_validator_addr = Some(initialize_bridges_output.l2_da_validator_address); Ok(()) } @@ -135,27 +184,25 @@ pub struct EcosystemContracts { pub state_transition_proxy_addr: Address, pub transparent_proxy_admin_addr: Address, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub stm_deployment_tracker_proxy_addr: Option
, pub validator_timelock_addr: Address, pub diamond_cut_data: String, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub force_deployments_data: Option, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub native_token_vault_addr: Option
, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub l1_bytecodes_supplier_addr: Option
, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub expected_rollup_l2_da_validator: Option
, + // `Option` to be able to parse configs from pre-gateway protocol version. + #[serde(skip_serializing_if = "Option::is_none")] + pub l1_wrapped_base_token_store: Option
, } impl ZkStackConfig for EcosystemContracts {} @@ -165,7 +212,6 @@ pub struct BridgesContracts { pub erc20: BridgeContractsDefinition, pub shared: BridgeContractsDefinition, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub l1_nullifier_addr: Option
, } @@ -185,11 +231,9 @@ pub struct L1Contracts { #[serde(default)] pub chain_admin_addr: Address, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub access_control_restriction_addr: Option
, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub chain_proxy_admin_addr: Option
, pub multicall3_addr: Address, @@ -197,23 +241,18 @@ pub struct L1Contracts { pub validator_timelock_addr: Address, pub base_token_addr: Address, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub base_token_asset_id: Option, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub rollup_l1_da_validator_addr: Option
, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub avail_l1_da_validator_addr: Option
, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub no_da_validium_l1_validator_addr: Option
, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub transaction_filterer_addr: Option
, } @@ -223,19 +262,15 @@ pub struct L2Contracts { pub testnet_paymaster_addr: Address, pub default_l2_upgrader: Address, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub da_validator_addr: Option
, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub l2_native_token_vault_proxy_addr: Option
, + // `Option` to be able to parse configs from previous protocol version + #[serde(skip_serializing_if = "Option::is_none")] + pub legacy_shared_bridge_addr: Option
, pub consensus_registry: Option
, pub multicall3: Option
, - pub legacy_shared_bridge_addr: Option
, pub timestamp_asserter_addr: Option
, - // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. - #[serde(skip_serializing_if = "Option::is_none")] - pub predeployed_l2_wrapped_base_token_address: Option
, } diff --git a/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/input.rs b/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/input.rs index 17b2bac38a3f..47fe66143250 100644 --- a/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/input.rs +++ b/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/input.rs @@ -6,6 +6,7 @@ use ethers::{ }; use rand::Rng; use serde::{Deserialize, Serialize}; +use zkstack_cli_types::L1Network; use zksync_basic_types::L2ChainId; use crate::{ @@ -111,6 +112,7 @@ pub struct DeployL1Config { pub era_chain_id: L2ChainId, pub owner_address: Address, pub testnet_verifier: bool, + pub support_l2_legacy_shared_bridge_test: bool, pub contracts: ContractsDeployL1Config, pub tokens: TokensDeployL1Config, } @@ -124,11 +126,14 @@ impl DeployL1Config { initial_deployment_config: &InitialDeploymentConfig, era_chain_id: L2ChainId, testnet_verifier: bool, + l1_network: L1Network, + support_l2_legacy_shared_bridge_test: bool, ) -> Self { Self { era_chain_id, testnet_verifier, owner_address: wallets_config.governor.address, + support_l2_legacy_shared_bridge_test, contracts: ContractsDeployL1Config { create2_factory_addr: initial_deployment_config.create2_factory_addr, create2_factory_salt: initial_deployment_config.create2_factory_salt, @@ -162,6 +167,7 @@ impl DeployL1Config { priority_tx_max_gas_limit: initial_deployment_config.priority_tx_max_gas_limit, validator_timelock_execution_delay: initial_deployment_config .validator_timelock_execution_delay, + avail_l1_da_validator_addr: l1_network.avail_l1_da_validator_addr(), }, tokens: TokensDeployL1Config { token_weth_address: initial_deployment_config.token_weth_address, @@ -196,6 +202,8 @@ pub struct ContractsDeployL1Config { pub bootloader_hash: H256, pub default_aa_hash: H256, pub evm_emulator_hash: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub avail_l1_da_validator_addr: Option
, } #[derive(Debug, Deserialize, Serialize, Clone)] diff --git a/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/output.rs b/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/output.rs index 7a922cbdf3c0..a0bca69cafd5 100644 --- a/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/output.rs +++ b/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/output.rs @@ -16,26 +16,10 @@ pub struct DeployL1Output { pub era_chain_id: u32, pub l1_chain_id: u32, pub multicall3_addr: Address, - pub owner_addr: Address, + pub owner_address: Address, pub contracts_config: DeployL1ContractsConfigOutput, pub deployed_addresses: DeployL1DeployedAddressesOutput, -} - -impl ZkStackConfig for DeployL1Output {} - -#[derive(Debug, Deserialize, Serialize, Clone)] -pub struct DeployL1ContractsConfigOutput { - pub diamond_init_max_l2_gas_per_batch: u64, - pub diamond_init_batch_overhead_l1_gas: u64, - pub diamond_init_max_pubdata_per_batch: u64, - pub diamond_init_minimal_l2_gas_price: u64, - pub diamond_init_priority_tx_max_pubdata: u64, - pub diamond_init_pubdata_pricing_mode: u64, - pub priority_tx_max_gas_limit: u64, - pub recursion_circuits_set_vks_hash: H256, - pub recursion_leaf_level_vk_hash: H256, - pub recursion_node_level_vk_hash: H256, - pub diamond_cut_data: String, + pub expected_rollup_l2_da_validator_addr: Address, } #[derive(Debug, Deserialize, Serialize, Clone)] @@ -45,15 +29,33 @@ pub struct DeployL1DeployedAddressesOutput { pub transparent_proxy_admin_addr: Address, pub validator_timelock_addr: Address, pub chain_admin: Address, + pub access_control_restriction_addr: Address, pub bridgehub: L1BridgehubOutput, pub bridges: L1BridgesOutput, pub state_transition: L1StateTransitionOutput, + pub rollup_l1_da_validator_addr: Address, + pub no_da_validium_l1_validator_addr: Address, + pub avail_l1_da_validator_addr: Address, + pub l1_rollup_da_manager: Address, + pub native_token_vault_addr: Address, +} + +impl ZkStackConfig for DeployL1Output {} + +#[derive(Debug, Deserialize, Serialize, Clone)] +pub struct DeployL1ContractsConfigOutput { + pub diamond_cut_data: String, + pub force_deployments_data: String, } #[derive(Debug, Deserialize, Serialize, Clone)] pub struct L1BridgehubOutput { pub bridgehub_implementation_addr: Address, pub bridgehub_proxy_addr: Address, + pub ctm_deployment_tracker_proxy_addr: Address, + pub ctm_deployment_tracker_implementation_addr: Address, + pub message_root_proxy_addr: Address, + pub message_root_implementation_addr: Address, } #[derive(Debug, Deserialize, Serialize, Clone)] @@ -62,21 +64,24 @@ pub struct L1BridgesOutput { pub erc20_bridge_proxy_addr: Address, pub shared_bridge_implementation_addr: Address, pub shared_bridge_proxy_addr: Address, + pub l1_nullifier_implementation_addr: Address, + pub l1_nullifier_proxy_addr: Address, } #[derive(Debug, Deserialize, Serialize, Clone)] pub struct L1StateTransitionOutput { + pub state_transition_proxy_addr: Address, + pub state_transition_implementation_addr: Address, + pub verifier_addr: Address, pub admin_facet_addr: Address, - pub default_upgrade_addr: Address, - pub diamond_init_addr: Address, - pub diamond_proxy_addr: Address, + pub mailbox_facet_addr: Address, pub executor_facet_addr: Address, - pub genesis_upgrade_addr: Address, pub getters_facet_addr: Address, - pub mailbox_facet_addr: Address, - pub state_transition_implementation_addr: Address, - pub state_transition_proxy_addr: Address, - pub verifier_addr: Address, + pub diamond_init_addr: Address, + pub genesis_upgrade_addr: Address, + pub default_upgrade_addr: Address, + pub diamond_proxy_addr: Address, + pub bytecodes_supplier_addr: Address, } #[derive(Debug, Deserialize, Serialize, Clone)] diff --git a/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/input.rs b/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/input.rs index 4143a286da62..afd71cd97757 100644 --- a/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/input.rs +++ b/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/input.rs @@ -1,4 +1,3 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use ethers::abi::Address; use serde::{Deserialize, Serialize}; use zkstack_cli_types::ProverMode; @@ -91,7 +90,6 @@ impl DeployGatewayCTMInput { l1_chain_id: U256::from(ecosystem_config.l1_network.chain_id()), testnet_verifier: ecosystem_config.prover_version == ProverMode::NoProofs, - recursion_node_level_vk_hash: H256::zero(), recursion_leaf_level_vk_hash: H256::zero(), recursion_circuits_set_vks_hash: H256::zero(), diff --git a/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/mod.rs b/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/mod.rs index cb22b3529e85..7d1a54844d0c 100644 --- a/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/mod.rs +++ b/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/mod.rs @@ -1,3 +1,2 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. pub mod input; pub mod output; diff --git a/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/output.rs b/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/output.rs index ee85d11a5eb6..33661fb6ebef 100644 --- a/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/output.rs +++ b/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/output.rs @@ -1,4 +1,3 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use ethers::abi::Address; use serde::{Deserialize, Serialize}; diff --git a/zkstack_cli/crates/config/src/forge_interface/deploy_l2_contracts/input.rs b/zkstack_cli/crates/config/src/forge_interface/deploy_l2_contracts/input.rs index 3836dca9d24c..78ffcd16eaa8 100644 --- a/zkstack_cli/crates/config/src/forge_interface/deploy_l2_contracts/input.rs +++ b/zkstack_cli/crates/config/src/forge_interface/deploy_l2_contracts/input.rs @@ -1,8 +1,8 @@ use ethers::types::Address; use serde::{Deserialize, Serialize}; -use zksync_basic_types::L2ChainId; +use zksync_basic_types::{L2ChainId, U256}; -use crate::{traits::ZkStackConfig, ChainConfig}; +use crate::{traits::ZkStackConfig, ChainConfig, ContractsConfig}; impl ZkStackConfig for DeployL2ContractsInput {} @@ -16,20 +16,27 @@ pub struct DeployL2ContractsInput { pub bridgehub: Address, pub governance: Address, pub erc20_bridge: Address, + pub da_validator_type: U256, pub consensus_registry_owner: Address, } impl DeployL2ContractsInput { - pub fn new(chain_config: &ChainConfig, era_chain_id: L2ChainId) -> anyhow::Result { + pub fn new( + chain_config: &ChainConfig, + contracts_config: &ContractsConfig, + era_chain_id: L2ChainId, + ) -> anyhow::Result { let contracts = chain_config.get_contracts_config()?; let wallets = chain_config.get_wallets_config()?; + Ok(Self { era_chain_id, chain_id: chain_config.chain_id, l1_shared_bridge: contracts.bridges.shared.l1_address, bridgehub: contracts.ecosystem_contracts.bridgehub_proxy_addr, - governance: wallets.governor.address, + governance: contracts_config.l1.governance_addr, erc20_bridge: contracts.bridges.erc20.l1_address, + da_validator_type: U256::from(chain_config.get_da_validator_type()? as u8), consensus_registry_owner: wallets.governor.address, }) } diff --git a/zkstack_cli/crates/config/src/forge_interface/deploy_l2_contracts/output.rs b/zkstack_cli/crates/config/src/forge_interface/deploy_l2_contracts/output.rs index 7b2b56c81548..e797686561ae 100644 --- a/zkstack_cli/crates/config/src/forge_interface/deploy_l2_contracts/output.rs +++ b/zkstack_cli/crates/config/src/forge_interface/deploy_l2_contracts/output.rs @@ -12,8 +12,7 @@ impl ZkStackConfig for TimestampAsserterOutput {} #[derive(Debug, Clone, Serialize, Deserialize)] pub struct InitializeBridgeOutput { - pub l2_shared_bridge_implementation: Address, - pub l2_shared_bridge_proxy: Address, + pub l2_da_validator_address: Address, } #[derive(Debug, Clone, Serialize, Deserialize)] diff --git a/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/input.rs b/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/input.rs index 9bedabf59a2d..41100c55a2ae 100644 --- a/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/input.rs +++ b/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/input.rs @@ -1,4 +1,3 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use ethers::types::Address; use serde::{Deserialize, Serialize}; use zkstack_cli_types::L1BatchCommitmentMode; diff --git a/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/mod.rs b/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/mod.rs index cb22b3529e85..7d1a54844d0c 100644 --- a/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/mod.rs +++ b/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/mod.rs @@ -1,3 +1,2 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. pub mod input; pub mod output; diff --git a/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/output.rs b/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/output.rs index 57c82effcc47..94b6d25a52bf 100644 --- a/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/output.rs +++ b/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/output.rs @@ -1,4 +1,3 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use ethers::types::Address; use serde::{Deserialize, Serialize}; diff --git a/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/input.rs b/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/input.rs index d71f327ede45..8bd300f50581 100644 --- a/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/input.rs +++ b/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/input.rs @@ -1,4 +1,3 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use ethers::types::{Address, H256}; use serde::{Deserialize, Serialize}; use zksync_basic_types::L2ChainId; @@ -15,6 +14,7 @@ pub struct GatewayEcosystemUpgradeInput { pub testnet_verifier: bool, pub contracts: GatewayUpgradeContractsConfig, pub tokens: GatewayUpgradeTokensConfig, + pub governance_upgrade_timer_initial_delay: u64, } impl ZkStackConfig for GatewayEcosystemUpgradeInput {} @@ -33,6 +33,8 @@ impl GatewayEcosystemUpgradeInput { era_chain_id, testnet_verifier, owner_address: current_contracts_config.l1.governance_addr, + // TODO: for local testing, even 0 is fine - but before prod, we should load it from some configuration. + governance_upgrade_timer_initial_delay: 0, contracts: GatewayUpgradeContractsConfig { create2_factory_addr: initial_deployment_config.create2_factory_addr, create2_factory_salt: initial_deployment_config.create2_factory_salt, diff --git a/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/mod.rs b/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/mod.rs index cb22b3529e85..7d1a54844d0c 100644 --- a/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/mod.rs +++ b/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/mod.rs @@ -1,3 +1,2 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. pub mod input; pub mod output; diff --git a/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/output.rs b/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/output.rs index 8f030eb47b73..2aab8a1e5422 100644 --- a/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/output.rs +++ b/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/output.rs @@ -1,4 +1,3 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use ethers::types::{Address, H256}; use serde::{Deserialize, Serialize}; use zksync_basic_types::web3::Bytes; @@ -19,6 +18,10 @@ pub struct GatewayEcosystemUpgradeOutput { pub contracts_config: GatewayEcosystemUpgradeContractsOutput, pub deployed_addresses: GatewayEcosystemUpgradeDeployedAddresses, + /// List of transactions that were executed during the upgrade. + /// This is added later by the zkstack and not present in the toml file that solidity creates. + #[serde(default)] + pub transactions: Vec, } impl FileConfigWithDefaultName for GatewayEcosystemUpgradeOutput { @@ -45,6 +48,12 @@ pub struct GatewayEcosystemUpgradeContractsOutput { pub recursion_circuits_set_vks_hash: H256, pub recursion_leaf_level_vk_hash: H256, pub recursion_node_level_vk_hash: H256, + + pub new_protocol_version: u64, + pub old_protocol_version: u64, + + pub old_validator_timelock: Address, + pub l1_legacy_shared_bridge: Address, } #[derive(Debug, Deserialize, Serialize, Clone)] diff --git a/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/input.rs b/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/input.rs index 263905d9fb35..6c4fc4d764a5 100644 --- a/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/input.rs +++ b/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/input.rs @@ -1,4 +1,3 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use ethers::utils::hex; use serde::{Deserialize, Serialize}; use zksync_basic_types::{web3::Bytes, Address}; diff --git a/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/mod.rs b/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/mod.rs index cb22b3529e85..7d1a54844d0c 100644 --- a/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/mod.rs +++ b/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/mod.rs @@ -1,3 +1,2 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. pub mod input; pub mod output; diff --git a/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/output.rs b/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/output.rs index 7d27725b2825..c201625be28b 100644 --- a/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/output.rs +++ b/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/output.rs @@ -1,4 +1,3 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use serde::{Deserialize, Serialize}; use zksync_basic_types::{Address, H256}; diff --git a/zkstack_cli/crates/config/src/forge_interface/register_chain/input.rs b/zkstack_cli/crates/config/src/forge_interface/register_chain/input.rs index 1e6a31009506..f44b8c1f50c3 100644 --- a/zkstack_cli/crates/config/src/forge_interface/register_chain/input.rs +++ b/zkstack_cli/crates/config/src/forge_interface/register_chain/input.rs @@ -2,38 +2,52 @@ use ethers::types::Address; use rand::Rng; use serde::{Deserialize, Serialize}; use zkstack_cli_types::L1BatchCommitmentMode; -use zksync_basic_types::L2ChainId; +use zksync_basic_types::{L2ChainId, H256}; use crate::{traits::ZkStackConfig, ChainConfig, ContractsConfig}; +#[derive(Debug, Deserialize, Serialize, Clone)] +pub struct RegisterChainL1Config { + contracts_config: Contracts, + deployed_addresses: DeployedAddresses, + chain: ChainL1Config, + owner_address: Address, + governance: Address, + create2_factory_address: Address, + create2_salt: H256, + initialize_legacy_bridge: bool, +} + #[derive(Debug, Deserialize, Serialize, Clone)] struct Bridgehub { bridgehub_proxy_addr: Address, } +#[derive(Debug, Deserialize, Serialize, Clone)] +struct Bridges { + shared_bridge_proxy_addr: Address, + l1_nullifier_proxy_addr: Address, + erc20_bridge_proxy_addr: Address, +} + #[derive(Debug, Deserialize, Serialize, Clone)] struct StateTransition { - state_transition_proxy_addr: Address, + chain_type_manager_proxy_addr: Address, } #[derive(Debug, Deserialize, Serialize, Clone)] struct DeployedAddresses { state_transition: StateTransition, bridgehub: Bridgehub, + bridges: Bridges, validator_timelock_addr: Address, + native_token_vault_addr: Address, } #[derive(Debug, Deserialize, Serialize, Clone)] struct Contracts { diamond_cut_data: String, -} - -#[derive(Debug, Deserialize, Serialize, Clone)] -pub struct RegisterChainL1Config { - contracts_config: Contracts, - deployed_addresses: DeployedAddresses, - chain: ChainL1Config, - owner_address: Address, + force_deployments_data: String, } #[derive(Debug, Deserialize, Serialize, Clone)] @@ -55,21 +69,39 @@ impl ZkStackConfig for RegisterChainL1Config {} impl RegisterChainL1Config { pub fn new(chain_config: &ChainConfig, contracts: &ContractsConfig) -> anyhow::Result { + let initialize_legacy_bridge = chain_config.legacy_bridge.unwrap_or_default(); let wallets_config = chain_config.get_wallets_config()?; Ok(Self { contracts_config: Contracts { diamond_cut_data: contracts.ecosystem_contracts.diamond_cut_data.clone(), + force_deployments_data: contracts + .ecosystem_contracts + .force_deployments_data + .clone() + .expect("force_deployment_data"), }, deployed_addresses: DeployedAddresses { state_transition: StateTransition { - state_transition_proxy_addr: contracts + chain_type_manager_proxy_addr: contracts .ecosystem_contracts .state_transition_proxy_addr, }, bridgehub: Bridgehub { bridgehub_proxy_addr: contracts.ecosystem_contracts.bridgehub_proxy_addr, }, + bridges: Bridges { + shared_bridge_proxy_addr: contracts.bridges.shared.l1_address, + l1_nullifier_proxy_addr: contracts + .bridges + .l1_nullifier_addr + .expect("l1_nullifier_addr"), + erc20_bridge_proxy_addr: contracts.bridges.erc20.l1_address, + }, validator_timelock_addr: contracts.ecosystem_contracts.validator_timelock_addr, + native_token_vault_addr: contracts + .ecosystem_contracts + .native_token_vault_addr + .expect("native_token_vault_addr"), }, chain: ChainL1Config { chain_chain_id: chain_config.chain_id, @@ -88,6 +120,10 @@ impl RegisterChainL1Config { allow_evm_emulator: chain_config.evm_emulator, }, owner_address: wallets_config.governor.address, + governance: contracts.l1.governance_addr, + create2_factory_address: contracts.create2_factory_addr, + create2_salt: H256::random(), + initialize_legacy_bridge, }) } } diff --git a/zkstack_cli/crates/config/src/forge_interface/register_chain/output.rs b/zkstack_cli/crates/config/src/forge_interface/register_chain/output.rs index a3e23f7bae42..951f36aa9fa8 100644 --- a/zkstack_cli/crates/config/src/forge_interface/register_chain/output.rs +++ b/zkstack_cli/crates/config/src/forge_interface/register_chain/output.rs @@ -8,6 +8,9 @@ pub struct RegisterChainOutput { pub diamond_proxy_addr: Address, pub governance_addr: Address, pub chain_admin_addr: Address, + pub l2_legacy_shared_bridge_addr: Option
, + pub access_control_restriction_addr: Address, + pub chain_proxy_admin_addr: Address, } impl ZkStackConfig for RegisterChainOutput {} diff --git a/zkstack_cli/crates/config/src/forge_interface/script_params.rs b/zkstack_cli/crates/config/src/forge_interface/script_params.rs index eb693c83a54c..b7496540ab18 100644 --- a/zkstack_cli/crates/config/src/forge_interface/script_params.rs +++ b/zkstack_cli/crates/config/src/forge_interface/script_params.rs @@ -39,9 +39,9 @@ pub const DEPLOY_L2_CONTRACTS_SCRIPT_PARAMS: ForgeScriptParams = ForgeScriptPara }; pub const REGISTER_CHAIN_SCRIPT_PARAMS: ForgeScriptParams = ForgeScriptParams { - input: "script-config/register-hyperchain.toml", - output: "script-out/output-register-hyperchain.toml", - script_path: "deploy-scripts/RegisterHyperchain.s.sol", + input: "script-config/register-zk-chain.toml", + output: "script-out/output-register-zk-chain.toml", + script_path: "deploy-scripts/RegisterZKChain.s.sol", }; pub const DEPLOY_ERC20_SCRIPT_PARAMS: ForgeScriptParams = ForgeScriptParams { @@ -74,39 +74,33 @@ pub const ENABLE_EVM_EMULATOR_PARAMS: ForgeScriptParams = ForgeScriptParams { script_path: "deploy-scripts/EnableEvmEmulator.s.sol", }; -// TODO(EVM-927): the following script does not work without gateway contracts. pub const DEPLOY_GATEWAY_CTM: ForgeScriptParams = ForgeScriptParams { input: "script-config/config-deploy-gateway-ctm.toml", output: "script-out/output-deploy-gateway-ctm.toml", script_path: "deploy-scripts/GatewayCTMFromL1.s.sol", }; -// TODO(EVM-927): the following script does not work without gateway contracts. pub const GATEWAY_PREPARATION: ForgeScriptParams = ForgeScriptParams { input: "script-config/gateway-preparation-l1.toml", output: "script-out/output-gateway-preparation-l1.toml", script_path: "deploy-scripts/GatewayPreparation.s.sol", }; -// TODO(EVM-927): the following script does not work without gateway contracts. pub const GATEWAY_GOVERNANCE_TX_PATH1: &str = "contracts/l1-contracts/script-out/gateway-deploy-governance-txs-1.json"; -// TODO(EVM-927): the following script does not work without gateway contracts. pub const GATEWAY_UPGRADE_ECOSYSTEM_PARAMS: ForgeScriptParams = ForgeScriptParams { input: "script-config/gateway-upgrade-ecosystem.toml", output: "script-out/gateway-upgrade-ecosystem.toml", script_path: "deploy-scripts/upgrade/EcosystemUpgrade.s.sol", }; -// TODO(EVM-927): the following script does not work without gateway contracts. pub const GATEWAY_UPGRADE_CHAIN_PARAMS: ForgeScriptParams = ForgeScriptParams { input: "script-config/gateway-upgrade-chain.toml", output: "script-out/gateway-upgrade-chain.toml", script_path: "deploy-scripts/upgrade/ChainUpgrade.s.sol", }; -// TODO(EVM-927): the following script does not work without gateway contracts. pub const FINALIZE_UPGRADE_SCRIPT_PARAMS: ForgeScriptParams = ForgeScriptParams { input: "script-config/gateway-finalize-upgrade.toml", output: "script-out/gateway-finalize-upgrade.toml", diff --git a/zkstack_cli/crates/config/src/forge_interface/setup_legacy_bridge/mod.rs b/zkstack_cli/crates/config/src/forge_interface/setup_legacy_bridge/mod.rs index 201cf86b734b..aa0764864606 100644 --- a/zkstack_cli/crates/config/src/forge_interface/setup_legacy_bridge/mod.rs +++ b/zkstack_cli/crates/config/src/forge_interface/setup_legacy_bridge/mod.rs @@ -8,6 +8,8 @@ pub struct SetupLegacyBridgeInput { pub bridgehub: Address, pub diamond_proxy: Address, pub shared_bridge_proxy: Address, + pub l1_nullifier_proxy: Address, + pub l1_native_token_vault: Address, pub transparent_proxy_admin: Address, pub erc20bridge_proxy: Address, pub token_weth_address: Address, diff --git a/zkstack_cli/crates/config/src/gateway.rs b/zkstack_cli/crates/config/src/gateway.rs index 0bdbcdf25475..67b5ad327cc2 100644 --- a/zkstack_cli/crates/config/src/gateway.rs +++ b/zkstack_cli/crates/config/src/gateway.rs @@ -1,4 +1,3 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use ethers::utils::hex; use zksync_config::configs::{gateway::GatewayChainConfig, GatewayConfig}; diff --git a/zkstack_cli/crates/types/src/l1_network.rs b/zkstack_cli/crates/types/src/l1_network.rs index cc7b47147548..609af7ef3e7c 100644 --- a/zkstack_cli/crates/types/src/l1_network.rs +++ b/zkstack_cli/crates/types/src/l1_network.rs @@ -1,4 +1,7 @@ +use std::str::FromStr; + use clap::ValueEnum; +use ethers::types::Address; use serde::{Deserialize, Serialize}; use strum::EnumIter; @@ -35,4 +38,14 @@ impl L1Network { L1Network::Mainnet => 1, } } + + pub fn avail_l1_da_validator_addr(&self) -> Option
{ + match self { + L1Network::Localhost => None, + L1Network::Sepolia | L1Network::Holesky => { + Some(Address::from_str("0xd99d6569785547ac72150d0309aeDb30C7871b51").unwrap()) + } + L1Network::Mainnet => None, // TODO: add mainnet address after it is known + } + } } diff --git a/zkstack_cli/crates/zkstack/Cargo.toml b/zkstack_cli/crates/zkstack/Cargo.toml index 96d1dbf25be6..169fe593ba14 100644 --- a/zkstack_cli/crates/zkstack/Cargo.toml +++ b/zkstack_cli/crates/zkstack/Cargo.toml @@ -43,11 +43,13 @@ zksync_consensus_roles.workspace = true zksync_consensus_crypto.workspace = true zksync_protobuf.workspace = true zksync_protobuf_config.workspace = true -prost.workspace = true -reqwest = "0.12.8" zksync_types.workspace = true zksync_web3_decl.workspace = true zksync_system_constants.workspace = true +zksync_eth_client.workspace = true +zksync_contracts.workspace = true +prost.workspace = true +reqwest = "0.12.8" [dev-dependencies] rand.workspace = true @@ -60,3 +62,8 @@ dirs.workspace = true ethers.workspace = true xshell.workspace = true zksync_protobuf_build.workspace = true + +[features] +# Features that allows gateway-chain related actions. +# These should be available for outside users until stabilized. +gateway = [] diff --git a/zkstack_cli/crates/zkstack/completion/_zkstack.zsh b/zkstack_cli/crates/zkstack/completion/_zkstack.zsh index 3b0bed93eb4f..9c42f83f0020 100644 --- a/zkstack_cli/crates/zkstack/completion/_zkstack.zsh +++ b/zkstack_cli/crates/zkstack/completion/_zkstack.zsh @@ -108,6 +108,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -133,13 +134,17 @@ _arguments "${_arguments_options[@]}" : \ '-o+[Enable Grafana]' \ '--observability=[Enable Grafana]' \ '--update-submodules=[]:UPDATE_SUBMODULES:(true false)' \ +'--validium-type=[Type of the Validium network]:VALIDIUM_TYPE:(no-da avail)' \ +'--support-l2-legacy-shared-bridge-test=[]' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-d[]' \ '--dont-drop[]' \ '--ecosystem-only[Initialize ecosystem only and skip chain initialization (chain can be initialized later with \`chain init\` subcommand)]' \ '--dev[Use defaults for all options and flags. Suitable for local development]' \ '--no-port-reallocation[Do not reallocate ports]' \ +'--skip-contract-compilation-override[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -269,6 +274,7 @@ _arguments "${_arguments_options[@]}" : \ '--l1-rpc-url=[L1 RPC URL]:L1_RPC_URL:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -289,8 +295,10 @@ _arguments "${_arguments_options[@]}" : \ '--deploy-paymaster=[]' \ '--l1-rpc-url=[L1 RPC URL]:L1_RPC_URL:_default' \ '--update-submodules=[]:UPDATE_SUBMODULES:(true false)' \ +'--validium-type=[Type of the Validium network]:VALIDIUM_TYPE:(no-da avail)' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-d[]' \ '--dont-drop[]' \ '--no-port-reallocation[Do not reallocate ports]' \ @@ -448,6 +456,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -465,6 +474,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -482,23 +492,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ -'-v[Verbose mode]' \ -'--verbose[Verbose mode]' \ -'--ignore-prerequisites[Ignores prerequisites checks]' \ -'-h[Print help (see more with '\''--help'\'')]' \ -'--help[Print help (see more with '\''--help'\'')]' \ -&& ret=0 -;; -(initialize-bridges) -_arguments "${_arguments_options[@]}" : \ -'--verify=[Verify deployed contracts]' \ -'--verifier=[Verifier to use]:VERIFIER:(etherscan sourcify blockscout oklink)' \ -'--verifier-url=[Verifier URL, if using a custom provider]:VERIFIER_URL:_default' \ -'--verifier-api-key=[Verifier API key]:VERIFIER_API_KEY:_default' \ -'*-a+[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ -'*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ -'--chain=[Chain to use]:CHAIN:_default' \ -'--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -516,6 +510,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -533,6 +528,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -550,6 +546,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -567,6 +564,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -584,6 +582,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -601,6 +600,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -618,6 +618,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -701,10 +702,6 @@ _arguments "${_arguments_options[@]}" : \ _arguments "${_arguments_options[@]}" : \ && ret=0 ;; -(initialize-bridges) -_arguments "${_arguments_options[@]}" : \ -&& ret=0 -;; (deploy-consensus-registry) _arguments "${_arguments_options[@]}" : \ && ret=0 @@ -1496,6 +1493,7 @@ esac (contracts) _arguments "${_arguments_options[@]}" : \ '--l1-contracts=[Build L1 contracts]' \ +'--l1-da-contracts=[Build L1 DA contracts]' \ '--l2-contracts=[Build L2 contracts]' \ '--system-contracts=[Build system contracts]' \ '--chain=[Chain to use]:CHAIN:_default' \ @@ -2711,10 +2709,6 @@ _arguments "${_arguments_options[@]}" : \ _arguments "${_arguments_options[@]}" : \ && ret=0 ;; -(initialize-bridges) -_arguments "${_arguments_options[@]}" : \ -&& ret=0 -;; (deploy-consensus-registry) _arguments "${_arguments_options[@]}" : \ && ret=0 @@ -3264,7 +3258,6 @@ _zkstack__chain_commands() { 'register-chain:Register a new chain on L1 (executed by L1 governor). This command deploys and configures Governance, ChainAdmin, and DiamondProxy contracts, registers chain with BridgeHub and sets pending admin for DiamondProxy. Note\: After completion, L2 governor can accept ownership by running \`accept-chain-ownership\`' \ 'deploy-l2-contracts:Deploy all L2 contracts (executed by L1 governor)' \ 'accept-chain-ownership:Accept ownership of L2 chain (executed by L2 governor). This command should be run after \`register-chain\` to accept ownership of newly created DiamondProxy contract' \ -'initialize-bridges:Initialize bridges on L2' \ 'deploy-consensus-registry:Deploy L2 consensus registry' \ 'deploy-multicall3:Deploy L2 multicall3' \ 'deploy-timestamp-asserter:Deploy L2 TimestampAsserter' \ @@ -3379,7 +3372,6 @@ _zkstack__chain__help_commands() { 'register-chain:Register a new chain on L1 (executed by L1 governor). This command deploys and configures Governance, ChainAdmin, and DiamondProxy contracts, registers chain with BridgeHub and sets pending admin for DiamondProxy. Note\: After completion, L2 governor can accept ownership by running \`accept-chain-ownership\`' \ 'deploy-l2-contracts:Deploy all L2 contracts (executed by L1 governor)' \ 'accept-chain-ownership:Accept ownership of L2 chain (executed by L2 governor). This command should be run after \`register-chain\` to accept ownership of newly created DiamondProxy contract' \ -'initialize-bridges:Initialize bridges on L2' \ 'deploy-consensus-registry:Deploy L2 consensus registry' \ 'deploy-multicall3:Deploy L2 multicall3' \ 'deploy-timestamp-asserter:Deploy L2 TimestampAsserter' \ @@ -3476,11 +3468,6 @@ _zkstack__chain__help__init__configs_commands() { local commands; commands=() _describe -t commands 'zkstack chain help init configs commands' commands "$@" } -(( $+functions[_zkstack__chain__help__initialize-bridges_commands] )) || -_zkstack__chain__help__initialize-bridges_commands() { - local commands; commands=() - _describe -t commands 'zkstack chain help initialize-bridges commands' commands "$@" -} (( $+functions[_zkstack__chain__help__register-chain_commands] )) || _zkstack__chain__help__register-chain_commands() { local commands; commands=() @@ -3522,11 +3509,6 @@ _zkstack__chain__init__help__help_commands() { local commands; commands=() _describe -t commands 'zkstack chain init help help commands' commands "$@" } -(( $+functions[_zkstack__chain__initialize-bridges_commands] )) || -_zkstack__chain__initialize-bridges_commands() { - local commands; commands=() - _describe -t commands 'zkstack chain initialize-bridges commands' commands "$@" -} (( $+functions[_zkstack__chain__register-chain_commands] )) || _zkstack__chain__register-chain_commands() { local commands; commands=() @@ -4703,7 +4685,6 @@ _zkstack__help__chain_commands() { 'register-chain:Register a new chain on L1 (executed by L1 governor). This command deploys and configures Governance, ChainAdmin, and DiamondProxy contracts, registers chain with BridgeHub and sets pending admin for DiamondProxy. Note\: After completion, L2 governor can accept ownership by running \`accept-chain-ownership\`' \ 'deploy-l2-contracts:Deploy all L2 contracts (executed by L1 governor)' \ 'accept-chain-ownership:Accept ownership of L2 chain (executed by L2 governor). This command should be run after \`register-chain\` to accept ownership of newly created DiamondProxy contract' \ -'initialize-bridges:Initialize bridges on L2' \ 'deploy-consensus-registry:Deploy L2 consensus registry' \ 'deploy-multicall3:Deploy L2 multicall3' \ 'deploy-timestamp-asserter:Deploy L2 TimestampAsserter' \ @@ -4794,11 +4775,6 @@ _zkstack__help__chain__init__configs_commands() { local commands; commands=() _describe -t commands 'zkstack help chain init configs commands' commands "$@" } -(( $+functions[_zkstack__help__chain__initialize-bridges_commands] )) || -_zkstack__help__chain__initialize-bridges_commands() { - local commands; commands=() - _describe -t commands 'zkstack help chain initialize-bridges commands' commands "$@" -} (( $+functions[_zkstack__help__chain__register-chain_commands] )) || _zkstack__help__chain__register-chain_commands() { local commands; commands=() diff --git a/zkstack_cli/crates/zkstack/completion/zkstack.fish b/zkstack_cli/crates/zkstack/completion/zkstack.fish index b9d4e58f6322..8ba5afc5a49f 100644 --- a/zkstack_cli/crates/zkstack/completion/zkstack.fish +++ b/zkstack_cli/crates/zkstack/completion/zkstack.fish @@ -113,6 +113,7 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_se complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -s h -l help -d 'Print help (see more with \'--help\')' @@ -139,12 +140,18 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_se false\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l update-submodules -r -f -a "true\t'' false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l validium-type -d 'Type of the Validium network' -r -f -a "no-da\t'' +avail\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l support-l2-legacy-shared-bridge-test -r -f -a "true\t'' +false\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -s d -l dont-drop complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l ecosystem-only -d 'Initialize ecosystem only and skip chain initialization (chain can be initialized later with `chain init` subcommand)' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l dev -d 'Use defaults for all options and flags. Suitable for local development' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l no-port-reallocation -d 'Do not reallocate ports' +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l skip-contract-compilation-override complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -s h -l help -d 'Print help (see more with \'--help\')' @@ -162,26 +169,25 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_se complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from help" -f -a "change-default-chain" -d 'Change the default chain' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from help" -f -a "setup-observability" -d 'Setup observability for the ecosystem, downloading Grafana dashboards from the era-observability repo' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -l chain -d 'Chain to use' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -s v -l verbose -d 'Verbose mode' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -l ignore-prerequisites -d 'Ignores prerequisites checks' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -s h -l help -d 'Print help' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "create" -d 'Create a new chain, setting the necessary configurations for later initialization' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "build-transactions" -d 'Create unsigned transactions for chain deployment' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "init" -d 'Initialize chain, deploying necessary contracts and performing on-chain operations' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "genesis" -d 'Run server genesis' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "register-chain" -d 'Register a new chain on L1 (executed by L1 governor). This command deploys and configures Governance, ChainAdmin, and DiamondProxy contracts, registers chain with BridgeHub and sets pending admin for DiamondProxy. Note: After completion, L2 governor can accept ownership by running `accept-chain-ownership`' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-l2-contracts" -d 'Deploy all L2 contracts (executed by L1 governor)' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "accept-chain-ownership" -d 'Accept ownership of L2 chain (executed by L2 governor). This command should be run after `register-chain` to accept ownership of newly created DiamondProxy contract' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "initialize-bridges" -d 'Initialize bridges on L2' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-consensus-registry" -d 'Deploy L2 consensus registry' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-multicall3" -d 'Deploy L2 multicall3' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-timestamp-asserter" -d 'Deploy L2 TimestampAsserter' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-upgrader" -d 'Deploy Default Upgrader' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-paymaster" -d 'Deploy paymaster smart contract' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "update-token-multiplier-setter" -d 'Update Token Multiplier Setter address on L1' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "enable-evm-emulator" -d 'Enable EVM emulation on chain (Not supported yet)' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "create" -d 'Create a new chain, setting the necessary configurations for later initialization' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "build-transactions" -d 'Create unsigned transactions for chain deployment' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "init" -d 'Initialize chain, deploying necessary contracts and performing on-chain operations' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "genesis" -d 'Run server genesis' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "register-chain" -d 'Register a new chain on L1 (executed by L1 governor). This command deploys and configures Governance, ChainAdmin, and DiamondProxy contracts, registers chain with BridgeHub and sets pending admin for DiamondProxy. Note: After completion, L2 governor can accept ownership by running `accept-chain-ownership`' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-l2-contracts" -d 'Deploy all L2 contracts (executed by L1 governor)' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "accept-chain-ownership" -d 'Accept ownership of L2 chain (executed by L2 governor). This command should be run after `register-chain` to accept ownership of newly created DiamondProxy contract' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-consensus-registry" -d 'Deploy L2 consensus registry' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-multicall3" -d 'Deploy L2 multicall3' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-timestamp-asserter" -d 'Deploy L2 TimestampAsserter' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-upgrader" -d 'Deploy Default Upgrader' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-paymaster" -d 'Deploy paymaster smart contract' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "update-token-multiplier-setter" -d 'Update Token Multiplier Setter address on L1' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "enable-evm-emulator" -d 'Enable EVM emulation on chain (Not supported yet)' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l chain-name -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l chain-id -d 'Chain ID' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l prover-mode -d 'Prover options' -r -f -a "no-proofs\t'' @@ -220,6 +226,7 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_s complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -l l1-rpc-url -d 'L1 RPC URL' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -s h -l help -d 'Print help (see more with \'--help\')' @@ -239,8 +246,11 @@ false\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l l1-rpc-url -d 'L1 RPC URL' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l update-submodules -r -f -a "true\t'' false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l validium-type -d 'Type of the Validium network' -r -f -a "no-da\t'' +avail\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -s d -l dont-drop complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l no-port-reallocation -d 'Do not reallocate ports' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l dev -d 'Use defaults for all options and flags. Suitable for local development' @@ -271,6 +281,7 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_s complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from register-chain" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from register-chain" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from register-chain" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from register-chain" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from register-chain" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from register-chain" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from register-chain" -s h -l help -d 'Print help (see more with \'--help\')' @@ -285,6 +296,7 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_s complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-l2-contracts" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-l2-contracts" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-l2-contracts" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-l2-contracts" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-l2-contracts" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-l2-contracts" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-l2-contracts" -s h -l help -d 'Print help (see more with \'--help\')' @@ -299,23 +311,10 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_s complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from accept-chain-ownership" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from accept-chain-ownership" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from accept-chain-ownership" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from accept-chain-ownership" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from accept-chain-ownership" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from accept-chain-ownership" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from accept-chain-ownership" -s h -l help -d 'Print help (see more with \'--help\')' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from initialize-bridges" -l verify -d 'Verify deployed contracts' -r -f -a "true\t'' -false\t''" -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from initialize-bridges" -l verifier -d 'Verifier to use' -r -f -a "etherscan\t'' -sourcify\t'' -blockscout\t'' -oklink\t''" -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from initialize-bridges" -l verifier-url -d 'Verifier URL, if using a custom provider' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from initialize-bridges" -l verifier-api-key -d 'Verifier API key' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from initialize-bridges" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from initialize-bridges" -l chain -d 'Chain to use' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from initialize-bridges" -l resume -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from initialize-bridges" -s v -l verbose -d 'Verbose mode' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from initialize-bridges" -l ignore-prerequisites -d 'Ignores prerequisites checks' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from initialize-bridges" -s h -l help -d 'Print help (see more with \'--help\')' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -l verify -d 'Verify deployed contracts' -r -f -a "true\t'' false\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -l verifier -d 'Verifier to use' -r -f -a "etherscan\t'' @@ -327,6 +326,7 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_s complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -s h -l help -d 'Print help (see more with \'--help\')' @@ -341,6 +341,7 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_s complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -s h -l help -d 'Print help (see more with \'--help\')' @@ -355,6 +356,7 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_s complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -s h -l help -d 'Print help (see more with \'--help\')' @@ -369,6 +371,7 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_s complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -s h -l help -d 'Print help (see more with \'--help\')' @@ -383,6 +386,7 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_s complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-paymaster" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-paymaster" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-paymaster" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-paymaster" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-paymaster" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-paymaster" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-paymaster" -s h -l help -d 'Print help (see more with \'--help\')' @@ -397,6 +401,7 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_s complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from update-token-multiplier-setter" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from update-token-multiplier-setter" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from update-token-multiplier-setter" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from update-token-multiplier-setter" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from update-token-multiplier-setter" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from update-token-multiplier-setter" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from update-token-multiplier-setter" -s h -l help -d 'Print help (see more with \'--help\')' @@ -411,6 +416,7 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_s complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from enable-evm-emulator" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from enable-evm-emulator" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from enable-evm-emulator" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from enable-evm-emulator" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from enable-evm-emulator" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from enable-evm-emulator" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from enable-evm-emulator" -s h -l help -d 'Print help (see more with \'--help\')' @@ -421,7 +427,6 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_s complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "register-chain" -d 'Register a new chain on L1 (executed by L1 governor). This command deploys and configures Governance, ChainAdmin, and DiamondProxy contracts, registers chain with BridgeHub and sets pending admin for DiamondProxy. Note: After completion, L2 governor can accept ownership by running `accept-chain-ownership`' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "deploy-l2-contracts" -d 'Deploy all L2 contracts (executed by L1 governor)' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "accept-chain-ownership" -d 'Accept ownership of L2 chain (executed by L2 governor). This command should be run after `register-chain` to accept ownership of newly created DiamondProxy contract' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "initialize-bridges" -d 'Initialize bridges on L2' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "deploy-consensus-registry" -d 'Deploy L2 consensus registry' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "deploy-multicall3" -d 'Deploy L2 multicall3' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "deploy-timestamp-asserter" -d 'Deploy L2 TimestampAsserter' @@ -521,6 +526,8 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_sub complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from prover" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from contracts" -l l1-contracts -d 'Build L1 contracts' -r -f -a "true\t'' false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from contracts" -l l1-da-contracts -d 'Build L1 DA contracts' -r -f -a "true\t'' +false\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from contracts" -l l2-contracts -d 'Build L2 contracts' -r -f -a "true\t'' false\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from contracts" -l system-contracts -d 'Build system contracts' -r -f -a "true\t'' @@ -898,7 +905,6 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_su complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "register-chain" -d 'Register a new chain on L1 (executed by L1 governor). This command deploys and configures Governance, ChainAdmin, and DiamondProxy contracts, registers chain with BridgeHub and sets pending admin for DiamondProxy. Note: After completion, L2 governor can accept ownership by running `accept-chain-ownership`' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "deploy-l2-contracts" -d 'Deploy all L2 contracts (executed by L1 governor)' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "accept-chain-ownership" -d 'Accept ownership of L2 chain (executed by L2 governor). This command should be run after `register-chain` to accept ownership of newly created DiamondProxy contract' -complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "initialize-bridges" -d 'Initialize bridges on L2' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "deploy-consensus-registry" -d 'Deploy L2 consensus registry' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "deploy-multicall3" -d 'Deploy L2 multicall3' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "deploy-timestamp-asserter" -d 'Deploy L2 TimestampAsserter' diff --git a/zkstack_cli/crates/zkstack/completion/zkstack.sh b/zkstack_cli/crates/zkstack/completion/zkstack.sh index ae934b0e5d3a..280863339c52 100644 --- a/zkstack_cli/crates/zkstack/completion/zkstack.sh +++ b/zkstack_cli/crates/zkstack/completion/zkstack.sh @@ -96,9 +96,6 @@ _zkstack() { zkstack__chain,init) cmd="zkstack__chain__init" ;; - zkstack__chain,initialize-bridges) - cmd="zkstack__chain__initialize__bridges" - ;; zkstack__chain,register-chain) cmd="zkstack__chain__register__chain" ;; @@ -162,9 +159,6 @@ _zkstack() { zkstack__chain__help,init) cmd="zkstack__chain__help__init" ;; - zkstack__chain__help,initialize-bridges) - cmd="zkstack__chain__help__initialize__bridges" - ;; zkstack__chain__help,register-chain) cmd="zkstack__chain__help__register__chain" ;; @@ -804,9 +798,6 @@ _zkstack() { zkstack__help__chain,init) cmd="zkstack__help__chain__init" ;; - zkstack__help__chain,initialize-bridges) - cmd="zkstack__help__chain__initialize__bridges" - ;; zkstack__help__chain,register-chain) cmd="zkstack__help__chain__register__chain" ;; @@ -1144,7 +1135,7 @@ _zkstack() { return 0 ;; zkstack__chain) - opts="-v -h --verbose --chain --ignore-prerequisites --help create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" + opts="-v -h --verbose --chain --ignore-prerequisites --help create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1162,7 +1153,7 @@ _zkstack() { return 0 ;; zkstack__chain__accept__chain__ownership) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1204,7 +1195,7 @@ _zkstack() { return 0 ;; zkstack__chain__build__transactions) - opts="-o -a -v -h --out --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --l1-rpc-url --verbose --chain --ignore-prerequisites --help" + opts="-o -a -v -h --out --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --l1-rpc-url --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1335,7 +1326,7 @@ _zkstack() { return 0 ;; zkstack__chain__deploy__consensus__registry) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1377,7 +1368,7 @@ _zkstack() { return 0 ;; zkstack__chain__deploy__l2__contracts) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1419,7 +1410,7 @@ _zkstack() { return 0 ;; zkstack__chain__deploy__multicall3) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1461,7 +1452,7 @@ _zkstack() { return 0 ;; zkstack__chain__deploy__paymaster) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1503,7 +1494,7 @@ _zkstack() { return 0 ;; zkstack__chain__deploy__timestamp__asserter) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1545,7 +1536,7 @@ _zkstack() { return 0 ;; zkstack__chain__deploy__upgrader) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1587,7 +1578,7 @@ _zkstack() { return 0 ;; zkstack__chain__enable__evm__emulator) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1755,7 +1746,7 @@ _zkstack() { return 0 ;; zkstack__chain__help) - opts="create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" + opts="create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1992,20 +1983,6 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; - zkstack__chain__help__initialize__bridges) - opts="" - if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then - COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) - return 0 - fi - case "${prev}" in - *) - COMPREPLY=() - ;; - esac - COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) - return 0 - ;; zkstack__chain__help__register__chain) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then @@ -2035,7 +2012,7 @@ _zkstack() { return 0 ;; zkstack__chain__init) - opts="-a -d -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --server-db-url --server-db-name --dont-drop --deploy-paymaster --l1-rpc-url --no-port-reallocation --update-submodules --dev --verbose --chain --ignore-prerequisites --help configs help" + opts="-a -d -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --server-db-url --server-db-name --dont-drop --deploy-paymaster --l1-rpc-url --no-port-reallocation --update-submodules --dev --validium-type --verbose --chain --ignore-prerequisites --help configs help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -2085,6 +2062,10 @@ _zkstack() { COMPREPLY=($(compgen -W "true false" -- "${cur}")) return 0 ;; + --validium-type) + COMPREPLY=($(compgen -W "no-da avail" -- "${cur}")) + return 0 + ;; --chain) COMPREPLY=($(compgen -f "${cur}")) return 0 @@ -2168,50 +2149,8 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; - zkstack__chain__initialize__bridges) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" - if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then - COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) - return 0 - fi - case "${prev}" in - --verify) - COMPREPLY=($(compgen -W "true false" -- "${cur}")) - return 0 - ;; - --verifier) - COMPREPLY=($(compgen -W "etherscan sourcify blockscout oklink" -- "${cur}")) - return 0 - ;; - --verifier-url) - COMPREPLY=($(compgen -f "${cur}")) - return 0 - ;; - --verifier-api-key) - COMPREPLY=($(compgen -f "${cur}")) - return 0 - ;; - --additional-args) - COMPREPLY=($(compgen -f "${cur}")) - return 0 - ;; - -a) - COMPREPLY=($(compgen -f "${cur}")) - return 0 - ;; - --chain) - COMPREPLY=($(compgen -f "${cur}")) - return 0 - ;; - *) - COMPREPLY=() - ;; - esac - COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) - return 0 - ;; zkstack__chain__register__chain) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -2253,7 +2192,7 @@ _zkstack() { return 0 ;; zkstack__chain__update__token__multiplier__setter) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -2871,7 +2810,7 @@ _zkstack() { return 0 ;; zkstack__dev__contracts) - opts="-v -h --l1-contracts --l2-contracts --system-contracts --verbose --chain --ignore-prerequisites --help" + opts="-v -h --l1-contracts --l1-da-contracts --l2-contracts --system-contracts --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -2881,6 +2820,10 @@ _zkstack() { COMPREPLY=($(compgen -W "true false" -- "${cur}")) return 0 ;; + --l1-da-contracts) + COMPREPLY=($(compgen -W "true false" -- "${cur}")) + return 0 + ;; --l2-contracts) COMPREPLY=($(compgen -W "true false" -- "${cur}")) return 0 @@ -4907,7 +4850,7 @@ _zkstack() { return 0 ;; zkstack__ecosystem__build__transactions) - opts="-o -a -v -h --sender --l1-rpc-url --out --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-o -a -v -h --sender --l1-rpc-url --out --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -5181,7 +5124,7 @@ _zkstack() { return 0 ;; zkstack__ecosystem__init) - opts="-a -d -o -v -h --deploy-erc20 --deploy-ecosystem --ecosystem-contracts-path --l1-rpc-url --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --deploy-paymaster --server-db-url --server-db-name --dont-drop --ecosystem-only --dev --observability --no-port-reallocation --update-submodules --verbose --chain --ignore-prerequisites --help" + opts="-a -d -o -v -h --deploy-erc20 --deploy-ecosystem --ecosystem-contracts-path --l1-rpc-url --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --deploy-paymaster --server-db-url --server-db-name --dont-drop --ecosystem-only --dev --observability --no-port-reallocation --update-submodules --validium-type --support-l2-legacy-shared-bridge-test --skip-contract-compilation-override --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -5251,6 +5194,14 @@ _zkstack() { COMPREPLY=($(compgen -W "true false" -- "${cur}")) return 0 ;; + --validium-type) + COMPREPLY=($(compgen -W "no-da avail" -- "${cur}")) + return 0 + ;; + --support-l2-legacy-shared-bridge-test) + COMPREPLY=($(compgen -W "true false" -- "${cur}")) + return 0 + ;; --chain) COMPREPLY=($(compgen -f "${cur}")) return 0 @@ -5701,7 +5652,7 @@ _zkstack() { return 0 ;; zkstack__help__chain) - opts="create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator" + opts="create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -5924,20 +5875,6 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; - zkstack__help__chain__initialize__bridges) - opts="" - if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then - COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) - return 0 - fi - case "${prev}" in - *) - COMPREPLY=() - ;; - esac - COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) - return 0 - ;; zkstack__help__chain__register__chain) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then diff --git a/zkstack_cli/crates/zkstack/src/accept_ownership.rs b/zkstack_cli/crates/zkstack/src/accept_ownership.rs index cde1a365e8a0..1123df0f2240 100644 --- a/zkstack_cli/crates/zkstack/src/accept_ownership.rs +++ b/zkstack_cli/crates/zkstack/src/accept_ownership.rs @@ -27,6 +27,12 @@ lazy_static! { parse_abi(&[ "function governanceAcceptOwner(address governor, address target) public", "function chainAdminAcceptAdmin(address admin, address target) public", + "function setDAValidatorPair(address chainAdmin, address target, address l1DaValidator, address l2DaValidator) public", + "function makePermanentRollup(address chainAdmin, address target) public", + "function governanceExecuteCalls(bytes calldata callsToExecute, address target) public", + "function adminExecuteUpgrade(bytes memory diamondCut, address adminAddr, address accessControlRestriction, address chainDiamondProxy)", + "function adminScheduleUpgrade(address adminAddr, address accessControlRestriction, uint256 newProtocolVersion, uint256 timestamp)", + "function updateValidator(address adminAddr,address accessControlRestriction,address validatorTimelock,uint256 chainId,address validatorAddress,bool addValidator) public" ]) .unwrap(), ); @@ -92,8 +98,6 @@ pub async fn accept_owner( accept_ownership(shell, governor, forge).await } -// TODO(EVM-927): this function does not work without the Gateway contracts. -#[allow(unused)] #[allow(clippy::too_many_arguments)] pub async fn set_da_validator_pair( shell: &Shell, @@ -106,9 +110,6 @@ pub async fn set_da_validator_pair( forge_args: &ForgeScriptArgs, l1_rpc_url: String, ) -> anyhow::Result<()> { - // TODO(EVM-927): this function does not work without the Gateway contracts. - anyhow::bail!("Gateway upgrade not supported yet!"); - // resume doesn't properly work here. let mut forge_args = forge_args.clone(); forge_args.resume = false; @@ -137,8 +138,6 @@ pub async fn set_da_validator_pair( accept_ownership(shell, governor, forge).await } -// TODO(EVM-927): this function does not work without the Gateway contracts. -#[allow(unused)] #[allow(clippy::too_many_arguments)] pub async fn make_permanent_rollup( shell: &Shell, @@ -149,9 +148,6 @@ pub async fn make_permanent_rollup( forge_args: &ForgeScriptArgs, l1_rpc_url: String, ) -> anyhow::Result<()> { - // TODO(EVM-927): this function does not work without the Gateway contracts. - anyhow::bail!("Gateway upgrade not supported yet!"); - // resume doesn't properly work here. let mut forge_args = forge_args.clone(); forge_args.resume = false; @@ -175,8 +171,6 @@ pub async fn make_permanent_rollup( accept_ownership(shell, governor, forge).await } -// TODO(EVM-927): this function does not work without the Gateway contracts. -#[allow(unused)] #[allow(clippy::too_many_arguments)] pub async fn governance_execute_calls( shell: &Shell, @@ -186,9 +180,6 @@ pub async fn governance_execute_calls( forge_args: &ForgeScriptArgs, l1_rpc_url: String, ) -> anyhow::Result<()> { - // TODO(EVM-927): this function does not work without the Gateway contracts. - anyhow::bail!("Gateway upgrade not supported yet!"); - // resume doesn't properly work here. let mut forge_args = forge_args.clone(); forge_args.resume = false; @@ -214,8 +205,6 @@ pub async fn governance_execute_calls( accept_ownership(shell, governor, forge).await } -// TODO(EVM-927): this function does not work without the Gateway contracts. -#[allow(unused)] #[allow(clippy::too_many_arguments)] pub async fn admin_execute_upgrade( shell: &Shell, @@ -226,9 +215,6 @@ pub async fn admin_execute_upgrade( forge_args: &ForgeScriptArgs, l1_rpc_url: String, ) -> anyhow::Result<()> { - // TODO(EVM-927): this function does not work without the Gateway contracts. - anyhow::bail!("Gateway upgrade not supported yet!"); - // resume doesn't properly work here. let mut forge_args = forge_args.clone(); forge_args.resume = false; @@ -264,8 +250,6 @@ pub async fn admin_execute_upgrade( accept_ownership(shell, governor, forge).await } -// TODO(EVM-927): this function does not work without the Gateway contracts. -#[allow(unused)] #[allow(clippy::too_many_arguments)] pub async fn admin_schedule_upgrade( shell: &Shell, @@ -277,9 +261,6 @@ pub async fn admin_schedule_upgrade( forge_args: &ForgeScriptArgs, l1_rpc_url: String, ) -> anyhow::Result<()> { - // TODO(EVM-927): this function does not work without the Gateway contracts. - anyhow::bail!("Gateway upgrade not supported yet!"); - // resume doesn't properly work here. let mut forge_args = forge_args.clone(); forge_args.resume = false; @@ -314,8 +295,6 @@ pub async fn admin_schedule_upgrade( accept_ownership(shell, governor, forge).await } -// TODO(EVM-927): this function does not work without the Gateway contracts. -#[allow(unused)] #[allow(clippy::too_many_arguments)] pub async fn admin_update_validator( shell: &Shell, @@ -328,9 +307,6 @@ pub async fn admin_update_validator( forge_args: &ForgeScriptArgs, l1_rpc_url: String, ) -> anyhow::Result<()> { - // TODO(EVM-927): this function does not work without the Gateway contracts. - anyhow::bail!("Gateway upgrade not supported yet!"); - // resume doesn't properly work here. let mut forge_args = forge_args.clone(); forge_args.resume = false; diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/args/init/configs.rs b/zkstack_cli/crates/zkstack/src/commands/chain/args/init/configs.rs index adfb5ffaf9b7..828ad4ebcd43 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/args/init/configs.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/args/init/configs.rs @@ -8,7 +8,7 @@ use zkstack_cli_types::L1Network; use crate::{ commands::chain::args::{ genesis::{GenesisArgs, GenesisArgsFinal}, - init::InitArgsFinal, + init::{da_configs::ValidiumType, InitArgsFinal}, }, defaults::LOCAL_RPC_URL, messages::{ @@ -28,11 +28,12 @@ pub struct InitConfigsArgs { pub no_port_reallocation: bool, } -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Clone)] pub struct InitConfigsArgsFinal { pub genesis_args: GenesisArgsFinal, pub l1_rpc_url: String, pub no_port_reallocation: bool, + pub validium_config: Option, } impl InitConfigsArgs { @@ -55,6 +56,7 @@ impl InitConfigsArgs { genesis_args: self.genesis_args.fill_values_with_prompt(config), l1_rpc_url, no_port_reallocation: self.no_port_reallocation, + validium_config: Some(ValidiumType::read()), } } } @@ -65,6 +67,7 @@ impl InitConfigsArgsFinal { genesis_args: init_args.genesis_args.clone(), l1_rpc_url: init_args.l1_rpc_url.clone(), no_port_reallocation: init_args.no_port_reallocation, + validium_config: init_args.validium_config.clone(), } } } diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/args/init/da_configs.rs b/zkstack_cli/crates/zkstack/src/commands/chain/args/init/da_configs.rs new file mode 100644 index 000000000000..4d0e97e7ef05 --- /dev/null +++ b/zkstack_cli/crates/zkstack/src/commands/chain/args/init/da_configs.rs @@ -0,0 +1,146 @@ +use clap::{Parser, ValueEnum}; +use serde::{Deserialize, Serialize}; +use strum::{Display, EnumIter, IntoEnumIterator}; +use url::Url; +use zkstack_cli_common::{Prompt, PromptSelect}; +use zksync_config::{ + configs::da_client::avail::{ + AvailClientConfig, AvailDefaultConfig, AvailGasRelayConfig, AvailSecrets, + }, + AvailConfig, +}; + +use crate::{ + defaults::{AVAIL_BRIDGE_API_URL, AVAIL_RPC_URL}, + messages::{ + MSG_AVAIL_API_NODE_URL_PROMPT, MSG_AVAIL_API_TIMEOUT_MS, MSG_AVAIL_APP_ID_PROMPT, + MSG_AVAIL_BRIDGE_API_URL_PROMPT, MSG_AVAIL_CLIENT_TYPE_PROMPT, + MSG_AVAIL_FINALITY_STATE_PROMPT, MSG_AVAIL_GAS_RELAY_API_KEY_PROMPT, + MSG_AVAIL_GAS_RELAY_API_URL_PROMPT, MSG_AVAIL_GAS_RELAY_MAX_RETRIES_PROMPT, + MSG_AVAIL_SEED_PHRASE_PROMPT, MSG_INVALID_URL_ERR, MSG_VALIDIUM_TYPE_PROMPT, + }, +}; + +#[derive(Debug, Serialize, Deserialize, Parser, Clone)] +pub struct ValidiumTypeArgs { + #[clap(long, help = "Type of the Validium network")] + pub validium_type: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, EnumIter, Display, ValueEnum)] +pub enum ValidiumTypeInternal { + NoDA, + Avail, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, EnumIter, Display, ValueEnum)] +pub enum AvailClientTypeInternal { + FullClient, + GasRelay, +} + +#[derive(Debug, Clone, PartialEq)] +pub enum ValidiumType { + NoDA, + Avail((AvailConfig, AvailSecrets)), +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, EnumIter, Display, ValueEnum)] +pub enum AvailFinalityState { + InBlock, + Finalized, +} + +impl ValidiumType { + pub fn read() -> Self { + match PromptSelect::new(MSG_VALIDIUM_TYPE_PROMPT, ValidiumTypeInternal::iter()).ask() { + ValidiumTypeInternal::NoDA => ValidiumType::NoDA, + ValidiumTypeInternal::Avail => { + let avail_client_type = PromptSelect::new( + MSG_AVAIL_CLIENT_TYPE_PROMPT, + AvailClientTypeInternal::iter(), + ) + .ask(); + + let client_config = + match avail_client_type { + AvailClientTypeInternal::FullClient => { + AvailClientConfig::FullClient(AvailDefaultConfig { + api_node_url: Prompt::new(MSG_AVAIL_API_NODE_URL_PROMPT) + .default(AVAIL_RPC_URL.as_str()) + .validate_with(url_validator) + .ask(), + app_id: Prompt::new(MSG_AVAIL_APP_ID_PROMPT) + .validate_with(|input: &String| -> Result<(), String> { + input.parse::().map(|_| ()).map_err(|_| { + "Please enter a positive number".to_string() + }) + }) + .ask(), + finality_state: Some( + PromptSelect::new( + MSG_AVAIL_FINALITY_STATE_PROMPT, + AvailFinalityState::iter(), + ) + .ask() + .to_string(), + ), + }) + } + AvailClientTypeInternal::GasRelay => { + AvailClientConfig::GasRelay(AvailGasRelayConfig { + gas_relay_api_url: Prompt::new(MSG_AVAIL_GAS_RELAY_API_URL_PROMPT) + .validate_with(url_validator) + .ask(), + max_retries: Prompt::new(MSG_AVAIL_GAS_RELAY_MAX_RETRIES_PROMPT) + .validate_with(|input: &String| -> Result<(), String> { + input.parse::().map(|_| ()).map_err(|_| { + "Please enter a positive number".to_string() + }) + }) + .ask(), + }) + } + }; + + let avail_config = AvailConfig { + bridge_api_url: Prompt::new(MSG_AVAIL_BRIDGE_API_URL_PROMPT) + .default(AVAIL_BRIDGE_API_URL.as_str()) + .validate_with(url_validator) + .ask(), + timeout_ms: Prompt::new(MSG_AVAIL_API_TIMEOUT_MS) + .validate_with(|input: &String| -> Result<(), String> { + input + .parse::() + .map(|_| ()) + .map_err(|_| "Please enter a positive number".to_string()) + }) + .ask(), + config: client_config, + }; + + let avail_secrets = match avail_client_type { + AvailClientTypeInternal::FullClient => AvailSecrets { + seed_phrase: Some(Prompt::new(MSG_AVAIL_SEED_PHRASE_PROMPT).ask()), + gas_relay_api_key: None, + }, + AvailClientTypeInternal::GasRelay => AvailSecrets { + seed_phrase: None, + gas_relay_api_key: Some( + Prompt::new(MSG_AVAIL_GAS_RELAY_API_KEY_PROMPT).ask(), + ), + }, + }; + + ValidiumType::Avail((avail_config, avail_secrets)) + } + } + } +} + +#[allow(clippy::ptr_arg)] +fn url_validator(val: &String) -> Result<(), String> { + Url::parse(val) + .map(|_| ()) + .map_err(|_| MSG_INVALID_URL_ERR.to_string()) +} diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/args/init/mod.rs b/zkstack_cli/crates/zkstack/src/commands/chain/args/init/mod.rs index 23e32306519c..2d8539620ef0 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/args/init/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/args/init/mod.rs @@ -3,10 +3,13 @@ use serde::{Deserialize, Serialize}; use url::Url; use zkstack_cli_common::{forge::ForgeScriptArgs, Prompt}; use zkstack_cli_config::ChainConfig; -use zkstack_cli_types::L1Network; +use zkstack_cli_types::{L1BatchCommitmentMode, L1Network}; use crate::{ - commands::chain::args::genesis::{GenesisArgs, GenesisArgsFinal}, + commands::chain::args::{ + genesis::{GenesisArgs, GenesisArgsFinal}, + init::da_configs::ValidiumType, + }, defaults::LOCAL_RPC_URL, messages::{ MSG_DEPLOY_PAYMASTER_PROMPT, MSG_DEV_ARG_HELP, MSG_L1_RPC_URL_HELP, @@ -16,6 +19,7 @@ use crate::{ }; pub mod configs; +pub(crate) mod da_configs; #[derive(Debug, Clone, Serialize, Deserialize, Parser)] pub struct InitArgs { @@ -39,6 +43,8 @@ pub struct InitArgs { pub update_submodules: Option, #[clap(long, help = MSG_DEV_ARG_HELP)] pub dev: bool, + #[clap(flatten)] + pub validium_args: da_configs::ValidiumTypeArgs, } impl InitArgs { @@ -82,23 +88,34 @@ impl InitArgs { }) }; + let validium_config = match config.l1_batch_commit_data_generator_mode { + L1BatchCommitmentMode::Validium => match self.validium_args.validium_type { + None => Some(ValidiumType::read()), + Some(da_configs::ValidiumTypeInternal::NoDA) => Some(ValidiumType::NoDA), + Some(da_configs::ValidiumTypeInternal::Avail) => panic!( + "Avail is not supported via CLI args, use interactive mode" // TODO: Add support for configuration via CLI args + ), + }, + _ => None, + }; + InitArgsFinal { forge_args: self.forge_args, genesis_args: genesis.fill_values_with_prompt(config), deploy_paymaster, l1_rpc_url, no_port_reallocation: self.no_port_reallocation, - dev: self.dev, + validium_config, } } } -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Clone)] pub struct InitArgsFinal { pub forge_args: ForgeScriptArgs, pub genesis_args: GenesisArgsFinal, pub deploy_paymaster: bool, pub l1_rpc_url: String, pub no_port_reallocation: bool, - pub dev: bool, + pub validium_config: Option, } diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/build_transactions.rs b/zkstack_cli/crates/zkstack/src/commands/chain/build_transactions.rs index 2e25d15b0fab..3bf4db7188f7 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/build_transactions.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/build_transactions.rs @@ -18,13 +18,12 @@ use crate::{ }, }; -const REGISTER_CHAIN_TXNS_FILE_SRC: &str = - "contracts/l1-contracts/broadcast/RegisterHyperchain.s.sol/9/dry-run/run-latest.json"; -const REGISTER_CHAIN_TXNS_FILE_DST: &str = "register-hyperchain-txns.json"; +pub const REGISTER_CHAIN_TXNS_FILE_SRC: &str = + "contracts/l1-contracts/broadcast/RegisterZKChain.s.sol/9/dry-run/run-latest.json"; +pub const REGISTER_CHAIN_TXNS_FILE_DST: &str = "register-zk-chain-txns.json"; -const SCRIPT_CONFIG_FILE_SRC: &str = - "contracts/l1-contracts/script-config/register-hyperchain.toml"; -const SCRIPT_CONFIG_FILE_DST: &str = "register-hyperchain.toml"; +const SCRIPT_CONFIG_FILE_SRC: &str = "contracts/l1-contracts/script-config/register-zk-chain.toml"; +const SCRIPT_CONFIG_FILE_DST: &str = "register-zk-chain.toml"; pub(crate) async fn run(args: BuildTransactionsArgs, shell: &Shell) -> anyhow::Result<()> { let config = EcosystemConfig::from_file(shell)?; diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/convert_to_gateway.rs b/zkstack_cli/crates/zkstack/src/commands/chain/convert_to_gateway.rs index 0ac534382a26..0b06cd8de3c2 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/convert_to_gateway.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/convert_to_gateway.rs @@ -1,4 +1,3 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use anyhow::Context; use ethers::{abi::parse_abi, contract::BaseContract, types::Bytes, utils::hex}; use lazy_static::lazy_static; @@ -49,11 +48,7 @@ lazy_static! { ); } -#[allow(unused)] pub async fn run(args: ForgeScriptArgs, shell: &Shell) -> anyhow::Result<()> { - // TODO(EVM-927): this function does not work without the Gateway contracts. - anyhow::bail!("Gateway upgrade not supported yet!"); - let chain_name = global_config().chain_name.clone(); let ecosystem_config = EcosystemConfig::from_file(shell)?; let chain_config = ecosystem_config @@ -163,7 +158,7 @@ pub async fn run(args: ForgeScriptArgs, shell: &Shell) -> anyhow::Result<()> { Ok(()) } -async fn calculate_gateway_ctm( +pub async fn calculate_gateway_ctm( shell: &Shell, forge_args: ForgeScriptArgs, config: &EcosystemConfig, @@ -214,7 +209,7 @@ async fn calculate_gateway_ctm( Ok(gateway_config) } -async fn deploy_gateway_ctm( +pub async fn deploy_gateway_ctm( shell: &Shell, forge_args: ForgeScriptArgs, config: &EcosystemConfig, @@ -259,7 +254,7 @@ async fn deploy_gateway_ctm( Ok(()) } -async fn gateway_governance_whitelisting( +pub async fn gateway_governance_whitelisting( shell: &Shell, forge_args: ForgeScriptArgs, config: &EcosystemConfig, @@ -397,7 +392,7 @@ async fn gateway_governance_whitelisting( } #[allow(clippy::too_many_arguments)] -async fn call_script( +pub async fn call_script( shell: &Shell, forge_args: ForgeScriptArgs, data: &Bytes, diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/create.rs b/zkstack_cli/crates/zkstack/src/commands/chain/create.rs index fa5ee1a59df7..529d861a2559 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/create.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/create.rs @@ -25,7 +25,7 @@ pub fn run(args: ChainCreateArgs, shell: &Shell) -> anyhow::Result<()> { create(args, &mut ecosystem_config, shell) } -fn create( +pub fn create( args: ChainCreateArgs, ecosystem_config: &mut EcosystemConfig, shell: &Shell, diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs b/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs index d404b29b5a98..7cf628b1170b 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs @@ -33,7 +33,6 @@ use crate::{ pub enum Deploy2ContractsOption { All, Upgrader, - InitiailizeBridges, ConsensusRegistry, Multicall3, TimestampAsserter, @@ -61,6 +60,7 @@ pub async fn run( &ecosystem_config, &mut contracts, args, + true, ) .await?; } @@ -104,16 +104,6 @@ pub async fn run( ) .await?; } - Deploy2ContractsOption::InitiailizeBridges => { - initialize_bridges( - shell, - &chain_config, - &ecosystem_config, - &mut contracts, - args, - ) - .await? - } } contracts.save_with_base_path(shell, &chain_config.configs)?; @@ -131,39 +121,23 @@ async fn build_and_deploy( forge_args: ForgeScriptArgs, signature: Option<&str>, mut update_config: impl FnMut(&Shell, &Path) -> anyhow::Result<()>, + with_broadcast: bool, ) -> anyhow::Result<()> { build_l2_contracts(shell.clone(), ecosystem_config.link_to_code.clone())?; - call_forge(shell, chain_config, ecosystem_config, forge_args, signature).await?; - update_config( - shell, - &DEPLOY_L2_CONTRACTS_SCRIPT_PARAMS.output(&chain_config.link_to_code), - )?; - Ok(()) -} - -pub async fn initialize_bridges( - shell: &Shell, - chain_config: &ChainConfig, - ecosystem_config: &EcosystemConfig, - contracts_config: &mut ContractsConfig, - forge_args: ForgeScriptArgs, -) -> anyhow::Result<()> { - let signature = if let Some(true) = chain_config.legacy_bridge { - Some("runDeployLegacySharedBridge") - } else { - Some("runDeploySharedBridge") - }; - build_and_deploy( + call_forge( shell, chain_config, ecosystem_config, forge_args, signature, - |shell, out| { - contracts_config.set_l2_shared_bridge(&InitializeBridgeOutput::read(shell, out)?) - }, + with_broadcast, ) - .await + .await?; + update_config( + shell, + &DEPLOY_L2_CONTRACTS_SCRIPT_PARAMS.output(&chain_config.link_to_code), + )?; + Ok(()) } pub async fn deploy_upgrader( @@ -182,6 +156,7 @@ pub async fn deploy_upgrader( |shell, out| { contracts_config.set_default_l2_upgrade(&DefaultL2UpgradeOutput::read(shell, out)?) }, + true, ) .await } @@ -202,6 +177,7 @@ pub async fn deploy_consensus_registry( |shell, out| { contracts_config.set_consensus_registry(&ConsensusRegistryOutput::read(shell, out)?) }, + true, ) .await } @@ -220,6 +196,7 @@ pub async fn deploy_multicall3( forge_args, Some("runDeployMulticall3"), |shell, out| contracts_config.set_multicall3(&Multicall3Output::read(shell, out)?), + true, ) .await } @@ -241,6 +218,7 @@ pub async fn deploy_timestamp_asserter( contracts_config .set_timestamp_asserter_addr(&TimestampAsserterOutput::read(shell, out)?) }, + true, ) .await } @@ -251,18 +229,14 @@ pub async fn deploy_l2_contracts( ecosystem_config: &EcosystemConfig, contracts_config: &mut ContractsConfig, forge_args: ForgeScriptArgs, + with_broadcast: bool, ) -> anyhow::Result<()> { - let signature = if let Some(true) = chain_config.legacy_bridge { - Some("runWithLegacyBridge") - } else { - None - }; build_and_deploy( shell, chain_config, ecosystem_config, forge_args, - signature, + None, |shell, out| { contracts_config.set_l2_shared_bridge(&InitializeBridgeOutput::read(shell, out)?)?; contracts_config.set_default_l2_upgrade(&DefaultL2UpgradeOutput::read(shell, out)?)?; @@ -272,6 +246,7 @@ pub async fn deploy_l2_contracts( .set_timestamp_asserter_addr(&TimestampAsserterOutput::read(shell, out)?)?; Ok(()) }, + with_broadcast, ) .await } @@ -282,8 +257,13 @@ async fn call_forge( ecosystem_config: &EcosystemConfig, forge_args: ForgeScriptArgs, signature: Option<&str>, + with_broadcast: bool, ) -> anyhow::Result<()> { - let input = DeployL2ContractsInput::new(chain_config, ecosystem_config.era_chain_id)?; + let input = DeployL2ContractsInput::new( + chain_config, + &ecosystem_config.get_contracts_config()?, + ecosystem_config.era_chain_id, + )?; let foundry_contracts_path = chain_config.path_to_l1_foundry(); let secrets = chain_config.get_secrets_config()?; input.save( @@ -304,8 +284,10 @@ async fn call_forge( .l1_rpc_url .expose_str() .to_string(), - ) - .with_broadcast(); + ); + if with_broadcast { + forge = forge.with_broadcast(); + } if let Some(signature) = signature { forge = forge.with_signature(signature); diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/gateway_upgrade.rs b/zkstack_cli/crates/zkstack/src/commands/chain/gateway_upgrade.rs index ed84df9d13c4..f096daef032a 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/gateway_upgrade.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/gateway_upgrade.rs @@ -1,9 +1,11 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use anyhow::Context; use clap::{Parser, ValueEnum}; use ethers::{ - abi::{encode, parse_abi}, + abi::parse_abi, contract::BaseContract, + providers::{Http, Middleware, Provider}, + signers::Signer, + types::{transaction::eip2718::TypedTransaction, Eip1559TransactionRequest}, utils::hex, }; use lazy_static::lazy_static; @@ -16,23 +18,20 @@ use zkstack_cli_common::{ }; use zkstack_cli_config::{ forge_interface::{ - gateway_chain_upgrade::{ - input::GatewayChainUpgradeInput, output::GatewayChainUpgradeOutput, - }, gateway_ecosystem_upgrade::output::GatewayEcosystemUpgradeOutput, - script_params::{GATEWAY_UPGRADE_CHAIN_PARAMS, GATEWAY_UPGRADE_ECOSYSTEM_PARAMS}, + script_params::{ACCEPT_GOVERNANCE_SCRIPT_PARAMS, GATEWAY_UPGRADE_ECOSYSTEM_PARAMS}, }, - traits::{ReadConfig, ReadConfigWithBasePath, SaveConfig, SaveConfigWithBasePath}, + traits::{ReadConfig, ReadConfigWithBasePath, SaveConfigWithBasePath}, ChainConfig, EcosystemConfig, }; use zkstack_cli_types::L1BatchCommitmentMode; -use zksync_basic_types::{H256, U256}; -use zksync_types::{web3::keccak256, Address, L2_NATIVE_TOKEN_VAULT_ADDRESS}; +use zksync_basic_types::U256; +use zksync_types::Address; use crate::{ - accept_ownership::{ - admin_execute_upgrade, admin_schedule_upgrade, admin_update_validator, - set_da_validator_pair, + commands::dev::commands::gateway::{ + check_chain_readiness, fetch_chain_info, get_admin_call_builder, + set_upgrade_timestamp_calldata, DAMode, GatewayUpgradeArgsInner, GatewayUpgradeInfo, }, messages::{MSG_CHAIN_NOT_INITIALIZED, MSG_L1_SECRETS_MUST_BE_PRESENTED}, utils::forge::{fill_forge_private_key, WalletOwner}, @@ -47,9 +46,6 @@ lazy_static! { Debug, Serialize, Deserialize, Clone, Copy, ValueEnum, EnumIter, strum::Display, PartialEq, Eq, )] pub enum GatewayChainUpgradeStage { - // some config paaram - AdaptConfig, - // Does not require admin, still needs to be done to update configs, etc PrepareStage1, @@ -78,6 +74,8 @@ pub struct GatewayUpgradeArgs { pub forge_args: ForgeScriptArgs, chain_upgrade_stage: GatewayChainUpgradeStage, + + l2_wrapped_base_token_addr: Option
, } lazy_static! { @@ -90,11 +88,7 @@ lazy_static! { ); } -#[allow(unused)] pub async fn run(args: GatewayUpgradeArgs, shell: &Shell) -> anyhow::Result<()> { - // TODO(EVM-927): this function does not work without the Gateway contracts. - anyhow::bail!("Gateway upgrade not supported yet!"); - let ecosystem_config = EcosystemConfig::from_file(shell)?; let chain_name = global_config().chain_name.clone(); @@ -111,15 +105,14 @@ pub async fn run(args: GatewayUpgradeArgs, shell: &Shell) -> anyhow::Result<()> .to_string(); match args.chain_upgrade_stage { - GatewayChainUpgradeStage::AdaptConfig => adapt_config(shell, chain_config).await, GatewayChainUpgradeStage::PrepareStage1 => { - prepare_stage1(shell, args, ecosystem_config, chain_config, l1_url).await + prepare_stage1(shell, ecosystem_config, chain_config, l1_url).await } GatewayChainUpgradeStage::ScheduleStage1 => { - schedule_stage1(shell, args, ecosystem_config, chain_config, l1_url).await + schedule_stage1(shell, ecosystem_config, chain_config, l1_url).await } GatewayChainUpgradeStage::FinalizeStage1 => { - finalize_stage1(shell, args, ecosystem_config, chain_config, l1_url).await + finalize_stage1(shell, ecosystem_config, chain_config, l1_url).await } GatewayChainUpgradeStage::FinalizeStage2 => { finalize_stage2(shell, ecosystem_config, chain_config).await @@ -133,173 +126,130 @@ pub async fn run(args: GatewayUpgradeArgs, shell: &Shell) -> anyhow::Result<()> } } -fn encode_ntv_asset_id(l1_chain_id: U256, addr: Address) -> H256 { - let encoded_data = encode(&[ - ethers::abi::Token::Uint(l1_chain_id), - ethers::abi::Token::Address(L2_NATIVE_TOKEN_VAULT_ADDRESS), - ethers::abi::Token::Address(addr), - ]); - - H256(keccak256(&encoded_data)) -} - -async fn adapt_config(shell: &Shell, chain_config: ChainConfig) -> anyhow::Result<()> { - println!("Adapting config"); - let mut contracts_config = chain_config.get_contracts_config()?; - let genesis_config = chain_config.get_genesis_config()?; - - contracts_config.l2.legacy_shared_bridge_addr = contracts_config.bridges.shared.l2_address; - contracts_config.l1.base_token_asset_id = Some(encode_ntv_asset_id( - genesis_config.l1_chain_id.0.into(), - contracts_config.l1.base_token_addr, - )); - - contracts_config.save_with_base_path(shell, &chain_config.configs)?; - println!("Done"); - - Ok(()) -} - async fn prepare_stage1( shell: &Shell, - args: GatewayUpgradeArgs, ecosystem_config: EcosystemConfig, chain_config: ChainConfig, l1_url: String, ) -> anyhow::Result<()> { - let chain_upgrade_config_path = - GATEWAY_UPGRADE_CHAIN_PARAMS.input(&ecosystem_config.link_to_code); - - let gateway_upgrade_input = GatewayChainUpgradeInput::new(&chain_config); - gateway_upgrade_input.save(shell, chain_upgrade_config_path.clone())?; - - let mut forge = Forge::new(&ecosystem_config.path_to_l1_foundry()) - .script( - &GATEWAY_UPGRADE_CHAIN_PARAMS.script(), - args.forge_args.clone(), - ) - .with_ffi() - .with_rpc_url(l1_url) - .with_slow() - .with_broadcast(); - - forge = fill_forge_private_key( - forge, - Some(&chain_config.get_wallets_config()?.governor), - WalletOwner::Governor, - )?; - - println!("Preparing the chain for the upgrade!"); - - forge.run(shell)?; - - println!("done!"); - - let chain_output = GatewayChainUpgradeOutput::read( - shell, - GATEWAY_UPGRADE_CHAIN_PARAMS.output(&ecosystem_config.link_to_code), - )?; - let gateway_ecosystem_preparation_output = GatewayEcosystemUpgradeOutput::read_with_base_path(shell, ecosystem_config.config)?; // No need to save it, we have enough for now let mut contracts_config = chain_config.get_contracts_config()?; + let general_config = chain_config.get_general_config()?; + let genesis_config = chain_config.get_genesis_config()?; - contracts_config - .ecosystem_contracts - .stm_deployment_tracker_proxy_addr = Some( - gateway_ecosystem_preparation_output - .deployed_addresses - .bridgehub - .ctm_deployment_tracker_proxy_addr, - ); - // This is force deployment data for creating new contracts, not really relevant here tbh, - contracts_config.ecosystem_contracts.force_deployments_data = Some(hex::encode( - &gateway_ecosystem_preparation_output - .contracts_config - .force_deployments_data - .0, - )); - contracts_config.ecosystem_contracts.native_token_vault_addr = Some( - gateway_ecosystem_preparation_output - .deployed_addresses - .native_token_vault_addr, + let upgrade_info = GatewayUpgradeInfo::from_gateway_ecosystem_upgrade( + contracts_config.ecosystem_contracts.bridgehub_proxy_addr, + gateway_ecosystem_preparation_output, ); - contracts_config - .ecosystem_contracts - .l1_bytecodes_supplier_addr = Some( - gateway_ecosystem_preparation_output - .deployed_addresses - .l1_bytecodes_supplier_addr, - ); - contracts_config.l1.access_control_restriction_addr = - Some(chain_output.access_control_restriction); - contracts_config.l1.chain_admin_addr = chain_output.chain_admin_addr; - contracts_config.l1.rollup_l1_da_validator_addr = Some( - gateway_ecosystem_preparation_output - .deployed_addresses - .rollup_l1_da_validator_addr, - ); - contracts_config.l1.no_da_validium_l1_validator_addr = Some( - gateway_ecosystem_preparation_output - .deployed_addresses - .validium_l1_da_validator_addr, - ); + let da_mode: DAMode = + if genesis_config.l1_batch_commit_data_generator_mode == L1BatchCommitmentMode::Rollup { + DAMode::PermanentRollup + } else { + DAMode::Validium + }; + + let chain_info = fetch_chain_info( + &upgrade_info, + &GatewayUpgradeArgsInner { + chain_id: chain_config.chain_id.as_u64(), + l1_rpc_url: l1_url, + l2_rpc_url: general_config + .api_config + .context("api config")? + .web3_json_rpc + .http_url, + validator_addr1: chain_config.get_wallets_config()?.operator.address, + validator_addr2: chain_config.get_wallets_config()?.blob_operator.address, + da_mode, + dangerous_no_cross_check: false, + }, + ) + .await?; - let validum = chain_config - .get_genesis_config()? - .l1_batch_commit_data_generator_mode - == L1BatchCommitmentMode::Validium; - - // We do not use chain output because IMHO we should delete it altogether from there - contracts_config.l2.da_validator_addr = if !validum { - Some( - gateway_ecosystem_preparation_output - .contracts_config - .expected_rollup_l2_da_validator, - ) - } else { - Some( - gateway_ecosystem_preparation_output - .contracts_config - .expected_validium_l2_da_validator, - ) - }; - contracts_config.l2.l2_native_token_vault_proxy_addr = Some(L2_NATIVE_TOKEN_VAULT_ADDRESS); - contracts_config.l2.legacy_shared_bridge_addr = contracts_config.bridges.shared.l2_address; + upgrade_info.update_contracts_config(&mut contracts_config, &chain_info, da_mode, true); contracts_config.save_with_base_path(shell, chain_config.configs)?; Ok(()) } -const NEW_PROTOCOL_VERSION: u64 = 0x1b00000000; +async fn call_chain_admin( + l1_url: String, + chain_config: ChainConfig, + data: Vec, +) -> anyhow::Result<()> { + let wallet = chain_config + .get_wallets_config()? + .governor + .private_key + .context("gov pk missing")?; + let contracts_config = chain_config.get_contracts_config()?; + + // Initialize provider + let provider = Provider::::try_from(l1_url)?; + + // Initialize wallet + let chain_id = provider.get_chainid().await?.as_u64(); + let wallet = wallet.with_chain_id(chain_id); + + let tx = TypedTransaction::Eip1559(Eip1559TransactionRequest { + to: Some(contracts_config.l1.chain_admin_addr.into()), + // 10m should be always enough + gas: Some(U256::from(10_000_000)), + data: Some(data.into()), + value: Some(U256::zero()), + nonce: Some( + provider + .get_transaction_count(wallet.address(), None) + .await?, + ), + max_fee_per_gas: Some(provider.get_gas_price().await?), + max_priority_fee_per_gas: Some(U256::zero()), + chain_id: Some(chain_id.into()), + ..Default::default() + }); + + let signed_tx = wallet.sign_transaction(&tx).await.unwrap(); + + let tx = provider + .send_raw_transaction(tx.rlp_signed(&signed_tx)) + .await + .unwrap(); + println!("Sent tx with hash: {}", hex::encode(tx.0)); + + let receipt = tx.await?.context("receipt not present")?; + + if receipt.status.unwrap() != 1.into() { + anyhow::bail!("Transaction failed!"); + } + + Ok(()) +} async fn schedule_stage1( shell: &Shell, - args: GatewayUpgradeArgs, ecosystem_config: EcosystemConfig, chain_config: ChainConfig, l1_url: String, ) -> anyhow::Result<()> { + let gateway_ecosystem_preparation_output = + GatewayEcosystemUpgradeOutput::read_with_base_path(shell, ecosystem_config.config)?; + println!("Schedule stage1 of the upgrade!!"); + let calldata = set_upgrade_timestamp_calldata( + gateway_ecosystem_preparation_output + .contracts_config + .new_protocol_version, + // Immediatelly + 0, + ); - admin_schedule_upgrade( - shell, - &ecosystem_config, - &chain_config.get_contracts_config()?, - // For now it is hardcoded both in scripts and here - U256::from(NEW_PROTOCOL_VERSION), - // We only do instant upgrades for now - U256::zero(), - &chain_config.get_wallets_config()?.governor, - &args.forge_args, - l1_url.clone(), - ) - .await?; + call_chain_admin(l1_url, chain_config, calldata).await?; println!("done!"); @@ -308,109 +258,69 @@ async fn schedule_stage1( async fn finalize_stage1( shell: &Shell, - args: GatewayUpgradeArgs, ecosystem_config: EcosystemConfig, chain_config: ChainConfig, l1_url: String, ) -> anyhow::Result<()> { println!("Finalizing stage1 of chain upgrade!"); - let mut contracts_config = chain_config.get_contracts_config()?; + let contracts_config = chain_config.get_contracts_config()?; + let general_config = chain_config.get_general_config()?; + let genesis_config = chain_config.get_genesis_config()?; + + println!("Checking chain readiness..."); + check_chain_readiness( + l1_url.clone(), + general_config + .api_config + .as_ref() + .context("api")? + .web3_json_rpc + .http_url + .clone(), + chain_config.chain_id.as_u64(), + ) + .await?; + + println!("The chain is ready!"); + let gateway_ecosystem_preparation_output = GatewayEcosystemUpgradeOutput::read_with_base_path(shell, &ecosystem_config.config)?; - let old_validator_timelock = contracts_config.l1.validator_timelock_addr; - let new_validator_timelock = gateway_ecosystem_preparation_output - .deployed_addresses - .validator_timelock_addr; - - let validators = [ - chain_config.get_wallets_config()?.operator.address, - chain_config.get_wallets_config()?.blob_operator.address, - ]; - - println!("Setting new validators!"); - for val in validators { - admin_update_validator( - shell, - &ecosystem_config, - &chain_config, - old_validator_timelock, - val, - false, - &chain_config.get_wallets_config()?.governor, - &args.forge_args, - l1_url.clone(), - ) - .await?; - - admin_update_validator( - shell, - &ecosystem_config, - &chain_config, - new_validator_timelock, - val, - true, - &chain_config.get_wallets_config()?.governor, - &args.forge_args, - l1_url.clone(), - ) - .await?; - } + let da_mode: DAMode = + if genesis_config.l1_batch_commit_data_generator_mode == L1BatchCommitmentMode::Rollup { + DAMode::PermanentRollup + } else { + DAMode::Validium + }; - println!("Setting new validators done!"); + let upgrade_info = GatewayUpgradeInfo::from_gateway_ecosystem_upgrade( + contracts_config.ecosystem_contracts.bridgehub_proxy_addr, + gateway_ecosystem_preparation_output, + ); + let args = GatewayUpgradeArgsInner { + chain_id: chain_config.chain_id.as_u64(), + l1_rpc_url: l1_url.clone(), + l2_rpc_url: general_config + .api_config + .context("api config")? + .web3_json_rpc + .http_url, + validator_addr1: chain_config.get_wallets_config()?.operator.address, + validator_addr2: chain_config.get_wallets_config()?.blob_operator.address, + da_mode, + dangerous_no_cross_check: false, + }; - contracts_config.l1.validator_timelock_addr = gateway_ecosystem_preparation_output - .deployed_addresses - .validator_timelock_addr; + let chain_info = fetch_chain_info(&upgrade_info, &args).await?; - admin_execute_upgrade( - shell, - &ecosystem_config, - &chain_config.get_contracts_config()?, - &chain_config.get_wallets_config()?.governor, - gateway_ecosystem_preparation_output - .chain_upgrade_diamond_cut - .0, - &args.forge_args, - l1_url.clone(), - ) - .await?; + let admin_calls_finalize = get_admin_call_builder(&upgrade_info, &chain_info, args); - let l1_da_validator_contract = if chain_config - .get_genesis_config()? - .l1_batch_commit_data_generator_mode - == L1BatchCommitmentMode::Rollup - { - ecosystem_config - .get_contracts_config()? - .l1 - .rollup_l1_da_validator_addr - } else { - ecosystem_config - .get_contracts_config()? - .l1 - .no_da_validium_l1_validator_addr - } - .context("l1 da validator")?; + admin_calls_finalize.display(); - set_da_validator_pair( - shell, - &ecosystem_config, - contracts_config.l1.chain_admin_addr, - &chain_config.get_wallets_config()?.governor, - contracts_config.l1.diamond_proxy_addr, - l1_da_validator_contract, - contracts_config - .l2 - .da_validator_addr - .context("l2_da_validator_addr")?, - &args.forge_args, - l1_url, - ) - .await?; + let admin_calldata = admin_calls_finalize.compile_full_calldata(); - contracts_config.save_with_base_path(shell, &chain_config.configs)?; + call_chain_admin(l1_url, chain_config, admin_calldata).await?; println!("done!"); @@ -460,7 +370,6 @@ async fn set_weth_for_chain( ]) .unwrap(), ); - let contracts_config = chain_config.get_contracts_config()?; let calldata = contract .encode( "addL2WethToStore", @@ -474,17 +383,15 @@ async fn set_weth_for_chain( .l1 .chain_admin_addr, chain_config.chain_id.as_u64(), - contracts_config - .l2 - .predeployed_l2_wrapped_base_token_address - .expect("No predeployed_l2_wrapped_base_token_address"), + args.l2_wrapped_base_token_addr + .context("l2_wrapped_base_token_addr")?, ), ) .unwrap(); let mut forge = Forge::new(&ecosystem_config.path_to_l1_foundry()) .script( - &GATEWAY_UPGRADE_ECOSYSTEM_PARAMS.script(), + &ACCEPT_GOVERNANCE_SCRIPT_PARAMS.script(), forge_args.clone(), ) .with_ffi() diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/init/configs.rs b/zkstack_cli/crates/zkstack/src/commands/chain/init/configs.rs index 5a27f903a72f..4db4c9927de1 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/init/configs.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/init/configs.rs @@ -6,12 +6,17 @@ use zkstack_cli_config::{ copy_configs, set_l1_rpc_url, traits::SaveConfigWithBasePath, update_from_chain_config, ChainConfig, ContractsConfig, EcosystemConfig, }; +use zksync_config::configs::DataAvailabilitySecrets; use crate::{ commands::{ chain::{ - args::init::configs::{InitConfigsArgs, InitConfigsArgsFinal}, + args::init::{ + configs::{InitConfigsArgs, InitConfigsArgsFinal}, + da_configs::ValidiumType, + }, genesis, + utils::encode_ntv_asset_id, }, portal::update_portal_config, }, @@ -56,6 +61,13 @@ pub async fn init_configs( )?; } + let consensus_keys = generate_consensus_keys(); + + // Initialize secrets config + let mut secrets = chain_config.get_secrets_config()?; + set_l1_rpc_url(&mut secrets, init_args.l1_rpc_url.clone())?; + secrets.consensus = Some(get_consensus_secrets(&consensus_keys)); + let mut general_config = chain_config.get_general_config()?; if general_config.proof_data_handler_config.is_some() && general_config.prover_gateway.is_some() @@ -73,10 +85,22 @@ pub async fn init_configs( .consensus_config .context(MSG_CONSENSUS_CONFIG_MISSING_ERR)?; - let consensus_keys = generate_consensus_keys(); consensus_config.genesis_spec = Some(get_genesis_specs(chain_config, &consensus_keys)); general_config.consensus_config = Some(consensus_config); + if let Some(validium_config) = init_args.validium_config.clone() { + match validium_config { + ValidiumType::NoDA => { + general_config.da_client_config = None; + } + ValidiumType::Avail((avail_config, avail_secrets)) => { + general_config.da_client_config = Some(avail_config.into()); + secrets.data_availability = Some(DataAvailabilitySecrets::Avail(avail_secrets)); + } + } + } + + secrets.save_with_base_path(shell, &chain_config.configs)?; general_config.save_with_base_path(shell, &chain_config.configs)?; // Initialize genesis config @@ -90,14 +114,12 @@ pub async fn init_configs( contracts_config.l1.governance_addr = Address::zero(); contracts_config.l1.chain_admin_addr = Address::zero(); contracts_config.l1.base_token_addr = chain_config.base_token.address; + contracts_config.l1.base_token_asset_id = Some(encode_ntv_asset_id( + genesis_config.l1_chain_id.0.into(), + contracts_config.l1.base_token_addr, + )); contracts_config.save_with_base_path(shell, &chain_config.configs)?; - // Initialize secrets config - let mut secrets = chain_config.get_secrets_config()?; - set_l1_rpc_url(&mut secrets, init_args.l1_rpc_url.clone())?; - secrets.consensus = Some(get_consensus_secrets(&consensus_keys)); - secrets.save_with_base_path(shell, &chain_config.configs)?; - genesis::database::update_configs(init_args.genesis_args.clone(), shell, chain_config)?; update_portal_config(shell, chain_config) diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/init/mod.rs b/zkstack_cli/crates/zkstack/src/commands/chain/init/mod.rs index 4100fee22d89..f115048d1181 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/init/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/init/mod.rs @@ -3,10 +3,12 @@ use clap::{command, Parser, Subcommand}; use xshell::Shell; use zkstack_cli_common::{git, logger, spinner::Spinner}; use zkstack_cli_config::{traits::SaveConfigWithBasePath, ChainConfig, EcosystemConfig}; -use zkstack_cli_types::BaseToken; +use zkstack_cli_types::{BaseToken, L1BatchCommitmentMode}; +use zksync_config::DAClientConfig; +use zksync_types::Address; use crate::{ - accept_ownership::accept_admin, + accept_ownership::{accept_admin, make_permanent_rollup, set_da_validator_pair}, commands::chain::{ args::init::{ configs::{InitConfigsArgs, InitConfigsArgsFinal}, @@ -23,8 +25,8 @@ use crate::{ enable_evm_emulator::enable_evm_emulator, messages::{ msg_initializing_chain, MSG_ACCEPTING_ADMIN_SPINNER, MSG_CHAIN_INITIALIZED, - MSG_CHAIN_NOT_FOUND_ERR, MSG_DEPLOYING_PAYMASTER, MSG_GENESIS_DATABASE_ERR, - MSG_REGISTERING_CHAIN_SPINNER, MSG_SELECTED_CONFIG, + MSG_CHAIN_NOT_FOUND_ERR, MSG_DA_PAIR_REGISTRATION_SPINNER, MSG_DEPLOYING_PAYMASTER, + MSG_GENESIS_DATABASE_ERR, MSG_REGISTERING_CHAIN_SPINNER, MSG_SELECTED_CONFIG, MSG_UPDATING_TOKEN_MULTIPLIER_SETTER_SPINNER, MSG_WALLET_TOKEN_MULTIPLIER_SETTER_NOT_FOUND, }, }; @@ -123,18 +125,24 @@ pub async fn init( // Set token multiplier setter address (run by L2 Governor) if chain_config.base_token != BaseToken::eth() { let spinner = Spinner::new(MSG_UPDATING_TOKEN_MULTIPLIER_SETTER_SPINNER); + let chain_contracts = chain_config.get_contracts_config()?; set_token_multiplier_setter( shell, ecosystem_config, &chain_config.get_wallets_config()?.governor, - contracts_config.l1.chain_admin_addr, + chain_contracts + .l1 + .access_control_restriction_addr + .context("chain_contracts.l1.access_control_restriction_addr")?, + chain_contracts.l1.diamond_proxy_addr, chain_config .get_wallets_config() .unwrap() .token_multiplier_setter .context(MSG_WALLET_TOKEN_MULTIPLIER_SETTER_NOT_FOUND)? .address, - &init_args.forge_args, + chain_contracts.l1.chain_admin_addr, + &init_args.forge_args.clone(), init_args.l1_rpc_url.clone(), ) .await?; @@ -162,10 +170,46 @@ pub async fn init( ecosystem_config, &mut contracts_config, init_args.forge_args.clone(), + true, ) .await?; contracts_config.save_with_base_path(shell, &chain_config.configs)?; + let l1_da_validator_addr = get_l1_da_validator(chain_config); + + let spinner = Spinner::new(MSG_DA_PAIR_REGISTRATION_SPINNER); + set_da_validator_pair( + shell, + ecosystem_config, + contracts_config.l1.chain_admin_addr, + &chain_config.get_wallets_config()?.governor, + contracts_config.l1.diamond_proxy_addr, + l1_da_validator_addr.context("l1_da_validator_addr")?, + contracts_config + .l2 + .da_validator_addr + .context("da_validator_addr")?, + &init_args.forge_args.clone(), + init_args.l1_rpc_url.clone(), + ) + .await?; + spinner.finish(); + + if chain_config.l1_batch_commit_data_generator_mode == L1BatchCommitmentMode::Rollup { + println!("Making permanent rollup!"); + make_permanent_rollup( + shell, + ecosystem_config, + contracts_config.l1.chain_admin_addr, + &chain_config.get_wallets_config()?.governor, + contracts_config.l1.diamond_proxy_addr, + &init_args.forge_args.clone(), + init_args.l1_rpc_url.clone(), + ) + .await?; + println!("Done"); + } + // Setup legacy bridge - shouldn't be used for new chains (run by L1 Governor) if let Some(true) = chain_config.legacy_bridge { setup_legacy_bridge( @@ -200,3 +244,26 @@ pub async fn init( Ok(()) } + +pub(crate) fn get_l1_da_validator(chain_config: &ChainConfig) -> anyhow::Result
{ + let contracts_config = chain_config.get_contracts_config()?; + + let l1_da_validator_contract = match chain_config.l1_batch_commit_data_generator_mode { + L1BatchCommitmentMode::Rollup => contracts_config.l1.rollup_l1_da_validator_addr, + L1BatchCommitmentMode::Validium => { + let general_config = chain_config.get_general_config()?; + if let Some(da_client_config) = general_config.da_client_config { + match da_client_config { + DAClientConfig::Avail(_) => contracts_config.l1.avail_l1_da_validator_addr, + DAClientConfig::NoDA => contracts_config.l1.no_da_validium_l1_validator_addr, + _ => anyhow::bail!("DA client config is not supported"), + } + } else { + contracts_config.l1.no_da_validium_l1_validator_addr + } + } + } + .context("l1 da validator")?; + + Ok(l1_da_validator_contract) +} diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/migrate_from_gateway.rs b/zkstack_cli/crates/zkstack/src/commands/chain/migrate_from_gateway.rs index 573dcf56345f..cf9b9e8e6399 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/migrate_from_gateway.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/migrate_from_gateway.rs @@ -1,4 +1,3 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use anyhow::Context; use clap::Parser; use ethers::{ @@ -58,11 +57,7 @@ lazy_static! { ); } -#[allow(unused)] pub async fn run(args: MigrateFromGatewayArgs, shell: &Shell) -> anyhow::Result<()> { - // TODO(EVM-927): this function does not work without the Gateway contracts. - anyhow::bail!("Gateway upgrade not supported yet!"); - let ecosystem_config = EcosystemConfig::from_file(shell)?; let chain_name = global_config().chain_name.clone(); diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/migrate_to_gateway.rs b/zkstack_cli/crates/zkstack/src/commands/chain/migrate_to_gateway.rs index f6bb25613039..c51f6414ce97 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/migrate_to_gateway.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/migrate_to_gateway.rs @@ -67,12 +67,7 @@ lazy_static! { ); } -// TODO(EVM-927): merge gateway contracts -#[allow(unused)] pub async fn run(args: MigrateToGatewayArgs, shell: &Shell) -> anyhow::Result<()> { - // TODO(EVM-927): this function does not work without the Gateway contracts. - anyhow::bail!("Gateway upgrade not supported yet!"); - let ecosystem_config = EcosystemConfig::from_file(shell)?; let chain_name = global_config().chain_name.clone(); diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/mod.rs b/zkstack_cli/crates/zkstack/src/commands/chain/mod.rs index 7bbe8acf99f3..d6c1851d0c96 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/mod.rs @@ -13,20 +13,25 @@ use crate::commands::chain::{ mod accept_chain_ownership; pub(crate) mod args; mod build_transactions; -mod common; -mod convert_to_gateway; -mod create; +pub(crate) mod common; +#[cfg(feature = "gateway")] +pub(crate) mod convert_to_gateway; +pub(crate) mod create; pub mod deploy_l2_contracts; pub mod deploy_paymaster; mod enable_evm_emulator; +#[cfg(feature = "gateway")] mod gateway_upgrade; pub mod genesis; pub mod init; +#[cfg(feature = "gateway")] mod migrate_from_gateway; +#[cfg(feature = "gateway")] mod migrate_to_gateway; pub mod register_chain; mod set_token_multiplier_setter; mod setup_legacy_bridge; +mod utils; #[derive(Subcommand, Debug)] pub enum ChainCommands { @@ -52,9 +57,6 @@ pub enum ChainCommands { /// DiamondProxy contract. #[command(alias = "accept-ownership")] AcceptChainOwnership(ForgeScriptArgs), - /// Initialize bridges on L2 - #[command(alias = "bridge")] - InitializeBridges(ForgeScriptArgs), /// Deploy L2 consensus registry #[command(alias = "consensus")] DeployConsensusRegistry(ForgeScriptArgs), @@ -72,6 +74,18 @@ pub enum ChainCommands { DeployPaymaster(ForgeScriptArgs), /// Update Token Multiplier Setter address on L1 UpdateTokenMultiplierSetter(ForgeScriptArgs), + /// Prepare chain to be an eligible gateway + #[cfg(feature = "gateway")] + ConvertToGateway(ForgeScriptArgs), + /// Migrate chain to gateway + #[cfg(feature = "gateway")] + MigrateToGateway(migrate_to_gateway::MigrateToGatewayArgs), + /// Migrate chain from gateway + #[cfg(feature = "gateway")] + MigrateFromGateway(migrate_from_gateway::MigrateFromGatewayArgs), + /// Upgrade to the protocol version that supports Gateway + #[cfg(feature = "gateway")] + GatewayUpgrade(gateway_upgrade::GatewayUpgradeArgs), /// Enable EVM emulation on chain (Not supported yet) EnableEvmEmulator(ForgeScriptArgs), } @@ -99,13 +113,18 @@ pub(crate) async fn run(shell: &Shell, args: ChainCommands) -> anyhow::Result<() ChainCommands::DeployUpgrader(args) => { deploy_l2_contracts::run(args, shell, Deploy2ContractsOption::Upgrader).await } - ChainCommands::InitializeBridges(args) => { - deploy_l2_contracts::run(args, shell, Deploy2ContractsOption::InitiailizeBridges).await - } ChainCommands::DeployPaymaster(args) => deploy_paymaster::run(args, shell).await, ChainCommands::UpdateTokenMultiplierSetter(args) => { set_token_multiplier_setter::run(args, shell).await } + #[cfg(feature = "gateway")] + ChainCommands::ConvertToGateway(args) => convert_to_gateway::run(args, shell).await, + #[cfg(feature = "gateway")] + ChainCommands::MigrateToGateway(args) => migrate_to_gateway::run(args, shell).await, + #[cfg(feature = "gateway")] + ChainCommands::MigrateFromGateway(args) => migrate_from_gateway::run(args, shell).await, + #[cfg(feature = "gateway")] + ChainCommands::GatewayUpgrade(args) => gateway_upgrade::run(args, shell).await, ChainCommands::EnableEvmEmulator(args) => enable_evm_emulator::run(args, shell).await, } } diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/set_token_multiplier_setter.rs b/zkstack_cli/crates/zkstack/src/commands/chain/set_token_multiplier_setter.rs index 69a823f8f852..e1a57dcd0f00 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/set_token_multiplier_setter.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/set_token_multiplier_setter.rs @@ -25,7 +25,7 @@ use crate::{ lazy_static! { static ref SET_TOKEN_MULTIPLIER_SETTER: BaseContract = BaseContract::from( parse_abi(&[ - "function chainSetTokenMultiplierSetter(address chainAdmin, address target) public" + "function chainSetTokenMultiplierSetter(address chainAdmin, address accessControlRestriction, address diamondProxyAddress, address setter) public" ]) .unwrap(), ); @@ -56,8 +56,13 @@ pub async fn run(args: ForgeScriptArgs, shell: &Shell) -> anyhow::Result<()> { shell, &ecosystem_config, &chain_config.get_wallets_config()?.governor, - contracts_config.l1.chain_admin_addr, + contracts_config + .l1 + .access_control_restriction_addr + .context("access_control_restriction_addr")?, + contracts_config.l1.diamond_proxy_addr, token_multiplier_setter_address, + contracts_config.l1.chain_admin_addr, &args.clone(), l1_url, ) @@ -72,12 +77,15 @@ pub async fn run(args: ForgeScriptArgs, shell: &Shell) -> anyhow::Result<()> { Ok(()) } +#[allow(clippy::too_many_arguments)] pub async fn set_token_multiplier_setter( shell: &Shell, ecosystem_config: &EcosystemConfig, governor: &Wallet, - chain_admin_address: Address, - target_address: Address, + access_control_restriction_address: Address, + diamond_proxy_address: Address, + new_setter_address: Address, + chain_admin_addr: Address, forge_args: &ForgeScriptArgs, l1_rpc_url: String, ) -> anyhow::Result<()> { @@ -90,7 +98,12 @@ pub async fn set_token_multiplier_setter( let calldata = SET_TOKEN_MULTIPLIER_SETTER .encode( "chainSetTokenMultiplierSetter", - (chain_admin_address, target_address), + ( + chain_admin_addr, + access_control_restriction_address, + diamond_proxy_address, + new_setter_address, + ), ) .unwrap(); let foundry_contracts_path = ecosystem_config.path_to_l1_foundry(); diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/setup_legacy_bridge.rs b/zkstack_cli/crates/zkstack/src/commands/chain/setup_legacy_bridge.rs index 631dffffac41..24ef9d3c16d9 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/setup_legacy_bridge.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/setup_legacy_bridge.rs @@ -31,6 +31,14 @@ pub async fn setup_legacy_bridge( transparent_proxy_admin: contracts_config .ecosystem_contracts .transparent_proxy_admin_addr, + l1_nullifier_proxy: contracts_config + .bridges + .l1_nullifier_addr + .context("`l1_nullifier` missing")?, + l1_native_token_vault: contracts_config + .ecosystem_contracts + .native_token_vault_addr + .context("`native_token_vault` missing")?, erc20bridge_proxy: contracts_config.bridges.erc20.l1_address, token_weth_address: Default::default(), chain_id: chain_config.chain_id, diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/utils.rs b/zkstack_cli/crates/zkstack/src/commands/chain/utils.rs new file mode 100644 index 000000000000..830be06a7bb3 --- /dev/null +++ b/zkstack_cli/crates/zkstack/src/commands/chain/utils.rs @@ -0,0 +1,12 @@ +use ethers::abi::encode; +use zksync_types::{web3::keccak256, Address, H256, L2_NATIVE_TOKEN_VAULT_ADDRESS, U256}; + +pub fn encode_ntv_asset_id(l1_chain_id: U256, addr: Address) -> H256 { + let encoded_data = encode(&[ + ethers::abi::Token::Uint(l1_chain_id), + ethers::abi::Token::Address(L2_NATIVE_TOKEN_VAULT_ADDRESS), + ethers::abi::Token::Address(addr), + ]); + + H256(keccak256(&encoded_data)) +} diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/contracts.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/contracts.rs index 9a9aeeb30305..a3150bc96235 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/contracts.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/contracts.rs @@ -3,7 +3,9 @@ use std::path::PathBuf; use clap::Parser; use xshell::Shell; use zkstack_cli_common::{ - contracts::{build_l1_contracts, build_l2_contracts, build_system_contracts}, + contracts::{ + build_l1_contracts, build_l1_da_contracts, build_l2_contracts, build_system_contracts, + }, logger, spinner::Spinner, }; @@ -11,8 +13,9 @@ use zkstack_cli_config::EcosystemConfig; use crate::commands::dev::messages::{ MSG_BUILDING_CONTRACTS, MSG_BUILDING_CONTRACTS_SUCCESS, MSG_BUILDING_L1_CONTRACTS_SPINNER, - MSG_BUILDING_L2_CONTRACTS_SPINNER, MSG_BUILDING_SYSTEM_CONTRACTS_SPINNER, - MSG_BUILD_L1_CONTRACTS_HELP, MSG_BUILD_L2_CONTRACTS_HELP, MSG_BUILD_SYSTEM_CONTRACTS_HELP, + MSG_BUILDING_L1_DA_CONTRACTS_SPINNER, MSG_BUILDING_L2_CONTRACTS_SPINNER, + MSG_BUILDING_SYSTEM_CONTRACTS_SPINNER, MSG_BUILD_L1_CONTRACTS_HELP, + MSG_BUILD_L1_DA_CONTRACTS_HELP, MSG_BUILD_L2_CONTRACTS_HELP, MSG_BUILD_SYSTEM_CONTRACTS_HELP, MSG_NOTHING_TO_BUILD_MSG, }; @@ -20,6 +23,8 @@ use crate::commands::dev::messages::{ pub struct ContractsArgs { #[clap(long, alias = "l1", help = MSG_BUILD_L1_CONTRACTS_HELP, default_missing_value = "true", num_args = 0..=1)] pub l1_contracts: Option, + #[clap(long, alias = "l1-da", help = MSG_BUILD_L1_DA_CONTRACTS_HELP, default_missing_value = "true", num_args = 0..=1)] + pub l1_da_contracts: Option, #[clap(long, alias = "l2", help = MSG_BUILD_L2_CONTRACTS_HELP, default_missing_value = "true", num_args = 0..=1)] pub l2_contracts: Option, #[clap(long, alias = "sc", help = MSG_BUILD_SYSTEM_CONTRACTS_HELP, default_missing_value = "true", num_args = 0..=1)] @@ -31,9 +36,11 @@ impl ContractsArgs { if self.l1_contracts.is_none() && self.l2_contracts.is_none() && self.system_contracts.is_none() + && self.l1_da_contracts.is_none() { return vec![ ContractType::L1, + ContractType::L1DA, ContractType::L2, ContractType::SystemContracts, ]; @@ -43,6 +50,9 @@ impl ContractsArgs { if self.l1_contracts.unwrap_or(false) { contracts.push(ContractType::L1); } + if self.l1_da_contracts.unwrap_or(false) { + contracts.push(ContractType::L1DA); + } if self.l2_contracts.unwrap_or(false) { contracts.push(ContractType::L2); } @@ -56,6 +66,7 @@ impl ContractsArgs { #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum ContractType { L1, + L1DA, L2, SystemContracts, } @@ -74,6 +85,11 @@ impl ContractBuilder { msg: MSG_BUILDING_L1_CONTRACTS_SPINNER.to_string(), link_to_code: ecosystem.link_to_code.clone(), }, + ContractType::L1DA => Self { + cmd: Box::new(build_l1_da_contracts), + msg: MSG_BUILDING_L1_DA_CONTRACTS_SPINNER.to_string(), + link_to_code: ecosystem.link_to_code.clone(), + }, ContractType::L2 => Self { cmd: Box::new(build_l2_contracts), msg: MSG_BUILDING_L2_CONTRACTS_SPINNER.to_string(), diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/gateway.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/gateway.rs new file mode 100644 index 000000000000..ea8f96de6bcd --- /dev/null +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/gateway.rs @@ -0,0 +1,815 @@ +use std::{num::NonZeroUsize, str::FromStr, sync::Arc}; + +use anyhow::Context; +use clap::{Parser, ValueEnum}; +use ethers::{ + abi::{encode, parse_abi, Token}, + contract::{abigen, BaseContract}, + providers::{Http, Middleware, Provider}, + utils::hex, +}; +use serde::{Deserialize, Serialize}; +use strum::EnumIter; +use xshell::Shell; +use zkstack_cli_config::{ + forge_interface::gateway_ecosystem_upgrade::output::GatewayEcosystemUpgradeOutput, + traits::{ReadConfig, ZkStackConfig}, + ContractsConfig, +}; +use zksync_contracts::{chain_admin_contract, hyperchain_contract, DIAMOND_CUT}; +use zksync_types::{ + ethabi, + url::SensitiveUrl, + web3::{keccak256, Bytes}, + Address, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersionId, H256, + L2_NATIVE_TOKEN_VAULT_ADDRESS, U256, +}; +use zksync_web3_decl::{ + client::{Client, DynClient, L2}, + namespaces::{UnstableNamespaceClient, ZksNamespaceClient}, +}; + +/// To support both functionality of assignment inside local tests +/// and to print out the changes to the user the following function is used. +#[macro_export] +macro_rules! assign_or_print { + ($statement:expr, $value:expr, $should_assign:expr) => { + if $should_assign { + $statement = $value; + } else { + println!("{} = {:#?}", stringify!($statement), $value); + } + }; +} + +#[macro_export] +macro_rules! amend_config_pre_upgrade { + () => { + assign_or_print!() + }; +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub(crate) struct GatewayUpgradeInfo { + // Information about pre-upgrade contracts. + l1_chain_id: u32, + bridgehub_addr: Address, + old_validator_timelock: Address, + l1_legacy_shared_bridge: Address, + + // Information about the post-upgrade contracts. + ctm_deployment_tracker_proxy_addr: Address, + native_token_vault_addr: Address, + l1_bytecodes_supplier_addr: Address, + rollup_l1_da_validator_addr: Address, + no_da_validium_l1_validator_addr: Address, + expected_rollup_l2_da_validator: Address, + expected_validium_l2_da_validator: Address, + new_validator_timelock: Address, + + l1_wrapped_base_token_store: Address, + chain_upgrade_diamond_cut: Bytes, + + new_protocol_version: u64, + old_protocol_version: u64, +} + +#[derive(Debug, Default)] +pub struct FetchedChainInfo { + l2_legacy_shared_bridge_addr: Address, + hyperchain_addr: Address, + base_token_addr: Address, +} + +// Bridgehub ABI +abigen!( + BridgehubAbi, + r"[ + function getHyperchain(uint256)(address) +]" +); + +// L1SharedBridgeLegacyStore ABI +abigen!( + L1SharedBridgeLegacyAbi, + r"[ + function l2BridgeAddress(uint256 _chainId)(address) +]" +); + +// L2WrappedBaseTokenStore ABI +abigen!( + L2WrappedBaseTokenStoreAbi, + r"[ + function l2WBaseTokenAddress(uint256 _chainId)(address) +]" +); + +// ZKChain ABI +abigen!( + ZKChainAbi, + r"[ + function getPubdataPricingMode()(uint256) + function getBaseToken()(address) + function getTotalBatchesCommitted() external view returns (uint256) + function getTotalBatchesVerified() external view returns (uint256) +]" +); + +// ZKChain ABI +abigen!( + ValidatorTimelockAbi, + r"[ + function validators(uint256 _chainId, address _validator)(bool) +]" +); + +async fn verify_next_batch_new_version( + batch_number: u32, + main_node_client: &DynClient, +) -> anyhow::Result<()> { + let (_, right_bound) = main_node_client + .get_l2_block_range(L1BatchNumber(batch_number)) + .await? + .context("Range must be present for a batch")?; + + let next_l2_block = right_bound + 1; + + let block_details = main_node_client + .get_block_details(L2BlockNumber(next_l2_block.as_u32())) + .await? + .with_context(|| format!("No L2 block is present after the batch {}", batch_number))?; + + let protocol_version = block_details.protocol_version.with_context(|| { + format!( + "Protocol version not present for block {}", + next_l2_block.as_u64() + ) + })?; + anyhow::ensure!( + protocol_version >= ProtocolVersionId::gateway_upgrade(), + "THe block does not yet contain the gateway upgrade" + ); + + Ok(()) +} + +pub async fn check_chain_readiness( + l1_rpc_url: String, + l2_rpc_url: String, + l2_chain_id: u64, +) -> anyhow::Result<()> { + let l1_provider = match Provider::::try_from(&l1_rpc_url) { + Ok(provider) => provider, + Err(err) => { + anyhow::bail!("Connection error: {:#?}", err); + } + }; + let l1_client = Arc::new(l1_provider); + + let l2_client = Client::http(SensitiveUrl::from_str(&l2_rpc_url).unwrap()) + .context("failed creating JSON-RPC client for main node")? + .for_network(L2ChainId::new(l2_chain_id).unwrap().into()) + .with_allowed_requests_per_second(NonZeroUsize::new(100_usize).unwrap()) + .build(); + let l2_client = Box::new(l2_client) as Box>; + + let inflight_txs_count: usize = l2_client.get_unconfirmed_txs_count().await?; + let diamond_proxy_addr = l2_client.get_main_contract().await?; + + if inflight_txs_count != 0 { + anyhow::bail!("Chain not ready since there are inflight txs!"); + } + + let zkchain = ZKChainAbi::new(diamond_proxy_addr, l1_client.clone()); + let batches_committed = zkchain.get_total_batches_committed().await?.as_u32(); + let batches_verified = zkchain.get_total_batches_verified().await?.as_u32(); + + verify_next_batch_new_version(batches_committed, l2_client.as_ref()).await?; + verify_next_batch_new_version(batches_verified, l2_client.as_ref()).await?; + + Ok(()) +} + +async fn verify_correct_l2_wrapped_base_token( + l2_rpc_url: String, + addr: Address, +) -> anyhow::Result<()> { + // Connect to the L1 Ethereum network + let l2_provider = match Provider::::try_from(&l2_rpc_url) { + Ok(provider) => provider, + Err(err) => { + anyhow::bail!("Connection error: {:#?}", err); + } + }; + + let code = l2_provider.get_code(addr, None).await?; + + if code.len() == 0 { + anyhow::bail!("L2 wrapped base token code can not be empty"); + } + + // TODO(EVM-939): also verify that the code is correct. + + Ok(()) +} + +pub async fn fetch_chain_info( + upgrade_info: &GatewayUpgradeInfo, + args: &GatewayUpgradeArgsInner, +) -> anyhow::Result { + // Connect to the L1 Ethereum network + let provider = match Provider::::try_from(&args.l1_rpc_url) { + Ok(provider) => provider, + Err(err) => { + anyhow::bail!("Connection error: {:#?}", err); + } + }; + + let client = Arc::new(provider); + let chain_id = U256::from(args.chain_id); + + let bridgehub = BridgehubAbi::new(upgrade_info.bridgehub_addr, client.clone()); + let hyperchain_addr = bridgehub.get_hyperchain(chain_id).await?; + if hyperchain_addr == Address::zero() { + anyhow::bail!("Chain not present in bridgehub"); + } + let l1_legacy_bridge = + L1SharedBridgeLegacyAbi::new(upgrade_info.l1_legacy_shared_bridge, client.clone()); + + let l2_legacy_shared_bridge_addr = l1_legacy_bridge.l_2_bridge_address(chain_id).await?; + // Creation of the shared bridge is one of the steps for chain creation, + // so it is very weird that a chain does not have it, so we fail here. + anyhow::ensure!( + l2_legacy_shared_bridge_addr != Address::zero(), + "Chain not registered inside the L1 shared bridge!" + ); + + let l2_wrapped_base_token_store = + L2WrappedBaseTokenStoreAbi::new(upgrade_info.l1_wrapped_base_token_store, client.clone()); + + let l2_predeployed_wrapped_base_token = l2_wrapped_base_token_store + .l_2w_base_token_address(chain_id) + .await?; + + // Even in case the user does not want the script to fail due to this issue, + // we still display it just in case. + if l2_predeployed_wrapped_base_token == Address::zero() && args.dangerous_no_cross_check { + println!("\n\nWARNING: the chain does not contain wrapped base token. It is dangerous since the security of it depends on the ecosystem admin\n\n"); + } + + let zkchain = ZKChainAbi::new(hyperchain_addr, client.clone()); + + let base_token_addr = zkchain.get_base_token().await?; + + if !args.dangerous_no_cross_check { + // Firstly, check that the validators are present in the current timelock + let old_timelock = + ValidatorTimelockAbi::new(upgrade_info.old_validator_timelock, client.clone()); + + if !old_timelock + .validators(chain_id, args.validator_addr1) + .await? + { + anyhow::bail!( + "{} not validator", + hex_address_display(args.validator_addr1) + ); + } + if !old_timelock + .validators(chain_id, args.validator_addr2) + .await? + { + anyhow::bail!( + "{} not validator", + hex_address_display(args.validator_addr2) + ); + } + + if l2_predeployed_wrapped_base_token == Address::zero() { + anyhow::bail!("the chain does not contain wrapped base token. It is dangerous since the security of it depends on the ecosystem admin"); + } + + verify_correct_l2_wrapped_base_token( + args.l2_rpc_url.clone(), + l2_predeployed_wrapped_base_token, + ) + .await?; + + // Secondly, we check that the DA layer corresponds to the current pubdata pricing mode. + + // On L1 it is an enum with 0 meaaning a rollup and 1 meaning a validium. + // In the old version, it denoted how the pubdata will be checked. We use it to cross-check the + // user's input + let pricing_mode = zkchain.get_pubdata_pricing_mode().await?; + let pricing_mode_rollup = pricing_mode == U256::zero(); + + if args.da_mode.is_rollup() != pricing_mode_rollup { + anyhow::bail!("DA mode in consistent with the current system"); + } + } + + Ok(FetchedChainInfo { + l2_legacy_shared_bridge_addr, + hyperchain_addr, + base_token_addr, + }) +} + +impl ZkStackConfig for GatewayUpgradeInfo {} + +pub fn encode_ntv_asset_id(l1_chain_id: U256, addr: Address) -> H256 { + let encoded_data = encode(&[ + ethers::abi::Token::Uint(l1_chain_id), + ethers::abi::Token::Address(L2_NATIVE_TOKEN_VAULT_ADDRESS), + ethers::abi::Token::Address(addr), + ]); + + H256(keccak256(&encoded_data)) +} + +impl GatewayUpgradeInfo { + pub fn from_gateway_ecosystem_upgrade( + bridgehub_addr: Address, + gateway_ecosystem_upgrade: GatewayEcosystemUpgradeOutput, + ) -> Self { + Self { + l1_chain_id: gateway_ecosystem_upgrade.l1_chain_id, + bridgehub_addr, + old_validator_timelock: gateway_ecosystem_upgrade + .contracts_config + .old_validator_timelock, + l1_legacy_shared_bridge: gateway_ecosystem_upgrade + .contracts_config + .l1_legacy_shared_bridge, + ctm_deployment_tracker_proxy_addr: gateway_ecosystem_upgrade + .deployed_addresses + .bridgehub + .ctm_deployment_tracker_proxy_addr, + native_token_vault_addr: gateway_ecosystem_upgrade + .deployed_addresses + .native_token_vault_addr, + l1_bytecodes_supplier_addr: gateway_ecosystem_upgrade + .deployed_addresses + .l1_bytecodes_supplier_addr, + rollup_l1_da_validator_addr: gateway_ecosystem_upgrade + .deployed_addresses + .rollup_l1_da_validator_addr, + no_da_validium_l1_validator_addr: gateway_ecosystem_upgrade + .deployed_addresses + .validium_l1_da_validator_addr, + expected_rollup_l2_da_validator: gateway_ecosystem_upgrade + .contracts_config + .expected_rollup_l2_da_validator, + expected_validium_l2_da_validator: gateway_ecosystem_upgrade + .contracts_config + .expected_validium_l2_da_validator, + new_validator_timelock: gateway_ecosystem_upgrade + .deployed_addresses + .validator_timelock_addr, + // Note that on the contract side of things this contract is called `L2WrappedBaseTokenStore`, + // while on the server side for consistency with the conventions, where the prefix denotes + // the location of the contracts we call it `l1_wrapped_base_token_store` + l1_wrapped_base_token_store: gateway_ecosystem_upgrade + .deployed_addresses + .l2_wrapped_base_token_store_addr, + chain_upgrade_diamond_cut: gateway_ecosystem_upgrade.chain_upgrade_diamond_cut, + new_protocol_version: gateway_ecosystem_upgrade + .contracts_config + .new_protocol_version, + old_protocol_version: gateway_ecosystem_upgrade + .contracts_config + .old_protocol_version, + } + } + + fn get_l1_da_validator(&self, da_mode: DAMode) -> Address { + if da_mode.is_rollup() { + self.rollup_l1_da_validator_addr + } else { + self.no_da_validium_l1_validator_addr + } + } + + fn get_l2_da_validator(&self, da_mode: DAMode) -> Address { + if da_mode.is_rollup() { + self.expected_rollup_l2_da_validator + } else { + self.expected_validium_l2_da_validator + } + } + + pub fn update_contracts_config( + &self, + contracts_config: &mut ContractsConfig, + chain_info: &FetchedChainInfo, + da_mode: DAMode, + assign: bool, + ) { + assign_or_print!( + contracts_config.l2.legacy_shared_bridge_addr, + Some(chain_info.l2_legacy_shared_bridge_addr), + assign + ); + + let base_token_id = + encode_ntv_asset_id(U256::from(self.l1_chain_id), chain_info.base_token_addr); + assign_or_print!( + contracts_config.l1.base_token_asset_id, + Some(base_token_id), + assign + ); + + assign_or_print!( + contracts_config + .ecosystem_contracts + .l1_wrapped_base_token_store, + Some(self.l1_wrapped_base_token_store), + assign + ); + + assign_or_print!( + contracts_config + .ecosystem_contracts + .stm_deployment_tracker_proxy_addr, + Some(self.ctm_deployment_tracker_proxy_addr), + assign + ); + assign_or_print!( + contracts_config + .ecosystem_contracts + .stm_deployment_tracker_proxy_addr, + Some(self.ctm_deployment_tracker_proxy_addr), + assign + ); + assign_or_print!( + contracts_config.ecosystem_contracts.native_token_vault_addr, + Some(self.native_token_vault_addr), + assign + ); + assign_or_print!( + contracts_config + .ecosystem_contracts + .l1_bytecodes_supplier_addr, + Some(self.l1_bytecodes_supplier_addr), + assign + ); + assign_or_print!( + contracts_config.l1.rollup_l1_da_validator_addr, + Some(self.rollup_l1_da_validator_addr), + assign + ); + assign_or_print!( + contracts_config.l1.no_da_validium_l1_validator_addr, + Some(self.no_da_validium_l1_validator_addr), + assign + ); + + assign_or_print!( + contracts_config.l2.da_validator_addr, + Some(self.get_l2_da_validator(da_mode)), + assign + ); + + assign_or_print!( + contracts_config.l2.l2_native_token_vault_proxy_addr, + Some(L2_NATIVE_TOKEN_VAULT_ADDRESS), + assign + ); + } + + // Updates to the config that should be done somewhere after the upgrade is fully over. + // They do not have to updated for the system to work smoothly during the upgrade, but after + // "stage 2" they are desirable to be updated for consistency + pub fn _post_upgrade_update_contracts_config( + &self, + _config: &mut ContractsConfig, + _assign: bool, + ) { + todo!() + } +} + +#[derive( + Debug, Serialize, Deserialize, Clone, Copy, ValueEnum, EnumIter, strum::Display, PartialEq, Eq, +)] +pub(crate) enum DAMode { + Validium, + TemporaryRollup, + PermanentRollup, +} + +impl DAMode { + fn is_rollup(&self) -> bool { + matches!(self, Self::TemporaryRollup | Self::PermanentRollup) + } +} + +#[derive(Debug, Clone, Serialize)] +struct AdminCall { + description: String, + target: Address, + #[serde(serialize_with = "serialize_hex")] + data: Vec, + value: U256, +} + +impl AdminCall { + fn into_token(self) -> Token { + let Self { + target, + data, + value, + .. + } = self; + Token::Tuple(vec![ + Token::Address(target), + Token::Uint(value), + Token::Bytes(data), + ]) + } +} + +fn hex_address_display(addr: Address) -> String { + format!("0x{}", hex::encode(addr.0)) +} + +fn serialize_hex(bytes: &Vec, serializer: S) -> Result +where + S: serde::Serializer, +{ + let hex_string = format!("0x{}", hex::encode(bytes)); + serializer.serialize_str(&hex_string) +} + +#[derive(Debug, Clone)] +pub struct AdminCallBuilder { + calls: Vec, + validator_timelock_abi: BaseContract, + zkchain_abi: ethabi::Contract, + chain_admin_abi: ethabi::Contract, +} + +impl AdminCallBuilder { + pub fn new() -> Self { + Self { + calls: vec![], + validator_timelock_abi: BaseContract::from( + parse_abi(&[ + "function addValidator(uint256 _chainId, address _newValidator) external", + ]) + .unwrap(), + ), + zkchain_abi: hyperchain_contract(), + chain_admin_abi: chain_admin_contract(), + } + } + + pub fn append_validator( + &mut self, + chain_id: u64, + validator_timelock_addr: Address, + validator_addr: Address, + ) { + let data = self + .validator_timelock_abi + .encode("addValidator", (U256::from(chain_id), validator_addr)) + .unwrap(); + let description = format!( + "Adding validator 0x{}", + hex::encode(validator_timelock_addr.0) + ); + + let call = AdminCall { + description, + data: data.to_vec(), + target: validator_timelock_addr, + value: U256::zero(), + }; + + self.calls.push(call); + } + + pub fn append_execute_upgrade( + &mut self, + hyperchain_addr: Address, + protocol_version: u64, + diamond_cut_data: Bytes, + ) { + let diamond_cut = DIAMOND_CUT.decode_input(&diamond_cut_data.0).unwrap()[0].clone(); + + let data = self + .zkchain_abi + .function("upgradeChainFromVersion") + .unwrap() + .encode_input(&[Token::Uint(protocol_version.into()), diamond_cut]) + .unwrap(); + let description = "Executing upgrade:".to_string(); + + let call = AdminCall { + description, + data: data.to_vec(), + target: hyperchain_addr, + value: U256::zero(), + }; + + self.calls.push(call); + } + + pub fn append_set_da_validator_pair( + &mut self, + hyperchain_addr: Address, + l1_da_validator: Address, + l2_da_validator: Address, + ) { + let data = self + .zkchain_abi + .function("setDAValidatorPair") + .unwrap() + .encode_input(&[ + Token::Address(l1_da_validator), + Token::Address(l2_da_validator), + ]) + .unwrap(); + let description = "Executing upgrade:".to_string(); + + let call = AdminCall { + description, + data: data.to_vec(), + target: hyperchain_addr, + value: U256::zero(), + }; + + self.calls.push(call); + } + + pub fn append_make_permanent_rollup(&mut self, hyperchain_addr: Address) { + let data = self + .zkchain_abi + .function("makePermanentRollup") + .unwrap() + .encode_input(&[]) + .unwrap(); + let description = "Make permanent rollup:".to_string(); + + let call = AdminCall { + description, + data: data.to_vec(), + target: hyperchain_addr, + value: U256::zero(), + }; + + self.calls.push(call); + } + + pub fn display(&self) { + // Serialize with pretty printing + let serialized = serde_json::to_string_pretty(&self.calls).unwrap(); + + // Output the serialized JSON + println!("{}", serialized); + } + + pub fn compile_full_calldata(self) -> Vec { + let tokens: Vec<_> = self.calls.into_iter().map(|x| x.into_token()).collect(); + + let data = self + .chain_admin_abi + .function("multicall") + .unwrap() + .encode_input(&[Token::Array(tokens), Token::Bool(true)]) + .unwrap(); + + data.to_vec() + } +} + +fn chain_admin_abi() -> BaseContract { + BaseContract::from( + parse_abi(&[ + "function setUpgradeTimestamp(uint256 _protocolVersion, uint256 _upgradeTimestamp) external", + ]) + .unwrap(), + ) +} + +pub fn set_upgrade_timestamp_calldata(packed_protocol_version: u64, timestamp: u64) -> Vec { + let chain_admin = chain_admin_abi(); + + chain_admin + .encode("setUpgradeTimestamp", (packed_protocol_version, timestamp)) + .unwrap() + .to_vec() +} + +#[derive(Parser, Debug, Clone)] +pub struct GatewayUpgradeCalldataArgs { + upgrade_description_path: String, + chain_id: u64, + l1_rpc_url: String, + l2_rpc_url: String, + validator_addr1: Address, + validator_addr2: Address, + server_upgrade_timestamp: u64, + da_mode: DAMode, + #[clap(long, default_missing_value = "false")] + dangerous_no_cross_check: Option, +} + +pub struct GatewayUpgradeArgsInner { + pub chain_id: u64, + pub l1_rpc_url: String, + pub l2_rpc_url: String, + pub validator_addr1: Address, + pub validator_addr2: Address, + pub da_mode: DAMode, + pub dangerous_no_cross_check: bool, +} + +impl From for GatewayUpgradeArgsInner { + fn from(value: GatewayUpgradeCalldataArgs) -> Self { + Self { + chain_id: value.chain_id, + l1_rpc_url: value.l1_rpc_url, + l2_rpc_url: value.l2_rpc_url, + validator_addr1: value.validator_addr1, + validator_addr2: value.validator_addr2, + da_mode: value.da_mode, + dangerous_no_cross_check: value.dangerous_no_cross_check.unwrap_or_default(), + } + } +} + +pub fn get_admin_call_builder( + upgrade_info: &GatewayUpgradeInfo, + chain_info: &FetchedChainInfo, + args: GatewayUpgradeArgsInner, +) -> AdminCallBuilder { + let mut admin_calls_finalize = AdminCallBuilder::new(); + + admin_calls_finalize.append_validator( + args.chain_id, + upgrade_info.new_validator_timelock, + args.validator_addr1, + ); + admin_calls_finalize.append_validator( + args.chain_id, + upgrade_info.new_validator_timelock, + args.validator_addr2, + ); + + admin_calls_finalize.append_execute_upgrade( + chain_info.hyperchain_addr, + upgrade_info.old_protocol_version, + upgrade_info.chain_upgrade_diamond_cut.clone(), + ); + + admin_calls_finalize.append_set_da_validator_pair( + chain_info.hyperchain_addr, + upgrade_info.get_l1_da_validator(args.da_mode), + upgrade_info.get_l2_da_validator(args.da_mode), + ); + + if args.da_mode == DAMode::PermanentRollup { + admin_calls_finalize.append_make_permanent_rollup(chain_info.hyperchain_addr); + } + + admin_calls_finalize +} + +pub(crate) async fn run(shell: &Shell, args: GatewayUpgradeCalldataArgs) -> anyhow::Result<()> { + // 0. Read the GatewayUpgradeInfo + + let upgrade_info = GatewayUpgradeInfo::read(shell, &args.upgrade_description_path)?; + + // 1. Update all the configs + + let chain_info = fetch_chain_info(&upgrade_info, &args.clone().into()).await?; + + upgrade_info.update_contracts_config(&mut Default::default(), &chain_info, args.da_mode, false); + + // 2. Generate calldata + + let schedule_calldata = set_upgrade_timestamp_calldata( + args.server_upgrade_timestamp, + upgrade_info.new_protocol_version, + ); + + println!( + "Calldata to schedule upgrade: {}", + hex::encode(&schedule_calldata) + ); + + let admin_calls_finalize = get_admin_call_builder(&upgrade_info, &chain_info, args.into()); + + admin_calls_finalize.display(); + + let chain_admin_calldata = admin_calls_finalize.compile_full_calldata(); + + println!( + "Full calldata to call `ChainAdmin` with : {}", + hex::encode(&chain_admin_calldata) + ); + + Ok(()) +} diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/mod.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/mod.rs index a292168dc6e0..ac41d76b3c4e 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/mod.rs @@ -3,6 +3,8 @@ pub mod config_writer; pub mod contracts; pub mod database; pub mod fmt; +#[cfg(feature = "gateway")] +pub mod gateway; pub mod genesis; pub mod lint; pub(crate) mod lint_utils; diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/integration.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/integration.rs index 7a140644dcec..67976c340cb5 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/integration.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/integration.rs @@ -43,7 +43,7 @@ pub async fn run(shell: &Shell, args: IntegrationArgs) -> anyhow::Result<()> { let test_pattern = args.test_pattern; let mut command = cmd!( shell, - "yarn jest --forceExit --testTimeout 120000 -t {test_pattern...}" + "yarn jest --forceExit --testTimeout 350000 -t {test_pattern...}" ) .env("CHAIN_NAME", ecosystem_config.current_chain()) .env("MASTER_WALLET_PK", wallets.get_test_pk(&chain_config)?); diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/messages.rs b/zkstack_cli/crates/zkstack/src/commands/dev/messages.rs index 235aa95ee492..b65750b34341 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/messages.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/messages.rs @@ -13,6 +13,9 @@ pub(super) const MSG_SUBCOMMAND_CLEAN: &str = "Clean artifacts"; pub(super) const MSG_SUBCOMMAND_LINT_ABOUT: &str = "Lint code"; pub(super) const MSG_CONTRACTS_ABOUT: &str = "Build contracts"; pub(super) const MSG_CONFIG_WRITER_ABOUT: &str = "Overwrite general config"; +#[cfg(feature = "gateway")] +pub(super) const MSG_GATEWAY_UPGRADE_CALLDATA: &str = + "Gateway upgrade checker and calldata generator"; pub(super) const MSG_SUBCOMMAND_FMT_ABOUT: &str = "Format code"; @@ -110,9 +113,11 @@ pub(super) const MSG_NOTHING_TO_BUILD_MSG: &str = "Nothing to build!"; pub(super) const MSG_BUILDING_CONTRACTS: &str = "Building contracts"; pub(super) const MSG_BUILDING_L2_CONTRACTS_SPINNER: &str = "Building L2 contracts.."; pub(super) const MSG_BUILDING_L1_CONTRACTS_SPINNER: &str = "Building L1 contracts.."; +pub(super) const MSG_BUILDING_L1_DA_CONTRACTS_SPINNER: &str = "Building L1 DA contracts.."; pub(super) const MSG_BUILDING_SYSTEM_CONTRACTS_SPINNER: &str = "Building system contracts.."; pub(super) const MSG_BUILDING_CONTRACTS_SUCCESS: &str = "Contracts built successfully"; pub(super) const MSG_BUILD_L1_CONTRACTS_HELP: &str = "Build L1 contracts"; +pub(super) const MSG_BUILD_L1_DA_CONTRACTS_HELP: &str = "Build L1 DA contracts"; pub(super) const MSG_BUILD_L2_CONTRACTS_HELP: &str = "Build L2 contracts"; pub(super) const MSG_BUILD_SYSTEM_CONTRACTS_HELP: &str = "Build system contracts"; diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/mod.rs b/zkstack_cli/crates/zkstack/src/commands/dev/mod.rs index 409c3a764eb1..45e429129082 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/mod.rs @@ -1,5 +1,7 @@ use clap::Subcommand; use commands::status::args::StatusArgs; +#[cfg(feature = "gateway")] +use messages::MSG_GATEWAY_UPGRADE_CALLDATA; use messages::MSG_STATUS_ABOUT; use xshell::Shell; @@ -15,7 +17,7 @@ use crate::commands::dev::messages::{ MSG_SUBCOMMAND_SNAPSHOTS_CREATOR_ABOUT, MSG_SUBCOMMAND_TESTS_ABOUT, }; -mod commands; +pub(crate) mod commands; mod consts; mod dals; mod defaults; @@ -47,6 +49,9 @@ pub enum DevCommands { Status(StatusArgs), #[command(about = MSG_GENERATE_GENESIS_ABOUT, alias = "genesis")] GenerateGenesis, + #[cfg(feature = "gateway")] + #[command(about = MSG_GATEWAY_UPGRADE_CALLDATA)] + GatewayUpgradeCalldata(commands::gateway::GatewayUpgradeCalldataArgs), } pub async fn run(shell: &Shell, args: DevCommands) -> anyhow::Result<()> { @@ -65,6 +70,8 @@ pub async fn run(shell: &Shell, args: DevCommands) -> anyhow::Result<()> { } DevCommands::Status(args) => commands::status::run(shell, args).await?, DevCommands::GenerateGenesis => commands::genesis::run(shell).await?, + #[cfg(feature = "gateway")] + DevCommands::GatewayUpgradeCalldata(args) => commands::gateway::run(shell, args).await?, } Ok(()) } diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/create.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/create.rs index c49739d022c9..d997230d5611 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/create.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/create.rs @@ -70,7 +70,7 @@ impl EcosystemCreateArgs { link_to_code, wallet_creation: chain.wallet_creation, wallet_path: chain.wallet_path.clone(), - chain_args: chain, + chain_args: chain.clone(), start_containers, update_submodules: self.update_submodules, }) diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/gateway_upgrade.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/gateway_upgrade.rs index 0301853e1acf..50be243ab704 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/gateway_upgrade.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/gateway_upgrade.rs @@ -1,4 +1,3 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use std::path::PathBuf; use clap::{Parser, ValueEnum}; diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/init.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/init.rs index d6c3d675c053..cb4206c8a995 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/init.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/init.rs @@ -7,7 +7,7 @@ use zkstack_cli_common::{forge::ForgeScriptArgs, Prompt, PromptConfirm}; use zkstack_cli_types::L1Network; use crate::{ - commands::chain::args::genesis::GenesisArgs, + commands::chain::args::{genesis::GenesisArgs, init::da_configs::ValidiumTypeArgs}, defaults::LOCAL_RPC_URL, messages::{ MSG_DEPLOY_ECOSYSTEM_PROMPT, MSG_DEPLOY_ERC20_PROMPT, MSG_DEV_ARG_HELP, @@ -105,6 +105,12 @@ pub struct EcosystemInitArgs { pub no_port_reallocation: bool, #[clap(long)] pub update_submodules: Option, + #[clap(flatten)] + pub validium_args: ValidiumTypeArgs, + #[clap(long, default_missing_value = "false", num_args = 0..=1)] + pub support_l2_legacy_shared_bridge_test: Option, + #[clap(long, default_missing_value = "false")] + pub skip_contract_compilation_override: bool, } impl EcosystemInitArgs { @@ -146,6 +152,11 @@ impl EcosystemInitArgs { observability, ecosystem_only: self.ecosystem_only, no_port_reallocation: self.no_port_reallocation, + skip_contract_compilation_override: self.skip_contract_compilation_override, + validium_args: self.validium_args, + support_l2_legacy_shared_bridge_test: self + .support_l2_legacy_shared_bridge_test + .unwrap_or_default(), } } } @@ -159,4 +170,7 @@ pub struct EcosystemInitArgsFinal { pub observability: bool, pub ecosystem_only: bool, pub no_port_reallocation: bool, + pub skip_contract_compilation_override: bool, + pub validium_args: ValidiumTypeArgs, + pub support_l2_legacy_shared_bridge_test: bool, } diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/build_transactions.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/build_transactions.rs index 3d81e6f3b0b7..1cd76debbe15 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/build_transactions.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/build_transactions.rs @@ -49,6 +49,7 @@ pub async fn run(args: BuildTransactionsArgs, shell: &Shell) -> anyhow::Result<( &args.l1_rpc_url, Some(args.sender), false, + false, ) .await?; diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/common.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/common.rs index c31aa6252971..7255ba9e1ca5 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/common.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/common.rs @@ -16,6 +16,7 @@ use zkstack_cli_types::{L1Network, ProverMode}; use crate::utils::forge::{check_the_balance, fill_forge_private_key, WalletOwner}; +#[allow(clippy::too_many_arguments)] pub async fn deploy_l1( shell: &Shell, forge_args: &ForgeScriptArgs, @@ -24,8 +25,10 @@ pub async fn deploy_l1( l1_rpc_url: &str, sender: Option, broadcast: bool, + support_l2_legacy_shared_bridge_test: bool, ) -> anyhow::Result { let deploy_config_path = DEPLOY_ECOSYSTEM_SCRIPT_PARAMS.input(&config.link_to_code); + dbg!(config.get_default_configs_path()); let default_genesis_config = GenesisConfig::read_with_base_path(shell, config.get_default_configs_path()) .context("failed reading genesis config")?; @@ -38,6 +41,8 @@ pub async fn deploy_l1( initial_deployment_config, config.era_chain_id, config.prover_version == ProverMode::NoProofs, + config.l1_network, + support_l2_legacy_shared_bridge_test, ); deploy_config.save(shell, deploy_config_path)?; diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/gateway_upgrade.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/gateway_upgrade.rs new file mode 100644 index 000000000000..01905afb9a5d --- /dev/null +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/gateway_upgrade.rs @@ -0,0 +1,645 @@ +use anyhow::Context; +use ethers::{abi::parse_abi, contract::BaseContract, utils::hex}; +use lazy_static::lazy_static; +use serde::Deserialize; +use xshell::Shell; +use zkstack_cli_common::{db::DatabaseConfig, forge::Forge, git, spinner::Spinner}; +use zkstack_cli_config::{ + forge_interface::{ + gateway_ecosystem_upgrade::{ + input::GatewayEcosystemUpgradeInput, output::GatewayEcosystemUpgradeOutput, + }, + gateway_preparation::input::GatewayPreparationConfig, + script_params::{ + FINALIZE_UPGRADE_SCRIPT_PARAMS, GATEWAY_PREPARATION, GATEWAY_UPGRADE_ECOSYSTEM_PARAMS, + }, + }, + traits::{ReadConfig, ReadConfigWithBasePath, SaveConfig, SaveConfigWithBasePath}, + EcosystemConfig, GenesisConfig, CONFIGS_PATH, +}; +use zkstack_cli_types::ProverMode; +use zksync_basic_types::commitment::L1BatchCommitmentMode; +use zksync_types::{H160, L2_NATIVE_TOKEN_VAULT_ADDRESS, SHARED_BRIDGE_ETHER_TOKEN_ADDRESS, U256}; + +use super::args::gateway_upgrade::{GatewayUpgradeArgs, GatewayUpgradeArgsFinal}; +use crate::{ + accept_ownership::{ + accept_admin, governance_execute_calls, make_permanent_rollup, set_da_validator_pair, + }, + commands::{ + chain, + chain::{ + args::genesis::GenesisArgsFinal, + convert_to_gateway::{ + calculate_gateway_ctm, call_script, GATEWAY_PREPARATION_INTERFACE, + }, + genesis::genesis, + }, + ecosystem::args::gateway_upgrade::GatewayUpgradeStage, + }, + defaults::{generate_db_names, DBNames, DATABASE_SERVER_URL}, + messages::{MSG_CHAIN_NOT_FOUND_ERR, MSG_GENESIS_DATABASE_ERR, MSG_INTALLING_DEPS_SPINNER}, + utils::forge::{fill_forge_private_key, WalletOwner}, +}; + +pub async fn run(args: GatewayUpgradeArgs, shell: &Shell) -> anyhow::Result<()> { + println!("Running ecosystem gateway upgrade args"); + + let mut ecosystem_config = EcosystemConfig::from_file(shell)?; + git::submodule_update(shell, ecosystem_config.link_to_code.clone())?; + + let mut final_ecosystem_args = args.fill_values_with_prompt(ecosystem_config.l1_network, true); + + match final_ecosystem_args.ecosystem_upgrade_stage { + GatewayUpgradeStage::NoGovernancePrepare => { + no_governance_prepare(&mut final_ecosystem_args, shell, &ecosystem_config).await?; + no_governance_prepare_gateway(shell, &mut ecosystem_config).await?; + } + GatewayUpgradeStage::GovernanceStage1 => { + governance_stage_1(&mut final_ecosystem_args, shell, &ecosystem_config).await?; + } + GatewayUpgradeStage::GovernanceStage2 => { + governance_stage_2(&mut final_ecosystem_args, shell, &ecosystem_config).await?; + } + GatewayUpgradeStage::NoGovernanceStage2 => { + no_governance_stage_2(&mut final_ecosystem_args, shell, &ecosystem_config).await?; + } + GatewayUpgradeStage::GovernanceStage3 => { + governance_stage_3(&mut final_ecosystem_args, shell, &ecosystem_config).await?; + } + GatewayUpgradeStage::NoGovernanceStage3 => { + no_governance_stage_3(&mut final_ecosystem_args, shell, &ecosystem_config).await?; + } + } + + Ok(()) +} + +#[derive(Debug, Deserialize)] +struct BroadcastFile { + pub transactions: Vec, +} +#[derive(Debug, Deserialize)] +struct BroadcastFileTransactions { + pub hash: String, +} + +async fn no_governance_prepare( + init_args: &mut GatewayUpgradeArgsFinal, + shell: &Shell, + ecosystem_config: &EcosystemConfig, +) -> anyhow::Result<()> { + let spinner = Spinner::new(MSG_INTALLING_DEPS_SPINNER); + spinner.finish(); + + let forge_args = init_args.forge_args.clone(); + let l1_rpc_url = init_args.l1_rpc_url.clone(); + + let new_genesis_config = GenesisConfig::read_with_base_path(shell, CONFIGS_PATH)?; + let current_contracts_config = ecosystem_config.get_contracts_config()?; + let initial_deployment_config = ecosystem_config.get_initial_deployment_config()?; + + let ecosystem_upgrade_config_path = + GATEWAY_UPGRADE_ECOSYSTEM_PARAMS.input(&ecosystem_config.link_to_code); + + let era_config = ecosystem_config + .load_chain(Some("era".to_string())) + .context("No era")?; + + // FIXME: we will have to force this in production environment + // assert_eq!(era_config.chain_id, ecosystem_config.era_chain_id); + + let gateway_upgrade_input = GatewayEcosystemUpgradeInput::new( + &new_genesis_config, + ¤t_contracts_config, + &initial_deployment_config, + ecosystem_config.era_chain_id, + era_config.get_contracts_config()?.l1.diamond_proxy_addr, + ecosystem_config.prover_version == ProverMode::NoProofs, + ); + gateway_upgrade_input.save(shell, ecosystem_upgrade_config_path.clone())?; + + let mut forge = Forge::new(&ecosystem_config.path_to_l1_foundry()) + .script( + &GATEWAY_UPGRADE_ECOSYSTEM_PARAMS.script(), + forge_args.clone(), + ) + .with_ffi() + .with_rpc_url(l1_rpc_url) + .with_slow() + .with_gas_limit(1_000_000_000_000) + .with_broadcast(); + + forge = fill_forge_private_key( + forge, + ecosystem_config.get_wallets()?.deployer.as_ref(), + WalletOwner::Deployer, + )?; + + println!("Preparing the ecosystem for the upgrade!"); + + forge.run(shell)?; + + println!("done!"); + + let l1_chain_id = era_config.l1_network.chain_id(); + + let broadcast_file: BroadcastFile = { + let file_content = std::fs::read_to_string( + ecosystem_config + .link_to_code + .join("contracts/l1-contracts") + .join(format!( + "broadcast/EcosystemUpgrade.s.sol/{}/run-latest.json", + l1_chain_id + )), + ) + .context("Failed to read broadcast file")?; + serde_json::from_str(&file_content).context("Failed to parse broadcast file")? + }; + + let mut output = GatewayEcosystemUpgradeOutput::read( + shell, + GATEWAY_UPGRADE_ECOSYSTEM_PARAMS.output(&ecosystem_config.link_to_code), + )?; + + // Add all the transaction hashes. + for tx in broadcast_file.transactions { + output.transactions.push(tx.hash); + } + + output.save_with_base_path(shell, &ecosystem_config.config)?; + + Ok(()) +} + +async fn no_governance_prepare_gateway( + shell: &Shell, + ecosystem_config: &mut EcosystemConfig, +) -> anyhow::Result<()> { + let spinner = Spinner::new(MSG_INTALLING_DEPS_SPINNER); + spinner.finish(); + + let mut contracts_config = ecosystem_config.get_contracts_config()?; + + let output = GatewayEcosystemUpgradeOutput::read( + shell, + GATEWAY_UPGRADE_ECOSYSTEM_PARAMS.output(&ecosystem_config.link_to_code), + )?; + + let mut s: String = "0x".to_string(); + s += &hex::encode(output.contracts_config.diamond_cut_data.0); + contracts_config.ecosystem_contracts.diamond_cut_data = s; + + s = "0x".to_string(); + s += &hex::encode(output.contracts_config.force_deployments_data.0); + contracts_config.ecosystem_contracts.force_deployments_data = Some(s); + + contracts_config.l1.rollup_l1_da_validator_addr = + Some(output.deployed_addresses.rollup_l1_da_validator_addr); + contracts_config.l1.no_da_validium_l1_validator_addr = + Some(output.deployed_addresses.validium_l1_da_validator_addr); + + contracts_config + .ecosystem_contracts + .stm_deployment_tracker_proxy_addr = Some( + output + .deployed_addresses + .bridgehub + .ctm_deployment_tracker_proxy_addr, + ); + contracts_config.ecosystem_contracts.native_token_vault_addr = + Some(output.deployed_addresses.native_token_vault_addr); + contracts_config + .ecosystem_contracts + .l1_bytecodes_supplier_addr = Some(output.deployed_addresses.l1_bytecodes_supplier_addr); + contracts_config.bridges.l1_nullifier_addr = Some(contracts_config.bridges.shared.l1_address); + contracts_config.ecosystem_contracts.validator_timelock_addr = + output.deployed_addresses.validator_timelock_addr; + contracts_config.l1.validator_timelock_addr = output.deployed_addresses.validator_timelock_addr; + contracts_config.bridges.shared.l1_address = + output.deployed_addresses.bridges.shared_bridge_proxy_addr; + contracts_config + .ecosystem_contracts + .expected_rollup_l2_da_validator = + Some(output.contracts_config.expected_rollup_l2_da_validator); + + contracts_config.save_with_base_path(shell, &ecosystem_config.config)?; + Ok(()) +} + +// Governance has approved the proposal, now it will insert the new protocol version into our STM (CTM) +async fn governance_stage_1( + init_args: &mut GatewayUpgradeArgsFinal, + shell: &Shell, + ecosystem_config: &EcosystemConfig, +) -> anyhow::Result<()> { + println!("Executing governance stage 1!"); + + let previous_output = GatewayEcosystemUpgradeOutput::read( + shell, + GATEWAY_UPGRADE_ECOSYSTEM_PARAMS.output(&ecosystem_config.link_to_code), + )?; + previous_output.save_with_base_path(shell, &ecosystem_config.config)?; + + // These are ABI-encoded + let stage1_calls = previous_output.governance_stage1_calls; + + governance_execute_calls( + shell, + ecosystem_config, + &ecosystem_config.get_wallets()?.governor, + stage1_calls.0, + &init_args.forge_args.clone(), + init_args.l1_rpc_url.clone(), + ) + .await?; + + let gateway_ecosystem_preparation_output = + GatewayEcosystemUpgradeOutput::read_with_base_path(shell, &ecosystem_config.config)?; + + let mut contracts_config = ecosystem_config.get_contracts_config()?; + + contracts_config + .ecosystem_contracts + .stm_deployment_tracker_proxy_addr = Some( + gateway_ecosystem_preparation_output + .deployed_addresses + .bridgehub + .ctm_deployment_tracker_proxy_addr, + ); + // This is force deployment data for creating new contracts, not really relevant here tbh, + contracts_config.ecosystem_contracts.force_deployments_data = Some(hex::encode( + &gateway_ecosystem_preparation_output + .contracts_config + .force_deployments_data + .0, + )); + contracts_config.ecosystem_contracts.native_token_vault_addr = Some( + gateway_ecosystem_preparation_output + .deployed_addresses + .native_token_vault_addr, + ); + contracts_config + .ecosystem_contracts + .l1_bytecodes_supplier_addr = Some( + gateway_ecosystem_preparation_output + .deployed_addresses + .l1_bytecodes_supplier_addr, + ); + + contracts_config.l1.rollup_l1_da_validator_addr = Some( + gateway_ecosystem_preparation_output + .deployed_addresses + .rollup_l1_da_validator_addr, + ); + + contracts_config.l1.no_da_validium_l1_validator_addr = Some( + gateway_ecosystem_preparation_output + .deployed_addresses + .validium_l1_da_validator_addr, + ); + + // This value is meaningless for the ecosystem, but we'll populate it for consistency + contracts_config.l2.da_validator_addr = Some(H160::zero()); + contracts_config.l2.l2_native_token_vault_proxy_addr = Some(L2_NATIVE_TOKEN_VAULT_ADDRESS); + contracts_config.l2.legacy_shared_bridge_addr = contracts_config.bridges.shared.l2_address; + + contracts_config.save_with_base_path(shell, &ecosystem_config.config)?; + + Ok(()) +} + +// Governance has approved the proposal, now it will insert the new protocol version into our STM (CTM) +async fn governance_stage_2( + init_args: &mut GatewayUpgradeArgsFinal, + shell: &Shell, + ecosystem_config: &EcosystemConfig, +) -> anyhow::Result<()> { + println!("Executing governance stage 2!"); + + let previous_output = + GatewayEcosystemUpgradeOutput::read_with_base_path(shell, &ecosystem_config.config)?; + + // These are ABI-encoded + let stage2_calls = previous_output.governance_stage2_calls; + + governance_execute_calls( + shell, + ecosystem_config, + &ecosystem_config.get_wallets()?.governor, + stage2_calls.0, + &init_args.forge_args.clone(), + init_args.l1_rpc_url.clone(), + ) + .await?; + + let mut contracts_config = ecosystem_config.get_contracts_config()?; + contracts_config.bridges.shared.l1_address = previous_output + .deployed_addresses + .bridges + .shared_bridge_proxy_addr; + + contracts_config.save_with_base_path(shell, &ecosystem_config.config)?; + println!("Stage2 finalized!"); + + Ok(()) +} + +lazy_static! { + static ref FINALIZE_UPGRADE: BaseContract = BaseContract::from( + parse_abi(&[ + "function initChains(address bridgehub, uint256[] chains) public", + "function initTokens(address l1NativeTokenVault, address[] tokens, uint256[] chains) public", + ]) + .unwrap(), + ); +} + +// Governance has approved the proposal, now it will insert the new protocol version into our STM (CTM) +async fn no_governance_stage_2( + init_args: &mut GatewayUpgradeArgsFinal, + shell: &Shell, + ecosystem_config: &EcosystemConfig, +) -> anyhow::Result<()> { + let contracts_config = ecosystem_config.get_contracts_config()?; + let wallets = ecosystem_config.get_wallets()?; + let deployer_private_key = wallets + .deployer + .context("deployer_wallet")? + .private_key_h256() + .context("deployer_priuvate_key")?; + + println!("Finalizing stage2 of the upgrade!"); + + let chains: Vec<_> = ecosystem_config + .list_of_chains() + .into_iter() + .filter_map(|name| { + let chain = ecosystem_config + .load_chain(Some(name)) + .expect("Invalid chain"); + (chain.name != "gateway").then_some(chain) + }) + .collect(); + + let chain_ids: Vec<_> = chains + .into_iter() + .map(|c| ethers::abi::Token::Uint(U256::from(c.chain_id.as_u64()))) + .collect(); + let mut tokens: Vec<_> = ecosystem_config + .get_erc20_tokens() + .into_iter() + .map(|t| ethers::abi::Token::Address(t.address)) + .collect(); + tokens.push(ethers::abi::Token::Address( + SHARED_BRIDGE_ETHER_TOKEN_ADDRESS, + )); + + // Resume for accept admin doesn't work properly. Foundry assumes that if signature of the function is the same, + // than it's the same call, but because we are calling this function multiple times during the init process, + // code assumes that doing only once is enough, but actually we need to accept admin multiple times + let mut forge_args = init_args.forge_args.clone(); + forge_args.resume = false; + + let init_chains_calldata = FINALIZE_UPGRADE + .encode( + "initChains", + ( + ethers::abi::Token::Address( + contracts_config.ecosystem_contracts.bridgehub_proxy_addr, + ), + ethers::abi::Token::Array(chain_ids.clone()), + ), + ) + .unwrap(); + let init_tokens_calldata = FINALIZE_UPGRADE + .encode( + "initTokens", + ( + ethers::abi::Token::Address( + contracts_config + .ecosystem_contracts + .native_token_vault_addr + .context("native_token_vault_addr")?, + ), + ethers::abi::Token::Array(tokens), + ethers::abi::Token::Array(chain_ids), + ), + ) + .unwrap(); + + println!("Initiing chains!"); + let foundry_contracts_path = ecosystem_config.path_to_l1_foundry(); + let forge = Forge::new(&foundry_contracts_path) + .script(&FINALIZE_UPGRADE_SCRIPT_PARAMS.script(), forge_args.clone()) + .with_ffi() + .with_rpc_url(init_args.l1_rpc_url.clone()) + .with_broadcast() + .with_calldata(&init_chains_calldata) + .with_private_key(deployer_private_key); + + forge.run(shell)?; + + println!("Initiing tokens!"); + + let forge = Forge::new(&foundry_contracts_path) + .script(&FINALIZE_UPGRADE_SCRIPT_PARAMS.script(), forge_args.clone()) + .with_ffi() + .with_rpc_url(init_args.l1_rpc_url.clone()) + .with_broadcast() + .with_calldata(&init_tokens_calldata) + .with_private_key(deployer_private_key); + + forge.run(shell)?; + + println!("Done!"); + + Ok(()) +} + +async fn governance_stage_3( + init_args: &mut GatewayUpgradeArgsFinal, + shell: &Shell, + ecosystem_config: &EcosystemConfig, +) -> anyhow::Result<()> { + let chain_config = ecosystem_config + .load_chain(Some("gateway".to_string())) + .context(MSG_CHAIN_NOT_FOUND_ERR)?; + call_script( + shell, + init_args.forge_args.clone(), + &GATEWAY_PREPARATION_INTERFACE + .encode("executeGovernanceTxs", ()) + .unwrap(), + ecosystem_config, + &chain_config, + &ecosystem_config.get_wallets()?.governor, + init_args.l1_rpc_url.clone(), + true, + ) + .await?; + + Ok(()) +} + +async fn no_governance_stage_3( + init_args: &mut GatewayUpgradeArgsFinal, + shell: &Shell, + ecosystem_config: &EcosystemConfig, +) -> anyhow::Result<()> { + let chain_config = ecosystem_config + .load_chain(Some("gateway".to_string())) + .context(MSG_CHAIN_NOT_FOUND_ERR)?; + + let chain_genesis_config = chain_config.get_genesis_config()?; + let mut chain_contracts_config = chain_config.get_contracts_config()?; + + // Fund gateway's governor (chain_config.get_wallets_config()?.governor) + chain::common::distribute_eth( + ecosystem_config, + &chain_config, + init_args.l1_rpc_url.clone(), + ) + .await?; + + // Accept ownership for DiamondProxy (run by L2 Governor) + accept_admin( + shell, + ecosystem_config, + chain_contracts_config.l1.chain_admin_addr, + &chain_config.get_wallets_config()?.governor, + chain_contracts_config.l1.diamond_proxy_addr, + &init_args.forge_args.clone(), + init_args.l1_rpc_url.clone(), + ) + .await?; + + // prepare script input + let gateway_config = calculate_gateway_ctm( + shell, + init_args.forge_args.clone(), + ecosystem_config, + &chain_config, + &chain_genesis_config, + &ecosystem_config.get_initial_deployment_config().unwrap(), + init_args.l1_rpc_url.clone(), + ) + .await?; + + let gateway_preparation_config_path = GATEWAY_PREPARATION.input(&chain_config.link_to_code); + let preparation_config = GatewayPreparationConfig::new( + &chain_config, + &chain_contracts_config, + &ecosystem_config.get_contracts_config()?, + &gateway_config, + )?; + preparation_config.save(shell, gateway_preparation_config_path)?; + + // deploy filterer + let output = call_script( + shell, + init_args.forge_args.clone(), + &GATEWAY_PREPARATION_INTERFACE + .encode("deployAndSetGatewayTransactionFilterer", ()) + .unwrap(), + ecosystem_config, + &chain_config, + &chain_config.get_wallets_config()?.governor, + init_args.l1_rpc_url.clone(), + true, + ) + .await?; + + chain_contracts_config.set_transaction_filterer(output.gateway_transaction_filterer_proxy); + + // whitelist deployer + call_script( + shell, + init_args.forge_args.clone(), + &GATEWAY_PREPARATION_INTERFACE + .encode( + "grantWhitelist", + ( + output.gateway_transaction_filterer_proxy, + vec![ + ecosystem_config.get_contracts_config()?.l1.governance_addr, + ecosystem_config + .get_wallets()? + .deployer + .context("no deployer addr")? + .address, + ], + ), + ) + .unwrap(), + ecosystem_config, + &chain_config, + &chain_config.get_wallets_config()?.governor, + init_args.l1_rpc_url.clone(), + true, + ) + .await?; + + // deploy ctm + chain::convert_to_gateway::deploy_gateway_ctm( + shell, + init_args.forge_args.clone(), + ecosystem_config, + &chain_config, + &chain_genesis_config, + &ecosystem_config.get_initial_deployment_config().unwrap(), + init_args.l1_rpc_url.clone(), + ) + .await?; + + chain_contracts_config.save_with_base_path(shell, &chain_config.configs)?; + + // Set da validators + let validium_mode = + chain_config.l1_batch_commit_data_generator_mode == L1BatchCommitmentMode::Validium; + let l1_da_validator_addr = if validium_mode { + chain_contracts_config.l1.no_da_validium_l1_validator_addr + } else { + chain_contracts_config.l1.rollup_l1_da_validator_addr + }; + set_da_validator_pair( + shell, + ecosystem_config, + chain_contracts_config.l1.chain_admin_addr, + &chain_config.get_wallets_config()?.governor, + chain_contracts_config.l1.diamond_proxy_addr, + l1_da_validator_addr.context("l1_da_validator_addr")?, + chain_contracts_config + .l2 + .da_validator_addr + .context("da_validator_addr")?, + &init_args.forge_args.clone(), + init_args.l1_rpc_url.clone(), + ) + .await?; + if !validium_mode { + make_permanent_rollup( + shell, + ecosystem_config, + chain_contracts_config.l1.chain_admin_addr, + &chain_config.get_wallets_config()?.governor, + chain_contracts_config.l1.diamond_proxy_addr, + &init_args.forge_args.clone(), + init_args.l1_rpc_url.clone(), + ) + .await?; + } + + let DBNames { server_name, .. } = generate_db_names(&chain_config); + let args = GenesisArgsFinal { + server_db: DatabaseConfig::new(DATABASE_SERVER_URL.clone(), server_name), + dont_drop: false, + }; + // Run genesis (create DB and run server with --genesis) + genesis(args, shell, &chain_config) + .await + .context(MSG_GENESIS_DATABASE_ERR)?; + + Ok(()) +} diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/init.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/init.rs index 56c196fe7be9..ac66d47a831c 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/init.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/init.rs @@ -4,7 +4,7 @@ use anyhow::Context; use xshell::Shell; use zkstack_cli_common::{ config::global_config, - contracts::build_system_contracts, + contracts::{build_l1_contracts, build_l2_contracts, build_system_contracts}, forge::{Forge, ForgeScriptArgs}, git, logger, spinner::Spinner, @@ -27,6 +27,7 @@ use super::{ args::init::{EcosystemArgsFinal, EcosystemInitArgs, EcosystemInitArgsFinal}, common::deploy_l1, setup_observability, + utils::{build_da_contracts, install_yarn_dependencies}, }; use crate::{ accept_ownership::{accept_admin, accept_owner}, @@ -110,7 +111,13 @@ async fn init_ecosystem( initial_deployment_config: &InitialDeploymentConfig, ) -> anyhow::Result { let spinner = Spinner::new(MSG_INTALLING_DEPS_SPINNER); - build_system_contracts(shell.clone(), ecosystem_config.link_to_code.clone())?; + install_yarn_dependencies(shell, &ecosystem_config.link_to_code)?; + if !init_args.skip_contract_compilation_override { + build_da_contracts(shell, &ecosystem_config.link_to_code)?; + build_l1_contracts(shell.clone(), ecosystem_config.link_to_code.clone())?; + build_system_contracts(shell.clone(), ecosystem_config.link_to_code.clone())?; + build_l2_contracts(shell.clone(), ecosystem_config.link_to_code.clone())?; + } spinner.finish(); let contracts = deploy_ecosystem( @@ -119,6 +126,7 @@ async fn init_ecosystem( init_args.forge_args.clone(), ecosystem_config, initial_deployment_config, + init_args.support_l2_legacy_shared_bridge_test, ) .await?; contracts.save_with_base_path(shell, &ecosystem_config.config)?; @@ -177,6 +185,7 @@ async fn deploy_ecosystem( forge_args: ForgeScriptArgs, ecosystem_config: &EcosystemConfig, initial_deployment_config: &InitialDeploymentConfig, + support_l2_legacy_shared_bridge_test: bool, ) -> anyhow::Result { if ecosystem.deploy_ecosystem { return deploy_ecosystem_inner( @@ -185,6 +194,7 @@ async fn deploy_ecosystem( ecosystem_config, initial_deployment_config, ecosystem.l1_rpc_url.clone(), + support_l2_legacy_shared_bridge_test, ) .await; } @@ -246,6 +256,7 @@ async fn deploy_ecosystem_inner( config: &EcosystemConfig, initial_deployment_config: &InitialDeploymentConfig, l1_rpc_url: String, + support_l2_legacy_shared_bridge_test: bool, ) -> anyhow::Result { let spinner = Spinner::new(MSG_DEPLOYING_ECOSYSTEM_CONTRACTS_SPINNER); let contracts_config = deploy_l1( @@ -256,6 +267,7 @@ async fn deploy_ecosystem_inner( &l1_rpc_url, None, true, + support_l2_legacy_shared_bridge_test, ) .await?; spinner.finish(); @@ -293,21 +305,26 @@ async fn deploy_ecosystem_inner( ) .await?; - accept_admin( + // Note, that there is no admin in L1 asset router, so we do + // need to accept it + + accept_owner( shell, config, - contracts_config.l1.chain_admin_addr, + contracts_config.l1.governance_addr, &config.get_wallets()?.governor, - contracts_config.bridges.shared.l1_address, + contracts_config + .ecosystem_contracts + .state_transition_proxy_addr, &forge_args, l1_rpc_url.clone(), ) .await?; - accept_owner( + accept_admin( shell, config, - contracts_config.l1.governance_addr, + contracts_config.l1.chain_admin_addr, &config.get_wallets()?.governor, contracts_config .ecosystem_contracts @@ -317,14 +334,15 @@ async fn deploy_ecosystem_inner( ) .await?; - accept_admin( + accept_owner( shell, config, - contracts_config.l1.chain_admin_addr, + contracts_config.l1.governance_addr, &config.get_wallets()?.governor, contracts_config .ecosystem_contracts - .state_transition_proxy_addr, + .stm_deployment_tracker_proxy_addr + .context("stm_deployment_tracker_proxy_addr")?, &forge_args, l1_rpc_url.clone(), ) @@ -373,6 +391,7 @@ async fn init_chains( no_port_reallocation: final_init_args.no_port_reallocation, update_submodules: init_args.update_submodules, dev: final_init_args.dev, + validium_args: final_init_args.validium_args.clone(), }; let final_chain_init_args = chain_init_args.fill_values_with_prompt(&chain_config); diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/mod.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/mod.rs index 3f4aa7565e19..19c2888edd0d 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/mod.rs @@ -12,6 +12,8 @@ mod change_default; mod common; mod create; pub mod create_configs; +#[cfg(feature = "gateway")] +mod gateway_upgrade; pub(crate) mod init; pub(crate) mod setup_observability; mod utils; @@ -34,6 +36,9 @@ pub enum EcosystemCommands { /// downloading Grafana dashboards from the era-observability repo #[command(alias = "obs")] SetupObservability, + /// Gateway version upgrade + #[cfg(feature = "gateway")] + GatewayUpgrade(crate::commands::ecosystem::args::gateway_upgrade::GatewayUpgradeArgs), } pub(crate) async fn run(shell: &Shell, args: EcosystemCommands) -> anyhow::Result<()> { @@ -43,5 +48,7 @@ pub(crate) async fn run(shell: &Shell, args: EcosystemCommands) -> anyhow::Resul EcosystemCommands::Init(args) => init::run(args, shell).await, EcosystemCommands::ChangeDefaultChain(args) => change_default::run(args, shell), EcosystemCommands::SetupObservability => setup_observability::run(shell), + #[cfg(feature = "gateway")] + EcosystemCommands::GatewayUpgrade(args) => gateway_upgrade::run(args, shell).await, } } diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/utils.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/utils.rs index 5f6994ed38f9..77fc45ff9f83 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/utils.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/utils.rs @@ -12,3 +12,8 @@ pub(super) fn build_system_contracts(shell: &Shell, link_to_code: &Path) -> anyh let _dir_guard = shell.push_dir(link_to_code.join("contracts")); Ok(Cmd::new(cmd!(shell, "yarn sc build")).run()?) } + +pub(super) fn build_da_contracts(shell: &Shell, link_to_code: &Path) -> anyhow::Result<()> { + let _dir_guard = shell.push_dir(link_to_code.join("contracts")); + Ok(Cmd::new(cmd!(shell, "yarn da build:foundry")).run()?) +} diff --git a/zkstack_cli/crates/zkstack/src/commands/external_node/prepare_configs.rs b/zkstack_cli/crates/zkstack/src/commands/external_node/prepare_configs.rs index ed591597584c..ae36c1c9a1f4 100644 --- a/zkstack_cli/crates/zkstack/src/commands/external_node/prepare_configs.rs +++ b/zkstack_cli/crates/zkstack/src/commands/external_node/prepare_configs.rs @@ -60,6 +60,7 @@ fn prepare_configs( let mut ports = EcosystemPortsScanner::scan(shell)?; let genesis = config.get_genesis_config()?; let general = config.get_general_config()?; + let gateway = config.get_gateway_chain_config().ok(); let en_config = ENConfig { l2_chain_id: genesis.l2_chain_id, l1_chain_id: genesis.l1_chain_id, @@ -74,7 +75,7 @@ fn prepare_configs( )?, main_node_rate_limit_rps: None, bridge_addresses_refresh_interval_sec: None, - gateway_chain_id: None, + gateway_chain_id: gateway.map(|g| g.gateway_chain_id), }; let mut general_en = general.clone(); general_en.consensus_config = None; diff --git a/zkstack_cli/crates/zkstack/src/commands/server.rs b/zkstack_cli/crates/zkstack/src/commands/server.rs index cf7abf7dea21..4088a888d534 100644 --- a/zkstack_cli/crates/zkstack/src/commands/server.rs +++ b/zkstack_cli/crates/zkstack/src/commands/server.rs @@ -10,6 +10,7 @@ use zkstack_cli_config::{ traits::FileConfigWithDefaultName, ChainConfig, ContractsConfig, EcosystemConfig, GeneralConfig, GenesisConfig, SecretsConfig, WalletsConfig, }; +use zksync_config::configs::gateway::GatewayChainConfig; use crate::{ commands::args::{RunServerArgs, ServerArgs, ServerCommand, WaitArgs}, @@ -60,6 +61,19 @@ fn run_server( } else { ServerMode::Normal }; + + let gateway_config = chain_config.get_gateway_chain_config().ok(); + let mut gateway_contracts = None; + if let Some(gateway_config) = gateway_config { + gateway_contracts = if gateway_config.gateway_chain_id.0 != 0_u64 { + Some(GatewayChainConfig::get_path_with_base_path( + &chain_config.configs, + )) + } else { + None + }; + } + server .run( shell, @@ -69,7 +83,7 @@ fn run_server( GeneralConfig::get_path_with_base_path(&chain_config.configs), SecretsConfig::get_path_with_base_path(&chain_config.configs), ContractsConfig::get_path_with_base_path(&chain_config.configs), - None, + gateway_contracts, vec![], ) .context(MSG_FAILED_TO_RUN_SERVER_ERR) diff --git a/zkstack_cli/crates/zkstack/src/defaults.rs b/zkstack_cli/crates/zkstack/src/defaults.rs index 4fa15be8a118..843a15e047e9 100644 --- a/zkstack_cli/crates/zkstack/src/defaults.rs +++ b/zkstack_cli/crates/zkstack/src/defaults.rs @@ -9,6 +9,9 @@ lazy_static! { Url::parse("postgres://postgres:notsecurepassword@localhost:5432").unwrap(); pub static ref DATABASE_EXPLORER_URL: Url = Url::parse("postgres://postgres:notsecurepassword@localhost:5432").unwrap(); + pub static ref AVAIL_RPC_URL: Url = Url::parse("wss://turing-rpc.avail.so/ws").unwrap(); + pub static ref AVAIL_BRIDGE_API_URL: Url = + Url::parse("https://turing-bridge-api.avail.so").unwrap(); } pub const DEFAULT_OBSERVABILITY_PORT: u16 = 3000; diff --git a/zkstack_cli/crates/zkstack/src/messages.rs b/zkstack_cli/crates/zkstack/src/messages.rs index 216c4bd64d3a..179f7100ef9e 100644 --- a/zkstack_cli/crates/zkstack/src/messages.rs +++ b/zkstack_cli/crates/zkstack/src/messages.rs @@ -72,6 +72,7 @@ pub(super) const MSG_DEPLOY_ECOSYSTEM_PROMPT: &str = "Do you want to deploy ecosystem contracts? (Not needed if you already have an existing one)"; pub(super) const MSG_L1_RPC_URL_PROMPT: &str = "What is the RPC URL of the L1 network?"; pub(super) const MSG_DEPLOY_PAYMASTER_PROMPT: &str = "Do you want to deploy Paymaster contract?"; +pub(super) const MSG_VALIDIUM_TYPE_PROMPT: &str = "Select the Validium type"; pub(super) const MSG_DEPLOY_ERC20_PROMPT: &str = "Do you want to deploy some test ERC20s?"; pub(super) const MSG_ECOSYSTEM_CONTRACTS_PATH_PROMPT: &str = "Provide the path to the ecosystem contracts or keep it empty and you will use ZKsync ecosystem config. \ For using this config, you need to have governance wallet"; @@ -97,6 +98,7 @@ pub(super) const MSG_DEPLOYING_ECOSYSTEM_CONTRACTS_SPINNER: &str = "Deploying ecosystem contracts..."; pub(super) const MSG_REGISTERING_CHAIN_SPINNER: &str = "Registering chain..."; pub(super) const MSG_ACCEPTING_ADMIN_SPINNER: &str = "Accepting admin..."; +pub(super) const MSG_DA_PAIR_REGISTRATION_SPINNER: &str = "Registering DA pair..."; pub(super) const MSG_UPDATING_TOKEN_MULTIPLIER_SETTER_SPINNER: &str = "Updating token multiplier setter..."; pub(super) const MSG_TOKEN_MULTIPLIER_SETTER_UPDATED_TO: &str = @@ -592,3 +594,16 @@ pub(super) fn msg_wait_consensus_registry_started_polling(addr: Address, url: &U pub(super) fn msg_consensus_registry_wait_success(addr: Address, code_len: usize) -> String { format!("Consensus registry is deployed at {addr:?}: {code_len} bytes") } + +/// DA clients related messages +pub(super) const MSG_AVAIL_CLIENT_TYPE_PROMPT: &str = "Avail client type"; +pub(super) const MSG_AVAIL_API_TIMEOUT_MS: &str = "Avail API timeout in milliseconds"; +pub(super) const MSG_AVAIL_API_NODE_URL_PROMPT: &str = "Avail API node URL"; +pub(super) const MSG_AVAIL_APP_ID_PROMPT: &str = "Avail app id"; +pub(super) const MSG_AVAIL_FINALITY_STATE_PROMPT: &str = "Avail finality state"; +pub(super) const MSG_AVAIL_GAS_RELAY_API_URL_PROMPT: &str = "Gas relay API URL"; +pub(super) const MSG_AVAIL_GAS_RELAY_MAX_RETRIES_PROMPT: &str = "Gas relay max retries"; +pub(super) const MSG_AVAIL_BRIDGE_API_URL_PROMPT: &str = "Attestation bridge API URL"; +pub(super) const MSG_AVAIL_SEED_PHRASE_PROMPT: &str = "Seed phrase"; +pub(super) const MSG_AVAIL_GAS_RELAY_API_KEY_PROMPT: &str = "Gas relay API key"; +pub(super) const MSG_INVALID_URL_ERR: &str = "Invalid URL format"; diff --git a/zkstack_cli/zkstackup/zkstackup b/zkstack_cli/zkstackup/zkstackup index e91bbc17905c..2c928d8b1194 100755 --- a/zkstack_cli/zkstackup/zkstackup +++ b/zkstack_cli/zkstackup/zkstackup @@ -86,6 +86,10 @@ parse_args() { shift ZKSTACKUP_VERSION=$1 ;; + --cargo-features) + shift + ZKSTACKUP_FEATURES=$1 + ;; -h | --help) usage exit 0 @@ -114,10 +118,12 @@ Options: -b, --branch Git branch to use when installing from a repository. Ignored if --commit or --version is provided. -c, --commit Git commit hash to use when installing from a repository. Ignored if --branch or --version is provided. -v, --version Git tag to use when installing from a repository. Ignored if --branch or --commit is provided. + --cargo-features One or more features passed to cargo install (e.g., "gateway"). -h, --help Show this help message and exit. Examples: $(basename "$0") --repo matter-labs/zksync-era --version 0.1.1 + $(basename "$0") --local --cargo-features "gateway" EOF } @@ -143,7 +149,10 @@ install_local() { for bin in "${BINS[@]}"; do say "Installing $bin" - ensure cargo install --root $LOCAL_DIR --path ./crates/$bin --force + ensure cargo install --root "$LOCAL_DIR" \ + --path "./crates/$bin" \ + --force \ + ${ZKSTACKUP_FEATURES:+--features "$ZKSTACKUP_FEATURES"} chmod +x "$BIN_DIR/$bin" done } @@ -161,19 +170,42 @@ install_from_repo() { if [ -n "$ZKSTACKUP_COMMIT" ] || [ -n "$ZKSTACKUP_BRANCH" ]; then warn "Ignoring --commit and --branch arguments when installing by version" fi - ensure cargo install --root $LOCAL_DIR --git "https://github.com/$ZKSTACKUP_REPO" --tag "zkstack_cli-v$ZKSTACKUP_VERSION" --locked "${BINS[@]}" --force + ensure cargo install --root "$LOCAL_DIR" \ + --git "https://github.com/$ZKSTACKUP_REPO" \ + --tag "zkstack_cli-v$ZKSTACKUP_VERSION" \ + --locked "${BINS[@]}" \ + --force \ + ${ZKSTACKUP_FEATURES:+--features "$ZKSTACKUP_FEATURES"} + elif [ -n "$ZKSTACKUP_COMMIT" ]; then if [ -n "$ZKSTACKUP_BRANCH" ]; then warn "Ignoring --branch argument when installing by commit" fi - ensure cargo install --root $LOCAL_DIR --git "https://github.com/$ZKSTACKUP_REPO" --rev "$ZKSTACKUP_COMMIT" --locked "${BINS[@]}" --force + ensure cargo install --root "$LOCAL_DIR" \ + --git "https://github.com/$ZKSTACKUP_REPO" \ + --rev "$ZKSTACKUP_COMMIT" \ + --locked "${BINS[@]}" \ + --force \ + ${ZKSTACKUP_FEATURES:+--features "$ZKSTACKUP_FEATURES"} + elif [ -n "$ZKSTACKUP_BRANCH" ]; then - ensure cargo install --root $LOCAL_DIR --git "https://github.com/$ZKSTACKUP_REPO" --branch "$ZKSTACKUP_BRANCH" --locked "${BINS[@]}" --force + ensure cargo install --root "$LOCAL_DIR" \ + --git "https://github.com/$ZKSTACKUP_REPO" \ + --branch "$ZKSTACKUP_BRANCH" \ + --locked "${BINS[@]}" \ + --force \ + ${ZKSTACKUP_FEATURES:+--features "$ZKSTACKUP_FEATURES"} + else - ensure cargo install --root $LOCAL_DIR --git "https://github.com/$ZKSTACKUP_REPO" --locked "${BINS[@]}" --force + ensure cargo install --root "$LOCAL_DIR" \ + --git "https://github.com/$ZKSTACKUP_REPO" \ + --locked "${BINS[@]}" \ + --force \ + ${ZKSTACKUP_FEATURES:+--features "$ZKSTACKUP_FEATURES"} fi } + add_bin_folder_to_path() { if [[ ":$PATH:" == *":${BIN_DIR}:"* ]]; then echo "found ${BIN_DIR} in PATH"