From f5bbca91d7b22cd0ff27d2bdb9c3d643c264111f Mon Sep 17 00:00:00 2001 From: Arpit Temani Date: Mon, 11 Nov 2024 13:39:55 +0530 Subject: [PATCH] review comments --- .../workflows/cron_jerigon_zero_testing.yml | 24 +++++----- scripts/jerigon_zero_benchmark.sh | 43 ++++++++---------- ...on-data.tar.gz => erigon-test-data.tar.gz} | Bin 3 files changed, 32 insertions(+), 35 deletions(-) rename test_data/{erigon-data.tar.gz => erigon-test-data.tar.gz} (100%) diff --git a/.github/workflows/cron_jerigon_zero_testing.yml b/.github/workflows/cron_jerigon_zero_testing.yml index cba6604c3..6e8629009 100644 --- a/.github/workflows/cron_jerigon_zero_testing.yml +++ b/.github/workflows/cron_jerigon_zero_testing.yml @@ -1,10 +1,11 @@ name: Jerigon Zero Testing on: - # Uncomment when ready to run on a schedule + # TODO - Change this before merge + # # Uncomment when ready to run on a schedule # schedule: # # Run every Sunday at 12:00 AM (UTC) - # - cron: "0 0 * * 0" + # - cron: "0 12 * * SUN" push: branches: [develop] pull_request: @@ -21,7 +22,10 @@ env: jobs: jerigon_zero_testing: name: Jerigon Zero Testing - Integration and Benchmarking - runs-on: zero-ci + runs-on: zero-reg + concurrency: + group: jerigon_zero_testing + cancel-in-progress: true steps: - name: Checkout zk_evm code uses: actions/checkout@v4 @@ -52,8 +56,9 @@ jobs: - name: Run Erigon Network run: | cd .. - tar xf "$(pwd)/zk_evm/test_data/erigon-data.tar.gz" || { - echo "Failed to extract erigon-data.tar.gz"; exit 1; + # TODO - Download from IPFS + tar xf "$(pwd)/zk_evm/test_data/erigon-test-data.tar.gz" || { + echo "Failed to extract erigon-test-data.tar.gz"; exit 1; } docker pull ghcr.io/0xpolygonzero/erigon:feat-zero docker run -d --name erigon \ @@ -73,11 +78,8 @@ jobs: run: | export ETH_RPC_URL="http://localhost:8545" rm -rf proofs/* circuits/* ./proofs.json test.out verify.out leader.out - random_numbers=($(shuf -i 1-500 -n 5)) - for number in "${random_numbers[@]}"; do - hex_number="0x$(echo "obase=16; $number" | bc)" - OUTPUT_TO_TERMINAL=true RUN_VERIFICATION=true ./scripts/prove_rpc.sh $hex_number $hex_number $ETH_RPC_URL jerigon true 3000 100 - done + # TODO - Change block number and checkpoint before merge + OUTPUT_TO_TERMINAL=true ./scripts/prove_rpc.sh 1 10 $ETH_RPC_URL jerigon 0 3000 100 test_only - name: Download Previous Results uses: dawidd6/action-download-artifact@v6 @@ -97,6 +99,6 @@ jobs: uses: actions/upload-artifact@v4 with: name: jerigon_zero_benchmark - path: ./jerigon_zero_output.log + path: ./jerigon_zero_output.log ./jerigon_zero_error.log retention-days: 90 overwrite: true diff --git a/scripts/jerigon_zero_benchmark.sh b/scripts/jerigon_zero_benchmark.sh index b92964480..2f62f06fe 100755 --- a/scripts/jerigon_zero_benchmark.sh +++ b/scripts/jerigon_zero_benchmark.sh @@ -2,9 +2,6 @@ # ------------------------------------------------------------------------------ set -exo pipefail -# Args: -# 1 --> Output file (Not used in the current script) - # Get the number of processors for parallelism if [[ "$OSTYPE" == "darwin"* ]]; then num_procs=$(sysctl -n hw.physicalcpu) @@ -20,6 +17,7 @@ BLOCK_BATCH_SIZE="${BLOCK_BATCH_SIZE:-8}" # Logging setup OUTPUT_LOG="jerigon_zero_output.log" BLOCK_OUTPUT_LOG="jerigon_zero_block_output.log" +ERROR_LOG="jerigon_zero_error.log" PROOFS_FILE_LIST="${PROOF_OUTPUT_DIR}/proof_files.json" # Ensure necessary directories exist @@ -33,55 +31,52 @@ export RUST_BACKTRACE=full export RUST_LOG=info # Log the current date and time -echo "$(date +"%Y-%m-%d %H:%M:%S")" &>> "$OUTPUT_LOG" - -# Define the blocks to process -blocks=(100 200 300 400 500) +date +"%Y-%m-%d %H:%M:%S" &>> "$OUTPUT_LOG" # Function to process each block process_block() { - local block=$1 - - echo "Processing block: $block" &>> "$OUTPUT_LOG" + local block start_time end_time duration_sec PERF_TIME PERF_USER_TIME PERF_SYS_TIME + block=$1 # Fetch block data - if ! ./target/release/rpc --rpc-url "$ETH_RPC_URL" fetch --start-block "$block" --end-block "$block" > "output_${block}.json"; then - echo "Failed to fetch block data for block: $block" &>> "$OUTPUT_LOG" + if ! ./target/release/rpc --rpc-url "$ETH_RPC_URL" fetch --start-block "$block" --end-block "$block" > "witness_${block}.json"; then + echo "Failed to fetch block data for block: $block" &>> "$ERROR_LOG" exit 1 fi - local start_time=$(date +%s%N) + start_time=$(date +%s%N) # Run performance stats - if ! perf stat -e cycles ./target/release/leader --runtime in-memory --load-strategy monolithic --block-batch-size "$BLOCK_BATCH_SIZE" --proof-output-dir "$PROOF_OUTPUT_DIR" stdio < "output_${block}.json" &> "$BLOCK_OUTPUT_LOG"; then + if ! perf stat -e cycles ./target/release/leader --runtime in-memory --use-test-config --load-strategy on-demand --block-batch-size "$BLOCK_BATCH_SIZE" --proof-output-dir "$PROOF_OUTPUT_DIR" stdio < "witness_${block}.json" &> "$BLOCK_OUTPUT_LOG"; then echo "Performance command failed for block: $block" &>> "$OUTPUT_LOG" - cat "$BLOCK_OUTPUT_LOG" &>> "$OUTPUT_LOG" + cat "$BLOCK_OUTPUT_LOG" &>> "$ERROR_LOG" exit 1 fi - local end_time=$(date +%s%N) + end_time=$(date +%s%N) set +o pipefail - if ! cat "$BLOCK_OUTPUT_LOG" | grep "Successfully wrote to disk proof file " | awk '{print $NF}' | tee "$PROOFS_FILE_LIST"; then + if ! "$BLOCK_OUTPUT_LOG" | grep "Successfully wrote to disk proof file " | awk '{print $NF}' | tee "$PROOFS_FILE_LIST"; then echo "Proof list not generated for block: $block. Check the log for details." &>> "$OUTPUT_LOG" - cat "$BLOCK_OUTPUT_LOG" &>> "$OUTPUT_LOG" + cat "$BLOCK_OUTPUT_LOG" &>> "$ERROR_LOG" exit 1 fi - local duration_sec=$(echo "scale=3; ($end_time - $start_time) / 1000000000" | bc -l) + duration_sec=$(echo "scale=3; ($end_time - $start_time) / 1000000000" | bc -l) # Extract performance timings - local PERF_TIME=$(grep "seconds time elapsed" "$BLOCK_OUTPUT_LOG" | tail -1 | awk '{ print ($1)}') - local PERF_USER_TIME=$(grep "seconds user" "$BLOCK_OUTPUT_LOG" | tail -1 | awk '{ print ($1)}') - local PERF_SYS_TIME=$(grep "seconds sys" "$BLOCK_OUTPUT_LOG" | tail -1 | awk '{ print ($1)}') + PERF_TIME=$(grep "seconds time elapsed" "$BLOCK_OUTPUT_LOG" | tail -1 | awk '{ print ($1)}') + PERF_USER_TIME=$(grep "seconds user" "$BLOCK_OUTPUT_LOG" | tail -1 | awk '{ print ($1)}') + PERF_SYS_TIME=$(grep "seconds sys" "$BLOCK_OUTPUT_LOG" | tail -1 | awk '{ print ($1)}') echo "Success for block: $block!" echo "Proving duration for block $block: $duration_sec seconds, performance time: $PERF_TIME, performance user time: $PERF_USER_TIME, performance system time: $PERF_SYS_TIME" &>> "$OUTPUT_LOG" } # Process each block -for block in "${blocks[@]}"; do - process_block "$block" +# TODO - Change block numbers before merge +for i in $(seq 1 2); do + process_block "$i" done # Finalize logging diff --git a/test_data/erigon-data.tar.gz b/test_data/erigon-test-data.tar.gz similarity index 100% rename from test_data/erigon-data.tar.gz rename to test_data/erigon-test-data.tar.gz