diff --git a/.github/workflows/publish-dev.yml b/.github/workflows/publish-dev.yml index fb4197b75da9..4e8c76e0dfdd 100644 --- a/.github/workflows/publish-dev.yml +++ b/.github/workflows/publish-dev.yml @@ -112,12 +112,12 @@ jobs: - uses: actions/checkout@v4 # https://github.com/docker/setup-qemu-action - name: Set up QEMU - uses: docker/setup-qemu-action@v1 + uses: docker/setup-qemu-action@v3 # https://github.com/docker/setup-buildx-action - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 + uses: docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@v1 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/publish-rc.yml b/.github/workflows/publish-rc.yml index f8f3b21ff349..936072de42c9 100644 --- a/.github/workflows/publish-rc.yml +++ b/.github/workflows/publish-rc.yml @@ -114,10 +114,11 @@ jobs: # In case of failure - name: Rollback on failure if: failure() - uses: author/action-rollback@9ec72a6af74774e00343c6de3e946b0901c23013 + uses: author/action-rollback@1.0.4 with: - id: ${{ steps.create_release.outputs.id }} + release_id: ${{ steps.create_release.outputs.id }} tag: ${{ needs.tag.outputs.tag }} + delete_orphan_tag: true env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -131,12 +132,12 @@ jobs: - run: scripts/await-release.sh ${{ needs.tag.outputs.tag }} rc 900 # https://github.com/docker/setup-qemu-action - name: Set up QEMU - uses: docker/setup-qemu-action@v1 + uses: docker/setup-qemu-action@v3 # https://github.com/docker/setup-buildx-action - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 + uses: docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@v1 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/publish-stable.yml b/.github/workflows/publish-stable.yml index 9c41693f26f2..c2909a7e4e24 100644 --- a/.github/workflows/publish-stable.yml +++ b/.github/workflows/publish-stable.yml @@ -104,10 +104,11 @@ jobs: # In case of failure - name: Rollback on failure if: failure() - uses: author/action-rollback@9ec72a6af74774e00343c6de3e946b0901c23013 + uses: author/action-rollback@1.0.4 with: - id: ${{ steps.create_release.outputs.id }} + release_id: ${{ steps.create_release.outputs.id }} tag: ${{ needs.tag.outputs.tag }} + delete_orphan_tag: true env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -131,12 +132,12 @@ jobs: - run: scripts/await-release.sh ${{ needs.tag.outputs.tag }} latest 900 # https://github.com/docker/setup-qemu-action - name: Set up QEMU - uses: docker/setup-qemu-action@v1 + uses: docker/setup-qemu-action@v3 # https://github.com/docker/setup-buildx-action - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 + uses: docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@v1 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/Dockerfile b/Dockerfile index c65cac28d51d..0ee8083c85e2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,10 +1,10 @@ # --platform=$BUILDPLATFORM is used build javascript source with host arch # Otherwise TS builds on emulated archs and can be extremely slow (+1h) -FROM --platform=${BUILDPLATFORM:-amd64} node:22.4-alpine as build_src +FROM --platform=${BUILDPLATFORM:-amd64} node:22.4-slim AS build_src ARG COMMIT WORKDIR /usr/app -RUN apk update && apk add --no-cache g++ make python3 py3-setuptools && rm -rf /var/cache/apk/* +RUN apt-get update && apt-get install -y g++ make python3 python3-setuptools && apt-get clean && rm -rf /var/lib/apt/lists/* COPY . . @@ -21,21 +21,21 @@ RUN cd packages/cli && GIT_COMMIT=${COMMIT} yarn write-git-data # Copy built src + node_modules to build native packages for archs different than host. # Note: This step is redundant for the host arch -FROM node:22.4-alpine as build_deps +FROM node:22.4-slim AS build_deps WORKDIR /usr/app -RUN apk update && apk add --no-cache g++ make python3 py3-setuptools && rm -rf /var/cache/apk/* +RUN apt-get update && apt-get install -y g++ make python3 python3-setuptools && apt-get clean && rm -rf /var/lib/apt/lists/* COPY --from=build_src /usr/app . # Do yarn --force to trigger a rebuild of the native packages -# Emmulates `yarn rebuild` which is not available in v1 https://yarnpkg.com/cli/rebuild +# Emmulates `yarn rebuild` which is not available in v1 https://yarnpkg.com/cli/rebuild RUN yarn install --non-interactive --frozen-lockfile --production --force # Rebuild leveldb bindings (required for arm64 build) RUN cd node_modules/classic-level && yarn rebuild # Copy built src + node_modules to a new layer to prune unnecessary fs # Previous layer weights 7.25GB, while this final 488MB (as of Oct 2020) -FROM node:22.4-alpine +FROM node:22.4-slim WORKDIR /usr/app COPY --from=build_deps /usr/app . diff --git a/RELEASE.md b/RELEASE.md index 3379b40f3720..bb40b611ebe5 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -191,7 +191,7 @@ Merge `stable` into `unstable`, resolving conflicts: - `git checkout unstable && git merge stable` - Resolve conflicts -- Sanity check locally before pushing by using: `git diff unstable origin/unstable` +- Sanity check locally before pushing by using: `git diff origin/unstable unstable` - Disable `unstable` branch protection - `git push` - Enable `unstable` branch protection diff --git a/dashboards/lodestar_block_processor.json b/dashboards/lodestar_block_processor.json index 50513613c680..3258efa72fc0 100644 --- a/dashboards/lodestar_block_processor.json +++ b/dashboards/lodestar_block_processor.json @@ -7132,7 +7132,7 @@ "condition": "", "key": "instance", "operator": "=", - "value": "unstable-lg1k-hzax41" + "value": "unstable-lg1k-hzax41-dkr" } ], "hide": 0, diff --git a/dashboards/lodestar_block_production.json b/dashboards/lodestar_block_production.json index 5e33a72ef9af..58c7ba3f3684 100644 --- a/dashboards/lodestar_block_production.json +++ b/dashboards/lodestar_block_production.json @@ -2211,7 +2211,7 @@ "condition": "", "key": "instance", "operator": "=", - "value": "unstable-lg1k-hzax41" + "value": "unstable-lg1k-hzax41-dkr" } ], "hide": 0, diff --git a/dashboards/lodestar_bls_thread_pool.json b/dashboards/lodestar_bls_thread_pool.json index 867d9fd322c4..0fd2572d5a1d 100644 --- a/dashboards/lodestar_bls_thread_pool.json +++ b/dashboards/lodestar_bls_thread_pool.json @@ -1483,7 +1483,7 @@ "condition": "", "key": "instance", "operator": "=", - "value": "unstable-lg1k-hzax41" + "value": "unstable-lg1k-hzax41-dkr" } ], "hide": 0, diff --git a/dashboards/lodestar_debug_gossipsub.json b/dashboards/lodestar_debug_gossipsub.json index 0486bd58fbd3..26cd9b9b791a 100644 --- a/dashboards/lodestar_debug_gossipsub.json +++ b/dashboards/lodestar_debug_gossipsub.json @@ -8876,7 +8876,7 @@ "condition": "", "key": "instance", "operator": "=", - "value": "unstable-lg1k-hzax41" + "value": "unstable-lg1k-hzax41-dkr" } ], "hide": 0, diff --git a/dashboards/lodestar_discv5.json b/dashboards/lodestar_discv5.json index 31f115936df2..ca7f300060b4 100644 --- a/dashboards/lodestar_discv5.json +++ b/dashboards/lodestar_discv5.json @@ -2168,7 +2168,7 @@ "condition": "", "key": "instance", "operator": "=", - "value": "unstable-lg1k-hzax41" + "value": "unstable-lg1k-hzax41-dkr" } ], "hide": 0, diff --git a/dashboards/lodestar_execution_engine.json b/dashboards/lodestar_execution_engine.json index 2c4cadc131f1..63c1ab636e51 100644 --- a/dashboards/lodestar_execution_engine.json +++ b/dashboards/lodestar_execution_engine.json @@ -3439,7 +3439,7 @@ "condition": "", "key": "instance", "operator": "=", - "value": "unstable-lg1k-hzax41" + "value": "unstable-lg1k-hzax41-dkr" } ], "hide": 0, diff --git a/dashboards/lodestar_historical_state_regen.json b/dashboards/lodestar_historical_state_regen.json index 20eddcd1f31e..bc7106bf8de9 100644 --- a/dashboards/lodestar_historical_state_regen.json +++ b/dashboards/lodestar_historical_state_regen.json @@ -2601,7 +2601,7 @@ "condition": "", "key": "instance", "operator": "=", - "value": "unstable-lg1k-hzax41" + "value": "unstable-lg1k-hzax41-dkr" } ], "hide": 0, diff --git a/dashboards/lodestar_libp2p.json b/dashboards/lodestar_libp2p.json index 0fe72e0a4032..8d391dee2bbe 100644 --- a/dashboards/lodestar_libp2p.json +++ b/dashboards/lodestar_libp2p.json @@ -1348,7 +1348,7 @@ "condition": "", "key": "instance", "operator": "=", - "value": "unstable-lg1k-hzax41" + "value": "unstable-lg1k-hzax41-dkr" } ], "hide": 0, diff --git a/dashboards/lodestar_multinode.json b/dashboards/lodestar_multinode.json index 9a8fecaf0128..6c7f18dc3148 100644 --- a/dashboards/lodestar_multinode.json +++ b/dashboards/lodestar_multinode.json @@ -844,7 +844,7 @@ "condition": "", "key": "instance", "operator": "=", - "value": "unstable-lg1k-hzax41" + "value": "unstable-lg1k-hzax41-dkr" } ], "hide": 0, diff --git a/dashboards/lodestar_networking.json b/dashboards/lodestar_networking.json index d1c40e659194..a239d47ac62a 100644 --- a/dashboards/lodestar_networking.json +++ b/dashboards/lodestar_networking.json @@ -6421,7 +6421,7 @@ "condition": "", "key": "instance", "operator": "=", - "value": "unstable-lg1k-hzax41" + "value": "unstable-lg1k-hzax41-dkr" } ], "hide": 0, diff --git a/dashboards/lodestar_rest_api.json b/dashboards/lodestar_rest_api.json index 6445873f4dc5..7ef29bb89ec5 100644 --- a/dashboards/lodestar_rest_api.json +++ b/dashboards/lodestar_rest_api.json @@ -649,7 +649,7 @@ "condition": "", "key": "instance", "operator": "=", - "value": "unstable-lg1k-hzax41" + "value": "unstable-lg1k-hzax41-dkr" } ], "hide": 0, diff --git a/dashboards/lodestar_state_cache_regen.json b/dashboards/lodestar_state_cache_regen.json index be52d414ea3b..1cca1d26c561 100644 --- a/dashboards/lodestar_state_cache_regen.json +++ b/dashboards/lodestar_state_cache_regen.json @@ -425,7 +425,32 @@ }, "mappings": [] }, - "overrides": [] + "overrides": [ + { + "__systemRef": "hideSeriesFrom", + "matcher": { + "id": "byNames", + "options": { + "mode": "exclude", + "names": [ + "epochs_in-memory" + ], + "prefix": "All except:", + "readOnly": true + } + }, + "properties": [ + { + "id": "custom.hideFrom", + "value": { + "legend": false, + "tooltip": false, + "viz": true + } + } + ] + } + ] }, "gridPos": { "h": 8, @@ -1325,30 +1350,6 @@ "value": "none" } ] - }, - { - "__systemRef": "hideSeriesFrom", - "matcher": { - "id": "byNames", - "options": { - "mode": "exclude", - "names": [ - "count_per_epoch" - ], - "prefix": "All except:", - "readOnly": true - } - }, - "properties": [ - { - "id": "custom.hideFrom", - "value": { - "legend": false, - "tooltip": false, - "viz": true - } - } - ] } ] }, @@ -1378,7 +1379,7 @@ "uid": "${DS_PROMETHEUS}" }, "editorMode": "code", - "expr": "rate(lodestar_cp_state_cache_state_serialize_seconds_sum[$rate_interval])\n/\nrate(lodestar_cp_state_cache_state_serialize_seconds_count[$rate_interval])", + "expr": "rate(lodestar_state_serialize_seconds_sum{source=\"persistent_checkpoints_cache_state\"}[$rate_interval])\n/\nrate(lodestar_state_serialize_seconds_count{source=\"persistent_checkpoints_cache_state\"}[$rate_interval])", "hide": false, "instant": false, "legendFormat": "serialize_duration", @@ -1475,6 +1476,30 @@ "id": "unit" } ] + }, + { + "__systemRef": "hideSeriesFrom", + "matcher": { + "id": "byNames", + "options": { + "mode": "exclude", + "names": [ + "reload_duration" + ], + "prefix": "All except:", + "readOnly": true + } + }, + "properties": [ + { + "id": "custom.hideFrom", + "value": { + "legend": false, + "tooltip": false, + "viz": true + } + } + ] } ] }, @@ -1610,7 +1635,32 @@ }, "mappings": [] }, - "overrides": [] + "overrides": [ + { + "__systemRef": "hideSeriesFrom", + "matcher": { + "id": "byNames", + "options": { + "mode": "exclude", + "names": [ + "from_memory" + ], + "prefix": "All except:", + "readOnly": true + } + }, + "properties": [ + { + "id": "custom.hideFrom", + "value": { + "legend": false, + "tooltip": false, + "viz": true + } + } + ] + } + ] }, "gridPos": { "h": 8, @@ -1705,7 +1755,32 @@ }, "mappings": [] }, - "overrides": [] + "overrides": [ + { + "__systemRef": "hideSeriesFrom", + "matcher": { + "id": "byNames", + "options": { + "mode": "exclude", + "names": [ + "state_reload_validator_serialization" + ], + "prefix": "All except:", + "readOnly": true + } + }, + "properties": [ + { + "id": "custom.hideFrom", + "value": { + "legend": false, + "tooltip": false, + "viz": true + } + } + ] + } + ] }, "gridPos": { "h": 8, @@ -1827,7 +1902,32 @@ }, "mappings": [] }, - "overrides": [] + "overrides": [ + { + "__systemRef": "hideSeriesFrom", + "matcher": { + "id": "byNames", + "options": { + "mode": "exclude", + "names": [ + "getState" + ], + "prefix": "All except:", + "readOnly": true + } + }, + "properties": [ + { + "id": "custom.hideFrom", + "value": { + "legend": false, + "tooltip": false, + "viz": true + } + } + ] + } + ] }, "gridPos": { "h": 8, @@ -2447,28 +2547,15 @@ }, { "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, "gridPos": { "h": 1, "w": 24, "x": 0, "y": 82 }, - "id": 54, + "id": 60, "panels": [], - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "${DS_PROMETHEUS}" - }, - "refId": "A" - } - ], - "title": "Regen queue", + "title": "Regen - getState", "type": "row" }, { @@ -2489,10 +2576,9 @@ "axisPlacement": "auto", "barAlignment": 0, "drawStyle": "line", - "fillOpacity": 10, - "gradientMode": "opacity", + "fillOpacity": 0, + "gradientMode": "none", "hideFrom": { - "graph": false, "legend": false, "tooltip": false, "viz": false @@ -2504,7 +2590,7 @@ "scaleDistribution": { "type": "linear" }, - "showPoints": "never", + "showPoints": "auto", "spanNulls": false, "stacking": { "group": "A", @@ -2515,45 +2601,44 @@ } }, "mappings": [], - "unit": "percentunit" + "unit": "s" }, "overrides": [] }, "gridPos": { - "h": 7, + "h": 8, "w": 12, "x": 0, "y": 83 }, - "id": 42, + "id": 61, "options": { - "graph": {}, "legend": { "calcs": [], "displayMode": "list", "placement": "bottom", - "showLegend": false + "showLegend": true }, "tooltip": { - "mode": "multi", + "mode": "single", "sort": "none" } }, - "pluginVersion": "7.4.5", "targets": [ { "datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" }, - "exemplar": false, - "expr": "rate(lodestar_regen_queue_job_time_seconds_sum[32m])", - "interval": "", - "legendFormat": "regen_queue", + "editorMode": "code", + "expr": "rate(lodestar_regen_get_state_get_seed_state_seconds_sum[$rate_interval])\n/\nrate(lodestar_regen_get_state_get_seed_state_seconds_count[$rate_interval])", + "instant": false, + "legendFormat": "{{caller}}", + "range": true, "refId": "A" } ], - "title": "Regen queue - Utilization ratio", + "title": "Get seed state duration", "type": "timeseries" }, { @@ -2574,10 +2659,9 @@ "axisPlacement": "auto", "barAlignment": 0, "drawStyle": "line", - "fillOpacity": 10, - "gradientMode": "opacity", + "fillOpacity": 0, + "gradientMode": "none", "hideFrom": { - "graph": false, "legend": false, "tooltip": false, "viz": false @@ -2589,7 +2673,7 @@ "scaleDistribution": { "type": "linear" }, - "showPoints": "never", + "showPoints": "auto", "spanNulls": false, "stacking": { "group": "A", @@ -2600,44 +2684,44 @@ } }, "mappings": [], - "unit": "none" + "unit": "s" }, "overrides": [] }, "gridPos": { - "h": 7, - "w": 6, + "h": 8, + "w": 12, "x": 12, "y": 83 }, - "id": 44, + "id": 62, "options": { - "graph": {}, "legend": { "calcs": [], "displayMode": "list", "placement": "bottom", - "showLegend": false + "showLegend": true }, "tooltip": { - "mode": "multi", + "mode": "single", "sort": "none" } }, - "pluginVersion": "7.4.5", "targets": [ { "datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" }, - "expr": "12*rate(lodestar_regen_queue_job_time_seconds_count[$rate_interval])", - "interval": "", - "legendFormat": "regen_queue", + "editorMode": "code", + "expr": "rate(lodestar_regen_get_state_load_blocks_seconds_sum[$rate_interval])\n/\nrate(lodestar_regen_get_state_load_blocks_seconds_count[$rate_interval])", + "instant": false, + "legendFormat": "{{caller}}", + "range": true, "refId": "A" } ], - "title": "Regen queue - Jobs / slot", + "title": "Load blocks duration", "type": "timeseries" }, { @@ -2658,10 +2742,9 @@ "axisPlacement": "auto", "barAlignment": 0, "drawStyle": "line", - "fillOpacity": 10, - "gradientMode": "opacity", + "fillOpacity": 0, + "gradientMode": "none", "hideFrom": { - "graph": false, "legend": false, "tooltip": false, "viz": false @@ -2673,7 +2756,7 @@ "scaleDistribution": { "type": "linear" }, - "showPoints": "never", + "showPoints": "auto", "spanNulls": false, "stacking": { "group": "A", @@ -2684,15 +2767,426 @@ } }, "mappings": [], - "unit": "percentunit" + "unit": "s" }, - "overrides": [] + "overrides": [ + { + "__systemRef": "hideSeriesFrom", + "matcher": { + "id": "byNames", + "options": { + "mode": "exclude", + "names": [ + "validateGossipBlock" + ], + "prefix": "All except:", + "readOnly": true + } + }, + "properties": [ + { + "id": "custom.hideFrom", + "value": { + "legend": false, + "tooltip": false, + "viz": true + } + } + ] + } + ] }, "gridPos": { - "h": 7, + "h": 8, + "w": 12, + "x": 0, + "y": 91 + }, + "id": 63, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "editorMode": "code", + "expr": "rate(lodestar_regen_get_state_state_transition_seconds_sum[$rate_interval])\n/\nrate(lodestar_regen_get_state_state_transition_seconds_count[$rate_interval])", + "instant": false, + "legendFormat": "{{caller}}", + "range": true, + "refId": "A" + } + ], + "title": "State transition duration", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [] + }, + "overrides": [ + { + "__systemRef": "hideSeriesFrom", + "matcher": { + "id": "byNames", + "options": { + "mode": "exclude", + "names": [ + "validateGossipBlock" + ], + "prefix": "All except:", + "readOnly": true + } + }, + "properties": [ + { + "id": "custom.hideFrom", + "value": { + "legend": false, + "tooltip": false, + "viz": true + } + } + ] + } + ] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 91 + }, + "id": 64, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "editorMode": "code", + "expr": "rate(lodestar_regen_get_state_block_count_sum[$rate_interval])\n/\nrate(lodestar_regen_get_state_block_count_count[$rate_interval])", + "instant": false, + "legendFormat": "{{caller}}", + "range": true, + "refId": "A" + } + ], + "title": "Reprocessed block count", + "type": "timeseries" + }, + { + "collapsed": false, + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 99 + }, + "id": 54, + "panels": [], + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "refId": "A" + } + ], + "title": "Regen queue", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "opacity", + "hideFrom": { + "graph": false, + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "unit": "percentunit" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 100 + }, + "id": 42, + "options": { + "graph": {}, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "7.4.5", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "exemplar": false, + "expr": "rate(lodestar_regen_queue_job_time_seconds_sum[32m])", + "interval": "", + "legendFormat": "regen_queue", + "refId": "A" + } + ], + "title": "Regen queue - Utilization ratio", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "opacity", + "hideFrom": { + "graph": false, + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "unit": "none" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 6, + "x": 12, + "y": 100 + }, + "id": 44, + "options": { + "graph": {}, + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": false + }, + "tooltip": { + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "7.4.5", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "expr": "12*rate(lodestar_regen_queue_job_time_seconds_count[$rate_interval])", + "interval": "", + "legendFormat": "regen_queue", + "refId": "A" + } + ], + "title": "Regen queue - Jobs / slot", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "opacity", + "hideFrom": { + "graph": false, + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "unit": "percentunit" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, "w": 6, "x": 18, - "y": 83 + "y": 100 }, "id": 48, "options": { @@ -2776,7 +3270,7 @@ "h": 7, "w": 12, "x": 0, - "y": 90 + "y": 107 }, "id": 46, "options": { @@ -2860,7 +3354,7 @@ "h": 7, "w": 6, "x": 12, - "y": 90 + "y": 107 }, "id": 50, "options": { @@ -2944,7 +3438,7 @@ "h": 7, "w": 6, "x": 18, - "y": 90 + "y": 107 }, "id": 52, "options": { @@ -3088,7 +3582,7 @@ "condition": "", "key": "instance", "operator": "=", - "value": "unstable-lg1k-hzax41" + "value": "unstable-lg1k-hzax41-dkr" } ], "hide": 0, diff --git a/dashboards/lodestar_summary.json b/dashboards/lodestar_summary.json index 5e8773c05d4e..87eaed30bd3e 100644 --- a/dashboards/lodestar_summary.json +++ b/dashboards/lodestar_summary.json @@ -2929,7 +2929,7 @@ "condition": "", "key": "instance", "operator": "=", - "value": "unstable-lg1k-hzax41" + "value": "unstable-lg1k-hzax41-dkr" } ], "hide": 0, diff --git a/dashboards/lodestar_sync.json b/dashboards/lodestar_sync.json index 6cc82bedde47..f58ccf308f9b 100644 --- a/dashboards/lodestar_sync.json +++ b/dashboards/lodestar_sync.json @@ -1776,7 +1776,7 @@ "condition": "", "key": "instance", "operator": "=", - "value": "unstable-lg1k-hzax41" + "value": "unstable-lg1k-hzax41-dkr" } ], "hide": 0, diff --git a/dashboards/lodestar_validator_client.json b/dashboards/lodestar_validator_client.json index 8ec6a04437b1..5e4459d1d1b9 100644 --- a/dashboards/lodestar_validator_client.json +++ b/dashboards/lodestar_validator_client.json @@ -71,6 +71,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "msg / slot", @@ -211,10 +212,12 @@ "fields": "", "values": false }, + "showPercentChange": false, "text": {}, - "textMode": "name" + "textMode": "name", + "wideLayout": true }, - "pluginVersion": "10.1.1", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -231,7 +234,6 @@ } ], "title": "Lodestar version", - "transformations": [], "type": "stat" }, { @@ -267,10 +269,12 @@ "fields": "", "values": false }, + "showPercentChange": false, "text": {}, - "textMode": "name" + "textMode": "name", + "wideLayout": true }, - "pluginVersion": "10.1.1", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -321,10 +325,12 @@ "fields": "", "values": false }, + "showPercentChange": false, "text": {}, - "textMode": "name" + "textMode": "name", + "wideLayout": true }, - "pluginVersion": "10.1.1", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -359,7 +365,7 @@ }, "gridPos": { "h": 2, - "w": 4, + "w": 2, "x": 20, "y": 0 }, @@ -376,9 +382,11 @@ "fields": "", "values": false }, - "textMode": "auto" + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "10.1.1", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -395,6 +403,83 @@ "title": "VC indices", "type": "stat" }, + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "description": "", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [ + { + "options": { + "0": { + "color": "green", + "index": 0, + "text": "Ready" + }, + "1": { + "color": "yellow", + "index": 1, + "text": "Syncing" + }, + "2": { + "color": "red", + "index": 2, + "text": "Error" + } + }, + "type": "value" + } + ] + }, + "overrides": [] + }, + "gridPos": { + "h": 2, + "w": 2, + "x": 22, + "y": 0 + }, + "id": 47, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "10.4.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "${DS_PROMETHEUS}" + }, + "editorMode": "code", + "expr": "vc_beacon_health", + "instant": false, + "legendFormat": "__auto", + "range": true, + "refId": "A" + } + ], + "title": "Beacon health", + "type": "stat" + }, { "datasource": { "type": "prometheus", @@ -429,9 +514,11 @@ "fields": "", "values": false }, - "textMode": "auto" + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "10.1.1", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -482,9 +569,11 @@ "fields": "", "values": false }, - "textMode": "auto" + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true }, - "pluginVersion": "10.1.1", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -622,7 +711,7 @@ }, "showHeader": false }, - "pluginVersion": "10.1.1", + "pluginVersion": "10.4.1", "targets": [ { "datasource": { @@ -701,6 +790,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -866,6 +956,7 @@ "mode": "palette-classic" }, "custom": { + "axisBorderShow": false, "axisCenteredZero": false, "axisColorMode": "text", "axisLabel": "", @@ -1019,7 +1110,8 @@ }, "showValue": "never", "tooltip": { - "show": true, + "mode": "single", + "showColorScale": false, "yHistogram": false }, "yAxis": { @@ -1028,7 +1120,7 @@ "unit": "s" } }, - "pluginVersion": "10.1.1", + "pluginVersion": "10.4.1", "reverseYBuckets": false, "targets": [ { @@ -1133,7 +1225,8 @@ }, "showValue": "never", "tooltip": { - "show": true, + "mode": "single", + "showColorScale": false, "yHistogram": false }, "yAxis": { @@ -1142,7 +1235,7 @@ "unit": "s" } }, - "pluginVersion": "10.1.1", + "pluginVersion": "10.4.1", "reverseYBuckets": false, "targets": [ { @@ -2344,8 +2437,7 @@ ], "refresh": "10s", "revision": 1, - "schemaVersion": 38, - "style": "dark", + "schemaVersion": 39, "tags": [ "lodestar" ], @@ -2455,7 +2547,7 @@ "condition": "", "key": "instance", "operator": "=", - "value": "unstable-lg1k-hzax41" + "value": "unstable-lg1k-hzax41-dkr" } ], "hide": 0, diff --git a/dashboards/lodestar_validator_monitor.json b/dashboards/lodestar_validator_monitor.json index 7579a595b550..3305e6796115 100644 --- a/dashboards/lodestar_validator_monitor.json +++ b/dashboards/lodestar_validator_monitor.json @@ -500,7 +500,7 @@ "type": "prometheus", "uid": "${DS_PROMETHEUS}" }, - "description": "Percent of attestations having correct head.", + "description": "Percent of attestations having incorrect head.", "fieldConfig": { "defaults": { "color": { @@ -1896,7 +1896,7 @@ "condition": "", "key": "instance", "operator": "=", - "value": "unstable-lg1k-hzax41" + "value": "unstable-lg1k-hzax41-dkr" } ], "hide": 0, diff --git a/dashboards/lodestar_vm_host.json b/dashboards/lodestar_vm_host.json index 457d4d120fa4..23e1c0418aba 100644 --- a/dashboards/lodestar_vm_host.json +++ b/dashboards/lodestar_vm_host.json @@ -5924,7 +5924,7 @@ "condition": "", "key": "instance", "operator": "=", - "value": "unstable-lg1k-hzax41" + "value": "unstable-lg1k-hzax41-dkr" } ], "hide": 0, diff --git a/docs/yarn.lock b/docs/yarn.lock index 41966c013183..27123932fe26 100644 --- a/docs/yarn.lock +++ b/docs/yarn.lock @@ -2430,22 +2430,6 @@ dependencies: "@types/ms" "*" -"@types/eslint-scope@^3.7.3": - version "3.7.7" - resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.7.tgz#3108bd5f18b0cdb277c867b3dd449c9ed7079ac5" - integrity sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg== - dependencies: - "@types/eslint" "*" - "@types/estree" "*" - -"@types/eslint@*": - version "8.56.5" - resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.56.5.tgz#94b88cab77588fcecdd0771a6d576fa1c0af9d02" - integrity sha512-u5/YPJHo1tvkSF2CE0USEkxon82Z5DBy2xR+qfyYNszpX9qcs4sT6uq2kBbj4BXY1+DBGDPnrhMZV3pKWGNukw== - dependencies: - "@types/estree" "*" - "@types/json-schema" "*" - "@types/estree-jsx@^1.0.0": version "1.0.5" resolved "https://registry.yarnpkg.com/@types/estree-jsx/-/estree-jsx-1.0.5.tgz#858a88ea20f34fe65111f005a689fa1ebf70dc18" @@ -2536,7 +2520,7 @@ dependencies: "@types/istanbul-lib-report" "*" -"@types/json-schema@*", "@types/json-schema@^7.0.4", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": +"@types/json-schema@^7.0.4", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": version "7.0.15" resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.15.tgz#596a1747233694d50f6ad8a7869fcb6f56cf5841" integrity sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA== @@ -2736,10 +2720,10 @@ resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.2.0.tgz#756641adb587851b5ccb3e095daf27ae581c8406" integrity sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ== -"@webassemblyjs/ast@1.11.6", "@webassemblyjs/ast@^1.11.5": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.6.tgz#db046555d3c413f8966ca50a95176a0e2c642e24" - integrity sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q== +"@webassemblyjs/ast@1.12.1", "@webassemblyjs/ast@^1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.12.1.tgz#bb16a0e8b1914f979f45864c23819cc3e3f0d4bb" + integrity sha512-EKfMUOPRRUTy5UII4qJDGPpqfwjOmZ5jeGFwid9mnoqIFK+e0vqoi1qH56JpmZSzEL53jKnNzScdmftJyG5xWg== dependencies: "@webassemblyjs/helper-numbers" "1.11.6" "@webassemblyjs/helper-wasm-bytecode" "1.11.6" @@ -2754,10 +2738,10 @@ resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz#6132f68c4acd59dcd141c44b18cbebbd9f2fa768" integrity sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q== -"@webassemblyjs/helper-buffer@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.6.tgz#b66d73c43e296fd5e88006f18524feb0f2c7c093" - integrity sha512-z3nFzdcp1mb8nEOFFk8DrYLpHvhKC3grJD2ardfKOzmbmJvEf/tPIqCY+sNcwZIY8ZD7IkB2l7/pqhUhqm7hLA== +"@webassemblyjs/helper-buffer@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.12.1.tgz#6df20d272ea5439bf20ab3492b7fb70e9bfcb3f6" + integrity sha512-nzJwQw99DNDKr9BVCOZcLuJJUlqkJh+kVzVl6Fmq/tI5ZtEyWT1KZMyOXltXLZJmDtvLCDgwsyrkohEtopTXCw== "@webassemblyjs/helper-numbers@1.11.6": version "1.11.6" @@ -2773,15 +2757,15 @@ resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz#bb2ebdb3b83aa26d9baad4c46d4315283acd51e9" integrity sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA== -"@webassemblyjs/helper-wasm-section@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.6.tgz#ff97f3863c55ee7f580fd5c41a381e9def4aa577" - integrity sha512-LPpZbSOwTpEC2cgn4hTydySy1Ke+XEu+ETXuoyvuyezHO3Kjdu90KK95Sh9xTbmjrCsUwvWwCOQQNta37VrS9g== +"@webassemblyjs/helper-wasm-section@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.12.1.tgz#3da623233ae1a60409b509a52ade9bc22a37f7bf" + integrity sha512-Jif4vfB6FJlUlSbgEMHUyk1j234GTNG9dBJ4XJdOySoj518Xj0oGsNi59cUQF4RRMS9ouBUxDDdyBVfPTypa5g== dependencies: - "@webassemblyjs/ast" "1.11.6" - "@webassemblyjs/helper-buffer" "1.11.6" + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-buffer" "1.12.1" "@webassemblyjs/helper-wasm-bytecode" "1.11.6" - "@webassemblyjs/wasm-gen" "1.11.6" + "@webassemblyjs/wasm-gen" "1.12.1" "@webassemblyjs/ieee754@1.11.6": version "1.11.6" @@ -2802,59 +2786,59 @@ resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.6.tgz#90f8bc34c561595fe156603be7253cdbcd0fab5a" integrity sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA== -"@webassemblyjs/wasm-edit@^1.11.5": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.6.tgz#c72fa8220524c9b416249f3d94c2958dfe70ceab" - integrity sha512-Ybn2I6fnfIGuCR+Faaz7YcvtBKxvoLV3Lebn1tM4o/IAJzmi9AWYIPWpyBfU8cC+JxAO57bk4+zdsTjJR+VTOw== +"@webassemblyjs/wasm-edit@^1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.12.1.tgz#9f9f3ff52a14c980939be0ef9d5df9ebc678ae3b" + integrity sha512-1DuwbVvADvS5mGnXbE+c9NfA8QRcZ6iKquqjjmR10k6o+zzsRVesil54DKexiowcFCPdr/Q0qaMgB01+SQ1u6g== dependencies: - "@webassemblyjs/ast" "1.11.6" - "@webassemblyjs/helper-buffer" "1.11.6" + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-buffer" "1.12.1" "@webassemblyjs/helper-wasm-bytecode" "1.11.6" - "@webassemblyjs/helper-wasm-section" "1.11.6" - "@webassemblyjs/wasm-gen" "1.11.6" - "@webassemblyjs/wasm-opt" "1.11.6" - "@webassemblyjs/wasm-parser" "1.11.6" - "@webassemblyjs/wast-printer" "1.11.6" - -"@webassemblyjs/wasm-gen@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.6.tgz#fb5283e0e8b4551cc4e9c3c0d7184a65faf7c268" - integrity sha512-3XOqkZP/y6B4F0PBAXvI1/bky7GryoogUtfwExeP/v7Nzwo1QLcq5oQmpKlftZLbT+ERUOAZVQjuNVak6UXjPA== - dependencies: - "@webassemblyjs/ast" "1.11.6" + "@webassemblyjs/helper-wasm-section" "1.12.1" + "@webassemblyjs/wasm-gen" "1.12.1" + "@webassemblyjs/wasm-opt" "1.12.1" + "@webassemblyjs/wasm-parser" "1.12.1" + "@webassemblyjs/wast-printer" "1.12.1" + +"@webassemblyjs/wasm-gen@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.12.1.tgz#a6520601da1b5700448273666a71ad0a45d78547" + integrity sha512-TDq4Ojh9fcohAw6OIMXqiIcTq5KUXTGRkVxbSo1hQnSy6lAM5GSdfwWeSxpAo0YzgsgF182E/U0mDNhuA0tW7w== + dependencies: + "@webassemblyjs/ast" "1.12.1" "@webassemblyjs/helper-wasm-bytecode" "1.11.6" "@webassemblyjs/ieee754" "1.11.6" "@webassemblyjs/leb128" "1.11.6" "@webassemblyjs/utf8" "1.11.6" -"@webassemblyjs/wasm-opt@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.6.tgz#d9a22d651248422ca498b09aa3232a81041487c2" - integrity sha512-cOrKuLRE7PCe6AsOVl7WasYf3wbSo4CeOk6PkrjS7g57MFfVUF9u6ysQBBODX0LdgSvQqRiGz3CXvIDKcPNy4g== +"@webassemblyjs/wasm-opt@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.12.1.tgz#9e6e81475dfcfb62dab574ac2dda38226c232bc5" + integrity sha512-Jg99j/2gG2iaz3hijw857AVYekZe2SAskcqlWIZXjji5WStnOpVoat3gQfT/Q5tb2djnCjBtMocY/Su1GfxPBg== dependencies: - "@webassemblyjs/ast" "1.11.6" - "@webassemblyjs/helper-buffer" "1.11.6" - "@webassemblyjs/wasm-gen" "1.11.6" - "@webassemblyjs/wasm-parser" "1.11.6" + "@webassemblyjs/ast" "1.12.1" + "@webassemblyjs/helper-buffer" "1.12.1" + "@webassemblyjs/wasm-gen" "1.12.1" + "@webassemblyjs/wasm-parser" "1.12.1" -"@webassemblyjs/wasm-parser@1.11.6", "@webassemblyjs/wasm-parser@^1.11.5": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz#bb85378c527df824004812bbdb784eea539174a1" - integrity sha512-6ZwPeGzMJM3Dqp3hCsLgESxBGtT/OeCvCZ4TA1JUPYgmhAx38tTPR9JaKy0S5H3evQpO/h2uWs2j6Yc/fjkpTQ== +"@webassemblyjs/wasm-parser@1.12.1", "@webassemblyjs/wasm-parser@^1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.12.1.tgz#c47acb90e6f083391e3fa61d113650eea1e95937" + integrity sha512-xikIi7c2FHXysxXe3COrVUPSheuBtpcfhbpFj4gmu7KRLYOzANztwUU0IbsqvMqzuNK2+glRGWCEqZo1WCLyAQ== dependencies: - "@webassemblyjs/ast" "1.11.6" + "@webassemblyjs/ast" "1.12.1" "@webassemblyjs/helper-api-error" "1.11.6" "@webassemblyjs/helper-wasm-bytecode" "1.11.6" "@webassemblyjs/ieee754" "1.11.6" "@webassemblyjs/leb128" "1.11.6" "@webassemblyjs/utf8" "1.11.6" -"@webassemblyjs/wast-printer@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.6.tgz#a7bf8dd7e362aeb1668ff43f35cb849f188eff20" - integrity sha512-JM7AhRcE+yW2GWYaKeHL5vt4xqee5N2WcezptmgyhNS+ScggqcT1OtXykhAb13Sn5Yas0j2uv9tHgrjwvzAP4A== +"@webassemblyjs/wast-printer@1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.12.1.tgz#bcecf661d7d1abdaf989d8341a4833e33e2b31ac" + integrity sha512-+X4WAlOisVWQMikjbcvY2e0rwPsKQ9F688lksZhBcPycBBuii3O7m8FACbDMWDojpAqvjIncrG8J0XHKyQfVeA== dependencies: - "@webassemblyjs/ast" "1.11.6" + "@webassemblyjs/ast" "1.12.1" "@xtuc/long" "4.2.2" "@xtuc/ieee754@^1.2.0": @@ -2875,10 +2859,10 @@ accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: mime-types "~2.1.34" negotiator "0.6.3" -acorn-import-assertions@^1.9.0: - version "1.9.0" - resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz#507276249d684797c84e0734ef84860334cfb1ac" - integrity sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA== +acorn-import-attributes@^1.9.5: + version "1.9.5" + resolved "https://registry.yarnpkg.com/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz#7eb1557b1ba05ef18b5ed0ec67591bfab04688ef" + integrity sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ== acorn-jsx@^5.0.0: version "5.3.2" @@ -3136,10 +3120,10 @@ binary-extensions@^2.0.0: resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== -body-parser@1.20.2: - version "1.20.2" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.2.tgz#6feb0e21c4724d06de7ff38da36dad4f57a747fd" - integrity sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA== +body-parser@1.20.3: + version "1.20.3" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.3.tgz#1953431221c6fb5cd63c4b36d53fab0928e548c6" + integrity sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g== dependencies: bytes "3.1.2" content-type "~1.0.5" @@ -3149,7 +3133,7 @@ body-parser@1.20.2: http-errors "2.0.0" iconv-lite "0.4.24" on-finished "2.4.1" - qs "6.11.0" + qs "6.13.0" raw-body "2.5.2" type-is "~1.6.18" unpipe "1.0.0" @@ -4362,9 +4346,9 @@ domhandler@^5.0.2, domhandler@^5.0.3: domelementtype "^2.3.0" dompurify@^3.0.5: - version "3.0.9" - resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-3.0.9.tgz#b3f362f24b99f53498c75d43ecbd784b0b3ad65e" - integrity sha512-uyb4NDIvQ3hRn6NiC+SIFaP4mJ/MdXlvtunaqK9Bn6dD3RuB/1S/gasEjDHD8eiaqdSael2vBv+hOs7Y+jhYOQ== + version "3.1.6" + resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-3.1.6.tgz#43c714a94c6a7b8801850f82e756685300a027e2" + integrity sha512-cTOAhc36AalkjtBpfG6O8JimdTMWNXjiePT2xQH/ppBGi/4uIpmj8eKyIkMJErXWARyINV/sB38yf8JCLF5pbQ== domutils@^2.5.2, domutils@^2.8.0: version "2.8.0" @@ -4454,10 +4438,15 @@ encodeurl@~1.0.2: resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== -enhanced-resolve@^5.15.0: - version "5.15.1" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.15.1.tgz#384391e025f099e67b4b00bfd7f0906a408214e1" - integrity sha512-3d3JRbwsCLJsYgvb6NuWEG44jjPSOMuS73L/6+7BZuoKm3W+qXnSoIYVHi8dG7Qcg4inAY4jbzkZ7MnskePeDg== +encodeurl@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-2.0.0.tgz#7b8ea898077d7e409d3ac45474ea38eaf0857a58" + integrity sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg== + +enhanced-resolve@^5.17.1: + version "5.17.1" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz#67bfbbcc2f81d511be77d686a90267ef7f898a15" + integrity sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg== dependencies: graceful-fs "^4.2.4" tapable "^2.2.0" @@ -4659,36 +4648,36 @@ execa@^5.0.0: strip-final-newline "^2.0.0" express@^4.17.3: - version "4.19.2" - resolved "https://registry.yarnpkg.com/express/-/express-4.19.2.tgz#e25437827a3aa7f2a827bc8171bbbb664a356465" - integrity sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q== + version "4.21.0" + resolved "https://registry.yarnpkg.com/express/-/express-4.21.0.tgz#d57cb706d49623d4ac27833f1cbc466b668eb915" + integrity sha512-VqcNGcj/Id5ZT1LZ/cfihi3ttTn+NJmkli2eZADigjq29qTlWi/hAQ43t/VLPq8+UX06FCEx3ByOYet6ZFblng== dependencies: accepts "~1.3.8" array-flatten "1.1.1" - body-parser "1.20.2" + body-parser "1.20.3" content-disposition "0.5.4" content-type "~1.0.4" cookie "0.6.0" cookie-signature "1.0.6" debug "2.6.9" depd "2.0.0" - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" etag "~1.8.1" - finalhandler "1.2.0" + finalhandler "1.3.1" fresh "0.5.2" http-errors "2.0.0" - merge-descriptors "1.0.1" + merge-descriptors "1.0.3" methods "~1.1.2" on-finished "2.4.1" parseurl "~1.3.3" - path-to-regexp "0.1.7" + path-to-regexp "0.1.10" proxy-addr "~2.0.7" - qs "6.11.0" + qs "6.13.0" range-parser "~1.2.1" safe-buffer "5.2.1" - send "0.18.0" - serve-static "1.15.0" + send "0.19.0" + serve-static "1.16.2" setprototypeof "1.2.0" statuses "2.0.1" type-is "~1.6.18" @@ -4783,13 +4772,13 @@ fill-range@^7.0.1: dependencies: to-regex-range "^5.0.1" -finalhandler@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" - integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== +finalhandler@1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.3.1.tgz#0c575f1d1d324ddd1da35ad7ece3df7d19088019" + integrity sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ== dependencies: debug "2.6.9" - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" on-finished "2.4.1" parseurl "~1.3.3" @@ -5071,7 +5060,7 @@ graceful-fs@4.2.10: resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== -graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: +graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.11, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: version "4.2.11" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== @@ -6345,10 +6334,10 @@ memfs@^3.1.2, memfs@^3.4.3: dependencies: fs-monkey "^1.0.4" -merge-descriptors@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" - integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== +merge-descriptors@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.3.tgz#d80319a65f3c7935351e5cfdac8f9318504dbed5" + integrity sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ== merge-stream@^2.0.0: version "2.0.0" @@ -7426,10 +7415,10 @@ path-parse@^1.0.7: resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== -path-to-regexp@0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" - integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== +path-to-regexp@0.1.10: + version "0.1.10" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.10.tgz#67e9108c5c0551b9e5326064387de4763c4d5f8b" + integrity sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w== path-to-regexp@2.2.1: version "2.2.1" @@ -7854,12 +7843,12 @@ pupa@^3.1.0: dependencies: escape-goat "^4.0.0" -qs@6.11.0: - version "6.11.0" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a" - integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== +qs@6.13.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.13.0.tgz#6ca3bd58439f7e245655798997787b0d88a51906" + integrity sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg== dependencies: - side-channel "^1.0.4" + side-channel "^1.0.6" queue-microtask@^1.2.2: version "1.2.3" @@ -8458,10 +8447,10 @@ semver@^7.3.2, semver@^7.3.5, semver@^7.3.7, semver@^7.5.4: dependencies: lru-cache "^6.0.0" -send@0.18.0: - version "0.18.0" - resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" - integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== +send@0.19.0: + version "0.19.0" + resolved "https://registry.yarnpkg.com/send/-/send-0.19.0.tgz#bbc5a388c8ea6c048967049dbeac0e4a3f09d7f8" + integrity sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw== dependencies: debug "2.6.9" depd "2.0.0" @@ -8511,15 +8500,15 @@ serve-index@^1.9.1: mime-types "~2.1.17" parseurl "~1.3.2" -serve-static@1.15.0: - version "1.15.0" - resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" - integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== +serve-static@1.16.2: + version "1.16.2" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.16.2.tgz#b6a5343da47f6bdd2673848bf45754941e803296" + integrity sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw== dependencies: - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" parseurl "~1.3.3" - send "0.18.0" + send "0.19.0" set-function-length@^1.2.1: version "1.2.1" @@ -8581,7 +8570,7 @@ shelljs@^0.8.5: interpret "^1.0.0" rechoir "^0.6.2" -side-channel@^1.0.4: +side-channel@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.6.tgz#abd25fb7cd24baf45466406b1096b7831c9215f2" integrity sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA== @@ -9246,10 +9235,10 @@ vfile@^6.0.0, vfile@^6.0.1: unist-util-stringify-position "^4.0.0" vfile-message "^4.0.0" -watchpack@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d" - integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg== +watchpack@^2.4.1: + version "2.4.2" + resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.2.tgz#2feeaed67412e7c33184e5a79ca738fbd38564da" + integrity sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw== dependencies: glob-to-regexp "^0.4.1" graceful-fs "^4.1.2" @@ -9352,25 +9341,24 @@ webpack-sources@^3.2.2, webpack-sources@^3.2.3: integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== webpack@^5.88.1: - version "5.90.3" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.90.3.tgz#37b8f74d3ded061ba789bb22b31e82eed75bd9ac" - integrity sha512-h6uDYlWCctQRuXBs1oYpVe6sFcWedl0dpcVaTf/YF67J9bKvwJajFulMVSYKHrksMB3I/pIagRzDxwxkebuzKA== + version "5.94.0" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.94.0.tgz#77a6089c716e7ab90c1c67574a28da518a20970f" + integrity sha512-KcsGn50VT+06JH/iunZJedYGUJS5FGjow8wb9c0v5n1Om8O1g4L6LjtfxwlXIATopoQu+vOXXa7gYisWxCoPyg== dependencies: - "@types/eslint-scope" "^3.7.3" "@types/estree" "^1.0.5" - "@webassemblyjs/ast" "^1.11.5" - "@webassemblyjs/wasm-edit" "^1.11.5" - "@webassemblyjs/wasm-parser" "^1.11.5" + "@webassemblyjs/ast" "^1.12.1" + "@webassemblyjs/wasm-edit" "^1.12.1" + "@webassemblyjs/wasm-parser" "^1.12.1" acorn "^8.7.1" - acorn-import-assertions "^1.9.0" + acorn-import-attributes "^1.9.5" browserslist "^4.21.10" chrome-trace-event "^1.0.2" - enhanced-resolve "^5.15.0" + enhanced-resolve "^5.17.1" es-module-lexer "^1.2.1" eslint-scope "5.1.1" events "^3.2.0" glob-to-regexp "^0.4.1" - graceful-fs "^4.2.9" + graceful-fs "^4.2.11" json-parse-even-better-errors "^2.3.1" loader-runner "^4.2.0" mime-types "^2.1.27" @@ -9378,7 +9366,7 @@ webpack@^5.88.1: schema-utils "^3.2.0" tapable "^2.1.1" terser-webpack-plugin "^5.3.10" - watchpack "^2.4.0" + watchpack "^2.4.1" webpack-sources "^3.2.3" webpackbar@^5.0.2: diff --git a/funding.json b/funding.json new file mode 100644 index 000000000000..3ff79f54b693 --- /dev/null +++ b/funding.json @@ -0,0 +1,10 @@ +{ + "opRetro": { + "projectId": "0x8ec88058175ef4c1c9b1f26910c4d4f2cfa733d6fcd1dbd9385476a313d9e12d" + }, + "drips": { + "ethereum": { + "ownedBy": "0x94107e24Ba695aeb884fe9e896BA0Bbc14D3B509" + } + } +} diff --git a/lerna.json b/lerna.json index ccdcaca872ec..0ffc65fe5402 100644 --- a/lerna.json +++ b/lerna.json @@ -4,7 +4,7 @@ ], "npmClient": "yarn", "useNx": true, - "version": "1.20.2", + "version": "1.22.0", "stream": true, "command": { "version": { diff --git a/package.json b/package.json index 06fac0de8891..85d9662ab920 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "root", "private": true, "engines": { - "node": ">=20.1.0 <21 || >=22 <22.5" + "node": ">=20.1.0 <21 || >=22 <23" }, "packageManager": "yarn@1.22.22+sha256.c17d3797fb9a9115bf375e31bfd30058cac6bc9c3b8807a3d8cb2094794b51ca", "workspaces": [ diff --git a/packages/api/package.json b/packages/api/package.json index 5b91b46c128e..7d851d9513ba 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.20.2", + "version": "1.22.0", "type": "module", "exports": { ".": { @@ -70,12 +70,12 @@ "check-readme": "typescript-docs-verifier" }, "dependencies": { - "@chainsafe/persistent-merkle-tree": "^0.7.1", - "@chainsafe/ssz": "^0.15.1", - "@lodestar/config": "^1.20.2", - "@lodestar/params": "^1.20.2", - "@lodestar/types": "^1.20.2", - "@lodestar/utils": "^1.20.2", + "@chainsafe/persistent-merkle-tree": "^0.8.0", + "@chainsafe/ssz": "^0.17.1", + "@lodestar/config": "^1.22.0", + "@lodestar/params": "^1.22.0", + "@lodestar/types": "^1.22.0", + "@lodestar/utils": "^1.22.0", "eventsource": "^2.0.2", "qs": "^6.11.1" }, @@ -83,7 +83,7 @@ "@types/eventsource": "^1.1.11", "@types/qs": "^6.9.7", "ajv": "^8.12.0", - "fastify": "^4.27.0" + "fastify": "^5.0.0" }, "keywords": [ "ethereum", diff --git a/packages/api/src/beacon/client/events.ts b/packages/api/src/beacon/client/events.ts index 2d63925a738a..34f14f2e8397 100644 --- a/packages/api/src/beacon/client/events.ts +++ b/packages/api/src/beacon/client/events.ts @@ -13,7 +13,7 @@ export type ApiClient = ApiClientMethods; */ export function getClient(config: ChainForkConfig, baseUrl: string): ApiClient { const definitions = getDefinitions(config); - const eventSerdes = getEventSerdes(); + const eventSerdes = getEventSerdes(config); return { eventstream: async ({topics, signal, onEvent, onError, onClose}) => { diff --git a/packages/api/src/beacon/routes/beacon/block.ts b/packages/api/src/beacon/routes/beacon/block.ts index 73680ac0afc2..be6789753e0f 100644 --- a/packages/api/src/beacon/routes/beacon/block.ts +++ b/packages/api/src/beacon/routes/beacon/block.ts @@ -11,8 +11,10 @@ import { BeaconBlockBody, SignedBeaconBlockOrContents, SignedBlindedBeaconBlock, + SignedBlockContents, + sszTypesFor, } from "@lodestar/types"; -import {ForkName, ForkPreExecution, ForkSeq, isForkExecution} from "@lodestar/params"; +import {ForkName, ForkPreElectra, ForkPreExecution, isForkBlobs, isForkExecution} from "@lodestar/params"; import {Endpoint, RequestCodec, RouteDefinitions, Schema} from "../../../utils/index.js"; import {EmptyMeta, EmptyResponseCodec, EmptyResponseData, WithVersion} from "../../../utils/codecs.js"; import { @@ -37,19 +39,10 @@ export const BlockHeadersResponseType = new ListCompositeType(BlockHeaderRespons export const RootResponseType = new ContainerType({ root: ssz.Root, }); -export const SignedBlockContentsType = new ContainerType( - { - signedBlock: ssz.deneb.SignedBeaconBlock, - kzgProofs: ssz.deneb.KZGProofs, - blobs: ssz.deneb.Blobs, - }, - {jsonCase: "eth2"} -); export type BlockHeaderResponse = ValueOf; export type BlockHeadersResponse = ValueOf; export type RootResponse = ValueOf; -export type SignedBlockContents = ValueOf; export type BlockId = RootHex | Slot | "head" | "genesis" | "finalized" | "justified"; @@ -108,10 +101,22 @@ export type Endpoints = { "GET", BlockArgs, {params: {block_id: string}}, - BeaconBlockBody["attestations"], + BeaconBlockBody["attestations"], ExecutionOptimisticAndFinalizedMeta >; + /** + * Get block attestations + * Retrieves attestation included in requested block. + */ + getBlockAttestationsV2: Endpoint< + "GET", + BlockArgs, + {params: {block_id: string}}, + BeaconBlockBody["attestations"], + ExecutionOptimisticFinalizedAndVersionMeta + >; + /** * Get block header * Retrieves block header for given block id. @@ -258,6 +263,15 @@ export function getDefinitions(config: ChainForkConfig): RouteDefinitions ssz[fork].BeaconBlockBody.fields.attestations), + meta: ExecutionOptimisticFinalizedAndVersionCodec, + }, + }, getBlockHeader: { url: "/eth/v1/beacon/headers/{block_id}", method: "GET", @@ -297,11 +311,12 @@ export function getDefinitions(config: ChainForkConfig): RouteDefinitions { const slot = isSignedBlockContents(signedBlockOrContents) ? signedBlockOrContents.signedBlock.message.slot : signedBlockOrContents.message.slot; + const fork = config.getForkName(slot); + return { - body: - config.getForkSeq(slot) < ForkSeq.deneb - ? config.getForkTypes(slot).SignedBeaconBlock.serialize(signedBlockOrContents as SignedBeaconBlock) - : SignedBlockContentsType.serialize(signedBlockOrContents as SignedBlockContents), + body: isForkBlobs(fork) + ? sszTypesFor(fork).SignedBlockContents.serialize(signedBlockOrContents as SignedBlockContents) + : sszTypesFor(fork).SignedBeaconBlock.serialize(signedBlockOrContents as SignedBeaconBlock), headers: { [MetaHeader.Version]: config.getForkName(slot), }, @@ -345,12 +359,10 @@ export function getDefinitions(config: ChainForkConfig): RouteDefinitions { const forkName = toForkName(fromHeaders(headers, MetaHeader.Version)); - const forkSeq = config.forks[forkName].seq; return { - signedBlockOrContents: - forkSeq < ForkSeq.deneb - ? ssz[forkName].SignedBeaconBlock.deserialize(body) - : SignedBlockContentsType.deserialize(body), + signedBlockOrContents: isForkBlobs(forkName) + ? sszTypesFor(forkName).SignedBlockContents.deserialize(body) + : ssz[forkName].SignedBeaconBlock.deserialize(body), }; }, schema: { @@ -371,25 +383,23 @@ export function getDefinitions(config: ChainForkConfig): RouteDefinitions { const forkName = toForkName(fromHeaders(headers, MetaHeader.Version)); - const forkSeq = config.forks[forkName].seq; return { - signedBlockOrContents: - forkSeq < ForkSeq.deneb - ? ssz[forkName].SignedBeaconBlock.fromJson(body) - : SignedBlockContentsType.fromJson(body), + signedBlockOrContents: isForkBlobs(forkName) + ? sszTypesFor(forkName).SignedBlockContents.fromJson(body) + : ssz[forkName].SignedBeaconBlock.fromJson(body), broadcastValidation: query.broadcast_validation as BroadcastValidation, }; }, @@ -397,25 +407,24 @@ export function getDefinitions(config: ChainForkConfig): RouteDefinitions { const forkName = toForkName(fromHeaders(headers, MetaHeader.Version)); - const forkSeq = config.forks[forkName].seq; return { - signedBlockOrContents: - forkSeq < ForkSeq.deneb - ? ssz[forkName].SignedBeaconBlock.deserialize(body) - : SignedBlockContentsType.deserialize(body), + signedBlockOrContents: isForkBlobs(forkName) + ? sszTypesFor(forkName).SignedBlockContents.deserialize(body) + : ssz[forkName].SignedBeaconBlock.deserialize(body), broadcastValidation: query.broadcast_validation as BroadcastValidation, }; }, diff --git a/packages/api/src/beacon/routes/beacon/index.ts b/packages/api/src/beacon/routes/beacon/index.ts index f70792f9d76f..39d7d995dfb1 100644 --- a/packages/api/src/beacon/routes/beacon/index.ts +++ b/packages/api/src/beacon/routes/beacon/index.ts @@ -25,6 +25,7 @@ export type { export type { StateId, ValidatorId, + ValidatorIdentities, ValidatorStatus, FinalityCheckpoints, ValidatorResponse, diff --git a/packages/api/src/beacon/routes/beacon/pool.ts b/packages/api/src/beacon/routes/beacon/pool.ts index f957390131fe..4fe3efd4daf2 100644 --- a/packages/api/src/beacon/routes/beacon/pool.ts +++ b/packages/api/src/beacon/routes/beacon/pool.ts @@ -1,7 +1,8 @@ /* eslint-disable @typescript-eslint/naming-convention */ import {ValueOf} from "@chainsafe/ssz"; import {ChainForkConfig} from "@lodestar/config"; -import {phase0, capella, CommitteeIndex, Slot, ssz} from "@lodestar/types"; +import {isForkPostElectra} from "@lodestar/params"; +import {phase0, capella, CommitteeIndex, Slot, ssz, electra, AttesterSlashing} from "@lodestar/types"; import {Schema, Endpoint, RouteDefinitions} from "../../../utils/index.js"; import { ArrayOf, @@ -12,19 +13,31 @@ import { EmptyRequest, EmptyResponseCodec, EmptyResponseData, + WithVersion, } from "../../../utils/codecs.js"; +import {MetaHeader, VersionCodec, VersionMeta} from "../../../utils/metadata.js"; +import {toForkName} from "../../../utils/fork.js"; +import {fromHeaders} from "../../../utils/headers.js"; // See /packages/api/src/routes/index.ts for reasoning and instructions to add new routes -const AttestationListType = ArrayOf(ssz.phase0.Attestation); -const AttesterSlashingListType = ArrayOf(ssz.phase0.AttesterSlashing); +const AttestationListTypePhase0 = ArrayOf(ssz.phase0.Attestation); +const AttestationListTypeElectra = ArrayOf(ssz.electra.Attestation); +const AttesterSlashingListTypePhase0 = ArrayOf(ssz.phase0.AttesterSlashing); +const AttesterSlashingListTypeElectra = ArrayOf(ssz.electra.AttesterSlashing); const ProposerSlashingListType = ArrayOf(ssz.phase0.ProposerSlashing); const SignedVoluntaryExitListType = ArrayOf(ssz.phase0.SignedVoluntaryExit); const SignedBLSToExecutionChangeListType = ArrayOf(ssz.capella.SignedBLSToExecutionChange); const SyncCommitteeMessageListType = ArrayOf(ssz.altair.SyncCommitteeMessage); -type AttestationList = ValueOf; -type AttesterSlashingList = ValueOf; +type AttestationListPhase0 = ValueOf; +type AttestationListElectra = ValueOf; +type AttestationList = AttestationListPhase0 | AttestationListElectra; + +type AttesterSlashingListPhase0 = ValueOf; +type AttesterSlashingListElectra = ValueOf; +type AttesterSlashingList = AttesterSlashingListPhase0 | AttesterSlashingListElectra; + type ProposerSlashingList = ValueOf; type SignedVoluntaryExitList = ValueOf; type SignedBLSToExecutionChangeList = ValueOf; @@ -39,10 +52,22 @@ export type Endpoints = { "GET", {slot?: Slot; committeeIndex?: CommitteeIndex}, {query: {slot?: number; committee_index?: number}}, - AttestationList, + AttestationListPhase0, EmptyMeta >; + /** + * Get Attestations from operations pool + * Retrieves attestations known by the node but not necessarily incorporated into any block + */ + getPoolAttestationsV2: Endpoint< + "GET", + {slot?: Slot; committeeIndex?: CommitteeIndex}, + {query: {slot?: number; committee_index?: number}}, + AttestationList, + VersionMeta + >; + /** * Get AttesterSlashings from operations pool * Retrieves attester slashings known by the node but not necessarily incorporated into any block @@ -52,10 +77,23 @@ export type Endpoints = { "GET", EmptyArgs, EmptyRequest, - AttesterSlashingList, + AttesterSlashingListPhase0, EmptyMeta >; + /** + * Get AttesterSlashings from operations pool + * Retrieves attester slashings known by the node but not necessarily incorporated into any block + */ + getPoolAttesterSlashingsV2: Endpoint< + // ⏎ + "GET", + EmptyArgs, + EmptyRequest, + AttesterSlashingList, + VersionMeta + >; + /** * Get ProposerSlashings from operations pool * Retrieves proposer slashings known by the node but not necessarily incorporated into any block @@ -105,12 +143,28 @@ export type Endpoints = { */ submitPoolAttestations: Endpoint< "POST", - {signedAttestations: AttestationList}, + {signedAttestations: AttestationListPhase0}, {body: unknown}, EmptyResponseData, EmptyMeta >; + /** + * Submit Attestation objects to node + * Submits Attestation objects to the node. Each attestation in the request body is processed individually. + * + * If an attestation is validated successfully the node MUST publish that attestation on the appropriate subnet. + * + * If one or more attestations fail validation the node MUST return a 400 error with details of which attestations have failed, and why. + */ + submitPoolAttestationsV2: Endpoint< + "POST", + {signedAttestations: AttestationList}, + {body: unknown; headers: {[MetaHeader.Version]: string}}, + EmptyResponseData, + EmptyMeta + >; + /** * Submit AttesterSlashing object to node's pool * Submits AttesterSlashing object to node's pool and if passes validation node MUST broadcast it to network. @@ -123,6 +177,18 @@ export type Endpoints = { EmptyMeta >; + /** + * Submit AttesterSlashing object to node's pool + * Submits AttesterSlashing object to node's pool and if passes validation node MUST broadcast it to network. + */ + submitPoolAttesterSlashingsV2: Endpoint< + "POST", + {attesterSlashing: AttesterSlashing}, + {body: unknown; headers: {[MetaHeader.Version]: string}}, + EmptyResponseData, + EmptyMeta + >; + /** * Submit ProposerSlashing object to node's pool * Submits ProposerSlashing object to node's pool and if passes validation node MUST broadcast it to network. @@ -172,7 +238,7 @@ export type Endpoints = { >; }; -export function getDefinitions(_config: ChainForkConfig): RouteDefinitions { +export function getDefinitions(config: ChainForkConfig): RouteDefinitions { return { getPoolAttestations: { url: "/eth/v1/beacon/pool/attestations", @@ -183,19 +249,43 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions ({query: {slot, committee_index: committeeIndex}}), + parseReq: ({query}) => ({slot: query.slot, committeeIndex: query.committee_index}), + schema: {query: {slot: Schema.Uint, committee_index: Schema.Uint}}, + }, + resp: { + data: WithVersion((fork) => (isForkPostElectra(fork) ? AttestationListTypeElectra : AttestationListTypePhase0)), + meta: VersionCodec, + }, + }, getPoolAttesterSlashings: { url: "/eth/v1/beacon/pool/attester_slashings", method: "GET", req: EmptyRequestCodec, resp: { - data: AttesterSlashingListType, + data: AttesterSlashingListTypePhase0, meta: EmptyMetaCodec, }, }, + getPoolAttesterSlashingsV2: { + url: "/eth/v2/beacon/pool/attester_slashings", + method: "GET", + req: EmptyRequestCodec, + resp: { + data: WithVersion((fork) => + isForkPostElectra(fork) ? AttesterSlashingListTypeElectra : AttesterSlashingListTypePhase0 + ), + meta: VersionCodec, + }, + }, getPoolProposerSlashings: { url: "/eth/v1/beacon/pool/proposer_slashings", method: "GET", @@ -227,16 +317,61 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions ({body: AttestationListType.toJson(signedAttestations)}), - parseReqJson: ({body}) => ({signedAttestations: AttestationListType.fromJson(body)}), - writeReqSsz: ({signedAttestations}) => ({body: AttestationListType.serialize(signedAttestations)}), - parseReqSsz: ({body}) => ({signedAttestations: AttestationListType.deserialize(body)}), + writeReqJson: ({signedAttestations}) => ({body: AttestationListTypePhase0.toJson(signedAttestations)}), + parseReqJson: ({body}) => ({signedAttestations: AttestationListTypePhase0.fromJson(body)}), + writeReqSsz: ({signedAttestations}) => ({body: AttestationListTypePhase0.serialize(signedAttestations)}), + parseReqSsz: ({body}) => ({signedAttestations: AttestationListTypePhase0.deserialize(body)}), schema: { body: Schema.ObjectArray, }, }, resp: EmptyResponseCodec, }, + submitPoolAttestationsV2: { + url: "/eth/v2/beacon/pool/attestations", + method: "POST", + req: { + writeReqJson: ({signedAttestations}) => { + const fork = config.getForkName(signedAttestations[0]?.data.slot ?? 0); + return { + body: isForkPostElectra(fork) + ? AttestationListTypeElectra.toJson(signedAttestations as AttestationListElectra) + : AttestationListTypePhase0.toJson(signedAttestations as AttestationListPhase0), + headers: {[MetaHeader.Version]: fork}, + }; + }, + parseReqJson: ({body, headers}) => { + const fork = toForkName(fromHeaders(headers, MetaHeader.Version)); + return { + signedAttestations: isForkPostElectra(fork) + ? AttestationListTypeElectra.fromJson(body) + : AttestationListTypePhase0.fromJson(body), + }; + }, + writeReqSsz: ({signedAttestations}) => { + const fork = config.getForkName(signedAttestations[0]?.data.slot ?? 0); + return { + body: isForkPostElectra(fork) + ? AttestationListTypeElectra.serialize(signedAttestations as AttestationListElectra) + : AttestationListTypePhase0.serialize(signedAttestations as AttestationListPhase0), + headers: {[MetaHeader.Version]: fork}, + }; + }, + parseReqSsz: ({body, headers}) => { + const fork = toForkName(fromHeaders(headers, MetaHeader.Version)); + return { + signedAttestations: isForkPostElectra(fork) + ? AttestationListTypeElectra.deserialize(body) + : AttestationListTypePhase0.deserialize(body), + }; + }, + schema: { + body: Schema.ObjectArray, + headers: {[MetaHeader.Version]: Schema.String}, + }, + }, + resp: EmptyResponseCodec, + }, submitPoolAttesterSlashings: { url: "/eth/v1/beacon/pool/attester_slashings", method: "POST", @@ -251,6 +386,51 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions { + const fork = config.getForkName(Number(attesterSlashing.attestation1.data.slot)); + return { + body: isForkPostElectra(fork) + ? ssz.electra.AttesterSlashing.toJson(attesterSlashing) + : ssz.phase0.AttesterSlashing.toJson(attesterSlashing), + headers: {[MetaHeader.Version]: fork}, + }; + }, + parseReqJson: ({body, headers}) => { + const fork = toForkName(fromHeaders(headers, MetaHeader.Version)); + return { + attesterSlashing: isForkPostElectra(fork) + ? ssz.electra.AttesterSlashing.fromJson(body) + : ssz.phase0.AttesterSlashing.fromJson(body), + }; + }, + writeReqSsz: ({attesterSlashing}) => { + const fork = config.getForkName(Number(attesterSlashing.attestation1.data.slot)); + return { + body: isForkPostElectra(fork) + ? ssz.electra.AttesterSlashing.serialize(attesterSlashing as electra.AttesterSlashing) + : ssz.phase0.AttesterSlashing.serialize(attesterSlashing as phase0.AttesterSlashing), + headers: {[MetaHeader.Version]: fork}, + }; + }, + parseReqSsz: ({body, headers}) => { + const fork = toForkName(fromHeaders(headers, MetaHeader.Version)); + return { + attesterSlashing: isForkPostElectra(fork) + ? ssz.electra.AttesterSlashing.deserialize(body) + : ssz.phase0.AttesterSlashing.deserialize(body), + }; + }, + schema: { + body: Schema.Object, + headers: {[MetaHeader.Version]: Schema.String}, + }, + }, + resp: EmptyResponseCodec, + }, submitPoolProposerSlashings: { url: "/eth/v1/beacon/pool/proposer_slashings", method: "POST", diff --git a/packages/api/src/beacon/routes/beacon/state.ts b/packages/api/src/beacon/routes/beacon/state.ts index 4f8d414f5b6c..1489f48c297e 100644 --- a/packages/api/src/beacon/routes/beacon/state.ts +++ b/packages/api/src/beacon/routes/beacon/state.ts @@ -2,7 +2,7 @@ import {ContainerType, ValueOf} from "@chainsafe/ssz"; import {ChainForkConfig} from "@lodestar/config"; import {MAX_VALIDATORS_PER_COMMITTEE} from "@lodestar/params"; -import {phase0, CommitteeIndex, Slot, Epoch, ssz, RootHex, StringType} from "@lodestar/types"; +import {phase0, CommitteeIndex, Slot, Epoch, ssz, RootHex, StringType, ValidatorStatus} from "@lodestar/types"; import {Endpoint, RequestCodec, RouteDefinitions, Schema} from "../../../utils/index.js"; import {ArrayOf, JsonOnlyReq} from "../../../utils/codecs.js"; import {ExecutionOptimisticAndFinalizedCodec, ExecutionOptimisticAndFinalizedMeta} from "../../../utils/metadata.js"; @@ -24,17 +24,7 @@ export type StateArgs = { export type ValidatorId = string | number; -export type ValidatorStatus = - | "active" - | "pending_initialized" - | "pending_queued" - | "active_ongoing" - | "active_exiting" - | "active_slashed" - | "exited_unslashed" - | "exited_slashed" - | "withdrawal_possible" - | "withdrawal_done"; +export type {ValidatorStatus}; export const RandaoResponseType = new ContainerType({ randao: ssz.Root, @@ -53,6 +43,14 @@ export const ValidatorResponseType = new ContainerType({ status: new StringType(), validator: ssz.phase0.Validator, }); +export const ValidatorIdentityType = new ContainerType( + { + index: ssz.ValidatorIndex, + pubkey: ssz.BLSPubkey, + activationEpoch: ssz.UintNum64, + }, + {jsonCase: "eth2"} +); export const EpochCommitteeResponseType = new ContainerType({ index: ssz.CommitteeIndex, slot: ssz.Slot, @@ -73,6 +71,7 @@ export const EpochSyncCommitteeResponseType = new ContainerType( {jsonCase: "eth2"} ); export const ValidatorResponseListType = ArrayOf(ValidatorResponseType); +export const ValidatorIdentitiesType = ArrayOf(ValidatorIdentityType); export const EpochCommitteeResponseListType = ArrayOf(EpochCommitteeResponseType); export const ValidatorBalanceListType = ArrayOf(ValidatorBalanceType); @@ -84,6 +83,7 @@ export type ValidatorBalance = ValueOf; export type EpochSyncCommitteeResponse = ValueOf; export type ValidatorResponseList = ValueOf; +export type ValidatorIdentities = ValueOf; export type EpochCommitteeResponseList = ValueOf; export type ValidatorBalanceList = ValueOf; @@ -191,6 +191,26 @@ export type Endpoints = { ExecutionOptimisticAndFinalizedMeta >; + /** + * Get validator identities from state + * + * Returns filterable list of validators identities. + * + * Identities will be returned for all indices or public keys that match known validators. If an index or public key does not + * match any known validator, no identity will be returned but this will not cause an error. There are no guarantees for the + * returned data in terms of ordering. + */ + postStateValidatorIdentities: Endpoint< + "POST", + StateArgs & { + /** An array of values, with each value either a hex encoded public key (any bytes48 with 0x prefix) or a validator index */ + validatorIds?: ValidatorId[]; + }, + {params: {state_id: string}; body: string[]}, + ValidatorIdentities, + ExecutionOptimisticAndFinalizedMeta + >; + /** * Get validator balances from state * Returns filterable list of validator balances. @@ -404,6 +424,28 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions ({ + params: {state_id: stateId.toString()}, + body: toValidatorIdsStr(validatorIds) || [], + }), + parseReqJson: ({params, body = []}) => ({ + stateId: params.state_id, + validatorIds: fromValidatorIdsStr(body), + }), + schema: { + params: {state_id: Schema.StringRequired}, + body: Schema.UintOrStringArray, + }, + }), + resp: { + data: ValidatorIdentitiesType, + meta: ExecutionOptimisticAndFinalizedCodec, + }, + }, getStateValidatorBalances: { url: "/eth/v1/beacon/states/{state_id}/validator_balances", method: "GET", diff --git a/packages/api/src/beacon/routes/debug.ts b/packages/api/src/beacon/routes/debug.ts index 8099fcac020e..590ecf71dd9c 100644 --- a/packages/api/src/beacon/routes/debug.ts +++ b/packages/api/src/beacon/routes/debug.ts @@ -58,11 +58,34 @@ const DebugChainHeadType = new ContainerType( {jsonCase: "eth2"} ); +const ForkChoiceNodeType = new ContainerType( + { + slot: ssz.Slot, + blockRoot: stringType, + parentRoot: stringType, + justifiedEpoch: ssz.Epoch, + finalizedEpoch: ssz.Epoch, + weight: ssz.UintNum64, + validity: new StringType<"valid" | "invalid" | "optimistic">(), + executionBlockHash: stringType, + }, + {jsonCase: "eth2"} +); +const ForkChoiceResponseType = new ContainerType( + { + justifiedCheckpoint: ssz.phase0.Checkpoint, + finalizedCheckpoint: ssz.phase0.Checkpoint, + forkChoiceNodes: ArrayOf(ForkChoiceNodeType), + }, + {jsonCase: "eth2"} +); + const ProtoNodeListType = ArrayOf(ProtoNodeType); const DebugChainHeadListType = ArrayOf(DebugChainHeadType); type ProtoNodeList = ValueOf; type DebugChainHeadList = ValueOf; +type ForkChoiceResponse = ValueOf; export type Endpoints = { /** @@ -77,6 +100,18 @@ export type Endpoints = { EmptyMeta >; + /** + * Retrieves all current fork choice context + */ + getDebugForkChoice: Endpoint< + // ⏎ + "GET", + EmptyArgs, + EmptyRequest, + ForkChoiceResponse, + EmptyMeta + >; + /** * Dump all ProtoArray's nodes to debug */ @@ -115,6 +150,24 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions ({ + ...(data as ForkChoiceResponse), + }), + fromResponse: (resp) => ({ + data: resp as ForkChoiceResponse, + }), + }, + }, + }, getProtoArrayNodes: { url: "/eth/v0/debug/forkchoice", method: "GET", diff --git a/packages/api/src/beacon/routes/events.ts b/packages/api/src/beacon/routes/events.ts index 23be5e7c2288..1f041aa30194 100644 --- a/packages/api/src/beacon/routes/events.ts +++ b/packages/api/src/beacon/routes/events.ts @@ -13,6 +13,9 @@ import { LightClientOptimisticUpdate, LightClientFinalityUpdate, SSEPayloadAttributes, + Attestation, + AttesterSlashing, + sszTypesFor, } from "@lodestar/types"; import {ForkName} from "@lodestar/params"; @@ -104,10 +107,10 @@ export type EventData = { block: RootHex; executionOptimistic: boolean; }; - [EventType.attestation]: phase0.Attestation; + [EventType.attestation]: Attestation; [EventType.voluntaryExit]: phase0.SignedVoluntaryExit; [EventType.proposerSlashing]: phase0.ProposerSlashing; - [EventType.attesterSlashing]: phase0.AttesterSlashing; + [EventType.attesterSlashing]: AttesterSlashing; [EventType.blsToExecutionChange]: capella.SignedBLSToExecutionChange; [EventType.finalizedCheckpoint]: { block: RootHex; @@ -184,7 +187,7 @@ export type TypeJson = { fromJson: (data: unknown) => T; // client }; -export function getTypeByEvent(): {[K in EventType]: TypeJson} { +export function getTypeByEvent(config: ChainForkConfig): {[K in EventType]: TypeJson} { // eslint-disable-next-line @typescript-eslint/naming-convention const WithVersion = (getType: (fork: ForkName) => TypeJson): TypeJson<{data: T; version: ForkName}> => { return { @@ -225,10 +228,28 @@ export function getTypeByEvent(): {[K in EventType]: TypeJson} { {jsonCase: "eth2"} ), - [EventType.attestation]: ssz.phase0.Attestation, + [EventType.attestation]: { + toJson: (attestation) => { + const fork = config.getForkName(attestation.data.slot); + return sszTypesFor(fork).Attestation.toJson(attestation); + }, + fromJson: (attestation) => { + const fork = config.getForkName((attestation as Attestation).data.slot); + return sszTypesFor(fork).Attestation.fromJson(attestation); + }, + }, [EventType.voluntaryExit]: ssz.phase0.SignedVoluntaryExit, [EventType.proposerSlashing]: ssz.phase0.ProposerSlashing, - [EventType.attesterSlashing]: ssz.phase0.AttesterSlashing, + [EventType.attesterSlashing]: { + toJson: (attesterSlashing) => { + const fork = config.getForkName(Number(attesterSlashing.attestation1.data.slot)); + return sszTypesFor(fork).AttesterSlashing.toJson(attesterSlashing); + }, + fromJson: (attesterSlashing) => { + const fork = config.getForkName(Number((attesterSlashing as AttesterSlashing).attestation1.data.slot)); + return sszTypesFor(fork).AttesterSlashing.fromJson(attesterSlashing); + }, + }, [EventType.blsToExecutionChange]: ssz.capella.SignedBLSToExecutionChange, [EventType.finalizedCheckpoint]: new ContainerType( @@ -269,8 +290,8 @@ export function getTypeByEvent(): {[K in EventType]: TypeJson} { } // eslint-disable-next-line @typescript-eslint/explicit-function-return-type -export function getEventSerdes() { - const typeByEvent = getTypeByEvent(); +export function getEventSerdes(config: ChainForkConfig) { + const typeByEvent = getTypeByEvent(config); return { toJson: (event: BeaconEvent): unknown => { diff --git a/packages/api/src/beacon/routes/node.ts b/packages/api/src/beacon/routes/node.ts index 1ff0378c3330..0744b5f07452 100644 --- a/packages/api/src/beacon/routes/node.ts +++ b/packages/api/src/beacon/routes/node.ts @@ -43,6 +43,22 @@ export const PeerCountType = new ContainerType( {jsonCase: "eth2"} ); +export const SyncingStatusType = new ContainerType( + { + /** Head slot node is trying to reach */ + headSlot: ssz.Slot, + /** How many slots node needs to process to reach head. 0 if synced. */ + syncDistance: ssz.Slot, + /** Set to true if the node is syncing, false if the node is synced. */ + isSyncing: ssz.Boolean, + /** Set to true if the node is optimistically tracking head. */ + isOptimistic: ssz.Boolean, + /** Set to true if the connected el client is offline */ + elOffline: ssz.Boolean, + }, + {jsonCase: "eth2"} +); + export type NetworkIdentity = ValueOf; export type PeerState = "disconnected" | "connecting" | "connected" | "disconnecting"; @@ -66,18 +82,7 @@ export type FilterGetPeers = { direction?: PeerDirection[]; }; -export type SyncingStatus = { - /** Head slot node is trying to reach */ - headSlot: string; - /** How many slots node needs to process to reach head. 0 if synced. */ - syncDistance: string; - /** Set to true if the node is syncing, false if the node is synced. */ - isSyncing: boolean; - /** Set to true if the node is optimistically tracking head. */ - isOptimistic: boolean; - /** Set to true if the connected el client is offline */ - elOffline: boolean; -}; +export type SyncingStatus = ValueOf; export enum NodeHealth { READY = HttpStatusCode.OK, @@ -243,7 +248,10 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions ({params: {state_id: stateId}, query: {format: toHexString(descriptor)}}), - parseReq: ({params, query}) => ({stateId: params.state_id, descriptor: fromHexString(query.format)}), + writeReq: ({stateId, descriptor}) => ({params: {state_id: stateId}, query: {format: toHex(descriptor)}}), + parseReq: ({params, query}) => ({stateId: params.state_id, descriptor: fromHex(query.format)}), schema: {params: {state_id: Schema.StringRequired}, query: {format: Schema.StringRequired}}, }, resp: { @@ -63,8 +64,8 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions ({params: {block_id: blockId}, query: {format: toHexString(descriptor)}}), - parseReq: ({params, query}) => ({blockId: params.block_id, descriptor: fromHexString(query.format)}), + writeReq: ({blockId, descriptor}) => ({params: {block_id: blockId}, query: {format: toHex(descriptor)}}), + parseReq: ({params, query}) => ({blockId: params.block_id, descriptor: fromHex(query.format)}), schema: {params: {block_id: Schema.StringRequired}, query: {format: Schema.StringRequired}}, }, resp: { diff --git a/packages/api/src/beacon/routes/validator.ts b/packages/api/src/beacon/routes/validator.ts index 33161ec789e5..a9a1423e4da2 100644 --- a/packages/api/src/beacon/routes/validator.ts +++ b/packages/api/src/beacon/routes/validator.ts @@ -1,7 +1,7 @@ /* eslint-disable @typescript-eslint/naming-convention */ -import {ContainerType, fromHexString, toHexString, Type, ValueOf} from "@chainsafe/ssz"; +import {ContainerType, Type, ValueOf} from "@chainsafe/ssz"; import {ChainForkConfig} from "@lodestar/config"; -import {isForkBlobs} from "@lodestar/params"; +import {isForkBlobs, isForkPostElectra} from "@lodestar/params"; import { altair, BLSSignature, @@ -17,7 +17,10 @@ import { stringType, BeaconBlockOrContents, BlindedBeaconBlock, + Attestation, + sszTypesFor, } from "@lodestar/types"; +import {fromHex, toHex, toRootHex} from "@lodestar/utils"; import {Endpoint, RouteDefinitions, Schema} from "../../utils/index.js"; import {fromGraffitiHex, toBoolean, toGraffitiHex} from "../../utils/serdes.js"; import {getExecutionForkTypes, toForkName} from "../../utils/fork.js"; @@ -41,6 +44,7 @@ import { VersionMeta, VersionType, } from "../../utils/metadata.js"; +import {fromHeaders} from "../../utils/headers.js"; // See /packages/api/src/routes/index.ts for reasoning and instructions to add new routes @@ -81,15 +85,6 @@ export type ProduceBlockV3Meta = ValueOf & { executionPayloadSource: ProducedBlockSource; }; -export const BlockContentsType = new ContainerType( - { - block: ssz.deneb.BeaconBlock, - kzgProofs: ssz.deneb.KZGProofs, - blobs: ssz.deneb.Blobs, - }, - {jsonCase: "eth2"} -); - export const AttesterDutyType = new ContainerType( { /** The validator's public key, uniquely identifying them */ @@ -208,7 +203,8 @@ export const ValidatorIndicesType = ArrayOf(ssz.ValidatorIndex); export const AttesterDutyListType = ArrayOf(AttesterDutyType); export const ProposerDutyListType = ArrayOf(ProposerDutyType); export const SyncDutyListType = ArrayOf(SyncDutyType); -export const SignedAggregateAndProofListType = ArrayOf(ssz.phase0.SignedAggregateAndProof); +export const SignedAggregateAndProofListPhase0Type = ArrayOf(ssz.phase0.SignedAggregateAndProof); +export const SignedAggregateAndProofListElectraType = ArrayOf(ssz.electra.SignedAggregateAndProof); export const SignedContributionAndProofListType = ArrayOf(ssz.altair.SignedContributionAndProof); export const BeaconCommitteeSubscriptionListType = ArrayOf(BeaconCommitteeSubscriptionType); export const SyncCommitteeSubscriptionListType = ArrayOf(SyncCommitteeSubscriptionType); @@ -225,7 +221,9 @@ export type ProposerDuty = ValueOf; export type ProposerDutyList = ValueOf; export type SyncDuty = ValueOf; export type SyncDutyList = ValueOf; -export type SignedAggregateAndProofList = ValueOf; +export type SignedAggregateAndProofListPhase0 = ValueOf; +export type SignedAggregateAndProofListElectra = ValueOf; +export type SignedAggregateAndProofList = SignedAggregateAndProofListPhase0 | SignedAggregateAndProofListElectra; export type SignedContributionAndProofList = ValueOf; export type BeaconCommitteeSubscription = ValueOf; export type BeaconCommitteeSubscriptionList = ValueOf; @@ -412,18 +410,48 @@ export type Endpoints = { EmptyMeta >; + /** + * Get aggregated attestation + * Aggregates all attestations matching given attestation data root, slot and committee index + * Returns an aggregated `Attestation` object with same `AttestationData` root. + */ + getAggregatedAttestationV2: Endpoint< + "GET", + { + /** HashTreeRoot of AttestationData that validator want's aggregated */ + attestationDataRoot: Root; + slot: Slot; + committeeIndex: number; + }, + {query: {attestation_data_root: string; slot: number; committee_index: number}}, + Attestation, + VersionMeta + >; + /** * Publish multiple aggregate and proofs * Verifies given aggregate and proofs and publishes them on appropriate gossipsub topic. */ publishAggregateAndProofs: Endpoint< "POST", - {signedAggregateAndProofs: SignedAggregateAndProofList}, + {signedAggregateAndProofs: SignedAggregateAndProofListPhase0}, {body: unknown}, EmptyResponseData, EmptyMeta >; + /** + * Publish multiple aggregate and proofs + * Verifies given aggregate and proofs and publishes them on appropriate gossipsub topic. + */ + publishAggregateAndProofsV2: Endpoint< + "POST", + {signedAggregateAndProofs: SignedAggregateAndProofList}, + {body: unknown; headers: {[MetaHeader.Version]: string}}, + EmptyResponseData, + EmptyMeta + >; + publishContributionAndProofs: Endpoint< "POST", {contributionAndProofs: SignedContributionAndProofList}, @@ -536,7 +564,7 @@ export type Endpoints = { >; }; -export function getDefinitions(_config: ChainForkConfig): RouteDefinitions { +export function getDefinitions(config: ChainForkConfig): RouteDefinitions { return { getAttesterDuties: { url: "/eth/v1/validator/duties/attester/{epoch}", @@ -596,7 +624,7 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions ({ params: {slot}, query: { - randao_reveal: toHexString(randaoReveal), + randao_reveal: toHex(randaoReveal), graffiti: toGraffitiHex(graffiti), fee_recipient: feeRecipient, builder_selection: builderSelection, @@ -605,7 +633,7 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions ({ slot: params.slot, - randaoReveal: fromHexString(query.randao_reveal), + randaoReveal: fromHex(query.randao_reveal), graffiti: fromGraffitiHex(query.graffiti), feeRecipient: query.fee_recipient, builderSelection: query.builder_selection as BuilderSelection, @@ -624,7 +652,8 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions (isForkBlobs(fork) ? BlockContentsType : ssz[fork].BeaconBlock) as Type + (fork) => + (isForkBlobs(fork) ? sszTypesFor(fork).BlockContents : ssz[fork].BeaconBlock) as Type ), meta: VersionCodec, }, @@ -646,7 +675,7 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions ({ params: {slot}, query: { - randao_reveal: toHexString(randaoReveal), + randao_reveal: toHex(randaoReveal), graffiti: toGraffitiHex(graffiti), skip_randao_verification: writeSkipRandaoVerification(skipRandaoVerification), fee_recipient: feeRecipient, @@ -658,7 +687,7 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions ({ slot: params.slot, - randaoReveal: fromHexString(query.randao_reveal), + randaoReveal: fromHex(query.randao_reveal), graffiti: fromGraffitiHex(query.graffiti), skipRandaoVerification: parseSkipRandaoVerification(query.skip_randao_verification), feeRecipient: query.fee_recipient, @@ -687,7 +716,7 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions ), meta: { @@ -737,11 +766,11 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions ({ params: {slot}, - query: {randao_reveal: toHexString(randaoReveal), graffiti: toGraffitiHex(graffiti)}, + query: {randao_reveal: toHex(randaoReveal), graffiti: toGraffitiHex(graffiti)}, }), parseReq: ({params, query}) => ({ slot: params.slot, - randaoReveal: fromHexString(query.randao_reveal), + randaoReveal: fromHex(query.randao_reveal), graffiti: fromGraffitiHex(query.graffiti), }), schema: { @@ -777,12 +806,12 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions ({ - query: {slot, subcommittee_index: subcommitteeIndex, beacon_block_root: toHexString(beaconBlockRoot)}, + query: {slot, subcommittee_index: subcommitteeIndex, beacon_block_root: toRootHex(beaconBlockRoot)}, }), parseReq: ({query}) => ({ slot: query.slot, subcommitteeIndex: query.subcommittee_index, - beaconBlockRoot: fromHexString(query.beacon_block_root), + beaconBlockRoot: fromHex(query.beacon_block_root), }), schema: { query: { @@ -802,11 +831,17 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions ({ - query: {attestation_data_root: toHexString(attestationDataRoot), slot}, + query: {attestation_data_root: toRootHex(attestationDataRoot), slot}, + }), + parseReq: ({query}) => ({ + attestationDataRoot: fromHex(query.attestation_data_root), + slot: query.slot, }), - parseReq: ({query}) => ({attestationDataRoot: fromHexString(query.attestation_data_root), slot: query.slot}), schema: { - query: {attestation_data_root: Schema.StringRequired, slot: Schema.UintRequired}, + query: { + attestation_data_root: Schema.StringRequired, + slot: Schema.UintRequired, + }, }, }, resp: { @@ -814,24 +849,106 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions ({ + query: {attestation_data_root: toHex(attestationDataRoot), slot, committee_index: committeeIndex}, + }), + parseReq: ({query}) => ({ + attestationDataRoot: fromHex(query.attestation_data_root), + slot: query.slot, + committeeIndex: query.committee_index, + }), + schema: { + query: { + attestation_data_root: Schema.StringRequired, + slot: Schema.UintRequired, + committee_index: Schema.UintRequired, + }, + }, + }, + resp: { + data: WithVersion((fork) => (isForkPostElectra(fork) ? ssz.electra.Attestation : ssz.phase0.Attestation)), + meta: VersionCodec, + }, + }, publishAggregateAndProofs: { url: "/eth/v1/validator/aggregate_and_proofs", method: "POST", req: { writeReqJson: ({signedAggregateAndProofs}) => ({ - body: SignedAggregateAndProofListType.toJson(signedAggregateAndProofs), + body: SignedAggregateAndProofListPhase0Type.toJson(signedAggregateAndProofs), + }), + parseReqJson: ({body}) => ({ + signedAggregateAndProofs: SignedAggregateAndProofListPhase0Type.fromJson(body), }), - parseReqJson: ({body}) => ({signedAggregateAndProofs: SignedAggregateAndProofListType.fromJson(body)}), writeReqSsz: ({signedAggregateAndProofs}) => ({ - body: SignedAggregateAndProofListType.serialize(signedAggregateAndProofs), + body: SignedAggregateAndProofListPhase0Type.serialize(signedAggregateAndProofs), + }), + parseReqSsz: ({body}) => ({ + signedAggregateAndProofs: SignedAggregateAndProofListPhase0Type.deserialize(body), }), - parseReqSsz: ({body}) => ({signedAggregateAndProofs: SignedAggregateAndProofListType.deserialize(body)}), schema: { body: Schema.ObjectArray, }, }, resp: EmptyResponseCodec, }, + publishAggregateAndProofsV2: { + url: "/eth/v2/validator/aggregate_and_proofs", + method: "POST", + req: { + writeReqJson: ({signedAggregateAndProofs}) => { + const fork = config.getForkName(signedAggregateAndProofs[0]?.message.aggregate.data.slot ?? 0); + return { + body: isForkPostElectra(fork) + ? SignedAggregateAndProofListElectraType.toJson( + signedAggregateAndProofs as SignedAggregateAndProofListElectra + ) + : SignedAggregateAndProofListPhase0Type.toJson( + signedAggregateAndProofs as SignedAggregateAndProofListPhase0 + ), + headers: {[MetaHeader.Version]: fork}, + }; + }, + parseReqJson: ({body, headers}) => { + const fork = toForkName(fromHeaders(headers, MetaHeader.Version)); + return { + signedAggregateAndProofs: isForkPostElectra(fork) + ? SignedAggregateAndProofListElectraType.fromJson(body) + : SignedAggregateAndProofListPhase0Type.fromJson(body), + }; + }, + writeReqSsz: ({signedAggregateAndProofs}) => { + const fork = config.getForkName(signedAggregateAndProofs[0]?.message.aggregate.data.slot ?? 0); + return { + body: isForkPostElectra(fork) + ? SignedAggregateAndProofListElectraType.serialize( + signedAggregateAndProofs as SignedAggregateAndProofListElectra + ) + : SignedAggregateAndProofListPhase0Type.serialize( + signedAggregateAndProofs as SignedAggregateAndProofListPhase0 + ), + headers: {[MetaHeader.Version]: fork}, + }; + }, + parseReqSsz: ({body, headers}) => { + const fork = toForkName(fromHeaders(headers, MetaHeader.Version)); + return { + signedAggregateAndProofs: isForkPostElectra(fork) + ? SignedAggregateAndProofListElectraType.deserialize(body) + : SignedAggregateAndProofListPhase0Type.deserialize(body), + }; + }, + schema: { + body: Schema.ObjectArray, + headers: {[MetaHeader.Version]: Schema.String}, + }, + }, + resp: EmptyResponseCodec, + }, publishContributionAndProofs: { url: "/eth/v1/validator/contribution_and_proofs", method: "POST", diff --git a/packages/api/src/beacon/server/events.ts b/packages/api/src/beacon/server/events.ts index cbeae24f6908..96212f006d8f 100644 --- a/packages/api/src/beacon/server/events.ts +++ b/packages/api/src/beacon/server/events.ts @@ -3,7 +3,7 @@ import {ApiError, ApplicationMethods, FastifyRoutes, createFastifyRoutes} from " import {Endpoints, getDefinitions, eventTypes, getEventSerdes} from "../routes/events.js"; export function getRoutes(config: ChainForkConfig, methods: ApplicationMethods): FastifyRoutes { - const eventSerdes = getEventSerdes(); + const eventSerdes = getEventSerdes(config); const serverRoutes = createFastifyRoutes(getDefinitions(config), methods); return { diff --git a/packages/api/src/builder/routes.ts b/packages/api/src/builder/routes.ts index 297c4fc5e9a9..7459e46abd0b 100644 --- a/packages/api/src/builder/routes.ts +++ b/packages/api/src/builder/routes.ts @@ -1,5 +1,4 @@ /* eslint-disable @typescript-eslint/naming-convention */ -import {fromHexString, toHexString} from "@chainsafe/ssz"; import { ssz, bellatrix, @@ -13,6 +12,7 @@ import { } from "@lodestar/types"; import {ForkName, isForkBlobs} from "@lodestar/params"; import {ChainForkConfig} from "@lodestar/config"; +import {fromHex, toPubkeyHex, toRootHex} from "@lodestar/utils"; import {Endpoint, RouteDefinitions, Schema} from "../utils/index.js"; import {MetaHeader, VersionCodec, VersionMeta} from "../utils/metadata.js"; @@ -105,12 +105,12 @@ export function getDefinitions(config: ChainForkConfig): RouteDefinitions ({ - params: {slot, parent_hash: toHexString(parentHash), pubkey: toHexString(proposerPubKey)}, + params: {slot, parent_hash: toRootHex(parentHash), pubkey: toPubkeyHex(proposerPubKey)}, }), parseReq: ({params}) => ({ slot: params.slot, - parentHash: fromHexString(params.parent_hash), - proposerPubkey: fromHexString(params.pubkey), + parentHash: fromHex(params.parent_hash), + proposerPubkey: fromHex(params.pubkey), }), schema: { params: {slot: Schema.UintRequired, parent_hash: Schema.StringRequired, pubkey: Schema.StringRequired}, diff --git a/packages/api/src/utils/client/response.ts b/packages/api/src/utils/client/response.ts index ed008273588a..fdcb2afda943 100644 --- a/packages/api/src/utils/client/response.ts +++ b/packages/api/src/utils/client/response.ts @@ -189,8 +189,11 @@ export class ApiResponse extends Response { private getErrorMessage(): string { const errBody = this.resolvedErrorBody(); try { - const errJson = JSON.parse(errBody) as {message?: string}; + const errJson = JSON.parse(errBody) as {message?: string; failures?: {message: string}[]}; if (errJson.message) { + if (errJson.failures) { + return `${errJson.message}\n` + errJson.failures.map((e) => e.message).join("\n"); + } return errJson.message; } else { return errBody; diff --git a/packages/api/src/utils/schema.ts b/packages/api/src/utils/schema.ts index 2d086fd8dfa9..3f297db6f665 100644 --- a/packages/api/src/utils/schema.ts +++ b/packages/api/src/utils/schema.ts @@ -1,3 +1,4 @@ +import {MediaType} from "./headers.js"; import {Endpoint, HeaderParams, PathParams, QueryParams} from "./types.js"; // Reasoning: Allows to declare JSON schemas for server routes in a succinct typesafe way. @@ -91,7 +92,16 @@ export function getFastifySchema(schemaDef: Schem const schema: {params?: JsonSchemaObj; querystring?: JsonSchemaObj; headers?: JsonSchemaObj; body?: JsonSchema} = {}; if (schemaDef.body != null) { - schema.body = getJsonSchemaItem(schemaDef.body); + schema.body = { + content: { + [MediaType.json]: { + schema: getJsonSchemaItem(schemaDef.body), + }, + [MediaType.ssz]: { + schema: {}, + }, + }, + }; } if (schemaDef.params) { diff --git a/packages/api/src/utils/serdes.ts b/packages/api/src/utils/serdes.ts index 233d7db9e7f8..282c2514e00d 100644 --- a/packages/api/src/utils/serdes.ts +++ b/packages/api/src/utils/serdes.ts @@ -1,4 +1,5 @@ -import {fromHexString, JsonPath, toHexString} from "@chainsafe/ssz"; +import {JsonPath} from "@chainsafe/ssz"; +import {fromHex, toHex} from "@lodestar/utils"; /** * Serialize proof path to JSON. @@ -82,7 +83,7 @@ export function toGraffitiHex(utf8?: string): string | undefined { return undefined; } - const hex = toHexString(new TextEncoder().encode(utf8)); + const hex = toHex(new TextEncoder().encode(utf8)); if (hex.length > GRAFFITI_HEX_LENGTH) { // remove characters from the end if hex string is too long @@ -102,7 +103,7 @@ export function fromGraffitiHex(hex?: string): string | undefined { return undefined; } try { - return new TextDecoder("utf8").decode(fromHexString(hex)); + return new TextDecoder("utf8").decode(fromHex(hex)); } catch { // allow malformed graffiti hex string return hex; diff --git a/packages/api/src/utils/server/parser.ts b/packages/api/src/utils/server/parser.ts index fd668b63757e..3300575a4845 100644 --- a/packages/api/src/utils/server/parser.ts +++ b/packages/api/src/utils/server/parser.ts @@ -2,22 +2,10 @@ import type * as fastify from "fastify"; import {MediaType} from "../headers.js"; export function addSszContentTypeParser(server: fastify.FastifyInstance): void { - // Cache body schema symbol, does not change per request - let bodySchemaSymbol: symbol | undefined; - server.addContentTypeParser( MediaType.ssz, {parseAs: "buffer"}, - async (request: fastify.FastifyRequest, payload: Buffer) => { - if (bodySchemaSymbol === undefined) { - // Get body schema symbol to be able to access validation function - // https://github.com/fastify/fastify/blob/af2ccb5ff681c1d0ac22eb7314c6fa803f73c873/lib/symbols.js#L25 - bodySchemaSymbol = Object.getOwnPropertySymbols(request.context).find((s) => s.description === "body-schema"); - } - // JSON schema validation will be applied to `Buffer` object, it is required to override validation function - // See https://github.com/fastify/help/issues/1012, it is not possible right now to define a schema per content type - (request.context as unknown as Record)[bodySchemaSymbol as symbol] = () => true; - + async (_request: fastify.FastifyRequest, payload: Buffer) => { // We could just return the `Buffer` here which is a subclass of `Uint8Array` but downstream code does not require it // and it's better to convert it here to avoid unexpected behavior such as `Buffer.prototype.slice` not copying memory // See https://github.com/nodejs/node/issues/41588#issuecomment-1016269584 diff --git a/packages/api/test/unit/beacon/oapiSpec.test.ts b/packages/api/test/unit/beacon/oapiSpec.test.ts index e5d473ab6a55..2b8a8254dd6b 100644 --- a/packages/api/test/unit/beacon/oapiSpec.test.ts +++ b/packages/api/test/unit/beacon/oapiSpec.test.ts @@ -21,7 +21,7 @@ import {testData as validatorTestData} from "./testData/validator.js"; // eslint-disable-next-line @typescript-eslint/naming-convention const __dirname = path.dirname(fileURLToPath(import.meta.url)); -const version = "v2.5.0"; +const version = "v3.0.0-alpha.6"; const openApiFile: OpenApiFile = { url: `https://github.com/ethereum/beacon-APIs/releases/download/${version}/beacon-node-oapi.json`, filepath: path.join(__dirname, "../../../oapi-schemas/beacon-node-oapi.json"), @@ -57,7 +57,6 @@ const ignoredOperations = [ /* missing route */ "getDepositSnapshot", // Won't fix for now, see https://github.com/ChainSafe/lodestar/issues/5697 "getNextWithdrawals", // https://github.com/ChainSafe/lodestar/issues/5696 - "getDebugForkChoice", // https://github.com/ChainSafe/lodestar/issues/5700 /* Must support ssz response body */ "getLightClientUpdatesByRange", // https://github.com/ChainSafe/lodestar/issues/6841 ]; @@ -108,7 +107,7 @@ describe("eventstream event data", () => { } }); - const eventSerdes = routes.events.getEventSerdes(); + const eventSerdes = routes.events.getEventSerdes(config); const knownTopics = new Set(Object.values(routes.events.eventTypes)); for (const [topic, {value}] of Object.entries(eventstreamExamples ?? {}).filter( diff --git a/packages/api/test/unit/beacon/testData/beacon.ts b/packages/api/test/unit/beacon/testData/beacon.ts index 9a89abd68a14..23ab13147454 100644 --- a/packages/api/test/unit/beacon/testData/beacon.ts +++ b/packages/api/test/unit/beacon/testData/beacon.ts @@ -49,6 +49,13 @@ export const testData: GenericServerTestCases = { args: {blockId: "head"}, res: {data: [ssz.phase0.Attestation.defaultValue()], meta: {executionOptimistic: true, finalized: false}}, }, + getBlockAttestationsV2: { + args: {blockId: "head"}, + res: { + data: [ssz.electra.Attestation.defaultValue()], + meta: {executionOptimistic: true, finalized: false, version: ForkName.electra}, + }, + }, getBlockHeader: { args: {blockId: "head"}, res: {data: blockHeaderResponse, meta: {executionOptimistic: true, finalized: false}}, @@ -94,10 +101,18 @@ export const testData: GenericServerTestCases = { args: {slot: 1, committeeIndex: 2}, res: {data: [ssz.phase0.Attestation.defaultValue()]}, }, + getPoolAttestationsV2: { + args: {slot: 1, committeeIndex: 2}, + res: {data: [ssz.electra.Attestation.defaultValue()], meta: {version: ForkName.electra}}, + }, getPoolAttesterSlashings: { args: undefined, res: {data: [ssz.phase0.AttesterSlashing.defaultValue()]}, }, + getPoolAttesterSlashingsV2: { + args: undefined, + res: {data: [ssz.electra.AttesterSlashing.defaultValue()], meta: {version: ForkName.electra}}, + }, getPoolProposerSlashings: { args: undefined, res: {data: [ssz.phase0.ProposerSlashing.defaultValue()]}, @@ -114,10 +129,18 @@ export const testData: GenericServerTestCases = { args: {signedAttestations: [ssz.phase0.Attestation.defaultValue()]}, res: undefined, }, + submitPoolAttestationsV2: { + args: {signedAttestations: [ssz.phase0.Attestation.defaultValue()]}, + res: undefined, + }, submitPoolAttesterSlashings: { args: {attesterSlashing: ssz.phase0.AttesterSlashing.defaultValue()}, res: undefined, }, + submitPoolAttesterSlashingsV2: { + args: {attesterSlashing: ssz.phase0.AttesterSlashing.defaultValue()}, + res: undefined, + }, submitPoolProposerSlashings: { args: {proposerSlashing: ssz.phase0.ProposerSlashing.defaultValue()}, res: undefined, @@ -168,6 +191,13 @@ export const testData: GenericServerTestCases = { args: {stateId: "head", validatorIds: [pubkeyHex, 1300], statuses: ["active_ongoing"]}, res: {data: [validatorResponse], meta: {executionOptimistic: true, finalized: false}}, }, + postStateValidatorIdentities: { + args: {stateId: "head", validatorIds: [1300]}, + res: { + data: [{index: 1300, pubkey: ssz.BLSPubkey.defaultValue(), activationEpoch: 1}], + meta: {executionOptimistic: true, finalized: false}, + }, + }, getStateValidator: { args: {stateId: "head", validatorId: pubkeyHex}, res: {data: validatorResponse, meta: {executionOptimistic: true, finalized: false}}, diff --git a/packages/api/test/unit/beacon/testData/debug.ts b/packages/api/test/unit/beacon/testData/debug.ts index aac3b379ff4d..cb2799939ae3 100644 --- a/packages/api/test/unit/beacon/testData/debug.ts +++ b/packages/api/test/unit/beacon/testData/debug.ts @@ -4,13 +4,41 @@ import {ssz} from "@lodestar/types"; import {Endpoints} from "../../../../src/beacon/routes/debug.js"; import {GenericServerTestCases} from "../../../utils/genericServerTest.js"; -const rootHex = toHexString(Buffer.alloc(32, 1)); +const root = new Uint8Array(32).fill(1); +const rootHex = toHexString(root); export const testData: GenericServerTestCases = { getDebugChainHeadsV2: { args: undefined, res: {data: [{slot: 1, root: rootHex, executionOptimistic: true}]}, }, + getDebugForkChoice: { + args: undefined, + res: { + data: { + justifiedCheckpoint: { + epoch: 2, + root, + }, + finalizedCheckpoint: { + epoch: 1, + root, + }, + forkChoiceNodes: [ + { + slot: 1, + blockRoot: rootHex, + parentRoot: rootHex, + justifiedEpoch: 1, + finalizedEpoch: 1, + weight: 1, + validity: "valid", + executionBlockHash: rootHex, + }, + ], + }, + }, + }, getProtoArrayNodes: { args: undefined, res: { diff --git a/packages/api/test/unit/beacon/testData/node.ts b/packages/api/test/unit/beacon/testData/node.ts index 48efc4a728bc..e46aa3e28850 100644 --- a/packages/api/test/unit/beacon/testData/node.ts +++ b/packages/api/test/unit/beacon/testData/node.ts @@ -49,7 +49,7 @@ export const testData: GenericServerTestCases = { }, getSyncingStatus: { args: undefined, - res: {data: {headSlot: "1", syncDistance: "2", isSyncing: false, isOptimistic: true, elOffline: false}}, + res: {data: {headSlot: 1, syncDistance: 2, isSyncing: false, isOptimistic: true, elOffline: false}}, }, getHealth: { args: {syncingStatus: 206}, diff --git a/packages/api/test/unit/beacon/testData/validator.ts b/packages/api/test/unit/beacon/testData/validator.ts index 11fd7dd26425..d4fae4bfe290 100644 --- a/packages/api/test/unit/beacon/testData/validator.ts +++ b/packages/api/test/unit/beacon/testData/validator.ts @@ -102,10 +102,18 @@ export const testData: GenericServerTestCases = { args: {attestationDataRoot: ZERO_HASH, slot: 32000}, res: {data: ssz.phase0.Attestation.defaultValue()}, }, + getAggregatedAttestationV2: { + args: {attestationDataRoot: ZERO_HASH, slot: 32000, committeeIndex: 2}, + res: {data: ssz.electra.Attestation.defaultValue(), meta: {version: ForkName.electra}}, + }, publishAggregateAndProofs: { args: {signedAggregateAndProofs: [ssz.phase0.SignedAggregateAndProof.defaultValue()]}, res: undefined, }, + publishAggregateAndProofsV2: { + args: {signedAggregateAndProofs: [ssz.phase0.SignedAggregateAndProof.defaultValue()]}, + res: undefined, + }, publishContributionAndProofs: { args: {contributionAndProofs: [ssz.altair.SignedContributionAndProof.defaultValue()]}, res: undefined, diff --git a/packages/api/test/unit/keymanager/oapiSpec.test.ts b/packages/api/test/unit/keymanager/oapiSpec.test.ts index 10c29f0cea55..97011f5d3660 100644 --- a/packages/api/test/unit/keymanager/oapiSpec.test.ts +++ b/packages/api/test/unit/keymanager/oapiSpec.test.ts @@ -12,7 +12,7 @@ import {testData} from "./testData.js"; // eslint-disable-next-line @typescript-eslint/naming-convention const __dirname = path.dirname(fileURLToPath(import.meta.url)); -const version = "v1.0.0"; +const version = "v1.1.0"; const openApiFile: OpenApiFile = { url: `https://github.com/ethereum/keymanager-APIs/releases/download/${version}/keymanager-oapi.json`, filepath: path.join(__dirname, "../../../oapi-schemas/keymanager-oapi.json"), diff --git a/packages/beacon-node/package.json b/packages/beacon-node/package.json index a59ec4440dee..6894e4a7d8b6 100644 --- a/packages/beacon-node/package.json +++ b/packages/beacon-node/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.20.2", + "version": "1.22.0", "type": "module", "exports": { ".": { @@ -94,22 +94,23 @@ "check-readme": "typescript-docs-verifier" }, "dependencies": { - "@chainsafe/as-sha256": "^0.4.1", + "@chainsafe/as-sha256": "^0.5.0", "@chainsafe/blst": "^2.0.3", "@chainsafe/discv5": "^9.0.0", "@chainsafe/enr": "^3.0.0", "@chainsafe/libp2p-gossipsub": "^13.0.0", "@chainsafe/libp2p-identify": "^1.0.0", "@chainsafe/libp2p-noise": "^15.0.0", - "@chainsafe/persistent-merkle-tree": "^0.7.1", + "@chainsafe/persistent-merkle-tree": "^0.8.0", "@chainsafe/prometheus-gc-stats": "^1.0.0", - "@chainsafe/ssz": "^0.15.1", + "@chainsafe/ssz": "^0.17.1", "@chainsafe/threads": "^1.11.1", + "@chainsafe/pubkey-index-map": "2.0.0", "@ethersproject/abi": "^5.7.0", - "@fastify/bearer-auth": "^9.0.0", - "@fastify/cors": "^8.2.1", - "@fastify/swagger": "^8.10.0", - "@fastify/swagger-ui": "^1.9.3", + "@fastify/bearer-auth": "^10.0.1", + "@fastify/cors": "^10.0.1", + "@fastify/swagger": "^9.0.0", + "@fastify/swagger-ui": "^5.0.1", "@libp2p/bootstrap": "^10.0.21", "@libp2p/identify": "^1.0.20", "@libp2p/interface": "^1.3.0", @@ -119,24 +120,24 @@ "@libp2p/peer-id-factory": "^4.1.0", "@libp2p/prometheus-metrics": "^3.0.21", "@libp2p/tcp": "9.0.23", - "@lodestar/api": "^1.20.2", - "@lodestar/config": "^1.20.2", - "@lodestar/db": "^1.20.2", - "@lodestar/fork-choice": "^1.20.2", - "@lodestar/light-client": "^1.20.2", - "@lodestar/logger": "^1.20.2", - "@lodestar/params": "^1.20.2", - "@lodestar/reqresp": "^1.20.2", - "@lodestar/state-transition": "^1.20.2", - "@lodestar/types": "^1.20.2", - "@lodestar/utils": "^1.20.2", - "@lodestar/validator": "^1.20.2", + "@lodestar/api": "^1.22.0", + "@lodestar/config": "^1.22.0", + "@lodestar/db": "^1.22.0", + "@lodestar/fork-choice": "^1.22.0", + "@lodestar/light-client": "^1.22.0", + "@lodestar/logger": "^1.22.0", + "@lodestar/params": "^1.22.0", + "@lodestar/reqresp": "^1.22.0", + "@lodestar/state-transition": "^1.22.0", + "@lodestar/types": "^1.22.0", + "@lodestar/utils": "^1.22.0", + "@lodestar/validator": "^1.22.0", "@multiformats/multiaddr": "^12.1.3", "c-kzg": "^2.1.2", "datastore-core": "^9.1.1", "datastore-level": "^10.1.1", "deepmerge": "^4.3.1", - "fastify": "^4.27.0", + "fastify": "^5.0.0", "interface-datastore": "^8.2.7", "it-all": "^3.0.4", "it-pipe": "^3.0.1", diff --git a/packages/beacon-node/src/api/impl/beacon/blocks/index.ts b/packages/beacon-node/src/api/impl/beacon/blocks/index.ts index 1b8a59cc8967..65e7b9373a22 100644 --- a/packages/beacon-node/src/api/impl/beacon/blocks/index.ts +++ b/packages/beacon-node/src/api/impl/beacon/blocks/index.ts @@ -1,13 +1,13 @@ import {routes} from "@lodestar/api"; -import {ApplicationMethods} from "@lodestar/api/server"; +import {ApiError, ApplicationMethods} from "@lodestar/api/server"; import { computeEpochAtSlot, computeTimeAtSlot, reconstructFullBlockOrContents, signedBeaconBlockToBlinded, } from "@lodestar/state-transition"; -import {ForkExecution, SLOTS_PER_HISTORICAL_ROOT, isForkExecution} from "@lodestar/params"; -import {sleep, fromHex, toHex} from "@lodestar/utils"; +import {ForkExecution, SLOTS_PER_HISTORICAL_ROOT, isForkExecution, isForkPostElectra} from "@lodestar/params"; +import {sleep, fromHex, toRootHex} from "@lodestar/utils"; import { deneb, isSignedBlockContents, @@ -97,9 +97,9 @@ export function getBeaconBlockApi({ // state transition to produce the stateRoot const slot = signedBlock.message.slot; const fork = config.getForkName(slot); - const blockRoot = toHex(chain.config.getForkTypes(slot).BeaconBlock.hashTreeRoot(signedBlock.message)); + const blockRoot = toRootHex(chain.config.getForkTypes(slot).BeaconBlock.hashTreeRoot(signedBlock.message)); // bodyRoot should be the same to produced block - const bodyRoot = toHex(chain.config.getForkTypes(slot).BeaconBlockBody.hashTreeRoot(signedBlock.message.body)); + const bodyRoot = toRootHex(chain.config.getForkTypes(slot).BeaconBlockBody.hashTreeRoot(signedBlock.message.body)); const blockLocallyProduced = chain.producedBlockRoot.has(blockRoot) || chain.producedBlindedBlockRoot.has(blockRoot); const valLogMeta = {slot, blockRoot, bodyRoot, broadcastValidation, blockLocallyProduced}; @@ -147,7 +147,7 @@ export function getBeaconBlockApi({ ); throw new BlockError(signedBlock, { code: BlockErrorCode.PARENT_UNKNOWN, - parentRoot: toHex(signedBlock.message.parentRoot), + parentRoot: toRootHex(signedBlock.message.parentRoot), }); } @@ -243,7 +243,7 @@ export function getBeaconBlockApi({ opts: PublishBlockOpts = {} ) => { const slot = signedBlindedBlock.message.slot; - const blockRoot = toHex( + const blockRoot = toRootHex( chain.config .getExecutionForkTypes(signedBlindedBlock.message.slot) .BlindedBeaconBlock.hashTreeRoot(signedBlindedBlock.message) @@ -258,7 +258,7 @@ export function getBeaconBlockApi({ chain.logger.debug("Reconstructing signedBlockOrContents", {slot, blockRoot, source}); const contents = executionPayload - ? chain.producedContentsCache.get(toHex(executionPayload.blockHash)) ?? null + ? chain.producedContentsCache.get(toRootHex(executionPayload.blockHash)) ?? null : null; const signedBlockOrContents = reconstructFullBlockOrContents(signedBlindedBlock, {executionPayload, contents}); @@ -354,7 +354,7 @@ export function getBeaconBlockApi({ } finalized = false; - if (summary.blockRoot !== toHex(canonicalRoot)) { + if (summary.blockRoot !== toRootHex(canonicalRoot)) { const block = await db.block.get(fromHex(summary.blockRoot)); if (block) { result.push(toBeaconHeaderResponse(config, block)); @@ -407,12 +407,29 @@ export function getBeaconBlockApi({ async getBlockAttestations({blockId}) { const {block, executionOptimistic, finalized} = await getBlockResponse(chain, blockId); + const fork = config.getForkName(block.message.slot); + + if (isForkPostElectra(fork)) { + throw new ApiError( + 400, + `Use getBlockAttestationsV2 to retrieve block attestations for post-electra fork=${fork}` + ); + } + return { - data: Array.from(block.message.body.attestations), + data: block.message.body.attestations, meta: {executionOptimistic, finalized}, }; }, + async getBlockAttestationsV2({blockId}) { + const {block, executionOptimistic, finalized} = await getBlockResponse(chain, blockId); + return { + data: block.message.body.attestations, + meta: {executionOptimistic, finalized, version: config.getForkName(block.message.slot)}, + }; + }, + async getBlockRoot({blockId}) { // Fast path: From head state already available in memory get historical blockRoot const slot = typeof blockId === "string" ? parseInt(blockId) : blockId; @@ -473,7 +490,7 @@ export function getBeaconBlockApi({ } if (!blobSidecars) { - throw Error(`blobSidecars not found in db for slot=${block.message.slot} root=${toHex(blockRoot)}`); + throw Error(`blobSidecars not found in db for slot=${block.message.slot} root=${toRootHex(blockRoot)}`); } return { diff --git a/packages/beacon-node/src/api/impl/beacon/pool/index.ts b/packages/beacon-node/src/api/impl/beacon/pool/index.ts index 8372b84db3b1..e01b172f1e72 100644 --- a/packages/beacon-node/src/api/impl/beacon/pool/index.ts +++ b/packages/beacon-node/src/api/impl/beacon/pool/index.ts @@ -1,7 +1,7 @@ import {routes} from "@lodestar/api"; import {ApplicationMethods} from "@lodestar/api/server"; -import {Epoch, ssz} from "@lodestar/types"; -import {SYNC_COMMITTEE_SUBNET_SIZE} from "@lodestar/params"; +import {Attestation, Epoch, isElectraAttestation, ssz} from "@lodestar/types"; +import {ForkName, SYNC_COMMITTEE_SUBNET_SIZE, isForkPostElectra} from "@lodestar/params"; import {validateApiAttestation} from "../../../../chain/validation/index.js"; import {validateApiAttesterSlashing} from "../../../../chain/validation/attesterSlashing.js"; import {validateApiProposerSlashing} from "../../../../chain/validation/proposerSlashing.js"; @@ -16,6 +16,7 @@ import { SyncCommitteeError, } from "../../../../chain/errors/index.js"; import {validateGossipFnRetryUnknownRoot} from "../../../../network/processor/gossipHandlers.js"; +import {ApiError, FailureList, IndexedError} from "../../errors.js"; export function getBeaconPoolApi({ chain, @@ -26,7 +27,15 @@ export function getBeaconPoolApi({ return { async getPoolAttestations({slot, committeeIndex}) { // Already filtered by slot - let attestations = chain.aggregatedAttestationPool.getAll(slot); + let attestations: Attestation[] = chain.aggregatedAttestationPool.getAll(slot); + const fork = chain.config.getForkName(slot ?? chain.clock.currentSlot); + + if (isForkPostElectra(fork)) { + throw new ApiError( + 400, + `Use getPoolAttestationsV2 to retrieve pool attestations for post-electra fork=${fork}` + ); + } if (committeeIndex !== undefined) { attestations = attestations.filter((attestation) => committeeIndex === attestation.data.index); @@ -35,10 +44,32 @@ export function getBeaconPoolApi({ return {data: attestations}; }, + async getPoolAttestationsV2({slot, committeeIndex}) { + // Already filtered by slot + let attestations = chain.aggregatedAttestationPool.getAll(slot); + const fork = chain.config.getForkName(slot ?? attestations[0]?.data.slot ?? chain.clock.currentSlot); + const isPostElectra = isForkPostElectra(fork); + + attestations = attestations.filter((attestation) => + isPostElectra ? isElectraAttestation(attestation) : !isElectraAttestation(attestation) + ); + + if (committeeIndex !== undefined) { + attestations = attestations.filter((attestation) => committeeIndex === attestation.data.index); + } + + return {data: attestations, meta: {version: fork}}; + }, + async getPoolAttesterSlashings() { return {data: chain.opPool.getAllAttesterSlashings()}; }, + async getPoolAttesterSlashingsV2() { + // TODO Electra: Determine fork based on data returned by api + return {data: chain.opPool.getAllAttesterSlashings(), meta: {version: ForkName.phase0}}; + }, + async getPoolProposerSlashings() { return {data: chain.opPool.getAllProposerSlashings()}; }, @@ -52,8 +83,12 @@ export function getBeaconPoolApi({ }, async submitPoolAttestations({signedAttestations}) { + await this.submitPoolAttestationsV2({signedAttestations}); + }, + + async submitPoolAttestationsV2({signedAttestations}) { const seenTimestampSec = Date.now() / 1000; - const errors: Error[] = []; + const failures: FailureList = []; await Promise.all( signedAttestations.map(async (attestation, i) => { @@ -65,7 +100,7 @@ export function getBeaconPoolApi({ // when a validator is configured with multiple beacon node urls, this attestation data may come from another beacon node // and the block hasn't been in our forkchoice since we haven't seen / processing that block // see https://github.com/ChainSafe/lodestar/issues/5098 - const {indexedAttestation, subnet, attDataRootHex} = await validateGossipFnRetryUnknownRoot( + const {indexedAttestation, subnet, attDataRootHex, committeeIndex} = await validateGossipFnRetryUnknownRoot( validateFn, network, chain, @@ -74,7 +109,7 @@ export function getBeaconPoolApi({ ); if (network.shouldAggregate(subnet, slot)) { - const insertOutcome = chain.attestationPool.add(attestation, attDataRootHex); + const insertOutcome = chain.attestationPool.add(committeeIndex, attestation, attDataRootHex); metrics?.opPool.attestationPoolInsertOutcome.inc({insertOutcome}); } @@ -92,7 +127,7 @@ export function getBeaconPoolApi({ return; } - errors.push(e as Error); + failures.push({index: i, message: (e as Error).message}); logger.error(`Error on submitPoolAttestations [${i}]`, logCtx, e as Error); if (e instanceof AttestationError && e.action === GossipAction.REJECT) { chain.persistInvalidSszValue(ssz.phase0.Attestation, attestation, "api_reject"); @@ -101,10 +136,8 @@ export function getBeaconPoolApi({ }) ); - if (errors.length > 1) { - throw Error("Multiple errors on submitPoolAttestations\n" + errors.map((e) => e.message).join("\n")); - } else if (errors.length === 1) { - throw errors[0]; + if (failures.length > 0) { + throw new IndexedError("Error processing attestations", failures); } }, @@ -114,6 +147,11 @@ export function getBeaconPoolApi({ await network.publishAttesterSlashing(attesterSlashing); }, + async submitPoolAttesterSlashingsV2({attesterSlashing}) { + // TODO Electra: Refactor submitPoolAttesterSlashings and submitPoolAttesterSlashingsV2 + await this.submitPoolAttesterSlashings({attesterSlashing}); + }, + async submitPoolProposerSlashings({proposerSlashing}) { await validateApiProposerSlashing(chain, proposerSlashing); chain.opPool.insertProposerSlashing(proposerSlashing); @@ -128,7 +166,7 @@ export function getBeaconPoolApi({ }, async submitPoolBLSToExecutionChange({blsToExecutionChanges}) { - const errors: Error[] = []; + const failures: FailureList = []; await Promise.all( blsToExecutionChanges.map(async (blsToExecutionChange, i) => { @@ -144,7 +182,7 @@ export function getBeaconPoolApi({ await network.publishBlsToExecutionChange(blsToExecutionChange); } } catch (e) { - errors.push(e as Error); + failures.push({index: i, message: (e as Error).message}); logger.error( `Error on submitPoolBLSToExecutionChange [${i}]`, {validatorIndex: blsToExecutionChange.message.validatorIndex}, @@ -154,10 +192,8 @@ export function getBeaconPoolApi({ }) ); - if (errors.length > 1) { - throw Error("Multiple errors on submitPoolBLSToExecutionChange\n" + errors.map((e) => e.message).join("\n")); - } else if (errors.length === 1) { - throw errors[0]; + if (failures.length > 0) { + throw new IndexedError("Error processing BLS to execution changes", failures); } }, @@ -181,7 +217,7 @@ export function getBeaconPoolApi({ // TODO: Fetch states at signature slots const state = chain.getHeadState(); - const errors: Error[] = []; + const failures: FailureList = []; await Promise.all( signatures.map(async (signature, i) => { @@ -221,7 +257,7 @@ export function getBeaconPoolApi({ return; } - errors.push(e as Error); + failures.push({index: i, message: (e as Error).message}); logger.debug( `Error on submitPoolSyncCommitteeSignatures [${i}]`, {slot: signature.slot, validatorIndex: signature.validatorIndex}, @@ -234,10 +270,8 @@ export function getBeaconPoolApi({ }) ); - if (errors.length > 1) { - throw Error("Multiple errors on submitPoolSyncCommitteeSignatures\n" + errors.map((e) => e.message).join("\n")); - } else if (errors.length === 1) { - throw errors[0]; + if (failures.length > 0) { + throw new IndexedError("Error processing sync committee signatures", failures); } }, }; diff --git a/packages/beacon-node/src/api/impl/beacon/state/index.ts b/packages/beacon-node/src/api/impl/beacon/state/index.ts index 9d9646ee8cf3..2bf758a8e286 100644 --- a/packages/beacon-node/src/api/impl/beacon/state/index.ts +++ b/packages/beacon-node/src/api/impl/beacon/state/index.ts @@ -9,20 +9,17 @@ import { getRandaoMix, } from "@lodestar/state-transition"; import {EPOCHS_PER_HISTORICAL_VECTOR} from "@lodestar/params"; +import {getValidatorStatus} from "@lodestar/types"; +import {fromHex} from "@lodestar/utils"; import {ApiError} from "../../errors.js"; import {ApiModules} from "../../types.js"; -import { - filterStateValidatorsByStatus, - getStateValidatorIndex, - getValidatorStatus, - getStateResponse, - toValidatorResponse, -} from "./utils.js"; +import {filterStateValidatorsByStatus, getStateValidatorIndex, getStateResponse, toValidatorResponse} from "./utils.js"; export function getBeaconStateApi({ chain, config, -}: Pick): ApplicationMethods { + logger, +}: Pick): ApplicationMethods { async function getState( stateId: routes.beacon.StateId ): Promise<{state: BeaconStateAllForks; executionOptimistic: boolean; finalized: boolean}> { @@ -98,6 +95,8 @@ export function getBeaconStateApi({ currentEpoch ); validatorResponses.push(validatorResponse); + } else { + logger.warn(resp.reason, {id}); } } return { @@ -130,6 +129,39 @@ export function getBeaconStateApi({ return this.getStateValidators(args, context); }, + async postStateValidatorIdentities({stateId, validatorIds = []}) { + const {state, executionOptimistic, finalized} = await getStateResponse(chain, stateId); + const {pubkey2index} = chain.getHeadState().epochCtx; + + let validatorIdentities: routes.beacon.ValidatorIdentities; + + if (validatorIds.length) { + validatorIdentities = []; + for (const id of validatorIds) { + const resp = getStateValidatorIndex(id, state, pubkey2index); + if (resp.valid) { + const index = resp.validatorIndex; + const {pubkey, activationEpoch} = state.validators.getReadonly(index); + validatorIdentities.push({index, pubkey, activationEpoch}); + } else { + logger.warn(resp.reason, {id}); + } + } + } else { + const validatorsArr = state.validators.getAllReadonlyValues(); + validatorIdentities = new Array(validatorsArr.length) as routes.beacon.ValidatorIdentities; + for (let i = 0; i < validatorsArr.length; i++) { + const {pubkey, activationEpoch} = validatorsArr[i]; + validatorIdentities[i] = {index: i, pubkey, activationEpoch}; + } + } + + return { + data: validatorIdentities, + meta: {executionOptimistic, finalized}, + }; + }, + async getStateValidator({stateId, validatorId}) { const {state, executionOptimistic, finalized} = await getStateResponse(chain, stateId); const {pubkey2index} = chain.getHeadState().epochCtx; @@ -164,7 +196,7 @@ export function getBeaconStateApi({ } balances.push({index: id, balance: state.balances.get(id)}); } else { - const index = headState.epochCtx.pubkey2index.get(id); + const index = headState.epochCtx.pubkey2index.get(fromHex(id)); if (index != null && index <= state.validators.length) { balances.push({index, balance: state.balances.get(index)}); } @@ -202,7 +234,14 @@ export function getBeaconStateApi({ const epoch = filters.epoch ?? computeEpochAtSlot(state.slot); const startSlot = computeStartSlotAtEpoch(epoch); - const shuffling = stateCached.epochCtx.getShufflingAtEpoch(epoch); + const decisionRoot = stateCached.epochCtx.getShufflingDecisionRoot(epoch); + const shuffling = await chain.shufflingCache.get(epoch, decisionRoot); + if (!shuffling) { + throw new ApiError( + 500, + `No shuffling found to calculate committees for epoch: ${epoch} and decisionRoot: ${decisionRoot}` + ); + } const committees = shuffling.committees; const committeesFlat = committees.flatMap((slotCommittees, slotInEpoch) => { const slot = startSlot + slotInEpoch; diff --git a/packages/beacon-node/src/api/impl/beacon/state/utils.ts b/packages/beacon-node/src/api/impl/beacon/state/utils.ts index 40b1e2815263..5e9d3be01221 100644 --- a/packages/beacon-node/src/api/impl/beacon/state/utils.ts +++ b/packages/beacon-node/src/api/impl/beacon/state/utils.ts @@ -1,7 +1,8 @@ +import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; import {routes} from "@lodestar/api"; -import {FAR_FUTURE_EPOCH, GENESIS_SLOT} from "@lodestar/params"; -import {BeaconStateAllForks, PubkeyIndexMap} from "@lodestar/state-transition"; -import {BLSPubkey, Epoch, phase0, RootHex, Slot, ValidatorIndex} from "@lodestar/types"; +import {GENESIS_SLOT} from "@lodestar/params"; +import {BeaconStateAllForks} from "@lodestar/state-transition"; +import {BLSPubkey, Epoch, getValidatorStatus, phase0, RootHex, Slot, ValidatorIndex} from "@lodestar/types"; import {fromHex} from "@lodestar/utils"; import {CheckpointWithHex, IForkChoice} from "@lodestar/fork-choice"; import {IBeaconChain} from "../../../../chain/index.js"; @@ -82,38 +83,6 @@ export async function getStateResponseWithRegen( return res; } -/** - * Get the status of the validator - * based on conditions outlined in https://hackmd.io/ofFJ5gOmQpu1jjHilHbdQQ - */ -export function getValidatorStatus(validator: phase0.Validator, currentEpoch: Epoch): routes.beacon.ValidatorStatus { - // pending - if (validator.activationEpoch > currentEpoch) { - if (validator.activationEligibilityEpoch === FAR_FUTURE_EPOCH) { - return "pending_initialized"; - } else if (validator.activationEligibilityEpoch < FAR_FUTURE_EPOCH) { - return "pending_queued"; - } - } - // active - if (validator.activationEpoch <= currentEpoch && currentEpoch < validator.exitEpoch) { - if (validator.exitEpoch === FAR_FUTURE_EPOCH) { - return "active_ongoing"; - } else if (validator.exitEpoch < FAR_FUTURE_EPOCH) { - return validator.slashed ? "active_slashed" : "active_exiting"; - } - } - // exited - if (validator.exitEpoch <= currentEpoch && currentEpoch < validator.withdrawableEpoch) { - return validator.slashed ? "exited_slashed" : "exited_unslashed"; - } - // withdrawal - if (validator.withdrawableEpoch <= currentEpoch) { - return validator.effectiveBalance !== 0 ? "withdrawal_possible" : "withdrawal_done"; - } - throw new Error("ValidatorStatus unknown"); -} - export function toValidatorResponse( index: ValidatorIndex, validator: phase0.Validator, @@ -187,7 +156,7 @@ export function getStateValidatorIndex( // typeof id === Uint8Array const validatorIndex = pubkey2index.get(id); - if (validatorIndex === undefined) { + if (validatorIndex === null) { return {valid: false, code: 404, reason: "Validator pubkey not found in state"}; } if (validatorIndex >= state.validators.length) { diff --git a/packages/beacon-node/src/api/impl/config/constants.ts b/packages/beacon-node/src/api/impl/config/constants.ts index 87ffce91b4d9..14ecade15b2e 100644 --- a/packages/beacon-node/src/api/impl/config/constants.ts +++ b/packages/beacon-node/src/api/impl/config/constants.ts @@ -36,13 +36,16 @@ import { SYNC_COMMITTEE_SUBNET_COUNT, BLOB_TX_TYPE, VERSIONED_HASH_VERSION_KZG, + COMPOUNDING_WITHDRAWAL_PREFIX, + UNSET_DEPOSIT_REQUESTS_START_INDEX, + FULL_EXIT_REQUEST_AMOUNT, } from "@lodestar/params"; /* eslint-disable @typescript-eslint/naming-convention */ /** * Hand-picked list of constants declared in consensus-spec .md files. - * This list is asserted to be up-to-date with the test `test/e2e/api/specConstants.test.ts` + * This list is asserted to be up-to-date with the test `test/e2e/api/impl/config.test.ts` */ export const specConstants = { // phase0/beacon-chain.md @@ -57,6 +60,7 @@ export const specConstants = { // ## Withdrawal prefixes BLS_WITHDRAWAL_PREFIX, ETH1_ADDRESS_WITHDRAWAL_PREFIX, + COMPOUNDING_WITHDRAWAL_PREFIX, // ## Domain types DOMAIN_BEACON_PROPOSER, DOMAIN_BEACON_ATTESTER, @@ -100,4 +104,8 @@ export const specConstants = { // Deneb types BLOB_TX_TYPE, VERSIONED_HASH_VERSION_KZG, + + // electra + UNSET_DEPOSIT_REQUESTS_START_INDEX, + FULL_EXIT_REQUEST_AMOUNT, }; diff --git a/packages/beacon-node/src/api/impl/debug/index.ts b/packages/beacon-node/src/api/impl/debug/index.ts index 4edb8ba9b2dd..e5b6450b206f 100644 --- a/packages/beacon-node/src/api/impl/debug/index.ts +++ b/packages/beacon-node/src/api/impl/debug/index.ts @@ -1,6 +1,8 @@ import {routes} from "@lodestar/api"; import {ApplicationMethods} from "@lodestar/api/server"; +import {ExecutionStatus} from "@lodestar/fork-choice"; import {BeaconState} from "@lodestar/types"; +import {ZERO_HASH_HEX} from "@lodestar/params"; import {getStateResponseWithRegen} from "../beacon/state/utils.js"; import {ApiModules} from "../types.js"; import {isOptimisticBlock} from "../../../util/forkChoice.js"; @@ -22,6 +24,35 @@ export function getDebugApi({ }; }, + async getDebugForkChoice() { + return { + data: { + justifiedCheckpoint: chain.forkChoice.getJustifiedCheckpoint(), + finalizedCheckpoint: chain.forkChoice.getFinalizedCheckpoint(), + forkChoiceNodes: chain.forkChoice.getAllNodes().map((node) => ({ + slot: node.slot, + blockRoot: node.blockRoot, + parentRoot: node.parentRoot, + justifiedEpoch: node.justifiedEpoch, + finalizedEpoch: node.finalizedEpoch, + weight: node.weight, + validity: (() => { + switch (node.executionStatus) { + case ExecutionStatus.Valid: + return "valid"; + case ExecutionStatus.Invalid: + return "invalid"; + case ExecutionStatus.Syncing: + case ExecutionStatus.PreMerge: + return "optimistic"; + } + })(), + executionBlockHash: node.executionPayloadBlockHash ?? ZERO_HASH_HEX, + })), + }, + }; + }, + async getProtoArrayNodes() { const nodes = chain.forkChoice.getAllNodes().map((node) => ({ // if node has executionPayloadNumber, it will overwrite the below default diff --git a/packages/beacon-node/src/api/impl/errors.ts b/packages/beacon-node/src/api/impl/errors.ts index 848691f7cf6d..609f40f83a12 100644 --- a/packages/beacon-node/src/api/impl/errors.ts +++ b/packages/beacon-node/src/api/impl/errors.ts @@ -35,3 +35,16 @@ export class OnlySupportedByDVT extends ApiError { super(501, "Only supported by distributed validator middleware clients"); } } + +// Error thrown when processing multiple items failed - https://github.com/ethereum/beacon-APIs/blob/e7f7d70423b0abfe9d9f33b701be2ec03e44eb02/types/http.yaml#L175 +export class IndexedError extends ApiError { + failures: FailureList; + + constructor(message: string, failures: FailureList) { + super(400, message); + + this.failures = failures.sort((a, b) => a.index - b.index); + } +} + +export type FailureList = {index: number; message: string}[]; diff --git a/packages/beacon-node/src/api/impl/lodestar/index.ts b/packages/beacon-node/src/api/impl/lodestar/index.ts index d3083dda8e9c..0e8d2b1fa94b 100644 --- a/packages/beacon-node/src/api/impl/lodestar/index.ts +++ b/packages/beacon-node/src/api/impl/lodestar/index.ts @@ -3,7 +3,7 @@ import path from "node:path"; import {routes} from "@lodestar/api"; import {ApplicationMethods} from "@lodestar/api/server"; import {Repository} from "@lodestar/db"; -import {toHex} from "@lodestar/utils"; +import {toHex, toRootHex} from "@lodestar/utils"; import {getLatestWeakSubjectivityCheckpointEpoch} from "@lodestar/state-transition"; import {ChainForkConfig} from "@lodestar/config"; import {ssz} from "@lodestar/types"; @@ -202,7 +202,7 @@ function regenRequestToJson(config: ChainForkConfig, regenRequest: RegenRequest) case "getPreState": { const slot = regenRequest.args[0].slot; return { - root: toHex(config.getForkTypes(slot).BeaconBlock.hashTreeRoot(regenRequest.args[0])), + root: toRootHex(config.getForkTypes(slot).BeaconBlock.hashTreeRoot(regenRequest.args[0])), slot, }; } diff --git a/packages/beacon-node/src/api/impl/validator/index.ts b/packages/beacon-node/src/api/impl/validator/index.ts index b2a0b8575f5c..8a500125ad06 100644 --- a/packages/beacon-node/src/api/impl/validator/index.ts +++ b/packages/beacon-node/src/api/impl/validator/index.ts @@ -3,6 +3,7 @@ import {ApplicationMethods} from "@lodestar/api/server"; import { CachedBeaconStateAllForks, computeStartSlotAtEpoch, + calculateCommitteeAssignments, proposerShufflingDecisionRoot, attesterShufflingDecisionRoot, getBlockRootAtSlot, @@ -21,6 +22,7 @@ import { ForkPreBlobs, ForkBlobs, ForkExecution, + isForkPostElectra, } from "@lodestar/params"; import {MAX_BUILDER_BOOST_FACTOR} from "@lodestar/validator"; import { @@ -39,9 +41,10 @@ import { BeaconBlock, BlockContents, BlindedBeaconBlock, + getValidatorStatus, } from "@lodestar/types"; import {ExecutionStatus, DataAvailabilityStatus} from "@lodestar/fork-choice"; -import {fromHex, toHex, resolveOrRacePromises, prettyWeiToEth} from "@lodestar/utils"; +import {fromHex, toHex, resolveOrRacePromises, prettyWeiToEth, toRootHex} from "@lodestar/utils"; import { AttestationError, AttestationErrorCode, @@ -59,7 +62,6 @@ import {validateSyncCommitteeGossipContributionAndProof} from "../../../chain/va import {CommitteeSubscription} from "../../../network/subnets/index.js"; import {ApiModules} from "../types.js"; import {RegenCaller} from "../../../chain/regen/index.js"; -import {getValidatorStatus} from "../beacon/state/utils.js"; import {validateGossipFnRetryUnknownRoot} from "../../../network/processor/gossipHandlers.js"; import {SCHEDULER_LOOKAHEAD_FACTOR} from "../../../chain/prepareNextSlot.js"; import {ChainEvent, CheckpointHex, CommonBlockBody} from "../../../chain/index.js"; @@ -324,7 +326,7 @@ export function getValidatorApi( function notOnOptimisticBlockRoot(beaconBlockRoot: Root): void { const protoBeaconBlock = chain.forkChoice.getBlock(beaconBlockRoot); if (!protoBeaconBlock) { - throw new ApiError(400, `Block not in forkChoice, beaconBlockRoot=${toHex(beaconBlockRoot)}`); + throw new ApiError(404, `Block not in forkChoice, beaconBlockRoot=${toRootHex(beaconBlockRoot)}`); } if (protoBeaconBlock.executionStatus === ExecutionStatus.Syncing) @@ -336,7 +338,7 @@ export function getValidatorApi( function notOnOutOfRangeData(beaconBlockRoot: Root): void { const protoBeaconBlock = chain.forkChoice.getBlock(beaconBlockRoot); if (!protoBeaconBlock) { - throw new ApiError(400, `Block not in forkChoice, beaconBlockRoot=${toHex(beaconBlockRoot)}`); + throw new ApiError(404, `Block not in forkChoice, beaconBlockRoot=${toRootHex(beaconBlockRoot)}`); } if (protoBeaconBlock.dataAvailabilityStatus === DataAvailabilityStatus.OutOfRange) @@ -416,7 +418,7 @@ export function getValidatorApi( slot, executionPayloadValue, consensusBlockValue, - root: toHex(config.getExecutionForkTypes(slot).BlindedBeaconBlock.hashTreeRoot(block)), + root: toRootHex(config.getExecutionForkTypes(slot).BlindedBeaconBlock.hashTreeRoot(block)), }); if (chain.opts.persistProducedBlocks) { @@ -494,13 +496,13 @@ export function getValidatorApi( slot, executionPayloadValue, consensusBlockValue, - root: toHex(config.getForkTypes(slot).BeaconBlock.hashTreeRoot(block)), + root: toRootHex(config.getForkTypes(slot).BeaconBlock.hashTreeRoot(block)), }); if (chain.opts.persistProducedBlocks) { void chain.persistBlock(block, "produced_engine_block"); } if (isForkBlobs(version)) { - const blockHash = toHex((block as bellatrix.BeaconBlock).body.executionPayload.blockHash); + const blockHash = toRootHex((block as bellatrix.BeaconBlock).body.executionPayload.blockHash); const contents = chain.producedContentsCache.get(blockHash); if (contents === undefined) { throw Error("contents missing in cache"); @@ -814,6 +816,7 @@ export function getValidatorApi( const attEpoch = computeEpochAtSlot(slot); const headBlockRootHex = chain.forkChoice.getHead().blockRoot; const headBlockRoot = fromHex(headBlockRootHex); + const fork = config.getForkName(slot); const beaconBlockRoot = slot >= headSlot @@ -845,7 +848,7 @@ export function getValidatorApi( return { data: { slot, - index: committeeIndex, + index: isForkPostElectra(fork) ? 0 : committeeIndex, beaconBlockRoot, source: attEpochState.currentJustifiedCheckpoint, target: {epoch: attEpoch, root: targetRoot}, @@ -869,7 +872,7 @@ export function getValidatorApi( // and it hasn't been in our forkchoice since we haven't seen / processing that block // see https://github.com/ChainSafe/lodestar/issues/5063 if (!chain.forkChoice.hasBlock(beaconBlockRoot)) { - const rootHex = toHex(beaconBlockRoot); + const rootHex = toRootHex(beaconBlockRoot); network.searchUnknownSlotRoot({slot, root: rootHex}); // if result of this call is false, i.e. block hasn't seen after 1 slot then the below notOnOptimisticBlockRoot call will throw error await chain.waitForBlock(slot, rootHex); @@ -880,7 +883,12 @@ export function getValidatorApi( notOnOutOfRangeData(beaconBlockRoot); const contribution = chain.syncCommitteeMessagePool.getContribution(subcommitteeIndex, slot, beaconBlockRoot); - if (!contribution) throw new ApiError(500, "No contribution available"); + if (!contribution) { + throw new ApiError( + 404, + `No sync committee contribution for slot=${slot}, subnet=${subcommitteeIndex}, beaconBlockRoot=${toRootHex(beaconBlockRoot)}` + ); + } metrics?.production.producedSyncContributionParticipants.observe( contribution.aggregationBits.getTrueBitIndexes().length @@ -954,7 +962,7 @@ export function getValidatorApi( return { data: duties, meta: { - dependentRoot: toHex(dependentRoot), + dependentRoot: toRootHex(dependentRoot), executionOptimistic: isOptimisticBlock(head), }, }; @@ -988,7 +996,15 @@ export function getValidatorApi( // Check that all validatorIndex belong to the state before calling getCommitteeAssignments() const pubkeys = getPubkeysForIndices(state.validators, indices); - const committeeAssignments = state.epochCtx.getCommitteeAssignments(epoch, indices); + const decisionRoot = state.epochCtx.getShufflingDecisionRoot(epoch); + const shuffling = await chain.shufflingCache.get(epoch, decisionRoot); + if (!shuffling) { + throw new ApiError( + 500, + `No shuffling found to calculate committee assignments for epoch: ${epoch} and decisionRoot: ${decisionRoot}` + ); + } + const committeeAssignments = calculateCommitteeAssignments(shuffling, indices); const duties: routes.validator.AttesterDuty[] = []; for (let i = 0, len = indices.length; i < len; i++) { const validatorIndex = indices[i]; @@ -1006,7 +1022,7 @@ export function getValidatorApi( return { data: duties, meta: { - dependentRoot: toHex(dependentRoot), + dependentRoot: toRootHex(dependentRoot), executionOptimistic: isOptimisticBlock(head), }, }; @@ -1071,8 +1087,16 @@ export function getValidatorApi( await waitForSlot(slot); // Must never request for a future slot > currentSlot - const dataRootHex = toHex(attestationDataRoot); - const aggregate = chain.attestationPool.getAggregate(slot, dataRootHex); + const dataRootHex = toRootHex(attestationDataRoot); + const aggregate = chain.attestationPool.getAggregate(slot, null, dataRootHex); + const fork = chain.config.getForkName(slot); + + if (isForkPostElectra(fork)) { + throw new ApiError( + 400, + `Use getAggregatedAttestationV2 to retrieve aggregated attestations for post-electra fork=${fork}` + ); + } if (!aggregate) { throw new ApiError(404, `No aggregated attestation for slot=${slot}, dataRoot=${dataRootHex}`); @@ -1085,7 +1109,34 @@ export function getValidatorApi( }; }, + async getAggregatedAttestationV2({attestationDataRoot, slot, committeeIndex}) { + notWhileSyncing(); + + await waitForSlot(slot); // Must never request for a future slot > currentSlot + + const dataRootHex = toRootHex(attestationDataRoot); + const aggregate = chain.attestationPool.getAggregate(slot, committeeIndex, dataRootHex); + + if (!aggregate) { + throw new ApiError( + 404, + `No aggregated attestation for slot=${slot}, committeeIndex=${committeeIndex}, dataRoot=${dataRootHex}` + ); + } + + metrics?.production.producedAggregateParticipants.observe(aggregate.aggregationBits.getTrueBitIndexes().length); + + return { + data: aggregate, + meta: {version: config.getForkName(slot)}, + }; + }, + async publishAggregateAndProofs({signedAggregateAndProofs}) { + await this.publishAggregateAndProofsV2({signedAggregateAndProofs}); + }, + + async publishAggregateAndProofsV2({signedAggregateAndProofs}) { notWhileSyncing(); const seenTimestampSec = Date.now() / 1000; @@ -1308,7 +1359,7 @@ export function getValidatorApi( const filteredRegistrations = registrations.filter((registration) => { const {pubkey} = registration.message; const validatorIndex = headState.epochCtx.pubkey2index.get(pubkey); - if (validatorIndex === undefined) return false; + if (validatorIndex === null) return false; const validator = headState.validators.getReadonly(validatorIndex); const status = getValidatorStatus(validator, currentEpoch); diff --git a/packages/beacon-node/src/api/rest/base.ts b/packages/beacon-node/src/api/rest/base.ts index 5f191bf76beb..276583dc7281 100644 --- a/packages/beacon-node/src/api/rest/base.ts +++ b/packages/beacon-node/src/api/rest/base.ts @@ -5,7 +5,7 @@ import bearerAuthPlugin from "@fastify/bearer-auth"; import {addSszContentTypeParser} from "@lodestar/api/server"; import {ErrorAborted, Gauge, Histogram, Logger} from "@lodestar/utils"; import {isLocalhostIP} from "../../util/ip.js"; -import {ApiError, NodeIsSyncing} from "../impl/errors.js"; +import {ApiError, FailureList, IndexedError, NodeIsSyncing} from "../impl/errors.js"; import {HttpActiveSocketsTracker, SocketMetrics} from "./activeSockets.js"; export type RestApiServerOpts = { @@ -15,6 +15,7 @@ export type RestApiServerOpts = { bearerToken?: string; headerLimit?: number; bodyLimit?: number; + stacktraces?: boolean; swaggerUI?: boolean; }; @@ -29,11 +30,31 @@ export type RestApiServerMetrics = SocketMetrics & { errors: Gauge<{operationId: string}>; }; +/** + * Error response body format as defined in beacon-api spec + * + * See https://github.com/ethereum/beacon-APIs/blob/v2.5.0/types/http.yaml + */ +type ErrorResponse = { + code: number; + message: string; + stacktraces?: string[]; +}; + +type IndexedErrorResponse = ErrorResponse & { + failures?: FailureList; +}; + /** * Error code used by Fastify if media type is not supported (415) */ const INVALID_MEDIA_TYPE_CODE = errorCodes.FST_ERR_CTP_INVALID_MEDIA_TYPE().code; +/** + * Error code used by Fastify if JSON schema validation failed + */ +const SCHEMA_VALIDATION_ERROR_CODE = errorCodes.FST_ERR_VALIDATION().code; + /** * REST API powered by `fastify` server. */ @@ -71,15 +92,38 @@ export class RestApiServer { // To parse our ApiError -> statusCode server.setErrorHandler((err, _req, res) => { + const stacktraces = opts.stacktraces ? err.stack?.split("\n") : undefined; if (err.validation) { - void res.status(400).send(err.validation); + const {instancePath, message} = err.validation[0]; + const payload: ErrorResponse = { + code: 400, + message: `${instancePath.substring(instancePath.lastIndexOf("/") + 1)} ${message}`, + stacktraces, + }; + void res.status(400).send(payload); + } else if (err instanceof IndexedError) { + const payload: IndexedErrorResponse = { + code: err.statusCode, + message: err.message, + failures: err.failures, + stacktraces, + }; + void res.status(err.statusCode).send(payload); } else { // Convert our custom ApiError into status code const statusCode = err instanceof ApiError ? err.statusCode : 500; - void res.status(statusCode).send(err); + const payload: ErrorResponse = {code: statusCode, message: err.message, stacktraces}; + void res.status(statusCode).send(payload); } }); + server.setNotFoundHandler((req, res) => { + const message = `Route ${req.raw.method}:${req.raw.url} not found`; + this.logger.warn(message); + const payload: ErrorResponse = {code: 404, message}; + void res.code(404).send(payload); + }); + if (opts.cors) { void server.register(fastifyCors, {origin: opts.cors}); } @@ -127,7 +171,7 @@ export class RestApiServer { const operationId = getOperationId(req); - if (err instanceof ApiError || err.code === INVALID_MEDIA_TYPE_CODE) { + if (err instanceof ApiError || [INVALID_MEDIA_TYPE_CODE, SCHEMA_VALIDATION_ERROR_CODE].includes(err.code)) { this.logger.warn(`Req ${req.id} ${operationId} failed`, {reason: err.message}); } else { this.logger.error(`Req ${req.id} ${operationId} error`, {}, err); diff --git a/packages/beacon-node/src/api/rest/index.ts b/packages/beacon-node/src/api/rest/index.ts index e27ed6bd9139..6beaf061588c 100644 --- a/packages/beacon-node/src/api/rest/index.ts +++ b/packages/beacon-node/src/api/rest/index.ts @@ -22,6 +22,7 @@ export const beaconRestApiServerOpts: BeaconRestApiServerOpts = { cors: "*", // beacon -> validator API is trusted, and for large amounts of keys the payload is multi-MB bodyLimit: 20 * 1024 * 1024, // 20MB for big block + blobs + stacktraces: false, }; export type BeaconRestApiServerModules = RestApiServerModules & { diff --git a/packages/beacon-node/src/chain/archiver/archiveBlocks.ts b/packages/beacon-node/src/chain/archiver/archiveBlocks.ts index 76bfe651ad77..8e1ac456579a 100644 --- a/packages/beacon-node/src/chain/archiver/archiveBlocks.ts +++ b/packages/beacon-node/src/chain/archiver/archiveBlocks.ts @@ -1,7 +1,6 @@ -import {fromHexString} from "@chainsafe/ssz"; import {Epoch, Slot, RootHex} from "@lodestar/types"; import {IForkChoice} from "@lodestar/fork-choice"; -import {Logger, toHex} from "@lodestar/utils"; +import {Logger, fromHex, toRootHex} from "@lodestar/utils"; import {ForkSeq, SLOTS_PER_EPOCH} from "@lodestar/params"; import {computeEpochAtSlot, computeStartSlotAtEpoch} from "@lodestar/state-transition"; import {KeyValue} from "@lodestar/db"; @@ -48,7 +47,7 @@ export async function archiveBlocks( const finalizedCanonicalBlockRoots: BlockRootSlot[] = finalizedCanonicalBlocks.map((block) => ({ slot: block.slot, - root: fromHexString(block.blockRoot), + root: fromHex(block.blockRoot), })); if (finalizedCanonicalBlockRoots.length > 0) { @@ -68,7 +67,7 @@ export async function archiveBlocks( // deleteNonCanonicalBlocks // loop through forkchoice single time - const nonCanonicalBlockRoots = finalizedNonCanonicalBlocks.map((summary) => fromHexString(summary.blockRoot)); + const nonCanonicalBlockRoots = finalizedNonCanonicalBlocks.map((summary) => fromHex(summary.blockRoot)); if (nonCanonicalBlockRoots.length > 0) { await db.block.batchDelete(nonCanonicalBlockRoots); logger.verbose("Deleted non canonical blocks from hot DB", { @@ -137,14 +136,14 @@ async function migrateBlocksFromHotToColdDb(db: IBeaconDb, blocks: BlockRootSlot canonicalBlocks.map(async (block) => { const blockBuffer = await db.block.getBinary(block.root); if (!blockBuffer) { - throw Error(`No block found for slot ${block.slot} root ${toHex(block.root)}`); + throw Error(`No block found for slot ${block.slot} root ${toRootHex(block.root)}`); } return { key: block.slot, value: blockBuffer, slot: block.slot, blockRoot: block.root, - // TODO: Benchmark if faster to slice Buffer or fromHexString() + // TODO: Benchmark if faster to slice Buffer or fromHex() parentRoot: getParentRootFromSignedBlock(blockBuffer), }; }) @@ -177,7 +176,7 @@ async function migrateBlobSidecarsFromHotToColdDb( .map(async (block) => { const bytes = await db.blobSidecars.getBinary(block.root); if (!bytes) { - throw Error(`No blobSidecars found for slot ${block.slot} root ${toHex(block.root)}`); + throw Error(`No blobSidecars found for slot ${block.slot} root ${toRootHex(block.root)}`); } return {key: block.slot, value: bytes}; }) diff --git a/packages/beacon-node/src/chain/archiver/archiveStates.ts b/packages/beacon-node/src/chain/archiver/archiveStates.ts index 2231cd3ff513..8fd9081ab243 100644 --- a/packages/beacon-node/src/chain/archiver/archiveStates.ts +++ b/packages/beacon-node/src/chain/archiver/archiveStates.ts @@ -6,6 +6,9 @@ import {CheckpointWithHex} from "@lodestar/fork-choice"; import {IBeaconDb} from "../../db/index.js"; import {IStateRegenerator} from "../regen/interface.js"; import {getStateSlotFromBytes} from "../../util/multifork.js"; +import {serializeState} from "../serializeState.js"; +import {AllocSource, BufferPool} from "../../util/bufferPool.js"; +import {Metrics} from "../../metrics/metrics.js"; /** * Minimum number of epochs between single temp archived states @@ -30,7 +33,8 @@ export class StatesArchiver { private readonly regen: IStateRegenerator, private readonly db: IBeaconDb, private readonly logger: Logger, - private readonly opts: StatesArchiverOpts + private readonly opts: StatesArchiverOpts, + private readonly bufferPool?: BufferPool | null ) {} /** @@ -45,13 +49,13 @@ export class StatesArchiver { * epoch - 1024*2 epoch - 1024 epoch - 32 epoch * ``` */ - async maybeArchiveState(finalized: CheckpointWithHex): Promise { + async maybeArchiveState(finalized: CheckpointWithHex, metrics?: Metrics | null): Promise { const lastStoredSlot = await this.db.stateArchive.lastKey(); const lastStoredEpoch = computeEpochAtSlot(lastStoredSlot ?? 0); const {archiveStateEpochFrequency} = this.opts; if (finalized.epoch - lastStoredEpoch >= Math.min(PERSIST_TEMP_STATE_EVERY_EPOCHS, archiveStateEpochFrequency)) { - await this.archiveState(finalized); + await this.archiveState(finalized, metrics); // Only check the current and previous intervals const minEpoch = Math.max( @@ -83,7 +87,7 @@ export class StatesArchiver { * Archives finalized states from active bucket to archive bucket. * Only the new finalized state is stored to disk */ - async archiveState(finalized: CheckpointWithHex): Promise { + async archiveState(finalized: CheckpointWithHex, metrics?: Metrics | null): Promise { // starting from Mar 2024, the finalized state could be from disk or in memory const finalizedStateOrBytes = await this.regen.getCheckpointStateOrBytes(finalized); const {rootHex} = finalized; @@ -95,8 +99,17 @@ export class StatesArchiver { await this.db.stateArchive.putBinary(slot, finalizedStateOrBytes); this.logger.verbose("Archived finalized state bytes", {epoch: finalized.epoch, slot, root: rootHex}); } else { - // state - await this.db.stateArchive.put(finalizedStateOrBytes.slot, finalizedStateOrBytes); + // serialize state using BufferPool if provided + const timer = metrics?.stateSerializeDuration.startTimer({source: AllocSource.ARCHIVE_STATE}); + await serializeState( + finalizedStateOrBytes, + AllocSource.ARCHIVE_STATE, + (stateBytes) => { + timer?.(); + return this.db.stateArchive.putBinary(finalizedStateOrBytes.slot, stateBytes); + }, + this.bufferPool + ); // don't delete states before the finalized state, auto-prune will take care of it this.logger.verbose("Archived finalized state", { epoch: finalized.epoch, diff --git a/packages/beacon-node/src/chain/archiver/index.ts b/packages/beacon-node/src/chain/archiver/index.ts index ee0711e05e4b..45169b2fa802 100644 --- a/packages/beacon-node/src/chain/archiver/index.ts +++ b/packages/beacon-node/src/chain/archiver/index.ts @@ -4,6 +4,7 @@ import {IBeaconDb} from "../../db/index.js"; import {JobItemQueue} from "../../util/queue/index.js"; import {IBeaconChain} from "../interface.js"; import {ChainEvent} from "../emitter.js"; +import {Metrics} from "../../metrics/metrics.js"; import {StatesArchiver, StatesArchiverOpts} from "./archiveStates.js"; import {archiveBlocks} from "./archiveBlocks.js"; @@ -45,10 +46,11 @@ export class Archiver { private readonly chain: IBeaconChain, private readonly logger: Logger, signal: AbortSignal, - opts: ArchiverOpts + opts: ArchiverOpts, + private readonly metrics?: Metrics | null ) { this.archiveBlobEpochs = opts.archiveBlobEpochs; - this.statesArchiver = new StatesArchiver(chain.regen, db, logger, opts); + this.statesArchiver = new StatesArchiver(chain.regen, db, logger, opts, chain.bufferPool); this.prevFinalized = chain.forkChoice.getFinalizedCheckpoint(); this.jobQueue = new JobItemQueue<[CheckpointWithHex], void>(this.processFinalizedCheckpoint, { maxLength: PROCESS_FINALIZED_CHECKPOINT_QUEUE_LEN, @@ -105,7 +107,7 @@ export class Archiver { this.prevFinalized = finalized; // should be after ArchiveBlocksTask to handle restart cleanly - await this.statesArchiver.maybeArchiveState(finalized); + await this.statesArchiver.maybeArchiveState(finalized, this.metrics); this.chain.regen.pruneOnFinalized(finalizedEpoch); diff --git a/packages/beacon-node/src/chain/balancesCache.ts b/packages/beacon-node/src/chain/balancesCache.ts index 5f4cf218c341..50a86b31b6c8 100644 --- a/packages/beacon-node/src/chain/balancesCache.ts +++ b/packages/beacon-node/src/chain/balancesCache.ts @@ -7,7 +7,7 @@ import { } from "@lodestar/state-transition"; import {CheckpointWithHex} from "@lodestar/fork-choice"; import {Epoch, RootHex} from "@lodestar/types"; -import {toHexString} from "@lodestar/utils"; +import {toRootHex} from "@lodestar/utils"; /** The number of validator balance sets that are cached within `CheckpointBalancesCache`. */ const MAX_BALANCE_CACHE_SIZE = 4; @@ -33,7 +33,7 @@ export class CheckpointBalancesCache { const epoch = state.epochCtx.epoch; const epochBoundarySlot = computeStartSlotAtEpoch(epoch); const epochBoundaryRoot = - epochBoundarySlot === state.slot ? blockRootHex : toHexString(getBlockRootAtSlot(state, epochBoundarySlot)); + epochBoundarySlot === state.slot ? blockRootHex : toRootHex(getBlockRootAtSlot(state, epochBoundarySlot)); const index = this.items.findIndex((item) => item.epoch === epoch && item.rootHex == epochBoundaryRoot); if (index === -1) { diff --git a/packages/beacon-node/src/chain/blocks/importBlock.ts b/packages/beacon-node/src/chain/blocks/importBlock.ts index 906be51434c2..d19d6a60c564 100644 --- a/packages/beacon-node/src/chain/blocks/importBlock.ts +++ b/packages/beacon-node/src/chain/blocks/importBlock.ts @@ -1,4 +1,3 @@ -import {toHexString} from "@chainsafe/ssz"; import {capella, ssz, altair, BeaconBlock} from "@lodestar/types"; import {ForkLightClient, ForkSeq, INTERVALS_PER_SLOT, MAX_SEED_LOOKAHEAD, SLOTS_PER_EPOCH} from "@lodestar/params"; import { @@ -10,7 +9,7 @@ import { } from "@lodestar/state-transition"; import {routes} from "@lodestar/api"; import {ForkChoiceError, ForkChoiceErrorCode, EpochDifference, AncestorStatus} from "@lodestar/fork-choice"; -import {isErrorAborted} from "@lodestar/utils"; +import {isErrorAborted, toHex, toRootHex} from "@lodestar/utils"; import {ZERO_HASH_HEX} from "../../constants/index.js"; import {toCheckpointHex} from "../stateCache/index.js"; import {isOptimisticBlock} from "../../util/forkChoice.js"; @@ -62,13 +61,13 @@ export async function importBlock( const {block, source} = blockInput; const {slot: blockSlot} = block.message; const blockRoot = this.config.getForkTypes(blockSlot).BeaconBlock.hashTreeRoot(block.message); - const blockRootHex = toHexString(blockRoot); + const blockRootHex = toRootHex(blockRoot); const currentEpoch = computeEpochAtSlot(this.forkChoice.getTime()); const blockEpoch = computeEpochAtSlot(blockSlot); - const parentEpoch = computeEpochAtSlot(parentBlockSlot); const prevFinalizedEpoch = this.forkChoice.getFinalizedCheckpoint().epoch; const blockDelaySec = (fullyVerifiedBlock.seenTimestampSec - postState.genesisTime) % this.config.SECONDS_PER_SLOT; const recvToValLatency = Date.now() / 1000 - (opts.seenTimestampSec ?? Date.now() / 1000); + const fork = this.config.getForkSeq(blockSlot); // this is just a type assertion since blockinput with dataPromise type will not end up here if (blockInput.type === BlockInputType.dataPromise) { @@ -101,34 +100,6 @@ export async function importBlock( this.metrics?.importBlock.bySource.inc({source}); this.logger.verbose("Added block to forkchoice and state cache", {slot: blockSlot, root: blockRootHex}); - // We want to import block asap so call all event handler in the next event loop - callInNextEventLoop(async () => { - this.emitter.emit(routes.events.EventType.block, { - block: blockRootHex, - slot: blockSlot, - executionOptimistic: blockSummary != null && isOptimisticBlock(blockSummary), - }); - - // dataPromise will not end up here, but preDeneb could. In future we might also allow syncing - // out of data range blocks and import then in forkchoice although one would not be able to - // attest and propose with such head similar to optimistic sync - if (blockInput.type === BlockInputType.availableData) { - const {blobsSource, blobs} = blockInput.blockData; - - this.metrics?.importBlock.blobsBySource.inc({blobsSource}); - for (const blobSidecar of blobs) { - const {index, kzgCommitment} = blobSidecar; - this.emitter.emit(routes.events.EventType.blobSidecar, { - blockRoot: blockRootHex, - slot: blockSlot, - index, - kzgCommitment: toHexString(kzgCommitment), - versionedHash: toHexString(kzgCommitmentToVersionedHash(kzgCommitment)), - }); - } - } - }); - // 3. Import attestations to fork choice // // - For each attestation @@ -148,10 +119,11 @@ export async function importBlock( for (const attestation of attestations) { try { - const indexedAttestation = postState.epochCtx.getIndexedAttestation(attestation); + // TODO Electra: figure out how to reuse the attesting indices computed from state transition + const indexedAttestation = postState.epochCtx.getIndexedAttestation(fork, attestation); const {target, beaconBlockRoot} = attestation.data; - const attDataRoot = toHexString(ssz.phase0.AttestationData.hashTreeRoot(indexedAttestation.data)); + const attDataRoot = toRootHex(ssz.phase0.AttestationData.hashTreeRoot(indexedAttestation.data)); this.seenAggregatedAttestations.add( target.epoch, attDataRoot, @@ -241,7 +213,7 @@ export async function importBlock( if (newHead.blockRoot !== oldHead.blockRoot) { // Set head state as strong reference - this.regen.updateHeadState(newHead.stateRoot, postState); + this.regen.updateHeadState(newHead, postState); this.emitter.emit(routes.events.EventType.head, { block: newHead.blockRoot, @@ -362,12 +334,6 @@ export async function importBlock( this.logger.verbose("After importBlock caching postState without SSZ cache", {slot: postState.slot}); } - if (parentEpoch < blockEpoch) { - // current epoch and previous epoch are likely cached in previous states - this.shufflingCache.processState(postState, postState.epochCtx.nextShuffling.epoch); - this.logger.verbose("Processed shuffling for next epoch", {parentEpoch, blockEpoch, slot: blockSlot}); - } - if (blockSlot % SLOTS_PER_EPOCH === 0) { // Cache state to preserve epoch transition work const checkpointState = postState; @@ -399,9 +365,9 @@ export async function importBlock( const preFinalizedEpoch = parentBlockSummary.finalizedEpoch; if (finalizedEpoch > preFinalizedEpoch) { this.emitter.emit(routes.events.EventType.finalizedCheckpoint, { - block: toHexString(finalizedCheckpoint.root), + block: toRootHex(finalizedCheckpoint.root), epoch: finalizedCheckpoint.epoch, - state: toHexString(checkpointState.hashTreeRoot()), + state: toRootHex(checkpointState.hashTreeRoot()), executionOptimistic: false, }); this.logger.verbose("Checkpoint finalized", toCheckpointHex(finalizedCheckpoint)); @@ -413,32 +379,58 @@ export async function importBlock( // Send block events, only for recent enough blocks if (this.clock.currentSlot - blockSlot < EVENTSTREAM_EMIT_RECENT_BLOCK_SLOTS) { - // NOTE: Skip looping if there are no listeners from the API - if (this.emitter.listenerCount(routes.events.EventType.voluntaryExit)) { - for (const voluntaryExit of block.message.body.voluntaryExits) { - this.emitter.emit(routes.events.EventType.voluntaryExit, voluntaryExit); + // We want to import block asap so call all event handler in the next event loop + callInNextEventLoop(() => { + // NOTE: Skip emitting if there are no listeners from the API + if (this.emitter.listenerCount(routes.events.EventType.block)) { + this.emitter.emit(routes.events.EventType.block, { + block: blockRootHex, + slot: blockSlot, + executionOptimistic: blockSummary != null && isOptimisticBlock(blockSummary), + }); } - } - if (this.emitter.listenerCount(routes.events.EventType.blsToExecutionChange)) { - for (const blsToExecutionChange of (block.message.body as capella.BeaconBlockBody).blsToExecutionChanges ?? []) { - this.emitter.emit(routes.events.EventType.blsToExecutionChange, blsToExecutionChange); + if (this.emitter.listenerCount(routes.events.EventType.voluntaryExit)) { + for (const voluntaryExit of block.message.body.voluntaryExits) { + this.emitter.emit(routes.events.EventType.voluntaryExit, voluntaryExit); + } } - } - if (this.emitter.listenerCount(routes.events.EventType.attestation)) { - for (const attestation of block.message.body.attestations) { - this.emitter.emit(routes.events.EventType.attestation, attestation); + if (this.emitter.listenerCount(routes.events.EventType.blsToExecutionChange)) { + for (const blsToExecutionChange of (block.message as capella.BeaconBlock).body.blsToExecutionChanges ?? []) { + this.emitter.emit(routes.events.EventType.blsToExecutionChange, blsToExecutionChange); + } } - } - if (this.emitter.listenerCount(routes.events.EventType.attesterSlashing)) { - for (const attesterSlashing of block.message.body.attesterSlashings) { - this.emitter.emit(routes.events.EventType.attesterSlashing, attesterSlashing); + if (this.emitter.listenerCount(routes.events.EventType.attestation)) { + for (const attestation of block.message.body.attestations) { + this.emitter.emit(routes.events.EventType.attestation, attestation); + } } - } - if (this.emitter.listenerCount(routes.events.EventType.proposerSlashing)) { - for (const proposerSlashing of block.message.body.proposerSlashings) { - this.emitter.emit(routes.events.EventType.proposerSlashing, proposerSlashing); + if (this.emitter.listenerCount(routes.events.EventType.attesterSlashing)) { + for (const attesterSlashing of block.message.body.attesterSlashings) { + this.emitter.emit(routes.events.EventType.attesterSlashing, attesterSlashing); + } } - } + if (this.emitter.listenerCount(routes.events.EventType.proposerSlashing)) { + for (const proposerSlashing of block.message.body.proposerSlashings) { + this.emitter.emit(routes.events.EventType.proposerSlashing, proposerSlashing); + } + } + if ( + blockInput.type === BlockInputType.availableData && + this.emitter.listenerCount(routes.events.EventType.blobSidecar) + ) { + const {blobs} = blockInput.blockData; + for (const blobSidecar of blobs) { + const {index, kzgCommitment} = blobSidecar; + this.emitter.emit(routes.events.EventType.blobSidecar, { + blockRoot: blockRootHex, + slot: blockSlot, + index, + kzgCommitment: toHex(kzgCommitment), + versionedHash: toHex(kzgCommitmentToVersionedHash(kzgCommitment)), + }); + } + } + }); } // Register stat metrics about the block after importing it @@ -452,6 +444,13 @@ export async function importBlock( fullyVerifiedBlock.postState.epochCtx.currentSyncCommitteeIndexed.validatorIndices ); } + // dataPromise will not end up here, but preDeneb could. In future we might also allow syncing + // out of data range blocks and import then in forkchoice although one would not be able to + // attest and propose with such head similar to optimistic sync + if (blockInput.type === BlockInputType.availableData) { + const {blobsSource} = blockInput.blockData; + this.metrics?.importBlock.blobsBySource.inc({blobsSource}); + } const advancedSlot = this.clock.slotWithFutureTolerance(REPROCESS_MIN_TIME_TO_NEXT_SLOT_SEC); diff --git a/packages/beacon-node/src/chain/blocks/index.ts b/packages/beacon-node/src/chain/blocks/index.ts index eb8c2663c9b6..a7a2ced2ad7a 100644 --- a/packages/beacon-node/src/chain/blocks/index.ts +++ b/packages/beacon-node/src/chain/blocks/index.ts @@ -1,4 +1,4 @@ -import {toHex, isErrorAborted} from "@lodestar/utils"; +import {isErrorAborted, toRootHex} from "@lodestar/utils"; import {SignedBeaconBlock} from "@lodestar/types"; import {JobItemQueue, isQueueErrorAborted} from "../../util/queue/index.js"; import {Metrics} from "../../metrics/metrics.js"; @@ -127,7 +127,7 @@ export async function processBlocks( const blockSlot = signedBlock.message.slot; const {preState, postState} = err.type; const forkTypes = this.config.getForkTypes(blockSlot); - const invalidRoot = toHex(postState.hashTreeRoot()); + const invalidRoot = toRootHex(postState.hashTreeRoot()); const suffix = `slot_${blockSlot}_invalid_state_root_${invalidRoot}`; this.persistInvalidSszValue(forkTypes.SignedBeaconBlock, signedBlock, suffix); diff --git a/packages/beacon-node/src/chain/blocks/types.ts b/packages/beacon-node/src/chain/blocks/types.ts index da573bb76334..8b793932e951 100644 --- a/packages/beacon-node/src/chain/blocks/types.ts +++ b/packages/beacon-node/src/chain/blocks/types.ts @@ -1,7 +1,7 @@ import {CachedBeaconStateAllForks, computeEpochAtSlot} from "@lodestar/state-transition"; import {MaybeValidExecutionStatus, DataAvailabilityStatus} from "@lodestar/fork-choice"; import {deneb, Slot, RootHex, SignedBeaconBlock} from "@lodestar/types"; -import {ForkSeq, ForkName} from "@lodestar/params"; +import {ForkSeq, ForkBlobs} from "@lodestar/params"; import {ChainForkConfig} from "@lodestar/config"; export enum BlockInputType { @@ -36,7 +36,7 @@ export enum GossipedInputType { type BlobsCacheMap = Map; -type ForkBlobsInfo = {fork: ForkName.deneb}; +type ForkBlobsInfo = {fork: ForkBlobs}; type BlobsData = {blobs: deneb.BlobSidecars; blobsBytes: (Uint8Array | null)[]; blobsSource: BlobsSource}; export type BlockInputDataBlobs = ForkBlobsInfo & BlobsData; export type BlockInputData = BlockInputDataBlobs; diff --git a/packages/beacon-node/src/chain/blocks/verifyBlock.ts b/packages/beacon-node/src/chain/blocks/verifyBlock.ts index bf4cc7e60dcd..4d21342bd8cc 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlock.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlock.ts @@ -1,4 +1,3 @@ -import {toHexString} from "@chainsafe/ssz"; import { CachedBeaconStateAllForks, computeEpochAtSlot, @@ -9,7 +8,7 @@ import {bellatrix, deneb} from "@lodestar/types"; import {ForkName} from "@lodestar/params"; import {ProtoBlock, ExecutionStatus, DataAvailabilityStatus} from "@lodestar/fork-choice"; import {ChainForkConfig} from "@lodestar/config"; -import {Logger} from "@lodestar/utils"; +import {Logger, toRootHex} from "@lodestar/utils"; import {BlockError, BlockErrorCode} from "../errors/index.js"; import {BlockProcessOpts} from "../options.js"; import {RegenCaller} from "../regen/index.js"; @@ -198,9 +197,9 @@ export async function verifyBlocksInEpoch( } function logOnPowBlock(logger: Logger, config: ChainForkConfig, mergeBlock: bellatrix.BeaconBlock): void { - const mergeBlockHash = toHexString(config.getForkTypes(mergeBlock.slot).BeaconBlock.hashTreeRoot(mergeBlock)); - const mergeExecutionHash = toHexString(mergeBlock.body.executionPayload.blockHash); - const mergePowHash = toHexString(mergeBlock.body.executionPayload.parentHash); + const mergeBlockHash = toRootHex(config.getForkTypes(mergeBlock.slot).BeaconBlock.hashTreeRoot(mergeBlock)); + const mergeExecutionHash = toRootHex(mergeBlock.body.executionPayload.blockHash); + const mergePowHash = toRootHex(mergeBlock.body.executionPayload.parentHash); logger.info(POS_PANDA_MERGE_TRANSITION_BANNER); logger.info("Execution transitioning from PoW to PoS!!!"); logger.info("Importing block referencing terminal PoW block", { diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts index d08a747259b1..e641ff9ae6d9 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlocksExecutionPayloads.ts @@ -1,4 +1,3 @@ -import {toHexString} from "@chainsafe/ssz"; import { CachedBeaconStateAllForks, isExecutionStateType, @@ -6,7 +5,7 @@ import { isMergeTransitionBlock as isMergeTransitionBlockFn, isExecutionEnabled, } from "@lodestar/state-transition"; -import {bellatrix, Slot, deneb, SignedBeaconBlock} from "@lodestar/types"; +import {bellatrix, Slot, deneb, SignedBeaconBlock, electra} from "@lodestar/types"; import { IForkChoice, assertValidTerminalPowBlock, @@ -17,7 +16,7 @@ import { LVHInvalidResponse, } from "@lodestar/fork-choice"; import {ChainForkConfig} from "@lodestar/config"; -import {ErrorAborted, Logger} from "@lodestar/utils"; +import {ErrorAborted, Logger, toRootHex} from "@lodestar/utils"; import {ForkSeq, SAFE_SLOTS_TO_IMPORT_OPTIMISTICALLY} from "@lodestar/params"; import {IExecutionEngine} from "../../execution/engine/interface.js"; @@ -205,10 +204,8 @@ export async function verifyBlocksExecutionPayload( // in import block if (isMergeTransitionBlock) { const mergeBlock = block.message as bellatrix.BeaconBlock; - const mergeBlockHash = toHexString( - chain.config.getForkTypes(mergeBlock.slot).BeaconBlock.hashTreeRoot(mergeBlock) - ); - const powBlockRootHex = toHexString(mergeBlock.body.executionPayload.parentHash); + const mergeBlockHash = toRootHex(chain.config.getForkTypes(mergeBlock.slot).BeaconBlock.hashTreeRoot(mergeBlock)); + const powBlockRootHex = toRootHex(mergeBlock.body.executionPayload.parentHash); const powBlock = await chain.eth1.getPowBlock(powBlockRootHex).catch((error) => { // Lets just warn the user here, errors if any will be reported on // `assertValidTerminalPowBlock` checks @@ -305,6 +302,8 @@ export async function verifyBlockExecutionPayload( ? (block.message.body as deneb.BeaconBlockBody).blobKzgCommitments.map(kzgCommitmentToVersionedHash) : undefined; const parentBlockRoot = ForkSeq[fork] >= ForkSeq.deneb ? block.message.parentRoot : undefined; + const executionRequests = + ForkSeq[fork] >= ForkSeq.electra ? (block.message.body as electra.BeaconBlockBody).executionRequests : undefined; const logCtx = {slot: block.message.slot, executionBlock: executionPayloadEnabled.blockNumber}; chain.logger.debug("Call engine api newPayload", logCtx); @@ -312,7 +311,8 @@ export async function verifyBlockExecutionPayload( fork, executionPayloadEnabled, versionedHashes, - parentBlockRoot + parentBlockRoot, + executionRequests ); chain.logger.debug("Receive engine api newPayload result", {...logCtx, status: execResult.status}); @@ -330,7 +330,7 @@ export async function verifyBlockExecutionPayload( const lvhResponse = { executionStatus, latestValidExecHash: execResult.latestValidHash, - invalidateFromParentBlockRoot: toHexString(block.message.parentRoot), + invalidateFromParentBlockRoot: toRootHex(block.message.parentRoot), }; const execError = new BlockError(block, { code: BlockErrorCode.EXECUTION_ENGINE_ERROR, @@ -407,7 +407,7 @@ function getSegmentErrorResponse( for (let mayBeLVHIndex = blockIndex - 1; mayBeLVHIndex >= 0; mayBeLVHIndex--) { const block = blocks[mayBeLVHIndex]; if ( - toHexString((block.message.body as bellatrix.BeaconBlockBody).executionPayload.blockHash) === + toRootHex((block.message.body as bellatrix.BeaconBlockBody).executionPayload.blockHash) === lvhResponse.latestValidExecHash ) { lvhFound = true; diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksSanityChecks.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksSanityChecks.ts index e62355a4889d..573dfa52ce8b 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlocksSanityChecks.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlocksSanityChecks.ts @@ -2,7 +2,7 @@ import {computeStartSlotAtEpoch} from "@lodestar/state-transition"; import {ChainForkConfig} from "@lodestar/config"; import {IForkChoice, ProtoBlock} from "@lodestar/fork-choice"; import {Slot} from "@lodestar/types"; -import {toHexString} from "@lodestar/utils"; +import {toRootHex} from "@lodestar/utils"; import {IClock} from "../../util/clock.js"; import {BlockError, BlockErrorCode} from "../errors/index.js"; import {BlockInput, ImportBlockOpts} from "./types.js"; @@ -67,7 +67,7 @@ export function verifyBlocksSanityChecks( parentBlockSlot = relevantBlocks[relevantBlocks.length - 1].block.message.slot; } else { // When importing a block segment, only the first NON-IGNORED block must be known to the fork-choice. - const parentRoot = toHexString(block.message.parentRoot); + const parentRoot = toRootHex(block.message.parentRoot); parentBlock = chain.forkChoice.getBlockHex(parentRoot); if (!parentBlock) { throw new BlockError(block, {code: BlockErrorCode.PARENT_UNKNOWN, parentRoot}); @@ -85,9 +85,7 @@ export function verifyBlocksSanityChecks( // Not already known // IGNORE if `partiallyVerifiedBlock.ignoreIfKnown` - const blockHash = toHexString( - chain.config.getForkTypes(block.message.slot).BeaconBlock.hashTreeRoot(block.message) - ); + const blockHash = toRootHex(chain.config.getForkTypes(block.message.slot).BeaconBlock.hashTreeRoot(block.message)); if (chain.forkChoice.hasBlockHex(blockHash)) { if (opts.ignoreIfKnown) { continue; diff --git a/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts b/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts index b0f5ab159591..89cf7ddc7556 100644 --- a/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts +++ b/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts @@ -1,4 +1,4 @@ -import {toHex} from "@lodestar/utils"; +import {toRootHex} from "@lodestar/utils"; import {BeaconChain} from "../chain.js"; import {BlockInput, BlockInputType} from "./types.js"; @@ -15,7 +15,7 @@ export async function writeBlockInputToDb(this: BeaconChain, blocksInput: BlockI for (const blockInput of blocksInput) { const {block, blockBytes} = blockInput; const blockRoot = this.config.getForkTypes(block.message.slot).BeaconBlock.hashTreeRoot(block.message); - const blockRootHex = toHex(blockRoot); + const blockRootHex = toRootHex(blockRoot); if (blockBytes) { // skip serializing data if we already have it this.metrics?.importBlock.persistBlockWithSerializedDataCount.inc(); @@ -59,7 +59,7 @@ export async function removeEagerlyPersistedBlockInputs(this: BeaconChain, block for (const blockInput of blockInputs) { const {block, type} = blockInput; const blockRoot = this.config.getForkTypes(block.message.slot).BeaconBlock.hashTreeRoot(block.message); - const blockRootHex = toHex(blockRoot); + const blockRootHex = toRootHex(blockRoot); if (!this.forkChoice.hasBlockHex(blockRootHex)) { blockToRemove.push(block); diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index a12ee4a21f64..371f660abe2e 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -1,5 +1,6 @@ import path from "node:path"; -import {CompositeTypeAny, fromHexString, TreeView, Type, toHexString} from "@chainsafe/ssz"; +import {CompositeTypeAny, TreeView, Type} from "@chainsafe/ssz"; +import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; import { BeaconStateAllForks, CachedBeaconStateAllForks, @@ -10,9 +11,9 @@ import { getEffectiveBalanceIncrementsZeroInactive, isCachedBeaconState, Index2PubkeyCache, - PubkeyIndexMap, EpochShuffling, computeEndSlotAtEpoch, + computeAnchorCheckpoint, } from "@lodestar/state-transition"; import {BeaconConfig} from "@lodestar/config"; import { @@ -35,7 +36,7 @@ import { } from "@lodestar/types"; import {CheckpointWithHex, ExecutionStatus, IForkChoice, ProtoBlock, UpdateHeadOpt} from "@lodestar/fork-choice"; import {ProcessShutdownCallback} from "@lodestar/validator"; -import {Logger, gweiToWei, isErrorAborted, pruneSetToMax, sleep, toHex} from "@lodestar/utils"; +import {Logger, fromHex, gweiToWei, isErrorAborted, pruneSetToMax, sleep, toRootHex} from "@lodestar/utils"; import {ForkSeq, GENESIS_SLOT, SLOTS_PER_EPOCH} from "@lodestar/params"; import {GENESIS_EPOCH, ZERO_HASH} from "../constants/index.js"; @@ -60,7 +61,6 @@ import { import {IChainOptions} from "./options.js"; import {QueuedStateRegenerator, RegenCaller} from "./regen/index.js"; import {initializeForkChoice} from "./forkChoice/index.js"; -import {computeAnchorCheckpoint} from "./initState.js"; import {IBlsVerifier, BlsSingleThreadVerifier, BlsMultiThreadWorkerPool} from "./bls/index.js"; import { SeenAttesters, @@ -119,6 +119,7 @@ export class BeaconChain implements IBeaconChain { readonly config: BeaconConfig; readonly logger: Logger; readonly metrics: Metrics | null; + readonly bufferPool: BufferPool | null; readonly anchorStateLatestBlockSlot: Slot; @@ -133,7 +134,7 @@ export class BeaconChain implements IBeaconChain { // Ops pool readonly attestationPool: AttestationPool; - readonly aggregatedAttestationPool = new AggregatedAttestationPool(); + readonly aggregatedAttestationPool: AggregatedAttestationPool; readonly syncCommitteeMessagePool: SyncCommitteeMessagePool; readonly syncContributionAndProofPool = new SyncContributionAndProofPool(); readonly opPool = new OpPool(); @@ -226,7 +227,13 @@ export class BeaconChain implements IBeaconChain { if (!clock) clock = new Clock({config, genesisTime: this.genesisTime, signal}); const preAggregateCutOffTime = (2 / 3) * this.config.SECONDS_PER_SLOT; - this.attestationPool = new AttestationPool(clock, preAggregateCutOffTime, this.opts?.preaggregateSlotDistance); + this.attestationPool = new AttestationPool( + config, + clock, + preAggregateCutOffTime, + this.opts?.preaggregateSlotDistance + ); + this.aggregatedAttestationPool = new AggregatedAttestationPool(this.config); this.syncCommitteeMessagePool = new SyncCommitteeMessagePool( clock, preAggregateCutOffTime, @@ -239,7 +246,6 @@ export class BeaconChain implements IBeaconChain { this.beaconProposerCache = new BeaconProposerCache(opts); this.checkpointBalancesCache = new CheckpointBalancesCache(); - this.shufflingCache = new ShufflingCache(metrics, this.opts); // Restore state caches // anchorState may already by a CachedBeaconState. If so, don't create the cache again, since deserializing all @@ -254,9 +260,21 @@ export class BeaconChain implements IBeaconChain { pubkey2index: new PubkeyIndexMap(), index2pubkey: [], }); - this.shufflingCache.processState(cachedState, cachedState.epochCtx.previousShuffling.epoch); - this.shufflingCache.processState(cachedState, cachedState.epochCtx.currentShuffling.epoch); - this.shufflingCache.processState(cachedState, cachedState.epochCtx.nextShuffling.epoch); + + this.shufflingCache = cachedState.epochCtx.shufflingCache = new ShufflingCache(metrics, logger, this.opts, [ + { + shuffling: cachedState.epochCtx.previousShuffling, + decisionRoot: cachedState.epochCtx.previousDecisionRoot, + }, + { + shuffling: cachedState.epochCtx.currentShuffling, + decisionRoot: cachedState.epochCtx.currentDecisionRoot, + }, + { + shuffling: cachedState.epochCtx.nextShuffling, + decisionRoot: cachedState.epochCtx.nextDecisionRoot, + }, + ]); // Persist single global instance of state caches this.pubkey2index = cachedState.epochCtx.pubkey2index; @@ -266,6 +284,9 @@ export class BeaconChain implements IBeaconChain { const blockStateCache = this.opts.nHistoricalStates ? new FIFOBlockStateCache(this.opts, {metrics}) : new BlockStateCacheImpl({metrics}); + this.bufferPool = this.opts.nHistoricalStates + ? new BufferPool(anchorState.type.tree_serializedSize(anchorState.node), metrics) + : null; const checkpointStateCache = this.opts.nHistoricalStates ? new PersistentCheckpointStateCache( { @@ -274,7 +295,7 @@ export class BeaconChain implements IBeaconChain { clock, shufflingCache: this.shufflingCache, blockStateCache, - bufferPool: new BufferPool(anchorState.type.tree_serializedSize(anchorState.node), metrics), + bufferPool: this.bufferPool, datastore: fileDataStore ? // debug option if we want to investigate any issues with the DB new FileCPStateDatastore() @@ -324,7 +345,7 @@ export class BeaconChain implements IBeaconChain { this.bls = bls; this.emitter = emitter; - this.archiver = new Archiver(db, this, logger, signal, opts); + this.archiver = new Archiver(db, this, logger, signal, opts, metrics); // always run PrepareNextSlotScheduler except for fork_choice spec tests if (!opts?.disablePrepareNextSlot) { new PrepareNextSlotScheduler(this, this.config, metrics, this.logger, signal); @@ -511,7 +532,7 @@ export class BeaconChain implements IBeaconChain { }; } - const data = await this.db.stateArchive.getByRoot(fromHexString(stateRoot)); + const data = await this.db.stateArchive.getByRoot(fromHex(stateRoot)); return data && {state: data, executionOptimistic: false, finalized: true}; } @@ -558,7 +579,7 @@ export class BeaconChain implements IBeaconChain { // Unfinalized slot, attempt to find in fork-choice const block = this.forkChoice.getCanonicalBlockAtSlot(slot); if (block) { - const data = await this.db.block.get(fromHexString(block.blockRoot)); + const data = await this.db.block.get(fromHex(block.blockRoot)); if (data) { return {block: data, executionOptimistic: isOptimisticBlock(block), finalized: false}; } @@ -577,7 +598,7 @@ export class BeaconChain implements IBeaconChain { ): Promise<{block: SignedBeaconBlock; executionOptimistic: boolean; finalized: boolean} | null> { const block = this.forkChoice.getBlockHex(root); if (block) { - const data = await this.db.block.get(fromHexString(root)); + const data = await this.db.block.get(fromHex(root)); if (data) { return {block: data, executionOptimistic: isOptimisticBlock(block), finalized: false}; } @@ -585,14 +606,14 @@ export class BeaconChain implements IBeaconChain { // TODO: Add a lock to the archiver to have deterministic behavior on where are blocks } - const data = await this.db.blockArchive.getByRoot(fromHexString(root)); + const data = await this.db.blockArchive.getByRoot(fromHex(root)); return data && {block: data, executionOptimistic: false, finalized: true}; } async produceCommonBlockBody(blockAttributes: BlockAttributes): Promise { const {slot, parentBlockRoot} = blockAttributes; const state = await this.regen.getBlockSlotState( - toHexString(parentBlockRoot), + toRootHex(parentBlockRoot), slot, {dontTransferCache: true}, RegenCaller.produceBlock @@ -641,7 +662,7 @@ export class BeaconChain implements IBeaconChain { shouldOverrideBuilder?: boolean; }> { const state = await this.regen.getBlockSlotState( - toHexString(parentBlockRoot), + toRootHex(parentBlockRoot), slot, {dontTransferCache: true}, RegenCaller.produceBlock @@ -673,7 +694,7 @@ export class BeaconChain implements IBeaconChain { : this.config.getExecutionForkTypes(slot).BlindedBeaconBlockBody.hashTreeRoot(body as BlindedBeaconBlockBody); this.logger.debug("Computing block post state from the produced body", { slot, - bodyRoot: toHexString(bodyRoot), + bodyRoot: toRootHex(bodyRoot), blockType, }); @@ -691,7 +712,7 @@ export class BeaconChain implements IBeaconChain { blockType === BlockType.Full ? this.config.getForkTypes(slot).BeaconBlock.hashTreeRoot(block) : this.config.getExecutionForkTypes(slot).BlindedBeaconBlock.hashTreeRoot(block as BlindedBeaconBlock); - const blockRootHex = toHex(blockRoot); + const blockRootHex = toRootHex(blockRoot); // track the produced block for consensus broadcast validations if (blockType === BlockType.Full) { @@ -729,7 +750,7 @@ export class BeaconChain implements IBeaconChain { * ) */ getContents(beaconBlock: deneb.BeaconBlock): deneb.Contents { - const blockHash = toHex(beaconBlock.body.executionPayload.blockHash); + const blockHash = toRootHex(beaconBlock.body.executionPayload.blockHash); const contents = this.producedContentsCache.get(blockHash); if (!contents) { throw Error(`No contents for executionPayload.blockHash ${blockHash}`); @@ -758,7 +779,7 @@ export class BeaconChain implements IBeaconChain { finalizedRoot: finalizedCheckpoint.epoch === GENESIS_EPOCH ? ZERO_HASH : finalizedCheckpoint.root, finalizedEpoch: finalizedCheckpoint.epoch, // TODO: PERFORMANCE: Memoize to prevent re-computing every time - headRoot: fromHexString(head.blockRoot), + headRoot: fromHex(head.blockRoot), headSlot: head.slot, }; } @@ -892,8 +913,8 @@ export class BeaconChain implements IBeaconChain { state = await this.regen.getState(attHeadBlock.stateRoot, regenCaller); } - // resolve the promise to unblock other calls of the same epoch and dependent root - return this.shufflingCache.processState(state, attEpoch); + // should always be the current epoch of the active context so no need to await a result from the ShufflingCache + return state.epochCtx.getShufflingAtEpoch(attEpoch); } /** @@ -926,7 +947,7 @@ export class BeaconChain implements IBeaconChain { checkpointRoot: checkpoint.rootHex, stateId, stateSlot: state.slot, - stateRoot: toHex(state.hashTreeRoot()), + stateRoot: toRootHex(state.hashTreeRoot()), }); } @@ -997,7 +1018,7 @@ export class BeaconChain implements IBeaconChain { // by default store to lodestar_archive of current dir const dirpath = path.join(this.opts.persistInvalidSszObjectsDir ?? "invalid_ssz_objects", dateStr); - const filepath = path.join(dirpath, `${typeName}_${toHex(root)}.ssz`); + const filepath = path.join(dirpath, `${typeName}_${toRootHex(root)}.ssz`); await ensureDir(dirpath); @@ -1027,6 +1048,9 @@ export class BeaconChain implements IBeaconChain { metrics.forkChoice.balancesLength.set(forkChoiceMetrics.balancesLength); metrics.forkChoice.nodes.set(forkChoiceMetrics.nodes); metrics.forkChoice.indices.set(forkChoiceMetrics.indices); + + const headState = this.getHeadState(); + metrics.headState.unfinalizedPubkeyCacheSize.set(headState.epochCtx.unfinalizedPubkey2index.size); } private onClockSlot(slot: Slot): void { @@ -1107,7 +1131,7 @@ export class BeaconChain implements IBeaconChain { // TODO: Improve using regen here const {blockRoot, stateRoot, slot} = this.forkChoice.getHead(); const headState = this.regen.getStateSync(stateRoot); - const headBlock = await this.db.block.get(fromHexString(blockRoot)); + const headBlock = await this.db.block.get(fromHex(blockRoot)); if (headBlock == null) { throw Error(`Head block ${slot} ${headBlock} is not available in database`); } @@ -1115,6 +1139,39 @@ export class BeaconChain implements IBeaconChain { if (headState) { this.opPool.pruneAll(headBlock, headState); } + + const cpEpoch = cp.epoch; + + if (headState === null) { + this.logger.verbose("Head state is null"); + } else if (cpEpoch >= this.config.ELECTRA_FORK_EPOCH) { + // Get the validator.length from the state at cpEpoch + // We are confident the last element in the list is from headEpoch + // Thus we query from the end of the list. (cpEpoch - headEpoch - 1) is negative number + const pivotValidatorIndex = headState.epochCtx.getValidatorCountAtEpoch(cpEpoch); + + if (pivotValidatorIndex !== undefined) { + // Note EIP-6914 will break this logic + const newFinalizedValidators = headState.epochCtx.unfinalizedPubkey2index.filter( + (index, _pubkey) => index < pivotValidatorIndex + ); + + // Populate finalized pubkey cache and remove unfinalized pubkey cache + if (!newFinalizedValidators.isEmpty()) { + this.regen.updateUnfinalizedPubkeys(newFinalizedValidators); + } + } + } + + // TODO-Electra: Deprecating eth1Data poll requires a check on a finalized checkpoint state. + // Will resolve this later + // if (cpEpoch >= (this.config.ELECTRA_FORK_EPOCH ?? Infinity)) { + // // finalizedState can be safely casted to Electra state since cp is already post-Electra + // if (finalizedState.eth1DepositIndex >= (finalizedState as CachedBeaconStateElectra).depositRequestsStartIndex) { + // // Signal eth1 to stop polling eth1Data + // this.eth1.stopPollingEth1Data(); + // } + // } } async updateBeaconProposerData(epoch: Epoch, proposers: ProposerPreparationData[]): Promise { @@ -1152,10 +1209,10 @@ export class BeaconChain implements IBeaconChain { const preState = this.regen.getPreStateSync(block); if (preState === null) { - throw Error(`Pre-state is unavailable given block's parent root ${toHexString(block.parentRoot)}`); + throw Error(`Pre-state is unavailable given block's parent root ${toRootHex(block.parentRoot)}`); } - const postState = this.regen.getStateSync(toHexString(block.stateRoot)) ?? undefined; + const postState = this.regen.getStateSync(toRootHex(block.stateRoot)) ?? undefined; return computeBlockRewards(block, preState.clone(), postState?.clone()); } @@ -1173,7 +1230,7 @@ export class BeaconChain implements IBeaconChain { } const {executionOptimistic, finalized} = stateResult; - const stateRoot = toHexString(stateResult.state.hashTreeRoot()); + const stateRoot = toRootHex(stateResult.state.hashTreeRoot()); const cachedState = this.regen.getStateSync(stateRoot); @@ -1193,7 +1250,7 @@ export class BeaconChain implements IBeaconChain { const preState = this.regen.getPreStateSync(block); if (preState === null) { - throw Error(`Pre-state is unavailable given block's parent root ${toHexString(block.parentRoot)}`); + throw Error(`Pre-state is unavailable given block's parent root ${toRootHex(block.parentRoot)}`); } return computeSyncCommitteeRewards(block, preState.clone(), validatorIds); diff --git a/packages/beacon-node/src/chain/errors/attestationError.ts b/packages/beacon-node/src/chain/errors/attestationError.ts index 8e0dc925f32e..9f8e86cea1ab 100644 --- a/packages/beacon-node/src/chain/errors/attestationError.ts +++ b/packages/beacon-node/src/chain/errors/attestationError.ts @@ -1,5 +1,5 @@ -import {toHexString} from "@chainsafe/ssz"; import {Epoch, Slot, ValidatorIndex, RootHex} from "@lodestar/types"; +import {toRootHex} from "@lodestar/utils"; import {GossipActionError} from "./gossipValidation.js"; export enum AttestationErrorCode { @@ -127,6 +127,14 @@ export enum AttestationErrorCode { INVALID_SERIALIZED_BYTES = "ATTESTATION_ERROR_INVALID_SERIALIZED_BYTES", /** Too many skipped slots. */ TOO_MANY_SKIPPED_SLOTS = "ATTESTATION_ERROR_TOO_MANY_SKIPPED_SLOTS", + /** + * Electra: The aggregated attestation does not have exactly one committee bit set. + */ + NOT_EXACTLY_ONE_COMMITTEE_BIT_SET = "ATTESTATION_ERROR_NOT_EXACTLY_ONE_COMMITTEE_BIT_SET", + /** + * Electra: Invalid attestationData index: is non-zero + */ + NON_ZERO_ATTESTATION_DATA_INDEX = "ATTESTATION_ERROR_NON_ZERO_ATTESTATION_DATA_INDEX", } export type AttestationErrorType = @@ -160,14 +168,16 @@ export type AttestationErrorType = | {code: AttestationErrorCode.INVALID_AGGREGATOR} | {code: AttestationErrorCode.INVALID_INDEXED_ATTESTATION} | {code: AttestationErrorCode.INVALID_SERIALIZED_BYTES} - | {code: AttestationErrorCode.TOO_MANY_SKIPPED_SLOTS; headBlockSlot: Slot; attestationSlot: Slot}; + | {code: AttestationErrorCode.TOO_MANY_SKIPPED_SLOTS; headBlockSlot: Slot; attestationSlot: Slot} + | {code: AttestationErrorCode.NOT_EXACTLY_ONE_COMMITTEE_BIT_SET} + | {code: AttestationErrorCode.NON_ZERO_ATTESTATION_DATA_INDEX}; export class AttestationError extends GossipActionError { getMetadata(): Record { const type = this.type; switch (type.code) { case AttestationErrorCode.UNKNOWN_TARGET_ROOT: - return {code: type.code, root: toHexString(type.root)}; + return {code: type.code, root: toRootHex(type.root)}; case AttestationErrorCode.MISSING_STATE_TO_VERIFY_ATTESTATION: // TODO: The stack trace gets lost here return {code: type.code, error: type.error.message}; diff --git a/packages/beacon-node/src/chain/errors/blockError.ts b/packages/beacon-node/src/chain/errors/blockError.ts index 5f12bd939342..6280533c7a68 100644 --- a/packages/beacon-node/src/chain/errors/blockError.ts +++ b/packages/beacon-node/src/chain/errors/blockError.ts @@ -1,6 +1,5 @@ -import {toHexString} from "@chainsafe/ssz"; import {RootHex, SignedBeaconBlock, Slot, ValidatorIndex} from "@lodestar/types"; -import {LodestarError} from "@lodestar/utils"; +import {LodestarError, toRootHex} from "@lodestar/utils"; import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; import {ExecutionPayloadStatus} from "../../execution/engine/interface.js"; import {QueueErrorCode} from "../../util/queue/index.js"; @@ -151,8 +150,8 @@ export function renderBlockErrorType(type: BlockErrorType): Record({ name: "lodestar_historical_state_stfn_balances_nodes_populated_miss_total", help: "Total count state.balances nodesPopulated is false on stfn", diff --git a/packages/beacon-node/src/chain/initState.ts b/packages/beacon-node/src/chain/initState.ts index aae03a07f50c..311806fb1be7 100644 --- a/packages/beacon-node/src/chain/initState.ts +++ b/packages/beacon-node/src/chain/initState.ts @@ -1,16 +1,13 @@ -import {toHexString} from "@chainsafe/ssz"; import { - blockToHeader, computeEpochAtSlot, BeaconStateAllForks, CachedBeaconStateAllForks, - computeCheckpointEpochAtStateSlot, computeStartSlotAtEpoch, } from "@lodestar/state-transition"; -import {SignedBeaconBlock, phase0, ssz} from "@lodestar/types"; +import {SignedBeaconBlock} from "@lodestar/types"; import {ChainForkConfig} from "@lodestar/config"; -import {Logger, toHex} from "@lodestar/utils"; -import {GENESIS_SLOT, ZERO_HASH} from "../constants/index.js"; +import {Logger, toHex, toRootHex} from "@lodestar/utils"; +import {GENESIS_SLOT} from "../constants/index.js"; import {IBeaconDb} from "../db/index.js"; import {Eth1Provider} from "../eth1/index.js"; import {Metrics} from "../metrics/index.js"; @@ -38,17 +35,18 @@ export async function persistGenesisResult( export async function persistAnchorState( config: ChainForkConfig, db: IBeaconDb, - anchorState: BeaconStateAllForks + anchorState: BeaconStateAllForks, + anchorStateBytes: Uint8Array ): Promise { if (anchorState.slot === GENESIS_SLOT) { const genesisBlock = createGenesisBlock(config, anchorState); await Promise.all([ db.blockArchive.add(genesisBlock), db.block.add(genesisBlock), - db.stateArchive.add(anchorState), + db.stateArchive.putBinary(anchorState.slot, anchorStateBytes), ]); } else { - await db.stateArchive.add(anchorState); + await db.stateArchive.putBinary(anchorState.slot, anchorStateBytes); } } @@ -103,8 +101,8 @@ export async function initStateFromEth1({ const blockRoot = types.BeaconBlock.hashTreeRoot(genesisBlock.message); logger.info("Initializing genesis state", { - stateRoot: toHexString(stateRoot), - blockRoot: toHexString(blockRoot), + stateRoot: toRootHex(stateRoot), + blockRoot: toRootHex(blockRoot), validatorCount: genesisResult.state.validators.length, }); @@ -146,7 +144,7 @@ export async function initStateFromDb( logger.info("Initializing beacon state from db", { slot: state.slot, epoch: computeEpochAtSlot(state.slot), - stateRoot: toHexString(state.hashTreeRoot()), + stateRoot: toRootHex(state.hashTreeRoot()), }); return state; @@ -155,16 +153,17 @@ export async function initStateFromDb( /** * Initialize and persist an anchor state (either weak subjectivity or genesis) */ -export async function initStateFromAnchorState( +export async function checkAndPersistAnchorState( config: ChainForkConfig, db: IBeaconDb, logger: Logger, anchorState: BeaconStateAllForks, + anchorStateBytes: Uint8Array, { isWithinWeakSubjectivityPeriod, isCheckpointState, }: {isWithinWeakSubjectivityPeriod: boolean; isCheckpointState: boolean} -): Promise { +): Promise { const expectedFork = config.getForkInfo(computeStartSlotAtEpoch(anchorState.fork.epoch)); const expectedForkVersion = toHex(expectedFork.version); const stateFork = toHex(anchorState.fork.currentVersion); @@ -179,22 +178,22 @@ export async function initStateFromAnchorState( logger.info(`Initializing beacon from a valid ${stateInfo} state`, { slot: anchorState.slot, epoch: computeEpochAtSlot(anchorState.slot), - stateRoot: toHexString(anchorState.hashTreeRoot()), + stateRoot: toRootHex(anchorState.hashTreeRoot()), isWithinWeakSubjectivityPeriod, }); } else { logger.warn(`Initializing from a stale ${stateInfo} state vulnerable to long range attacks`, { slot: anchorState.slot, epoch: computeEpochAtSlot(anchorState.slot), - stateRoot: toHexString(anchorState.hashTreeRoot()), + stateRoot: toRootHex(anchorState.hashTreeRoot()), isWithinWeakSubjectivityPeriod, }); logger.warn("Checkpoint sync recommended, please use --help to see checkpoint sync options"); } - await persistAnchorState(config, db, anchorState); - - return anchorState; + if (isCheckpointState || anchorState.slot === GENESIS_SLOT) { + await persistAnchorState(config, db, anchorState, anchorStateBytes); + } } export function initBeaconMetrics(metrics: Metrics, state: BeaconStateAllForks): void { @@ -203,35 +202,3 @@ export function initBeaconMetrics(metrics: Metrics, state: BeaconStateAllForks): metrics.currentJustifiedEpoch.set(state.currentJustifiedCheckpoint.epoch); metrics.finalizedEpoch.set(state.finalizedCheckpoint.epoch); } - -export function computeAnchorCheckpoint( - config: ChainForkConfig, - anchorState: BeaconStateAllForks -): {checkpoint: phase0.Checkpoint; blockHeader: phase0.BeaconBlockHeader} { - let blockHeader; - let root; - const blockTypes = config.getForkTypes(anchorState.latestBlockHeader.slot); - - if (anchorState.latestBlockHeader.slot === GENESIS_SLOT) { - const block = blockTypes.BeaconBlock.defaultValue(); - block.stateRoot = anchorState.hashTreeRoot(); - blockHeader = blockToHeader(config, block); - root = ssz.phase0.BeaconBlockHeader.hashTreeRoot(blockHeader); - } else { - blockHeader = ssz.phase0.BeaconBlockHeader.clone(anchorState.latestBlockHeader); - if (ssz.Root.equals(blockHeader.stateRoot, ZERO_HASH)) { - blockHeader.stateRoot = anchorState.hashTreeRoot(); - } - root = ssz.phase0.BeaconBlockHeader.hashTreeRoot(blockHeader); - } - - return { - checkpoint: { - root, - // the checkpoint epoch = computeEpochAtSlot(anchorState.slot) + 1 if slot is not at epoch boundary - // this is similar to a process_slots() call - epoch: computeCheckpointEpochAtStateSlot(anchorState.slot), - }, - blockHeader, - }; -} diff --git a/packages/beacon-node/src/chain/interface.ts b/packages/beacon-node/src/chain/interface.ts index ca13dc604ea0..531f60dc0e63 100644 --- a/packages/beacon-node/src/chain/interface.ts +++ b/packages/beacon-node/src/chain/interface.ts @@ -1,4 +1,5 @@ import {CompositeTypeAny, TreeView, Type} from "@chainsafe/ssz"; +import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; import { UintNum64, Root, @@ -21,7 +22,6 @@ import { CachedBeaconStateAllForks, EpochShuffling, Index2PubkeyCache, - PubkeyIndexMap, } from "@lodestar/state-transition"; import {BeaconConfig} from "@lodestar/config"; import {Logger} from "@lodestar/utils"; @@ -30,6 +30,7 @@ import {IEth1ForBlockProduction} from "../eth1/index.js"; import {IExecutionEngine, IExecutionBuilder} from "../execution/index.js"; import {Metrics} from "../metrics/metrics.js"; import {IClock} from "../util/clock.js"; +import {BufferPool} from "../util/bufferPool.js"; import {ChainEventEmitter} from "./emitter.js"; import {IStateRegenerator, RegenCaller} from "./regen/index.js"; import {IBlsVerifier} from "./bls/index.js"; @@ -86,6 +87,7 @@ export interface IBeaconChain { readonly config: BeaconConfig; readonly logger: Logger; readonly metrics: Metrics | null; + readonly bufferPool: BufferPool | null; /** The initial slot that the chain is started with */ readonly anchorStateLatestBlockSlot: Slot; diff --git a/packages/beacon-node/src/chain/lightClient/index.ts b/packages/beacon-node/src/chain/lightClient/index.ts index 0230ca48c8be..30567b5d79b8 100644 --- a/packages/beacon-node/src/chain/lightClient/index.ts +++ b/packages/beacon-node/src/chain/lightClient/index.ts @@ -1,4 +1,4 @@ -import {BitArray, CompositeViewDU, toHexString} from "@chainsafe/ssz"; +import {BitArray, CompositeViewDU} from "@chainsafe/ssz"; import { altair, BeaconBlock, @@ -31,7 +31,7 @@ import { LightClientUpdateSummary, upgradeLightClientHeader, } from "@lodestar/light-client/spec"; -import {Logger, MapDef, pruneSetToMax} from "@lodestar/utils"; +import {Logger, MapDef, pruneSetToMax, toRootHex} from "@lodestar/utils"; import {routes} from "@lodestar/api"; import { MIN_SYNC_COMMITTEE_PARTICIPANTS, @@ -292,7 +292,7 @@ export class LightClientServer { if (!syncCommitteeWitness) { throw new LightClientServerError( {code: LightClientServerErrorCode.RESOURCE_UNAVAILABLE}, - `syncCommitteeWitness not available ${toHexString(blockRoot)}` + `syncCommitteeWitness not available ${toRootHex(blockRoot)}` ); } @@ -352,7 +352,7 @@ export class LightClientServer { if (!syncCommitteeWitness) { throw new LightClientServerError( {code: LightClientServerErrorCode.RESOURCE_UNAVAILABLE}, - `syncCommitteeWitness not available ${toHexString(blockRoot)} period ${period}` + `syncCommitteeWitness not available ${toRootHex(blockRoot)} period ${period}` ); } @@ -391,7 +391,7 @@ export class LightClientServer { const header = blockToLightClientHeader(this.config.getForkName(blockSlot), block); const blockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(header.beacon); - const blockRootHex = toHexString(blockRoot); + const blockRootHex = toRootHex(blockRoot); const syncCommitteeWitness = getSyncCommitteesWitness(postState); @@ -410,7 +410,7 @@ export class LightClientServer { const period = computeSyncPeriodAtSlot(blockSlot); if (parentBlockPeriod < period) { // If the parentBlock is in a previous epoch it must be the dependentRoot of this epoch transition - const dependentRoot = toHexString(block.parentRoot); + const dependentRoot = toRootHex(block.parentRoot); const periodDependentRoots = this.knownSyncCommittee.getOrDefault(period); if (!periodDependentRoots.has(dependentRoot)) { periodDependentRoots.add(dependentRoot); @@ -486,7 +486,7 @@ export class LightClientServer { ): Promise { this.metrics?.lightclientServer.onSyncAggregate.inc({event: "processed"}); - const signedBlockRootHex = toHexString(signedBlockRoot); + const signedBlockRootHex = toRootHex(signedBlockRoot); const attestedData = this.prevHeadData.get(signedBlockRootHex); if (!attestedData) { // Log cacheSize since at start this.prevHeadData will be empty @@ -574,7 +574,7 @@ export class LightClientServer { } catch (e) { this.logger.error( "Error updating best LightClientUpdate", - {syncPeriod, slot: attestedHeader.beacon.slot, blockRoot: toHexString(attestedData.blockRoot)}, + {syncPeriod, slot: attestedHeader.beacon.slot, blockRoot: toRootHex(attestedData.blockRoot)}, e as Error ); } @@ -619,7 +619,7 @@ export class LightClientServer { const syncCommitteeWitness = await this.db.syncCommitteeWitness.get(attestedData.blockRoot); if (!syncCommitteeWitness) { - throw Error(`syncCommitteeWitness not available at ${toHexString(attestedData.blockRoot)}`); + throw Error(`syncCommitteeWitness not available at ${toRootHex(attestedData.blockRoot)}`); } const nextSyncCommittee = await this.db.syncCommittee.get(syncCommitteeWitness.nextSyncCommitteeRoot); if (!nextSyncCommittee) { @@ -697,7 +697,7 @@ export class LightClientServer { * Get finalized header from db. Keeps a small in-memory cache to speed up most of the lookups */ private async getFinalizedHeader(finalizedBlockRoot: Uint8Array): Promise { - const finalizedBlockRootHex = toHexString(finalizedBlockRoot); + const finalizedBlockRootHex = toRootHex(finalizedBlockRoot); const cachedFinalizedHeader = this.checkpointHeaders.get(finalizedBlockRootHex); if (cachedFinalizedHeader) { return cachedFinalizedHeader; diff --git a/packages/beacon-node/src/chain/lightClient/proofs.ts b/packages/beacon-node/src/chain/lightClient/proofs.ts index 87ad4544ec69..8d273e30ae5c 100644 --- a/packages/beacon-node/src/chain/lightClient/proofs.ts +++ b/packages/beacon-node/src/chain/lightClient/proofs.ts @@ -1,6 +1,11 @@ import {Tree} from "@chainsafe/persistent-merkle-tree"; -import {BeaconStateAllForks} from "@lodestar/state-transition"; -import {FINALIZED_ROOT_GINDEX, BLOCK_BODY_EXECUTION_PAYLOAD_GINDEX, ForkExecution} from "@lodestar/params"; +import {BeaconStateAllForks, CachedBeaconStateAllForks} from "@lodestar/state-transition"; +import { + FINALIZED_ROOT_GINDEX, + BLOCK_BODY_EXECUTION_PAYLOAD_GINDEX, + ForkExecution, + FINALIZED_ROOT_GINDEX_ELECTRA, +} from "@lodestar/params"; import {BeaconBlockBody, SSZTypesFor, ssz} from "@lodestar/types"; import {SyncCommitteeWitness} from "./types.js"; @@ -40,9 +45,10 @@ export function getCurrentSyncCommitteeBranch(syncCommitteesWitness: SyncCommitt return [syncCommitteesWitness.nextSyncCommitteeRoot, ...syncCommitteesWitness.witness]; } -export function getFinalizedRootProof(state: BeaconStateAllForks): Uint8Array[] { +export function getFinalizedRootProof(state: CachedBeaconStateAllForks): Uint8Array[] { state.commit(); - return new Tree(state.node).getSingleProof(BigInt(FINALIZED_ROOT_GINDEX)); + const finalizedRootGindex = state.epochCtx.isPostElectra() ? FINALIZED_ROOT_GINDEX_ELECTRA : FINALIZED_ROOT_GINDEX; + return new Tree(state.node).getSingleProof(BigInt(finalizedRootGindex)); } export function getBlockBodyExecutionHeaderProof( diff --git a/packages/beacon-node/src/chain/opPools/aggregatedAttestationPool.ts b/packages/beacon-node/src/chain/opPools/aggregatedAttestationPool.ts index 556e1f397d60..9a3e6622d1ff 100644 --- a/packages/beacon-node/src/chain/opPools/aggregatedAttestationPool.ts +++ b/packages/beacon-node/src/chain/opPools/aggregatedAttestationPool.ts @@ -1,7 +1,26 @@ -import {toHexString} from "@chainsafe/ssz"; -import {aggregateSignatures} from "@chainsafe/blst"; -import {ForkName, ForkSeq, MAX_ATTESTATIONS, MIN_ATTESTATION_INCLUSION_DELAY, SLOTS_PER_EPOCH} from "@lodestar/params"; -import {phase0, Epoch, Slot, ssz, ValidatorIndex, RootHex} from "@lodestar/types"; +import {aggregateSignatures, Signature} from "@chainsafe/blst"; +import {BitArray} from "@chainsafe/ssz"; +import { + ForkName, + ForkSeq, + isForkPostElectra, + MAX_ATTESTATIONS, + MAX_ATTESTATIONS_ELECTRA, + MAX_COMMITTEES_PER_SLOT, + MIN_ATTESTATION_INCLUSION_DELAY, + SLOTS_PER_EPOCH, +} from "@lodestar/params"; +import { + phase0, + Epoch, + Slot, + ssz, + ValidatorIndex, + RootHex, + electra, + isElectraAttestation, + Attestation, +} from "@lodestar/types"; import { CachedBeaconStateAllForks, CachedBeaconStatePhase0, @@ -11,7 +30,8 @@ import { getBlockRootAtSlot, } from "@lodestar/state-transition"; import {IForkChoice, EpochDifference} from "@lodestar/fork-choice"; -import {toHex, MapDef} from "@lodestar/utils"; +import {MapDef, toRootHex, assert} from "@lodestar/utils"; +import {ChainForkConfig} from "@lodestar/config"; import {intersectUint8Arrays, IntersectResult} from "../../util/bitArray.js"; import {pruneBySlot, signatureFromBytesNoCheck} from "./utils.js"; import {InsertOutcome} from "./types.js"; @@ -20,13 +40,24 @@ type DataRootHex = string; type CommitteeIndex = number; -type AttestationWithScore = {attestation: phase0.Attestation; score: number}; +// for pre-electra +type AttestationWithScore = {attestation: Attestation; score: number}; +/** + * for electra, this is to consolidate aggregated attestations of the same attestation data into a single attestation to be included in block + * note that this is local definition in this file and it's NOT validator consolidation + */ +export type AttestationsConsolidation = { + byCommittee: Map; + attData: phase0.AttestationData; + totalNotSeenCount: number; + score: number; +}; /** - * This function returns not seen participation for a given epoch and committee. + * This function returns not seen participation for a given epoch and slot and committe index. * Return null if all validators are seen or no info to check. */ -type GetNotSeenValidatorsFn = (epoch: Epoch, committee: Uint32Array) => Set | null; +type GetNotSeenValidatorsFn = (epoch: Epoch, slot: Slot, committeeIndex: number) => Set | null; type ValidateAttestationDataFn = (attData: phase0.AttestationData) => boolean; @@ -39,14 +70,21 @@ type ValidateAttestationDataFn = (attData: phase0.AttestationData) => boolean; const MAX_RETAINED_ATTESTATIONS_PER_GROUP = 4; /** - * On mainnet, each slot has 64 committees, and each block has 128 attestations max so in average + * Pre-electra, each slot has 64 committees, and each block has 128 attestations max so in average * we get 2 attestation per groups. * Starting from Jan 2024, we have a performance issue getting attestations for a block. Based on the - * fact that lot of groups will have only 1 attestation since it's full of participation increase this number + * fact that lot of groups will have only 1 full participation attestation, increase this number * a bit higher than average. This also help decrease number of slots to search for attestations. */ const MAX_ATTESTATIONS_PER_GROUP = 3; +/** + * For electra, each block has up to 8 aggregated attestations, assuming there are 3 for the "best" + * attestation data, there are still 5 for other attestation data so this constant is still good. + * We should separate to 2 constant based on conditions of different networks + */ +const MAX_ATTESTATIONS_PER_GROUP_ELECTRA = 3; + /** * Maintain a pool of aggregated attestations. Attestations can be retrieved for inclusion in a block * or api. The returned attestations are aggregated to maximise the number of validators that can be @@ -54,20 +92,27 @@ const MAX_ATTESTATIONS_PER_GROUP = 3; * Note that we want to remove attestations with attesters that were included in the chain. */ export class AggregatedAttestationPool { - private readonly attestationGroupByDataHashByIndexBySlot = new MapDef< + /** + * post electra, different committees could have the same AttData and we have to consolidate attestations of the same + * data to be included in block, so we should group by data before index + * // TODO: make sure it does not affect performance for pre electra forks + */ + private readonly attestationGroupByIndexByDataHexBySlot = new MapDef< Slot, - Map> - >(() => new Map>()); + Map> + >(() => new Map>()); private lowestPermissibleSlot = 0; + constructor(private readonly config: ChainForkConfig) {} + /** For metrics to track size of the pool */ getAttestationCount(): {attestationCount: number; attestationDataCount: number} { let attestationCount = 0; let attestationDataCount = 0; - for (const attestationGroupByDataByIndex of this.attestationGroupByDataHashByIndexBySlot.values()) { - for (const attestationGroupByData of attestationGroupByDataByIndex.values()) { - attestationDataCount += attestationGroupByData.size; - for (const attestationGroup of attestationGroupByData.values()) { + for (const attestationGroupByIndexByDataHex of this.attestationGroupByIndexByDataHexBySlot.values()) { + for (const attestationGroupByIndex of attestationGroupByIndexByDataHex.values()) { + attestationDataCount += attestationGroupByIndex.size; + for (const attestationGroup of attestationGroupByIndex.values()) { attestationCount += attestationGroup.getAttestationCount(); } } @@ -76,7 +121,7 @@ export class AggregatedAttestationPool { } add( - attestation: phase0.Attestation, + attestation: Attestation, dataRootHex: RootHex, attestingIndicesCount: number, committee: Uint32Array @@ -89,16 +134,32 @@ export class AggregatedAttestationPool { return InsertOutcome.Old; } - const attestationGroupByDataHashByIndex = this.attestationGroupByDataHashByIndexBySlot.getOrDefault(slot); - let attestationGroupByDataHash = attestationGroupByDataHashByIndex.get(attestation.data.index); - if (!attestationGroupByDataHash) { - attestationGroupByDataHash = new Map(); - attestationGroupByDataHashByIndex.set(attestation.data.index, attestationGroupByDataHash); + const attestationGroupByIndexByDataHash = this.attestationGroupByIndexByDataHexBySlot.getOrDefault(slot); + let attestationGroupByIndex = attestationGroupByIndexByDataHash.get(dataRootHex); + if (!attestationGroupByIndex) { + attestationGroupByIndex = new Map(); + attestationGroupByIndexByDataHash.set(dataRootHex, attestationGroupByIndex); } - let attestationGroup = attestationGroupByDataHash.get(dataRootHex); + + let committeeIndex; + + if (isForkPostElectra(this.config.getForkName(slot))) { + if (!isElectraAttestation(attestation)) { + throw Error(`Attestation should be type electra.Attestation for slot ${slot}`); + } + committeeIndex = attestation.committeeBits.getSingleTrueBit(); + } else { + if (isElectraAttestation(attestation)) { + throw Error(`Attestation should be type phase0.Attestation for slot ${slot}`); + } + committeeIndex = attestation.data.index; + } + // this should not happen because attestation should be validated before reaching this + assert.notNull(committeeIndex, "Committee index should not be null in aggregated attestation pool"); + let attestationGroup = attestationGroupByIndex.get(committeeIndex); if (!attestationGroup) { attestationGroup = new MatchingDataAttestationGroup(committee, attestation.data); - attestationGroupByDataHash.set(dataRootHex, attestationGroup); + attestationGroupByIndex.set(committeeIndex, attestationGroup); } return attestationGroup.add({ @@ -110,14 +171,21 @@ export class AggregatedAttestationPool { /** Remove attestations which are too old to be included in a block. */ prune(clockSlot: Slot): void { // Only retain SLOTS_PER_EPOCH slots - pruneBySlot(this.attestationGroupByDataHashByIndexBySlot, clockSlot, SLOTS_PER_EPOCH); + pruneBySlot(this.attestationGroupByIndexByDataHexBySlot, clockSlot, SLOTS_PER_EPOCH); this.lowestPermissibleSlot = Math.max(clockSlot - SLOTS_PER_EPOCH, 0); } + getAttestationsForBlock(fork: ForkName, forkChoice: IForkChoice, state: CachedBeaconStateAllForks): Attestation[] { + const forkSeq = ForkSeq[fork]; + return forkSeq >= ForkSeq.electra + ? this.getAttestationsForBlockElectra(fork, forkChoice, state) + : this.getAttestationsForBlockPreElectra(fork, forkChoice, state); + } + /** - * Get attestations to be included in a block. Returns $MAX_ATTESTATIONS items + * Get attestations to be included in a block pre-electra. Returns up to $MAX_ATTESTATIONS items */ - getAttestationsForBlock( + getAttestationsForBlockPreElectra( fork: ForkName, forkChoice: IForkChoice, state: CachedBeaconStateAllForks @@ -131,14 +199,14 @@ export class AggregatedAttestationPool { const attestationsByScore: AttestationWithScore[] = []; - const slots = Array.from(this.attestationGroupByDataHashByIndexBySlot.keys()).sort((a, b) => b - a); + const slots = Array.from(this.attestationGroupByIndexByDataHexBySlot.keys()).sort((a, b) => b - a); let minScore = Number.MAX_SAFE_INTEGER; let slotCount = 0; slot: for (const slot of slots) { slotCount++; - const attestationGroupByDataHashByIndex = this.attestationGroupByDataHashByIndexBySlot.get(slot); + const attestationGroupByIndexByDataHash = this.attestationGroupByIndexByDataHexBySlot.get(slot); // should not happen - if (!attestationGroupByDataHashByIndex) { + if (!attestationGroupByIndexByDataHash) { throw Error(`No aggregated attestation pool for slot=${slot}`); } @@ -159,35 +227,25 @@ export class AggregatedAttestationPool { } const slotDelta = stateSlot - slot; - const shuffling = state.epochCtx.getShufflingAtEpoch(epoch); - const slotCommittees = shuffling.committees[slot % SLOTS_PER_EPOCH]; - for (const [committeeIndex, attestationGroupByData] of attestationGroupByDataHashByIndex.entries()) { - // all attestations will be validated against the state in next step so we can get committee from the state - // this is an improvement to save the notSeenValidatorsFn call for the same slot/index instead of the same attestation data - if (committeeIndex > slotCommittees.length) { - // invalid index, should not happen - continue; - } - - const committee = slotCommittees[committeeIndex]; - const notSeenAttestingIndices = notSeenValidatorsFn(epoch, committee); - if (notSeenAttestingIndices === null || notSeenAttestingIndices.size === 0) { - continue; - } + for (const attestationGroupByIndex of attestationGroupByIndexByDataHash.values()) { + for (const [committeeIndex, attestationGroup] of attestationGroupByIndex.entries()) { + const notSeenAttestingIndices = notSeenValidatorsFn(epoch, slot, committeeIndex); + if (notSeenAttestingIndices === null || notSeenAttestingIndices.size === 0) { + continue; + } - if ( - slotCount > 2 && - attestationsByScore.length >= MAX_ATTESTATIONS && - notSeenAttestingIndices.size / slotDelta < minScore - ) { - // after 2 slots, there are a good chance that we have 2 * MAX_ATTESTATIONS attestations and break the for loop early - // if not, we may have to scan all slots in the pool - // if we have enough attestations and the max possible score is lower than scores of `attestationsByScore`, we should skip - // otherwise it takes time to check attestation, add it and remove it later after the sort by score - continue; - } + if ( + slotCount > 2 && + attestationsByScore.length >= MAX_ATTESTATIONS && + notSeenAttestingIndices.size / slotDelta < minScore + ) { + // after 2 slots, there are a good chance that we have 2 * MAX_ATTESTATIONS attestations and break the for loop early + // if not, we may have to scan all slots in the pool + // if we have enough attestations and the max possible score is lower than scores of `attestationsByScore`, we should skip + // otherwise it takes time to check attestation, add it and remove it later after the sort by score + continue; + } - for (const attestationGroup of attestationGroupByData.values()) { if (!validateAttestationDataFn(attestationGroup.data)) { continue; } @@ -200,6 +258,7 @@ export class AggregatedAttestationPool { // IF they have to be validated, do it only with one attestation per group since same data // The committeeCountPerSlot can be precomputed once per slot for (const {attestation, notSeenAttesterCount} of attestationGroup.getAttestationsForBlock( + fork, notSeenAttestingIndices )) { const score = notSeenAttesterCount / slotDelta; @@ -232,23 +291,138 @@ export class AggregatedAttestationPool { return attestationsForBlock; } + /** + * Get attestations to be included in an electra block. Returns up to $MAX_ATTESTATIONS_ELECTRA items + */ + getAttestationsForBlockElectra( + fork: ForkName, + forkChoice: IForkChoice, + state: CachedBeaconStateAllForks + ): electra.Attestation[] { + const stateSlot = state.slot; + const stateEpoch = state.epochCtx.epoch; + const statePrevEpoch = stateEpoch - 1; + + const notSeenValidatorsFn = getNotSeenValidatorsFn(state); + const validateAttestationDataFn = getValidateAttestationDataFn(forkChoice, state); + + const slots = Array.from(this.attestationGroupByIndexByDataHexBySlot.keys()).sort((a, b) => b - a); + const consolidations: AttestationsConsolidation[] = []; + let minScore = Number.MAX_SAFE_INTEGER; + let slotCount = 0; + slot: for (const slot of slots) { + slotCount++; + const attestationGroupByIndexByDataHash = this.attestationGroupByIndexByDataHexBySlot.get(slot); + // should not happen + if (!attestationGroupByIndexByDataHash) { + throw Error(`No aggregated attestation pool for slot=${slot}`); + } + + const epoch = computeEpochAtSlot(slot); + // validateAttestation condition: Attestation target epoch not in previous or current epoch + if (!(epoch === stateEpoch || epoch === statePrevEpoch)) { + continue; // Invalid attestations + } + // validateAttestation condition: Attestation slot not within inclusion window + if (!(slot + MIN_ATTESTATION_INCLUSION_DELAY <= stateSlot)) { + continue; // Invalid attestations + } + + const slotDelta = stateSlot - slot; + // CommitteeIndex 0 1 2 ... Consolidation + // Attestations att00 --- att10 --- att20 --- 0 (att 00 10 20) + // att01 --- - --- att21 --- 1 (att 01 __ 21) + // - --- - --- att22 --- 2 (att __ __ 22) + for (const attestationGroupByIndex of attestationGroupByIndexByDataHash.values()) { + // sameAttDataCons could be up to MAX_ATTESTATIONS_PER_GROUP_ELECTRA + const sameAttDataCons: AttestationsConsolidation[] = []; + for (const [committeeIndex, attestationGroup] of attestationGroupByIndex.entries()) { + const notSeenAttestingIndices = notSeenValidatorsFn(epoch, slot, committeeIndex); + if (notSeenAttestingIndices === null || notSeenAttestingIndices.size === 0) { + continue; + } + + if ( + slotCount > 2 && + consolidations.length >= MAX_ATTESTATIONS_ELECTRA && + notSeenAttestingIndices.size / slotDelta < minScore + ) { + // after 2 slots, there are a good chance that we have 2 * MAX_ATTESTATIONS_ELECTRA attestations and break the for loop early + // if not, we may have to scan all slots in the pool + // if we have enough attestations and the max possible score is lower than scores of `attestationsByScore`, we should skip + // otherwise it takes time to check attestation, add it and remove it later after the sort by score + continue; + } + + if (!validateAttestationDataFn(attestationGroup.data)) { + continue; + } + + // TODO: Is it necessary to validateAttestation for: + // - Attestation committee index not within current committee count + // - Attestation aggregation bits length does not match committee length + // + // These properties should not change after being validate in gossip + // IF they have to be validated, do it only with one attestation per group since same data + // The committeeCountPerSlot can be precomputed once per slot + for (const [i, attestationNonParticipation] of attestationGroup + .getAttestationsForBlock(fork, notSeenAttestingIndices) + .entries()) { + if (sameAttDataCons[i] === undefined) { + sameAttDataCons[i] = { + byCommittee: new Map(), + attData: attestationNonParticipation.attestation.data, + totalNotSeenCount: 0, + // only update score after we have full data + score: 0, + }; + } + sameAttDataCons[i].byCommittee.set(committeeIndex, attestationNonParticipation); + sameAttDataCons[i].totalNotSeenCount += attestationNonParticipation.notSeenAttesterCount; + } + for (const consolidation of sameAttDataCons) { + const score = consolidation.totalNotSeenCount / slotDelta; + if (score < minScore) { + minScore = score; + } + consolidations.push({...consolidation, score}); + // Stop accumulating attestations there are enough that may have good scoring + if (consolidations.length >= MAX_ATTESTATIONS_ELECTRA * 2) { + break slot; + } + } + } + } + } + + const sortedConsolidationsByScore = consolidations + .sort((a, b) => b.score - a.score) + .slice(0, MAX_ATTESTATIONS_ELECTRA); + // on chain aggregation is expensive, only do it after all + return sortedConsolidationsByScore.map(aggregateConsolidation); + } + /** * Get all attestations optionally filtered by `attestation.data.slot` + * Note this function is not fork aware and can potentially return a mix + * of phase0.Attestations and electra.Attestations. + * Caller of this function is expected to filtered result if they desire + * a homogenous array. * @param bySlot slot to filter, `bySlot === attestation.data.slot` */ - getAll(bySlot?: Slot): phase0.Attestation[] { - let attestationGroupsArr: Map[]; + getAll(bySlot?: Slot): Attestation[] { + let attestationGroupsArr: Map[]; if (bySlot === undefined) { - attestationGroupsArr = Array.from(this.attestationGroupByDataHashByIndexBySlot.values()).flatMap((byIndex) => + attestationGroupsArr = Array.from(this.attestationGroupByIndexByDataHexBySlot.values()).flatMap((byIndex) => Array.from(byIndex.values()) ); } else { - const attestationGroupsByIndex = this.attestationGroupByDataHashByIndexBySlot.get(bySlot); + const attestationGroupsByIndex = this.attestationGroupByIndexByDataHexBySlot.get(bySlot); if (!attestationGroupsByIndex) throw Error(`No attestations for slot ${bySlot}`); attestationGroupsArr = Array.from(attestationGroupsByIndex.values()); } - const attestations: phase0.Attestation[] = []; + const attestations: Attestation[] = []; for (const attestationGroups of attestationGroupsArr) { for (const attestationGroup of attestationGroups.values()) { attestations.push(...attestationGroup.getAttestations()); @@ -259,12 +433,12 @@ export class AggregatedAttestationPool { } interface AttestationWithIndex { - attestation: phase0.Attestation; + attestation: Attestation; trueBitsCount: number; } type AttestationNonParticipant = { - attestation: phase0.Attestation; + attestation: Attestation; // this is <= attestingIndices.count since some attesters may be seen by the chain // this is only updated and used in removeBySeenValidators function notSeenAttesterCount: number; @@ -346,9 +520,17 @@ export class MatchingDataAttestationGroup { * @param notSeenAttestingIndices not seen attestting indices, i.e. indices in the same committee * @returns an array of AttestationNonParticipant */ - getAttestationsForBlock(notSeenAttestingIndices: Set): AttestationNonParticipant[] { + getAttestationsForBlock(fork: ForkName, notSeenAttestingIndices: Set): AttestationNonParticipant[] { const attestations: AttestationNonParticipant[] = []; + const isPostElectra = isForkPostElectra(fork); for (const {attestation} of this.attestations) { + if ( + (isPostElectra && !isElectraAttestation(attestation)) || + (!isPostElectra && isElectraAttestation(attestation)) + ) { + continue; + } + let notSeenAttesterCount = 0; const {aggregationBits} = attestation; for (const notSeenIndex of notSeenAttestingIndices) { @@ -362,17 +544,16 @@ export class MatchingDataAttestationGroup { } } - if (attestations.length <= MAX_ATTESTATIONS_PER_GROUP) { + const maxAttestation = isPostElectra ? MAX_ATTESTATIONS_PER_GROUP_ELECTRA : MAX_ATTESTATIONS_PER_GROUP; + if (attestations.length <= maxAttestation) { return attestations; } else { - return attestations - .sort((a, b) => b.notSeenAttesterCount - a.notSeenAttesterCount) - .slice(0, MAX_ATTESTATIONS_PER_GROUP); + return attestations.sort((a, b) => b.notSeenAttesterCount - a.notSeenAttesterCount).slice(0, maxAttestation); } } /** Get attestations for API. */ - getAttestations(): phase0.Attestation[] { + getAttestations(): Attestation[] { return this.attestations.map((attestation) => attestation.attestation); } } @@ -386,6 +567,34 @@ export function aggregateInto(attestation1: AttestationWithIndex, attestation2: attestation1.attestation.signature = aggregateSignatures([signature1, signature2]).toBytes(); } +/** + * Electra and after: Block proposer consolidates attestations with the same + * attestation data from different committee into a single attestation + * https://github.com/ethereum/consensus-specs/blob/aba6345776aa876dad368cab27fbbb23fae20455/specs/_features/eip7549/validator.md?plain=1#L39 + */ +export function aggregateConsolidation({byCommittee, attData}: AttestationsConsolidation): electra.Attestation { + const committeeBits = BitArray.fromBitLen(MAX_COMMITTEES_PER_SLOT); + // TODO: can we improve this? + let aggregationBits: boolean[] = []; + const signatures: Signature[] = []; + const sortedCommittees = Array.from(byCommittee.keys()).sort((a, b) => a - b); + for (const committeeIndex of sortedCommittees) { + const attestationNonParticipation = byCommittee.get(committeeIndex); + if (attestationNonParticipation !== undefined) { + const {attestation} = attestationNonParticipation; + committeeBits.set(committeeIndex, true); + aggregationBits = [...aggregationBits, ...attestation.aggregationBits.toBoolArray()]; + signatures.push(signatureFromBytesNoCheck(attestation.signature)); + } + } + return { + aggregationBits: BitArray.fromBoolArray(aggregationBits), + data: attData, + committeeBits, + signature: aggregateSignatures(signatures).toBytes(), + }; +} + /** * Pre-compute participation from a CachedBeaconStateAllForks, for use to check if an attestation's committee * has already attested or not. @@ -408,12 +617,13 @@ export function getNotSeenValidatorsFn(state: CachedBeaconStateAllForks): GetNot state ); - return (epoch: Epoch, committee: Uint32Array) => { + return (epoch: Epoch, slot: Slot, committeeIndex: number) => { const participants = epoch === stateEpoch ? currentEpochParticipants : epoch === stateEpoch - 1 ? previousEpochParticipants : null; if (participants === null) { return null; } + const committee = state.epochCtx.getBeaconCommittee(slot, committeeIndex); const notSeenAttestingIndices = new Set(); for (const [i, validatorIndex] of committee.entries()) { @@ -435,22 +645,32 @@ export function getNotSeenValidatorsFn(state: CachedBeaconStateAllForks): GetNot const previousParticipation = altairState.previousEpochParticipation.getAll(); const currentParticipation = altairState.currentEpochParticipation.getAll(); const stateEpoch = computeEpochAtSlot(state.slot); + // this function could be called multiple times with same slot + committeeIndex + const cachedNotSeenValidators = new Map>(); - return (epoch: Epoch, committee: Uint32Array) => { + return (epoch: Epoch, slot: Slot, committeeIndex: number) => { const participationStatus = epoch === stateEpoch ? currentParticipation : epoch === stateEpoch - 1 ? previousParticipation : null; if (participationStatus === null) { return null; } + const cacheKey = slot + "_" + committeeIndex; + let notSeenAttestingIndices = cachedNotSeenValidators.get(cacheKey); + if (notSeenAttestingIndices != null) { + // if all validators are seen then return null, we don't need to check for any attestations of same committee again + return notSeenAttestingIndices.size === 0 ? null : notSeenAttestingIndices; + } - const notSeenAttestingIndices = new Set(); + const committee = state.epochCtx.getBeaconCommittee(slot, committeeIndex); + notSeenAttestingIndices = new Set(); for (const [i, validatorIndex] of committee.entries()) { // no need to check flagIsTimelySource as if validator is not seen, it's participation status is 0 if (participationStatus[validatorIndex] === 0) { notSeenAttestingIndices.add(i); } } + cachedNotSeenValidators.set(cacheKey, notSeenAttestingIndices); // if all validators are seen then return null, we don't need to check for any attestations of same committee again return notSeenAttestingIndices.size === 0 ? null : notSeenAttestingIndices; }; @@ -515,7 +735,7 @@ export function getValidateAttestationDataFn( } // the isValidAttestationData does not depend on slot and index - const beaconBlockRootHex = toHex(attData.beaconBlockRoot); + const beaconBlockRootHex = toRootHex(attData.beaconBlockRoot); const cacheKey = beaconBlockRootHex + targetEpoch; let isValid = cachedValidatedAttestationData.get(cacheKey); if (isValid === undefined) { @@ -560,7 +780,7 @@ export function isValidAttestationData( if (stateEpoch < 2 || targetEpoch < 2) { return true; } - const beaconBlockRootHex = toHex(data.beaconBlockRoot); + const beaconBlockRootHex = toRootHex(data.beaconBlockRoot); return isValidShuffling(forkChoice, state, beaconBlockRootHex, targetEpoch); } @@ -573,7 +793,7 @@ function isValidShuffling( // Otherwise the shuffling is determined by the block at the end of the target epoch // minus the shuffling lookahead (usually 2). We call this the "pivot". const pivotSlot = computeStartSlotAtEpoch(targetEpoch - 1) - 1; - const stateDependentRoot = toHexString(getBlockRootAtSlot(state, pivotSlot)); + const stateDependentRoot = toRootHex(getBlockRootAtSlot(state, pivotSlot)); // Use fork choice's view of the block DAG to quickly evaluate whether the attestation's // pivot block is the same as the current state's pivot block. If it is, then the diff --git a/packages/beacon-node/src/chain/opPools/attestationPool.ts b/packages/beacon-node/src/chain/opPools/attestationPool.ts index 2b511598f9a4..887448b1e553 100644 --- a/packages/beacon-node/src/chain/opPools/attestationPool.ts +++ b/packages/beacon-node/src/chain/opPools/attestationPool.ts @@ -1,10 +1,12 @@ import {BitArray} from "@chainsafe/ssz"; import {Signature, aggregateSignatures} from "@chainsafe/blst"; -import {phase0, Slot, RootHex} from "@lodestar/types"; -import {MapDef} from "@lodestar/utils"; +import {Slot, RootHex, isElectraAttestation, Attestation} from "@lodestar/types"; +import {MapDef, assert} from "@lodestar/utils"; +import {isForkPostElectra} from "@lodestar/params"; +import {ChainForkConfig} from "@lodestar/config"; import {IClock} from "../../util/clock.js"; import {InsertOutcome, OpPoolError, OpPoolErrorCode} from "./types.js"; -import {pruneBySlot, signatureFromBytesNoCheck} from "./utils.js"; +import {isElectraAggregate, pruneBySlot, signatureFromBytesNoCheck} from "./utils.js"; /** * The number of slots that will be stored in the pool. @@ -22,15 +24,22 @@ const SLOTS_RETAINED = 3; */ const MAX_ATTESTATIONS_PER_SLOT = 16_384; -type AggregateFast = { - data: phase0.Attestation["data"]; +type AggregateFastPhase0 = { + data: Attestation["data"]; aggregationBits: BitArray; signature: Signature; }; +export type AggregateFastElectra = AggregateFastPhase0 & {committeeBits: BitArray}; + +export type AggregateFast = AggregateFastPhase0 | AggregateFastElectra; + /** Hex string of DataRoot `TODO` */ type DataRootHex = string; +/** CommitteeIndex must be null for pre-electra. Must not be null post-electra */ +type CommitteeIndex = number | null; + /** * A pool of `Attestation` that is specially designed to store "unaggregated" attestations from * the native aggregation scheme. @@ -55,12 +64,14 @@ type DataRootHex = string; * receives and it can be triggered manually. */ export class AttestationPool { - private readonly attestationByRootBySlot = new MapDef>( - () => new Map() - ); + private readonly aggregateByIndexByRootBySlot = new MapDef< + Slot, + Map> + >(() => new Map>()); private lowestPermissibleSlot = 0; constructor( + private readonly config: ChainForkConfig, private readonly clock: IClock, private readonly cutOffSecFromSlot: number, private readonly preaggregateSlotDistance = 0 @@ -69,8 +80,10 @@ export class AttestationPool { /** Returns current count of pre-aggregated attestations with unique data */ getAttestationCount(): number { let attestationCount = 0; - for (const attestationByRoot of this.attestationByRootBySlot.values()) { - attestationCount += attestationByRoot.size; + for (const attestationByIndexByRoot of this.aggregateByIndexByRootBySlot.values()) { + for (const attestationByIndex of attestationByIndexByRoot.values()) { + attestationCount += attestationByIndex.size; + } } return attestationCount; } @@ -92,8 +105,9 @@ export class AttestationPool { * - Valid committeeIndex * - Valid data */ - add(attestation: phase0.Attestation, attDataRootHex: RootHex): InsertOutcome { + add(committeeIndex: CommitteeIndex, attestation: Attestation, attDataRootHex: RootHex): InsertOutcome { const slot = attestation.data.slot; + const fork = this.config.getForkName(slot); const lowestPermissibleSlot = this.lowestPermissibleSlot; // Reject any attestations that are too old. @@ -107,19 +121,33 @@ export class AttestationPool { } // Limit object per slot - const aggregateByRoot = this.attestationByRootBySlot.getOrDefault(slot); + const aggregateByRoot = this.aggregateByIndexByRootBySlot.getOrDefault(slot); if (aggregateByRoot.size >= MAX_ATTESTATIONS_PER_SLOT) { throw new OpPoolError({code: OpPoolErrorCode.REACHED_MAX_PER_SLOT}); } + if (isForkPostElectra(fork)) { + // Electra only: this should not happen because attestation should be validated before reaching this + assert.notNull(committeeIndex, "Committee index should not be null in attestation pool post-electra"); + assert.true(isElectraAttestation(attestation), "Attestation should be type electra.Attestation"); + } else { + assert.true(!isElectraAttestation(attestation), "Attestation should be type phase0.Attestation"); + committeeIndex = null; // For pre-electra, committee index info is encoded in attDataRootIndex + } + // Pre-aggregate the contribution with existing items - const aggregate = aggregateByRoot.get(attDataRootHex); + let aggregateByIndex = aggregateByRoot.get(attDataRootHex); + if (aggregateByIndex === undefined) { + aggregateByIndex = new Map(); + aggregateByRoot.set(attDataRootHex, aggregateByIndex); + } + const aggregate = aggregateByIndex.get(committeeIndex); if (aggregate) { // Aggregate mutating return aggregateAttestationInto(aggregate, attestation); } else { // Create new aggregate - aggregateByRoot.set(attDataRootHex, attestationToAggregate(attestation)); + aggregateByIndex.set(committeeIndex, attestationToAggregate(attestation)); return InsertOutcome.NewData; } } @@ -127,13 +155,23 @@ export class AttestationPool { /** * For validator API to get an aggregate */ - getAggregate(slot: Slot, dataRootHex: RootHex): phase0.Attestation | null { - const aggregate = this.attestationByRootBySlot.get(slot)?.get(dataRootHex); + getAggregate(slot: Slot, committeeIndex: CommitteeIndex, dataRootHex: RootHex): Attestation | null { + const fork = this.config.getForkName(slot); + const isPostElectra = isForkPostElectra(fork); + committeeIndex = isPostElectra ? committeeIndex : null; + + const aggregate = this.aggregateByIndexByRootBySlot.get(slot)?.get(dataRootHex)?.get(committeeIndex); if (!aggregate) { // TODO: Add metric for missing aggregates return null; } + if (isPostElectra) { + assert.true(isElectraAggregate(aggregate), "Aggregate should be type AggregateFastElectra"); + } else { + assert.true(!isElectraAggregate(aggregate), "Aggregate should be type AggregateFastPhase0"); + } + return fastToAttestation(aggregate); } @@ -142,7 +180,7 @@ export class AttestationPool { * By default, not interested in attestations in old slots, we only preaggregate attestations for the current slot. */ prune(clockSlot: Slot): void { - pruneBySlot(this.attestationByRootBySlot, clockSlot, SLOTS_RETAINED); + pruneBySlot(this.aggregateByIndexByRootBySlot, clockSlot, SLOTS_RETAINED); // by default preaggregateSlotDistance is 0, i.e only accept attestations in the same clock slot. this.lowestPermissibleSlot = Math.max(clockSlot - this.preaggregateSlotDistance, 0); } @@ -151,18 +189,20 @@ export class AttestationPool { * Get all attestations optionally filtered by `attestation.data.slot` * @param bySlot slot to filter, `bySlot === attestation.data.slot` */ - getAll(bySlot?: Slot): phase0.Attestation[] { - const attestations: phase0.Attestation[] = []; + getAll(bySlot?: Slot): Attestation[] { + const attestations: Attestation[] = []; const aggregateByRoots = bySlot === undefined - ? Array.from(this.attestationByRootBySlot.values()) - : [this.attestationByRootBySlot.get(bySlot)]; + ? Array.from(this.aggregateByIndexByRootBySlot.values()) + : [this.aggregateByIndexByRootBySlot.get(bySlot)]; for (const aggregateByRoot of aggregateByRoots) { if (aggregateByRoot) { - for (const aggFast of aggregateByRoot.values()) { - attestations.push(fastToAttestation(aggFast)); + for (const aggFastByIndex of aggregateByRoot.values()) { + for (const aggFast of aggFastByIndex.values()) { + attestations.push(fastToAttestation(aggFast)); + } } } } @@ -175,15 +215,13 @@ export class AttestationPool { // - Insert attestations coming from gossip and API /** - * Aggregate a new contribution into `aggregate` mutating it + * Aggregate a new attestation into `aggregate` mutating it */ -function aggregateAttestationInto(aggregate: AggregateFast, attestation: phase0.Attestation): InsertOutcome { +function aggregateAttestationInto(aggregate: AggregateFast, attestation: Attestation): InsertOutcome { const bitIndex = attestation.aggregationBits.getSingleTrueBit(); // Should never happen, attestations are verified against this exact condition before - if (bitIndex === null) { - throw Error("Invalid attestation not exactly one bit set"); - } + assert.notNull(bitIndex, "Invalid attestation in pool, not exactly one bit set"); if (aggregate.aggregationBits.get(bitIndex) === true) { return InsertOutcome.AlreadyKnown; @@ -197,7 +235,16 @@ function aggregateAttestationInto(aggregate: AggregateFast, attestation: phase0. /** * Format `contribution` into an efficient `aggregate` to add more contributions in with aggregateContributionInto() */ -function attestationToAggregate(attestation: phase0.Attestation): AggregateFast { +function attestationToAggregate(attestation: Attestation): AggregateFast { + if (isElectraAttestation(attestation)) { + return { + data: attestation.data, + // clone because it will be mutated + aggregationBits: attestation.aggregationBits.clone(), + committeeBits: attestation.committeeBits, + signature: signatureFromBytesNoCheck(attestation.signature), + }; + } return { data: attestation.data, // clone because it will be mutated @@ -207,12 +254,8 @@ function attestationToAggregate(attestation: phase0.Attestation): AggregateFast } /** - * Unwrap AggregateFast to phase0.Attestation + * Unwrap AggregateFast to Attestation */ -function fastToAttestation(aggFast: AggregateFast): phase0.Attestation { - return { - data: aggFast.data, - aggregationBits: aggFast.aggregationBits, - signature: aggFast.signature.toBytes(), - }; +function fastToAttestation(aggFast: AggregateFast): Attestation { + return {...aggFast, signature: aggFast.signature.toBytes()}; } diff --git a/packages/beacon-node/src/chain/opPools/opPool.ts b/packages/beacon-node/src/chain/opPools/opPool.ts index 69c331f6fd39..ee66591e9aef 100644 --- a/packages/beacon-node/src/chain/opPools/opPool.ts +++ b/packages/beacon-node/src/chain/opPools/opPool.ts @@ -1,4 +1,3 @@ -import {fromHexString, toHexString} from "@chainsafe/ssz"; import { CachedBeaconStateAllForks, computeEpochAtSlot, @@ -14,8 +13,10 @@ import { BLS_WITHDRAWAL_PREFIX, MAX_ATTESTER_SLASHINGS, ForkSeq, + MAX_ATTESTER_SLASHINGS_ELECTRA, } from "@lodestar/params"; -import {Epoch, phase0, capella, ssz, ValidatorIndex, SignedBeaconBlock} from "@lodestar/types"; +import {fromHex, toHex, toRootHex} from "@lodestar/utils"; +import {Epoch, phase0, capella, ssz, ValidatorIndex, SignedBeaconBlock, AttesterSlashing} from "@lodestar/types"; import {IBeaconDb} from "../../db/index.js"; import {SignedBLSToExecutionChangeVersioned} from "../../util/types.js"; import {BlockType} from "../interface.js"; @@ -83,10 +84,10 @@ export class OpPool { persistDiff( db.attesterSlashing, Array.from(this.attesterSlashings.entries()).map(([key, value]) => ({ - key: fromHexString(key), + key: fromHex(key), value: value.attesterSlashing, })), - toHexString + toHex ), persistDiff( db.proposerSlashing, @@ -135,7 +136,7 @@ export class OpPool { if (!rootHash) rootHash = ssz.phase0.AttesterSlashing.hashTreeRoot(attesterSlashing); // TODO: Do once and cache attached to the AttesterSlashing object const intersectingIndices = getAttesterSlashableIndices(attesterSlashing); - this.attesterSlashings.set(toHexString(rootHash), { + this.attesterSlashings.set(toRootHex(rootHash), { attesterSlashing, intersectingIndices, }); @@ -173,7 +174,7 @@ export class OpPool { blockType: BlockType, metrics: Metrics | null ): [ - phase0.AttesterSlashing[], + AttesterSlashing[], phase0.ProposerSlashing[], phase0.SignedVoluntaryExit[], capella.SignedBLSToExecutionChange[], @@ -207,7 +208,8 @@ export class OpPool { }); const endAttesterSlashings = stepsMetrics?.startTimer(); - const attesterSlashings: phase0.AttesterSlashing[] = []; + const attesterSlashings: AttesterSlashing[] = []; + const maxAttesterSlashings = stateFork >= ForkSeq.electra ? MAX_ATTESTER_SLASHINGS_ELECTRA : MAX_ATTESTER_SLASHINGS; attesterSlashing: for (const attesterSlashing of this.attesterSlashings.values()) { /** Indices slashable in this attester slashing */ const slashableIndices = new Set(); @@ -222,7 +224,7 @@ export class OpPool { if (isSlashableAtEpoch(validator, stateEpoch)) { slashableIndices.add(index); } - if (attesterSlashings.length >= MAX_ATTESTER_SLASHINGS) { + if (attesterSlashings.length >= maxAttesterSlashings) { break attesterSlashing; } } @@ -282,6 +284,7 @@ export class OpPool { } /** For beacon pool API */ + // TODO Electra: Update to adapt electra.AttesterSlashing getAllAttesterSlashings(): phase0.AttesterSlashing[] { return Array.from(this.attesterSlashings.values()).map((attesterSlashings) => attesterSlashings.attesterSlashing); } diff --git a/packages/beacon-node/src/chain/opPools/syncCommitteeMessagePool.ts b/packages/beacon-node/src/chain/opPools/syncCommitteeMessagePool.ts index 90a310841f01..bbaba1835dce 100644 --- a/packages/beacon-node/src/chain/opPools/syncCommitteeMessagePool.ts +++ b/packages/beacon-node/src/chain/opPools/syncCommitteeMessagePool.ts @@ -1,8 +1,8 @@ -import {BitArray, toHexString} from "@chainsafe/ssz"; +import {BitArray} from "@chainsafe/ssz"; import {Signature, aggregateSignatures} from "@chainsafe/blst"; import {SYNC_COMMITTEE_SIZE, SYNC_COMMITTEE_SUBNET_COUNT} from "@lodestar/params"; import {altair, Root, Slot, SubcommitteeIndex} from "@lodestar/types"; -import {MapDef} from "@lodestar/utils"; +import {MapDef, toRootHex} from "@lodestar/utils"; import {IClock} from "../../util/clock.js"; import {InsertOutcome, OpPoolError, OpPoolErrorCode} from "./types.js"; import {pruneBySlot, signatureFromBytesNoCheck} from "./utils.js"; @@ -64,7 +64,7 @@ export class SyncCommitteeMessagePool { // TODO: indexInSubcommittee: number should be indicesInSyncCommittee add(subnet: Subnet, signature: altair.SyncCommitteeMessage, indexInSubcommittee: number): InsertOutcome { const {slot, beaconBlockRoot} = signature; - const rootHex = toHexString(beaconBlockRoot); + const rootHex = toRootHex(beaconBlockRoot); const lowestPermissibleSlot = this.lowestPermissibleSlot; // Reject if too old. @@ -99,7 +99,7 @@ export class SyncCommitteeMessagePool { * This is for the aggregator to produce ContributionAndProof. */ getContribution(subnet: SubcommitteeIndex, slot: Slot, prevBlockRoot: Root): altair.SyncCommitteeContribution | null { - const contribution = this.contributionsByRootBySubnetBySlot.get(slot)?.get(subnet)?.get(toHexString(prevBlockRoot)); + const contribution = this.contributionsByRootBySubnetBySlot.get(slot)?.get(subnet)?.get(toRootHex(prevBlockRoot)); if (!contribution) { return null; } diff --git a/packages/beacon-node/src/chain/opPools/syncContributionAndProofPool.ts b/packages/beacon-node/src/chain/opPools/syncContributionAndProofPool.ts index 7834ae534501..ff0feea891e1 100644 --- a/packages/beacon-node/src/chain/opPools/syncContributionAndProofPool.ts +++ b/packages/beacon-node/src/chain/opPools/syncContributionAndProofPool.ts @@ -1,9 +1,9 @@ -import {BitArray, toHexString} from "@chainsafe/ssz"; +import {BitArray} from "@chainsafe/ssz"; import {Signature, aggregateSignatures} from "@chainsafe/blst"; import {SYNC_COMMITTEE_SIZE, SYNC_COMMITTEE_SUBNET_SIZE} from "@lodestar/params"; import {altair, Slot, Root, ssz} from "@lodestar/types"; import {G2_POINT_AT_INFINITY} from "@lodestar/state-transition"; -import {MapDef} from "@lodestar/utils"; +import {MapDef, toRootHex} from "@lodestar/utils"; import {InsertOutcome, OpPoolError, OpPoolErrorCode} from "./types.js"; import {pruneBySlot, signatureFromBytesNoCheck} from "./utils.js"; @@ -72,7 +72,7 @@ export class SyncContributionAndProofPool { add(contributionAndProof: altair.ContributionAndProof, syncCommitteeParticipants: number): InsertOutcome { const {contribution} = contributionAndProof; const {slot, beaconBlockRoot} = contribution; - const rootHex = toHexString(beaconBlockRoot); + const rootHex = toRootHex(beaconBlockRoot); // Reject if too old. if (slot < this.lowestPermissibleSlot) { @@ -100,7 +100,7 @@ export class SyncContributionAndProofPool { * This is for the block factory, the same to process_sync_committee_contributions in the spec. */ getAggregate(slot: Slot, prevBlockRoot: Root): altair.SyncAggregate { - const bestContributionBySubnet = this.bestContributionBySubnetRootBySlot.get(slot)?.get(toHexString(prevBlockRoot)); + const bestContributionBySubnet = this.bestContributionBySubnetRootBySlot.get(slot)?.get(toRootHex(prevBlockRoot)); if (!bestContributionBySubnet || bestContributionBySubnet.size === 0) { // TODO: Add metric for missing SyncAggregate // Must return signature as G2_POINT_AT_INFINITY when participating bits are empty diff --git a/packages/beacon-node/src/chain/opPools/utils.ts b/packages/beacon-node/src/chain/opPools/utils.ts index 039e95af6c9f..e136bf1d4094 100644 --- a/packages/beacon-node/src/chain/opPools/utils.ts +++ b/packages/beacon-node/src/chain/opPools/utils.ts @@ -2,6 +2,7 @@ import {Signature} from "@chainsafe/blst"; import {BLS_WITHDRAWAL_PREFIX} from "@lodestar/params"; import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; import {Slot, capella} from "@lodestar/types"; +import {AggregateFast, AggregateFastElectra} from "./attestationPool.js"; /** * Prune a Map indexed by slot to keep the most recent slots, up to `slotsRetained` @@ -58,3 +59,7 @@ export function isValidBlsToExecutionChangeForBlockInclusion( return true; } + +export function isElectraAggregate(aggregate: AggregateFast): aggregate is AggregateFastElectra { + return (aggregate as AggregateFastElectra).committeeBits !== undefined; +} diff --git a/packages/beacon-node/src/chain/options.ts b/packages/beacon-node/src/chain/options.ts index 7c7cfcdde75b..bc2b73256272 100644 --- a/packages/beacon-node/src/chain/options.ts +++ b/packages/beacon-node/src/chain/options.ts @@ -111,7 +111,7 @@ export const defaultChainOptions: IChainOptions = { // batching too much may block the I/O thread so if useWorker=false, suggest this value to be 32 // since this batch attestation work is designed to work with useWorker=true, make this the lowest value minSameMessageSignatureSetsToBatch: 2, - nHistoricalStates: false, + nHistoricalStates: true, nHistoricalStatesFileDataStore: false, maxBlockStates: DEFAULT_MAX_BLOCK_STATES, maxCPStateEpochsInMemory: DEFAULT_MAX_CP_STATE_EPOCHS_IN_MEMORY, diff --git a/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts b/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts index 214fbdc890ec..ff8221a326e9 100644 --- a/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts +++ b/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts @@ -17,6 +17,7 @@ import { BlindedBeaconBlockBody, BlindedBeaconBlock, sszTypesFor, + electra, } from "@lodestar/types"; import { CachedBeaconStateAllForks, @@ -32,7 +33,7 @@ import { } from "@lodestar/state-transition"; import {ChainForkConfig} from "@lodestar/config"; import {ForkSeq, ForkExecution, isForkExecution} from "@lodestar/params"; -import {toHex, sleep, Logger} from "@lodestar/utils"; +import {toHex, sleep, Logger, toRootHex} from "@lodestar/utils"; import type {BeaconChain} from "../chain.js"; import {PayloadId, IExecutionEngine, IExecutionBuilder, PayloadAttributes} from "../../execution/index.js"; import {ZERO_HASH, ZERO_HASH_HEX} from "../../constants/index.js"; @@ -258,7 +259,7 @@ export async function produceBlockBody( } const engineRes = await this.executionEngine.getPayload(fork, payloadId); - const {executionPayload, blobsBundle} = engineRes; + const {executionPayload, blobsBundle, executionRequests} = engineRes; shouldOverrideBuilder = engineRes.shouldOverrideBuilder; (blockBody as BeaconBlockBody).executionPayload = executionPayload; @@ -273,7 +274,7 @@ export async function produceBlockBody( prepType, payloadId, fetchedTime, - executionHeadBlockHash: toHex(engineRes.executionPayload.blockHash), + executionHeadBlockHash: toRootHex(engineRes.executionPayload.blockHash), }); if (executionPayload.transactions.length === 0) { this.metrics?.blockPayload.emptyPayloads.inc({prepType}); @@ -290,7 +291,7 @@ export async function produceBlockBody( } (blockBody as deneb.BeaconBlockBody).blobKzgCommitments = blobsBundle.commitments; - const blockHash = toHex(executionPayload.blockHash); + const blockHash = toRootHex(executionPayload.blockHash); const contents = {kzgProofs: blobsBundle.proofs, blobs: blobsBundle.blobs}; blobsResult = {type: BlobsResultType.produced, contents, blockHash}; @@ -298,6 +299,13 @@ export async function produceBlockBody( } else { blobsResult = {type: BlobsResultType.preDeneb}; } + + if (ForkSeq[fork] >= ForkSeq.electra) { + if (executionRequests === undefined) { + throw Error(`Missing executionRequests response from getPayload at fork=${fork}`); + } + (blockBody as electra.BeaconBlockBody).executionRequests = executionRequests; + } } } catch (e) { this.metrics?.blockPayload.payloadFetchErrors.inc(); @@ -380,7 +388,7 @@ export async function prepareExecutionPayload( const prevRandao = getRandaoMix(state, state.epochCtx.epoch); const payloadIdCached = chain.executionEngine.payloadIdCache.get({ - headBlockHash: toHex(parentHash), + headBlockHash: toRootHex(parentHash), finalizedBlockHash, timestamp: numToQuantity(timestamp), prevRandao: toHex(prevRandao), @@ -414,7 +422,7 @@ export async function prepareExecutionPayload( payloadId = await chain.executionEngine.notifyForkchoiceUpdate( fork, - toHex(parentHash), + toRootHex(parentHash), safeBlockHash, finalizedBlockHash, attributes @@ -559,7 +567,9 @@ function preparePayloadAttributes( }; if (ForkSeq[fork] >= ForkSeq.capella) { + // withdrawals logic is now fork aware as it changes on electra fork post capella (payloadAttributes as capella.SSEPayloadAttributes["payloadAttributes"]).withdrawals = getExpectedWithdrawals( + ForkSeq[fork], prepareState as CachedBeaconStateCapella ).withdrawals; } diff --git a/packages/beacon-node/src/chain/regen/interface.ts b/packages/beacon-node/src/chain/regen/interface.ts index 341625c9ff1d..031d19860789 100644 --- a/packages/beacon-node/src/chain/regen/interface.ts +++ b/packages/beacon-node/src/chain/regen/interface.ts @@ -44,7 +44,7 @@ export interface IStateRegenerator extends IStateRegeneratorInternal { pruneOnFinalized(finalizedEpoch: Epoch): void; processState(blockRootHex: RootHex, postState: CachedBeaconStateAllForks): void; addCheckpointState(cp: phase0.Checkpoint, item: CachedBeaconStateAllForks): void; - updateHeadState(newHeadStateRoot: RootHex, maybeHeadState: CachedBeaconStateAllForks): void; + updateHeadState(newHead: ProtoBlock, maybeHeadState: CachedBeaconStateAllForks): void; updatePreComputedCheckpoint(rootHex: RootHex, epoch: Epoch): number | null; } diff --git a/packages/beacon-node/src/chain/regen/queued.ts b/packages/beacon-node/src/chain/regen/queued.ts index 358a37e6e638..57e64bd364ea 100644 --- a/packages/beacon-node/src/chain/regen/queued.ts +++ b/packages/beacon-node/src/chain/regen/queued.ts @@ -1,8 +1,7 @@ -import {toHexString} from "@chainsafe/ssz"; import {phase0, Slot, RootHex, Epoch, BeaconBlock} from "@lodestar/types"; import {IForkChoice, ProtoBlock} from "@lodestar/fork-choice"; -import {CachedBeaconStateAllForks, computeEpochAtSlot} from "@lodestar/state-transition"; -import {Logger} from "@lodestar/utils"; +import {CachedBeaconStateAllForks, UnfinalizedPubkeyIndexMap, computeEpochAtSlot} from "@lodestar/state-transition"; +import {Logger, toRootHex} from "@lodestar/utils"; import {routes} from "@lodestar/api"; import {CheckpointHex, toCheckpointHex} from "../stateCache/index.js"; import {Metrics} from "../../metrics/index.js"; @@ -89,7 +88,7 @@ export class QueuedStateRegenerator implements IStateRegenerator { block: BeaconBlock, opts: StateCloneOpts = {dontTransferCache: true} ): CachedBeaconStateAllForks | null { - const parentRoot = toHexString(block.parentRoot); + const parentRoot = toRootHex(block.parentRoot); const parentBlock = this.forkChoice.getBlockHex(parentRoot); if (!parentBlock) { throw new RegenError({ @@ -165,28 +164,40 @@ export class QueuedStateRegenerator implements IStateRegenerator { this.checkpointStateCache.add(cp, item); } - updateHeadState(newHeadStateRoot: RootHex, maybeHeadState: CachedBeaconStateAllForks): void { - // the resulting state will be added to block state cache so we transfer the cache in this flow - const cloneOpts = {dontTransferCache: true}; + updateHeadState(newHead: ProtoBlock, maybeHeadState: CachedBeaconStateAllForks): void { + const {stateRoot: newHeadStateRoot, blockRoot: newHeadBlockRoot, slot: newHeadSlot} = newHead; + const maybeHeadStateRoot = toRootHex(maybeHeadState.hashTreeRoot()); + const logCtx = { + newHeadSlot, + newHeadBlockRoot, + newHeadStateRoot, + maybeHeadSlot: maybeHeadState.slot, + maybeHeadStateRoot, + }; const headState = - newHeadStateRoot === toHexString(maybeHeadState.hashTreeRoot()) + newHeadStateRoot === maybeHeadStateRoot ? maybeHeadState - : this.blockStateCache.get(newHeadStateRoot, cloneOpts); + : // maybeHeadState was already in block state cache so we don't transfer the cache + this.blockStateCache.get(newHeadStateRoot, {dontTransferCache: true}); if (headState) { this.blockStateCache.setHeadState(headState); } else { // Trigger regen on head change if necessary - this.logger.warn("Head state not available, triggering regen", {stateRoot: newHeadStateRoot}); - // it's important to reload state to regen head state here - const allowDiskReload = true; - // head has changed, so the existing cached head state is no longer useful. Set strong reference to null to free - // up memory for regen step below. During regen, node won't be functional but eventually head will be available - // for legacy StateContextCache only + this.logger.warn("Head state not available, triggering regen", logCtx); + // for the old BlockStateCacheImpl only + // - head has changed, so the existing cached head state is no longer useful. Set strong reference to null to free + // up memory for regen step below. During regen, node won't be functional but eventually head will be available + // for the new FIFOBlockStateCache, this has no affect this.blockStateCache.setHeadState(null); + + // for the new FIFOBlockStateCache, it's important to reload state to regen head state here if needed + const allowDiskReload = true; + // transfer cache here because we want to regen state asap + const cloneOpts = {dontTransferCache: false}; this.regen.getState(newHeadStateRoot, RegenCaller.processBlock, cloneOpts, allowDiskReload).then( (headStateRegen) => this.blockStateCache.setHeadState(headStateRegen), - (e) => this.logger.error("Error on head state regen", {}, e) + (e) => this.logger.error("Error on head state regen", logCtx, e) ); } } @@ -195,6 +206,54 @@ export class QueuedStateRegenerator implements IStateRegenerator { return this.checkpointStateCache.updatePreComputedCheckpoint(rootHex, epoch); } + /** + * Remove `validators` from all unfinalized cache's epochCtx.UnfinalizedPubkey2Index, + * and add them to epochCtx.pubkey2index and epochCtx.index2pubkey + */ + updateUnfinalizedPubkeys(validators: UnfinalizedPubkeyIndexMap): void { + let numStatesUpdated = 0; + const states = this.blockStateCache.getStates(); + const cpStates = this.checkpointStateCache.getStates(); + + // Add finalized pubkeys to all states. + const addTimer = this.metrics?.regenFnAddPubkeyTime.startTimer(); + + // We only need to add pubkeys to any one of the states since the finalized caches is shared globally across all states + const firstState = (states.next().value ?? cpStates.next().value) as CachedBeaconStateAllForks | undefined; + + if (firstState !== undefined) { + firstState.epochCtx.addFinalizedPubkeys(validators, this.metrics?.epochCache ?? undefined); + } else { + this.logger.warn("Attempt to delete finalized pubkey from unfinalized pubkey cache. But no state is available"); + } + + addTimer?.(); + + // Delete finalized pubkeys from unfinalized pubkey cache for all states + const deleteTimer = this.metrics?.regenFnDeletePubkeyTime.startTimer(); + const pubkeysToDelete = Array.from(validators.keys()); + + for (const s of states) { + s.epochCtx.deleteUnfinalizedPubkeys(pubkeysToDelete); + numStatesUpdated++; + } + + for (const s of cpStates) { + s.epochCtx.deleteUnfinalizedPubkeys(pubkeysToDelete); + numStatesUpdated++; + } + + // Since first state is consumed from the iterator. Will need to perform delete explicitly + if (firstState !== undefined) { + firstState?.epochCtx.deleteUnfinalizedPubkeys(pubkeysToDelete); + numStatesUpdated++; + } + + deleteTimer?.(); + + this.metrics?.regenFnNumStatesUpdated.observe(numStatesUpdated); + } + /** * Get the state to run with `block`. * - State after `block.parentRoot` dialed forward to block.slot diff --git a/packages/beacon-node/src/chain/regen/regen.ts b/packages/beacon-node/src/chain/regen/regen.ts index 2b6fc835cf7c..7c663c6e0d3d 100644 --- a/packages/beacon-node/src/chain/regen/regen.ts +++ b/packages/beacon-node/src/chain/regen/regen.ts @@ -1,5 +1,4 @@ -import {fromHexString, toHexString} from "@chainsafe/ssz"; -import {phase0, Slot, RootHex, BeaconBlock} from "@lodestar/types"; +import {phase0, Slot, RootHex, BeaconBlock, SignedBeaconBlock} from "@lodestar/types"; import { CachedBeaconStateAllForks, computeEpochAtSlot, @@ -8,9 +7,10 @@ import { DataAvailableStatus, processSlots, stateTransition, + StateHashTreeRootSource, } from "@lodestar/state-transition"; import {IForkChoice, ProtoBlock} from "@lodestar/fork-choice"; -import {Logger} from "@lodestar/utils"; +import {Logger, fromHex, toRootHex} from "@lodestar/utils"; import {SLOTS_PER_EPOCH} from "@lodestar/params"; import {ChainForkConfig} from "@lodestar/config"; import {Metrics} from "../../metrics/index.js"; @@ -89,7 +89,7 @@ export class StateRegenerator implements IStateRegeneratorInternal { allowDiskReload = false ): Promise { const checkpointStartSlot = computeStartSlotAtEpoch(cp.epoch); - return this.getBlockSlotState(toHexString(cp.root), checkpointStartSlot, opts, regenCaller, allowDiskReload); + return this.getBlockSlotState(toRootHex(cp.root), checkpointStartSlot, opts, regenCaller, allowDiskReload); } /** @@ -145,7 +145,7 @@ export class StateRegenerator implements IStateRegeneratorInternal { */ async getState( stateRoot: RootHex, - _rCaller: RegenCaller, + caller: RegenCaller, opts?: StateCloneOpts, // internal option, don't want to expose to external caller allowDiskReload = false @@ -156,6 +156,13 @@ export class StateRegenerator implements IStateRegeneratorInternal { return cachedStateCtx; } + // in block gossip validation (getPreState() call), dontTransferCache is specified as true because we only want to transfer cache in verifyBlocksStateTransitionOnly() + // but here we want to process blocks as fast as possible so force to transfer cache in this case + if (opts && allowDiskReload) { + // if there is no `opts` specified, it already means "false" + opts.dontTransferCache = false; + } + // Otherwise we have to use the fork choice to traverse backwards, block by block, // searching the state caches // then replay blocks forward to the desired stateRoot @@ -166,6 +173,8 @@ export class StateRegenerator implements IStateRegeneratorInternal { const blocksToReplay = [block]; let state: CachedBeaconStateAllForks | null = null; const {checkpointStateCache} = this.modules; + + const getSeedStateTimer = this.modules.metrics?.regenGetState.getSeedState.startTimer({caller}); // iterateAncestorBlocks only returns ancestor blocks, not the block itself for (const b of this.modules.forkChoice.iterateAncestorBlocks(block.blockRoot)) { state = this.modules.blockStateCache.get(b.stateRoot, opts); @@ -181,6 +190,7 @@ export class StateRegenerator implements IStateRegeneratorInternal { } blocksToReplay.push(b); } + getSeedStateTimer?.(); if (state === null) { throw new RegenError({ @@ -188,19 +198,50 @@ export class StateRegenerator implements IStateRegeneratorInternal { }); } + const blockCount = blocksToReplay.length; const MAX_EPOCH_TO_PROCESS = 5; - if (blocksToReplay.length > MAX_EPOCH_TO_PROCESS * SLOTS_PER_EPOCH) { + if (blockCount > MAX_EPOCH_TO_PROCESS * SLOTS_PER_EPOCH) { throw new RegenError({ code: RegenErrorCode.TOO_MANY_BLOCK_PROCESSED, stateRoot, }); } - const replaySlots = blocksToReplay.map((b) => b.slot).join(","); - this.modules.logger.debug("Replaying blocks to get state", {stateRoot, replaySlots}); - for (const b of blocksToReplay.reverse()) { - const block = await this.modules.db.block.get(fromHexString(b.blockRoot)); - if (!block) { + this.modules.metrics?.regenGetState.blockCount.observe({caller}, blockCount); + + const replaySlots = new Array(blockCount); + const blockPromises = new Array>(blockCount); + + const protoBlocksAsc = blocksToReplay.reverse(); + for (const [i, protoBlock] of protoBlocksAsc.entries()) { + replaySlots[i] = protoBlock.slot; + blockPromises[i] = this.modules.db.block.get(fromHex(protoBlock.blockRoot)); + } + + const logCtx = {stateRoot, caller, replaySlots: replaySlots.join(",")}; + this.modules.logger.debug("Replaying blocks to get state", logCtx); + + const loadBlocksTimer = this.modules.metrics?.regenGetState.loadBlocks.startTimer({caller}); + const blockOrNulls = await Promise.all(blockPromises); + loadBlocksTimer?.(); + + const blocksByRoot = new Map(); + for (const [i, blockOrNull] of blockOrNulls.entries()) { + // checking early here helps prevent unneccessary state transition below + if (blockOrNull === null) { + throw new RegenError({ + code: RegenErrorCode.BLOCK_NOT_IN_DB, + blockRoot: protoBlocksAsc[i].blockRoot, + }); + } + blocksByRoot.set(protoBlocksAsc[i].blockRoot, blockOrNull); + } + + const stateTransitionTimer = this.modules.metrics?.regenGetState.stateTransition.startTimer({caller}); + for (const b of protoBlocksAsc) { + const block = blocksByRoot.get(b.blockRoot); + // just to make compiler happy, we checked in the above for loop already + if (block === undefined) { throw new RegenError({ code: RegenErrorCode.BLOCK_NOT_IN_DB, blockRoot: b.blockRoot, @@ -224,7 +265,12 @@ export class StateRegenerator implements IStateRegeneratorInternal { this.modules.metrics ); - const stateRoot = toHexString(state.hashTreeRoot()); + const hashTreeRootTimer = this.modules.metrics?.stateHashTreeRootTime.startTimer({ + source: StateHashTreeRootSource.regenState, + }); + const stateRoot = toRootHex(state.hashTreeRoot()); + hashTreeRootTimer?.(); + if (b.stateRoot !== stateRoot) { throw new RegenError({ slot: b.slot, @@ -238,9 +284,6 @@ export class StateRegenerator implements IStateRegeneratorInternal { // also with allowDiskReload flag, we "reload" it to the state cache too this.modules.blockStateCache.add(state); } - - // this avoids keeping our node busy processing blocks - await nextEventLoop(); } catch (e) { throw new RegenError({ code: RegenErrorCode.STATE_TRANSITION_ERROR, @@ -248,7 +291,9 @@ export class StateRegenerator implements IStateRegeneratorInternal { }); } } - this.modules.logger.debug("Replayed blocks to get state", {stateRoot, replaySlots}); + stateTransitionTimer?.(); + + this.modules.logger.debug("Replayed blocks to get state", {...logCtx, stateSlot: state.slot}); return state; } diff --git a/packages/beacon-node/src/chain/rewards/attestationsRewards.ts b/packages/beacon-node/src/chain/rewards/attestationsRewards.ts index e909e4b1b57e..70fb27de239a 100644 --- a/packages/beacon-node/src/chain/rewards/attestationsRewards.ts +++ b/packages/beacon-node/src/chain/rewards/attestationsRewards.ts @@ -24,6 +24,7 @@ import { isInInactivityLeak, } from "@lodestar/state-transition"; import {BeaconConfig} from "@lodestar/config"; +import {fromHex} from "@lodestar/utils"; export type AttestationsRewards = routes.beacon.AttestationsRewards; type IdealAttestationsReward = routes.beacon.IdealAttestationsReward; @@ -143,7 +144,7 @@ function computeTotalAttestationsRewardsAltair( const {flags} = transitionCache; const {epochCtx, config} = state; const validatorIndices = validatorIds - .map((id) => (typeof id === "number" ? id : epochCtx.pubkey2index.get(id))) + .map((id) => (typeof id === "number" ? id : epochCtx.pubkey2index.get(fromHex(id)))) .filter((index) => index !== undefined); // Validator indices to include in the result const inactivityPenaltyDenominator = config.INACTIVITY_SCORE_BIAS * INACTIVITY_PENALTY_QUOTIENT_ALTAIR; diff --git a/packages/beacon-node/src/chain/seenCache/seenAttestationData.ts b/packages/beacon-node/src/chain/seenCache/seenAttestationData.ts index 9312f3b517a7..a0aa6db35893 100644 --- a/packages/beacon-node/src/chain/seenCache/seenAttestationData.ts +++ b/packages/beacon-node/src/chain/seenCache/seenAttestationData.ts @@ -1,12 +1,21 @@ -import {phase0, RootHex, Slot} from "@lodestar/types"; +import {BitArray} from "@chainsafe/ssz"; +import {CommitteeIndex, phase0, RootHex, Slot} from "@lodestar/types"; import {MapDef} from "@lodestar/utils"; import {Metrics} from "../../metrics/metrics.js"; -import {AttDataBase64} from "../../util/sszBytes.js"; import {InsertOutcome} from "../opPools/types.js"; +export type SeenAttDataKey = AttDataBase64 | AttDataCommitteeBitsBase64; +// pre-electra, AttestationData is used to cache attestations +type AttDataBase64 = string; +// electra, AttestationData + CommitteeBits are used to cache attestations +type AttDataCommitteeBitsBase64 = string; + export type AttestationDataCacheEntry = { // part of shuffling data, so this does not take memory - committeeIndices: Uint32Array; + committeeValidatorIndices: Uint32Array; + // undefined for phase0 Attestation + committeeBits?: BitArray; + committeeIndex: CommitteeIndex; // IndexedAttestationData signing root, 32 bytes signingRoot: Uint8Array; // to be consumed by forkchoice and oppool @@ -38,12 +47,14 @@ const DEFAULT_MAX_CACHE_SIZE_PER_SLOT = 200; const DEFAULT_CACHE_SLOT_DISTANCE = 2; /** + * Cached seen AttestationData to improve gossip validation. For Electra, this still take into account attestationIndex + * even through it is moved outside of AttestationData. * As of April 2023, validating gossip attestation takes ~12% of cpu time for a node subscribing to all subnets on mainnet. * Having this cache help saves a lot of cpu time since most of the gossip attestations are on the same slot. */ export class SeenAttestationDatas { - private cacheEntryByAttDataBase64BySlot = new MapDef>( - () => new Map() + private cacheEntryByAttDataBase64BySlot = new MapDef>( + () => new Map() ); private lowestPermissibleSlot = 0; @@ -57,14 +68,14 @@ export class SeenAttestationDatas { } // TODO: Move InsertOutcome type definition to a common place - add(slot: Slot, attDataBase64: AttDataBase64, cacheEntry: AttestationDataCacheEntry): InsertOutcome { + add(slot: Slot, attDataKey: SeenAttDataKey, cacheEntry: AttestationDataCacheEntry): InsertOutcome { if (slot < this.lowestPermissibleSlot) { this.metrics?.seenCache.attestationData.reject.inc({reason: RejectReason.too_old}); return InsertOutcome.Old; } const cacheEntryByAttDataBase64 = this.cacheEntryByAttDataBase64BySlot.getOrDefault(slot); - if (cacheEntryByAttDataBase64.has(attDataBase64)) { + if (cacheEntryByAttDataBase64.has(attDataKey)) { this.metrics?.seenCache.attestationData.reject.inc({reason: RejectReason.already_known}); return InsertOutcome.AlreadyKnown; } @@ -74,11 +85,11 @@ export class SeenAttestationDatas { return InsertOutcome.ReachLimit; } - cacheEntryByAttDataBase64.set(attDataBase64, cacheEntry); + cacheEntryByAttDataBase64.set(attDataKey, cacheEntry); return InsertOutcome.NewData; } - get(slot: Slot, attDataBase64: AttDataBase64): AttestationDataCacheEntry | null { + get(slot: Slot, attDataBase64: SeenAttDataKey): AttestationDataCacheEntry | null { const cacheEntryByAttDataBase64 = this.cacheEntryByAttDataBase64BySlot.get(slot); const cacheEntry = cacheEntryByAttDataBase64?.get(attDataBase64); if (cacheEntry) { diff --git a/packages/beacon-node/src/chain/seenCache/seenCommitteeContribution.ts b/packages/beacon-node/src/chain/seenCache/seenCommitteeContribution.ts index fedaff8225d6..86ade618b1d1 100644 --- a/packages/beacon-node/src/chain/seenCache/seenCommitteeContribution.ts +++ b/packages/beacon-node/src/chain/seenCache/seenCommitteeContribution.ts @@ -1,7 +1,6 @@ -import {toHexString} from "@chainsafe/ssz"; import {Slot, ValidatorIndex} from "@lodestar/types"; import {ContributionAndProof, SyncCommitteeContribution} from "@lodestar/types/altair"; -import {MapDef} from "@lodestar/utils"; +import {MapDef, toRootHex} from "@lodestar/utils"; import {Metrics} from "../../metrics/index.js"; import {isSuperSetOrEqual} from "../../util/bitArray.js"; import {AggregationInfo, insertDesc} from "./seenAggregateAndProof.js"; @@ -101,5 +100,5 @@ function seenAggregatorKey(subcommitteeIndex: number, aggregatorIndex: Validator function toContributionDataKey(contribution: SyncCommitteeContribution): ContributionDataKey { const {slot, beaconBlockRoot, subcommitteeIndex} = contribution; - return `${slot} - ${toHexString(beaconBlockRoot)} - ${subcommitteeIndex}`; + return `${slot} - ${toRootHex(beaconBlockRoot)} - ${subcommitteeIndex}`; } diff --git a/packages/beacon-node/src/chain/seenCache/seenGossipBlockInput.ts b/packages/beacon-node/src/chain/seenCache/seenGossipBlockInput.ts index 6b51332353f2..3806668436d8 100644 --- a/packages/beacon-node/src/chain/seenCache/seenGossipBlockInput.ts +++ b/packages/beacon-node/src/chain/seenCache/seenGossipBlockInput.ts @@ -1,7 +1,6 @@ -import {toHexString} from "@chainsafe/ssz"; import {deneb, RootHex, SignedBeaconBlock, ssz} from "@lodestar/types"; import {ChainForkConfig} from "@lodestar/config"; -import {pruneSetToMax} from "@lodestar/utils"; +import {pruneSetToMax, toRootHex} from "@lodestar/utils"; import {BLOBSIDECAR_FIXED_SIZE, isForkBlobs, ForkName} from "@lodestar/params"; import { @@ -81,9 +80,7 @@ export class SeenGossipBlockInput { const {signedBlock, blockBytes} = gossipedInput; fork = config.getForkName(signedBlock.message.slot); - blockHex = toHexString( - config.getForkTypes(signedBlock.message.slot).BeaconBlock.hashTreeRoot(signedBlock.message) - ); + blockHex = toRootHex(config.getForkTypes(signedBlock.message.slot).BeaconBlock.hashTreeRoot(signedBlock.message)); blockCache = this.blockInputCache.get(blockHex) ?? getEmptyBlockInputCacheEntry(fork); blockCache.block = signedBlock; @@ -93,7 +90,7 @@ export class SeenGossipBlockInput { const blockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(blobSidecar.signedBlockHeader.message); fork = config.getForkName(blobSidecar.signedBlockHeader.message.slot); - blockHex = toHexString(blockRoot); + blockHex = toRootHex(blockRoot); blockCache = this.blockInputCache.get(blockHex) ?? getEmptyBlockInputCacheEntry(fork); // TODO: freetheblobs check if its the same blob or a duplicate and throw/take actions diff --git a/packages/beacon-node/src/chain/serializeState.ts b/packages/beacon-node/src/chain/serializeState.ts new file mode 100644 index 000000000000..c6e796cd614c --- /dev/null +++ b/packages/beacon-node/src/chain/serializeState.ts @@ -0,0 +1,32 @@ +import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; +import {AllocSource, BufferPool} from "../util/bufferPool.js"; + +type ProcessStateBytesFn = (stateBytes: Uint8Array) => Promise; + +/* + * Serialize state using the BufferPool if provided. + */ +export async function serializeState( + state: CachedBeaconStateAllForks, + source: AllocSource, + processFn: ProcessStateBytesFn, + bufferPool?: BufferPool | null +): Promise { + const size = state.type.tree_serializedSize(state.node); + let stateBytes: Uint8Array | null = null; + if (bufferPool) { + using bufferWithKey = bufferPool.alloc(size, source); + if (bufferWithKey) { + stateBytes = bufferWithKey.buffer; + const dataView = new DataView(stateBytes.buffer, stateBytes.byteOffset, stateBytes.byteLength); + state.serializeToBytes({uint8Array: stateBytes, dataView}, 0); + return processFn(stateBytes); + } + // release the buffer back to the pool automatically + } + + // we already have metrics in BufferPool so no need to do it here + stateBytes = state.serialize(); + + return processFn(stateBytes); +} diff --git a/packages/beacon-node/src/chain/shufflingCache.ts b/packages/beacon-node/src/chain/shufflingCache.ts index 23177142d846..6c42228b5356 100644 --- a/packages/beacon-node/src/chain/shufflingCache.ts +++ b/packages/beacon-node/src/chain/shufflingCache.ts @@ -1,10 +1,14 @@ -import {toHexString} from "@chainsafe/ssz"; -import {CachedBeaconStateAllForks, EpochShuffling, getShufflingDecisionBlock} from "@lodestar/state-transition"; -import {Epoch, RootHex, ssz} from "@lodestar/types"; -import {MapDef, pruneSetToMax} from "@lodestar/utils"; -import {GENESIS_SLOT} from "@lodestar/params"; +import { + BeaconStateAllForks, + EpochShuffling, + IShufflingCache, + ShufflingBuildProps, + computeEpochShuffling, +} from "@lodestar/state-transition"; +import {Epoch, RootHex} from "@lodestar/types"; +import {LodestarError, Logger, MapDef, pruneSetToMax} from "@lodestar/utils"; import {Metrics} from "../metrics/metrics.js"; -import {computeAnchorCheckpoint} from "./initState.js"; +import {callInNextEventLoop} from "../util/eventLoop.js"; /** * Same value to CheckpointBalancesCache, with the assumption that we don't have to use it for old epochs. In the worse case: @@ -32,6 +36,7 @@ type ShufflingCacheItem = { type PromiseCacheItem = { type: CacheItemType.promise; + timeInsertedMs: number; promise: Promise; resolveFn: (shuffling: EpochShuffling) => void; }; @@ -48,7 +53,7 @@ export type ShufflingCacheOpts = { * - if a shuffling is not available (which does not happen with default chain option of maxSkipSlots = 32), track a promise to make sure we don't compute the same shuffling twice * - skip computing shuffling when loading state bytes from disk */ -export class ShufflingCache { +export class ShufflingCache implements IShufflingCache { /** LRU cache implemented as a map, pruned every time we add an item */ private readonly itemsByDecisionRootByEpoch: MapDef> = new MapDef( () => new Map() @@ -57,8 +62,10 @@ export class ShufflingCache { private readonly maxEpochs: number; constructor( - private readonly metrics: Metrics | null = null, - opts: ShufflingCacheOpts = {} + readonly metrics: Metrics | null = null, + readonly logger: Logger | null = null, + opts: ShufflingCacheOpts = {}, + precalculatedShufflings?: {shuffling: EpochShuffling | null; decisionRoot: RootHex}[] ) { if (metrics) { metrics.shufflingCache.size.addCollect(() => @@ -69,66 +76,25 @@ export class ShufflingCache { } this.maxEpochs = opts.maxShufflingCacheEpochs ?? MAX_EPOCHS; - } - - /** - * Extract shuffling from state and add to cache - */ - processState(state: CachedBeaconStateAllForks, shufflingEpoch: Epoch): EpochShuffling { - const decisionBlockHex = getDecisionBlock(state, shufflingEpoch); - let shuffling: EpochShuffling; - switch (shufflingEpoch) { - case state.epochCtx.nextShuffling.epoch: - shuffling = state.epochCtx.nextShuffling; - break; - case state.epochCtx.currentShuffling.epoch: - shuffling = state.epochCtx.currentShuffling; - break; - case state.epochCtx.previousShuffling.epoch: - shuffling = state.epochCtx.previousShuffling; - break; - default: - throw new Error(`Shuffling not found from state ${state.slot} for epoch ${shufflingEpoch}`); - } - let cacheItem = this.itemsByDecisionRootByEpoch.getOrDefault(shufflingEpoch).get(decisionBlockHex); - if (cacheItem !== undefined) { - // update existing promise - if (isPromiseCacheItem(cacheItem)) { - // unblock consumers of this promise - cacheItem.resolveFn(shuffling); - // then update item type to shuffling - cacheItem = { - type: CacheItemType.shuffling, - shuffling, - }; - this.add(shufflingEpoch, decisionBlockHex, cacheItem); - // we updated type to CacheItemType.shuffling so the above fields are not used anyway - this.metrics?.shufflingCache.processStateUpdatePromise.inc(); - } else { - // ShufflingCacheItem, do nothing - this.metrics?.shufflingCache.processStateNoOp.inc(); + precalculatedShufflings?.map(({shuffling, decisionRoot}) => { + if (shuffling !== null) { + this.set(shuffling, decisionRoot); } - } else { - // not found, new shuffling - this.add(shufflingEpoch, decisionBlockHex, {type: CacheItemType.shuffling, shuffling}); - this.metrics?.shufflingCache.processStateInsertNew.inc(); - } - - return shuffling; + }); } /** * Insert a promise to make sure we don't regen state for the same shuffling. * Bound by MAX_SHUFFLING_PROMISE to make sure our node does not blow up. */ - insertPromise(shufflingEpoch: Epoch, decisionRootHex: RootHex): void { + insertPromise(epoch: Epoch, decisionRoot: RootHex): void { const promiseCount = Array.from(this.itemsByDecisionRootByEpoch.values()) .flatMap((innerMap) => Array.from(innerMap.values())) .filter((item) => isPromiseCacheItem(item)).length; if (promiseCount >= MAX_PROMISES) { throw new Error( - `Too many shuffling promises: ${promiseCount}, shufflingEpoch: ${shufflingEpoch}, decisionRootHex: ${decisionRootHex}` + `Too many shuffling promises: ${promiseCount}, shufflingEpoch: ${epoch}, decisionRootHex: ${decisionRoot}` ); } let resolveFn: ((shuffling: EpochShuffling) => void) | null = null; @@ -141,10 +107,11 @@ export class ShufflingCache { const cacheItem: PromiseCacheItem = { type: CacheItemType.promise, + timeInsertedMs: Date.now(), promise, resolveFn, }; - this.add(shufflingEpoch, decisionRootHex, cacheItem); + this.itemsByDecisionRootByEpoch.getOrDefault(epoch).set(decisionRoot, cacheItem); this.metrics?.shufflingCache.insertPromiseCount.inc(); } @@ -153,39 +120,95 @@ export class ShufflingCache { * If there's a promise, it means we are computing the same shuffling, so we wait for the promise to resolve. * Return null if we don't have a shuffling for this epoch and dependentRootHex. */ - async get(shufflingEpoch: Epoch, decisionRootHex: RootHex): Promise { - const cacheItem = this.itemsByDecisionRootByEpoch.getOrDefault(shufflingEpoch).get(decisionRootHex); + async get(epoch: Epoch, decisionRoot: RootHex): Promise { + const cacheItem = this.itemsByDecisionRootByEpoch.getOrDefault(epoch).get(decisionRoot); if (cacheItem === undefined) { + this.metrics?.shufflingCache.miss.inc(); return null; } if (isShufflingCacheItem(cacheItem)) { + this.metrics?.shufflingCache.hit.inc(); return cacheItem.shuffling; } else { - // promise + this.metrics?.shufflingCache.shufflingPromiseNotResolved.inc(); return cacheItem.promise; } } /** - * Same to get() function but synchronous. + * Gets a cached shuffling via the epoch and decision root. If the shuffling is not + * available it will build it synchronously and return the shuffling. + * + * NOTE: If a shuffling is already queued and not calculated it will build and resolve + * the promise but the already queued build will happen at some later time */ - getSync(shufflingEpoch: Epoch, decisionRootHex: RootHex): EpochShuffling | null { - const cacheItem = this.itemsByDecisionRootByEpoch.getOrDefault(shufflingEpoch).get(decisionRootHex); - if (cacheItem === undefined) { - return null; + getSync( + epoch: Epoch, + decisionRoot: RootHex, + buildProps?: T + ): T extends ShufflingBuildProps ? EpochShuffling : EpochShuffling | null { + const cacheItem = this.itemsByDecisionRootByEpoch.getOrDefault(epoch).get(decisionRoot); + if (!cacheItem) { + this.metrics?.shufflingCache.miss.inc(); + } else if (isShufflingCacheItem(cacheItem)) { + this.metrics?.shufflingCache.hit.inc(); + return cacheItem.shuffling; + } else if (buildProps) { + // TODO: (@matthewkeil) This should possible log a warning?? + this.metrics?.shufflingCache.shufflingPromiseNotResolvedAndThrownAway.inc(); + } else { + this.metrics?.shufflingCache.shufflingPromiseNotResolved.inc(); } - if (isShufflingCacheItem(cacheItem)) { - return cacheItem.shuffling; + let shuffling: EpochShuffling | null = null; + if (buildProps) { + const timer = this.metrics?.shufflingCache.shufflingCalculationTime.startTimer({source: "getSync"}); + shuffling = computeEpochShuffling(buildProps.state, buildProps.activeIndices, epoch); + timer?.(); + this.set(shuffling, decisionRoot); } + return shuffling as T extends ShufflingBuildProps ? EpochShuffling : EpochShuffling | null; + } - // ignore promise - return null; + /** + * Queue asynchronous build for an EpochShuffling, triggered from state-transition + */ + build(epoch: number, decisionRoot: string, state: BeaconStateAllForks, activeIndices: Uint32Array): void { + this.insertPromise(epoch, decisionRoot); + /** + * TODO: (@matthewkeil) This will get replaced by a proper build queue and a worker to do calculations + * on a NICE thread with a rust implementation + */ + callInNextEventLoop(() => { + const timer = this.metrics?.shufflingCache.shufflingCalculationTime.startTimer({source: "build"}); + const shuffling = computeEpochShuffling(state, activeIndices, epoch); + timer?.(); + this.set(shuffling, decisionRoot); + }); } - private add(shufflingEpoch: Epoch, decisionBlock: RootHex, cacheItem: CacheItem): void { - this.itemsByDecisionRootByEpoch.getOrDefault(shufflingEpoch).set(decisionBlock, cacheItem); + /** + * Add an EpochShuffling to the ShufflingCache. If a promise for the shuffling is present it will + * resolve the promise with the built shuffling + */ + private set(shuffling: EpochShuffling, decisionRoot: string): void { + const shufflingAtEpoch = this.itemsByDecisionRootByEpoch.getOrDefault(shuffling.epoch); + // if a pending shuffling promise exists, resolve it + const cacheItem = shufflingAtEpoch.get(decisionRoot); + if (cacheItem) { + if (isPromiseCacheItem(cacheItem)) { + cacheItem.resolveFn(shuffling); + this.metrics?.shufflingCache.shufflingPromiseResolutionTime.observe( + (Date.now() - cacheItem.timeInsertedMs) / 1000 + ); + } else { + this.metrics?.shufflingCache.shufflingBuiltMultipleTimes.inc(); + } + } + // set the shuffling + shufflingAtEpoch.set(decisionRoot, {type: CacheItemType.shuffling, shuffling}); + // prune the cache pruneSetToMax(this.itemsByDecisionRootByEpoch, this.maxEpochs); } } @@ -198,13 +221,14 @@ function isPromiseCacheItem(item: CacheItem): item is PromiseCacheItem { return item.type === CacheItemType.promise; } -/** - * Get the shuffling decision block root for the given epoch of given state - * - Special case close to genesis block, return the genesis block root - * - This is similar to forkchoice.getDependentRoot() function, otherwise we cannot get cached shuffing in attestation verification when syncing from genesis. - */ -function getDecisionBlock(state: CachedBeaconStateAllForks, epoch: Epoch): RootHex { - return state.slot > GENESIS_SLOT - ? getShufflingDecisionBlock(state, epoch) - : toHexString(ssz.phase0.BeaconBlockHeader.hashTreeRoot(computeAnchorCheckpoint(state.config, state).blockHeader)); +export enum ShufflingCacheErrorCode { + NO_SHUFFLING_FOUND = "SHUFFLING_CACHE_ERROR_NO_SHUFFLING_FOUND", } + +type ShufflingCacheErrorType = { + code: ShufflingCacheErrorCode.NO_SHUFFLING_FOUND; + epoch: Epoch; + decisionRoot: RootHex; +}; + +export class ShufflingCacheError extends LodestarError {} diff --git a/packages/beacon-node/src/chain/stateCache/blockStateCacheImpl.ts b/packages/beacon-node/src/chain/stateCache/blockStateCacheImpl.ts index fdeb3ed5a659..1cb67cd6cf09 100644 --- a/packages/beacon-node/src/chain/stateCache/blockStateCacheImpl.ts +++ b/packages/beacon-node/src/chain/stateCache/blockStateCacheImpl.ts @@ -1,7 +1,7 @@ -import {toHexString} from "@chainsafe/ssz"; import {Epoch, RootHex} from "@lodestar/types"; import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; import {routes} from "@lodestar/api"; +import {toRootHex} from "@lodestar/utils"; import {Metrics} from "../../metrics/index.js"; import {StateCloneOpts} from "../regen/interface.js"; import {MapTracker} from "./mapMetrics.js"; @@ -34,7 +34,7 @@ export class BlockStateCacheImpl implements BlockStateCache { this.maxStates = maxStates; this.cache = new MapTracker(metrics?.stateCache); if (metrics) { - this.metrics = metrics.stateCache; + this.metrics = {...metrics.stateCache, ...metrics.epochCache}; metrics.stateCache.size.addCollect(() => metrics.stateCache.size.set(this.cache.size)); } } @@ -53,7 +53,7 @@ export class BlockStateCacheImpl implements BlockStateCache { } add(item: CachedBeaconStateAllForks): void { - const key = toHexString(item.hashTreeRoot()); + const key = toRootHex(item.hashTreeRoot()); if (this.cache.get(key)) { return; } @@ -70,7 +70,7 @@ export class BlockStateCacheImpl implements BlockStateCache { setHeadState(item: CachedBeaconStateAllForks | null): void { if (item) { - const key = toHexString(item.hashTreeRoot()); + const key = toRootHex(item.hashTreeRoot()); this.head = {state: item, stateRoot: key}; } else { this.head = null; @@ -130,13 +130,17 @@ export class BlockStateCacheImpl implements BlockStateCache { dumpSummary(): routes.lodestar.StateCacheItem[] { return Array.from(this.cache.entries()).map(([key, state]) => ({ slot: state.slot, - root: toHexString(state.hashTreeRoot()), + root: toRootHex(state.hashTreeRoot()), reads: this.cache.readCount.get(key) ?? 0, lastRead: this.cache.lastRead.get(key) ?? 0, checkpointState: false, })); } + getStates(): IterableIterator { + return this.cache.values(); + } + private deleteAllEpochItems(epoch: Epoch): void { for (const rootHex of this.epochIndex.get(epoch) || []) { this.cache.delete(rootHex); diff --git a/packages/beacon-node/src/chain/stateCache/datastore/file.ts b/packages/beacon-node/src/chain/stateCache/datastore/file.ts index 6529d12f84db..f487079ae443 100644 --- a/packages/beacon-node/src/chain/stateCache/datastore/file.ts +++ b/packages/beacon-node/src/chain/stateCache/datastore/file.ts @@ -1,6 +1,6 @@ import path from "node:path"; -import {toHexString, fromHexString} from "@chainsafe/ssz"; import {phase0, ssz} from "@lodestar/types"; +import {fromHex, toHex} from "@lodestar/utils"; import {ensureDir, readFile, readFileNames, removeFile, writeIfNotExist} from "../../../util/file.js"; import {CPStateDatastore, DatastoreKey} from "./types.js"; @@ -28,18 +28,18 @@ export class FileCPStateDatastore implements CPStateDatastore { async write(cpKey: phase0.Checkpoint, stateBytes: Uint8Array): Promise { const serializedCheckpoint = ssz.phase0.Checkpoint.serialize(cpKey); - const filePath = path.join(this.folderPath, toHexString(serializedCheckpoint)); + const filePath = path.join(this.folderPath, toHex(serializedCheckpoint)); await writeIfNotExist(filePath, stateBytes); return serializedCheckpoint; } async remove(serializedCheckpoint: DatastoreKey): Promise { - const filePath = path.join(this.folderPath, toHexString(serializedCheckpoint)); + const filePath = path.join(this.folderPath, toHex(serializedCheckpoint)); await removeFile(filePath); } async read(serializedCheckpoint: DatastoreKey): Promise { - const filePath = path.join(this.folderPath, toHexString(serializedCheckpoint)); + const filePath = path.join(this.folderPath, toHex(serializedCheckpoint)); return readFile(filePath); } @@ -47,6 +47,6 @@ export class FileCPStateDatastore implements CPStateDatastore { const fileNames = await readFileNames(this.folderPath); return fileNames .filter((fileName) => fileName.startsWith("0x") && fileName.length === CHECKPOINT_FILE_NAME_LENGTH) - .map((fileName) => fromHexString(fileName)); + .map((fileName) => fromHex(fileName)); } } diff --git a/packages/beacon-node/src/chain/stateCache/fifoBlockStateCache.ts b/packages/beacon-node/src/chain/stateCache/fifoBlockStateCache.ts index 93b581633c05..7766daf3c5b3 100644 --- a/packages/beacon-node/src/chain/stateCache/fifoBlockStateCache.ts +++ b/packages/beacon-node/src/chain/stateCache/fifoBlockStateCache.ts @@ -1,7 +1,7 @@ -import {toHexString} from "@chainsafe/ssz"; import {RootHex} from "@lodestar/types"; import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; import {routes} from "@lodestar/api"; +import {toRootHex} from "@lodestar/utils"; import {Metrics} from "../../metrics/index.js"; import {LinkedList} from "../../util/array.js"; import {StateCloneOpts} from "../regen/interface.js"; @@ -13,9 +13,14 @@ export type FIFOBlockStateCacheOpts = { }; /** - * Regen state if there's a reorg distance > 32 slots. + * Given `maxSkipSlots` = 32 and `DEFAULT_EARLIEST_PERMISSIBLE_SLOT_DISTANCE` = 32, lodestar doesn't need to + * reload states in order to process a gossip block. + * + * |-----------------------------------------------|-----------------------------------------------| + * maxSkipSlots DEFAULT_EARLIEST_PERMISSIBLE_SLOT_DISTANCE ^ + * clock slot */ -export const DEFAULT_MAX_BLOCK_STATES = 32; +export const DEFAULT_MAX_BLOCK_STATES = 64; /** * New implementation of BlockStateCache that keeps the most recent n states consistently @@ -107,7 +112,7 @@ export class FIFOBlockStateCache implements BlockStateCache { * In importBlock() steps, normally it'll call add() with isHead = false first. Then call setHeadState() to set the head. */ add(item: CachedBeaconStateAllForks, isHead = false): void { - const key = toHexString(item.hashTreeRoot()); + const key = toRootHex(item.hashTreeRoot()); if (this.cache.get(key) != null) { if (!this.keyOrder.has(key)) { throw Error(`State exists but key not found in keyOrder: ${key}`); @@ -183,13 +188,17 @@ export class FIFOBlockStateCache implements BlockStateCache { dumpSummary(): routes.lodestar.StateCacheItem[] { return Array.from(this.cache.entries()).map(([key, state]) => ({ slot: state.slot, - root: toHexString(state.hashTreeRoot()), + root: toRootHex(state.hashTreeRoot()), reads: this.cache.readCount.get(key) ?? 0, lastRead: this.cache.lastRead.get(key) ?? 0, checkpointState: false, })); } + getStates(): IterableIterator { + return this.cache.values(); + } + /** * For unit test only. */ diff --git a/packages/beacon-node/src/chain/stateCache/inMemoryCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/inMemoryCheckpointsCache.ts index 37c67c0d86b4..38aeabb97955 100644 --- a/packages/beacon-node/src/chain/stateCache/inMemoryCheckpointsCache.ts +++ b/packages/beacon-node/src/chain/stateCache/inMemoryCheckpointsCache.ts @@ -1,7 +1,6 @@ -import {toHexString} from "@chainsafe/ssz"; import {phase0, Epoch, RootHex} from "@lodestar/types"; import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; -import {MapDef} from "@lodestar/utils"; +import {MapDef, toRootHex} from "@lodestar/utils"; import {routes} from "@lodestar/api"; import {Metrics} from "../../metrics/index.js"; import {StateCloneOpts} from "../regen/interface.js"; @@ -144,7 +143,7 @@ export class InMemoryCheckpointStateCache implements CheckpointStateCache { delete(cp: phase0.Checkpoint): void { this.cache.delete(toCheckpointKey(toCheckpointHex(cp))); - const epochKey = toHexString(cp.root); + const epochKey = toRootHex(cp.root); const value = this.epochIndex.get(cp.epoch); if (value) { value.delete(epochKey); @@ -170,13 +169,17 @@ export class InMemoryCheckpointStateCache implements CheckpointStateCache { dumpSummary(): routes.lodestar.StateCacheItem[] { return Array.from(this.cache.entries()).map(([key, state]) => ({ slot: state.slot, - root: toHexString(state.hashTreeRoot()), + root: toRootHex(state.hashTreeRoot()), reads: this.cache.readCount.get(key) ?? 0, lastRead: this.cache.lastRead.get(key) ?? 0, checkpointState: true, })); } + getStates(): IterableIterator { + return this.cache.values(); + } + /** ONLY FOR DEBUGGING PURPOSES. For spec tests on error */ dumpCheckpointKeys(): string[] { return Array.from(this.cache.keys()); @@ -186,7 +189,7 @@ export class InMemoryCheckpointStateCache implements CheckpointStateCache { export function toCheckpointHex(checkpoint: phase0.Checkpoint): CheckpointHex { return { epoch: checkpoint.epoch, - rootHex: toHexString(checkpoint.root), + rootHex: toRootHex(checkpoint.root), }; } diff --git a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts index 58aeca061bc0..0719efcfd309 100644 --- a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts +++ b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts @@ -1,15 +1,15 @@ -import {fromHexString, toHexString} from "@chainsafe/ssz"; import {phase0, Epoch, RootHex} from "@lodestar/types"; import {CachedBeaconStateAllForks, computeStartSlotAtEpoch, getBlockRootAtSlot} from "@lodestar/state-transition"; -import {Logger, MapDef, sleep} from "@lodestar/utils"; +import {Logger, MapDef, fromHex, sleep, toHex, toRootHex} from "@lodestar/utils"; import {routes} from "@lodestar/api"; import {loadCachedBeaconState} from "@lodestar/state-transition"; import {INTERVALS_PER_SLOT} from "@lodestar/params"; import {Metrics} from "../../metrics/index.js"; import {IClock} from "../../util/clock.js"; import {ShufflingCache} from "../shufflingCache.js"; -import {BufferPool, BufferWithKey} from "../../util/bufferPool.js"; +import {AllocSource, BufferPool, BufferWithKey} from "../../util/bufferPool.js"; import {StateCloneOpts} from "../regen/interface.js"; +import {serializeState} from "../serializeState.js"; import {MapTracker} from "./mapMetrics.js"; import {CPStateDatastore, DatastoreKey, datastoreKeyToCheckpoint} from "./datastore/index.js"; import {CheckpointHex, CacheItemType, CheckpointStateCache, BlockStateCache} from "./types.js"; @@ -29,7 +29,7 @@ type PersistentCheckpointStateCacheModules = { shufflingCache: ShufflingCache; datastore: CPStateDatastore; blockStateCache: BlockStateCache; - bufferPool?: BufferPool; + bufferPool?: BufferPool | null; }; /** checkpoint serialized as a string */ @@ -53,10 +53,11 @@ type CacheItem = InMemoryCacheItem | PersistedCacheItem; type LoadedStateBytesData = {persistedKey: DatastoreKey; stateBytes: Uint8Array}; /** - * Before n-historical states, lodestar keeps mostly 3 states in memory with 1 finalized state - * Since Jan 2024, lodestar stores the finalized state in disk and keeps up to 2 epochs in memory + * Before n-historical states, lodestar keeps all checkpoint states since finalized + * Since Sep 2024, lodestar stores 3 most recent checkpoint states in memory and the rest on disk. The finalized state + * may not be available in memory, and stay on disk instead. */ -export const DEFAULT_MAX_CP_STATE_EPOCHS_IN_MEMORY = 2; +export const DEFAULT_MAX_CP_STATE_EPOCHS_IN_MEMORY = 3; /** * An implementation of CheckpointStateCache that keep up to n epoch checkpoint states in memory and persist the rest to disk @@ -94,7 +95,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { private readonly cache: MapTracker; /** Epoch -> Set */ private readonly epochIndex = new MapDef>(() => new Set()); - private readonly metrics: Metrics["cpStateCache"] | null | undefined; + private readonly metrics: Metrics | null | undefined; private readonly logger: Logger; private readonly clock: IClock | null | undefined; private readonly signal: AbortSignal | undefined; @@ -106,7 +107,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { private readonly datastore: CPStateDatastore; private readonly shufflingCache: ShufflingCache; private readonly blockStateCache: BlockStateCache; - private readonly bufferPool?: BufferPool; + private readonly bufferPool?: BufferPool | null; constructor( { @@ -123,7 +124,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { ) { this.cache = new MapTracker(metrics?.cpStateCache); if (metrics) { - this.metrics = metrics.cpStateCache; + this.metrics = metrics; metrics.cpStateCache.size.addCollect(() => { let persistCount = 0; let inMemoryCount = 0; @@ -171,7 +172,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { for (const persistedKey of persistedKeys) { const cp = datastoreKeyToCheckpoint(persistedKey); this.cache.set(toCacheKey(cp), {type: CacheItemType.persisted, value: persistedKey}); - this.epochIndex.getOrDefault(cp.epoch).add(toHexString(cp.root)); + this.epochIndex.getOrDefault(cp.epoch).add(toRootHex(cp.root)); } this.logger.info("Loaded persisted checkpoint states from the last run", { count: persistedKeys.length, @@ -192,43 +193,36 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { return stateOrStateBytesData?.clone(opts?.dontTransferCache) ?? null; } const {persistedKey, stateBytes} = stateOrStateBytesData; - const logMeta = {persistedKey: toHexString(persistedKey)}; + const logMeta = {persistedKey: toHex(persistedKey)}; this.logger.debug("Reload: read state successful", logMeta); - this.metrics?.stateReloadSecFromSlot.observe(this.clock?.secFromSlot(this.clock?.currentSlot ?? 0) ?? 0); + this.metrics?.cpStateCache.stateReloadSecFromSlot.observe( + this.clock?.secFromSlot(this.clock?.currentSlot ?? 0) ?? 0 + ); const seedState = this.findSeedStateToReload(cp); - this.metrics?.stateReloadEpochDiff.observe(Math.abs(seedState.epochCtx.epoch - cp.epoch)); + this.metrics?.cpStateCache.stateReloadEpochDiff.observe(Math.abs(seedState.epochCtx.epoch - cp.epoch)); this.logger.debug("Reload: found seed state", {...logMeta, seedSlot: seedState.slot}); try { // 80% of validators serialization time comes from memory allocation, this is to avoid it - const sszTimer = this.metrics?.stateReloadValidatorsSerializeDuration.startTimer(); + const sszTimer = this.metrics?.cpStateCache.stateReloadValidatorsSerializeDuration.startTimer(); // automatically free the buffer pool after this scope using validatorsBytesWithKey = this.serializeStateValidators(seedState); let validatorsBytes = validatorsBytesWithKey?.buffer; if (validatorsBytes == null) { // fallback logic in case we can't use the buffer pool - this.metrics?.stateReloadValidatorsSerializeAllocCount.inc(); + this.metrics?.cpStateCache.stateReloadValidatorsSerializeAllocCount.inc(); validatorsBytes = seedState.validators.serialize(); } sszTimer?.(); - const timer = this.metrics?.stateReloadDuration.startTimer(); - const newCachedState = loadCachedBeaconState( - seedState, - stateBytes, - { - shufflingGetter: (shufflingEpoch, decisionRootHex) => { - const shuffling = this.shufflingCache.getSync(shufflingEpoch, decisionRootHex); - if (shuffling == null) { - this.metrics?.stateReloadShufflingCacheMiss.inc(); - } - return shuffling; - }, - }, - validatorsBytes - ); + const timer = this.metrics?.cpStateCache.stateReloadDuration.startTimer(); + const newCachedState = loadCachedBeaconState(seedState, stateBytes, {}, validatorsBytes); newCachedState.commit(); - const stateRoot = toHexString(newCachedState.hashTreeRoot()); + const stateRoot = toRootHex(newCachedState.hashTreeRoot()); timer?.(); + + // load all cache in order for consumers (usually regen.getState()) to process blocks faster + newCachedState.validators.getAllReadonlyValues(); + newCachedState.balances.getAll(); this.logger.debug("Reload: cached state load successful", { ...logMeta, stateSlot: newCachedState.slot, @@ -284,7 +278,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { } const persistedKey = cacheItem.value; - const dbReadTimer = this.metrics?.stateReloadDbReadTime.startTimer(); + const dbReadTimer = this.metrics?.cpStateCache.stateReloadDbReadTime.startTimer(); const stateBytes = await this.datastore.read(persistedKey); dbReadTimer?.(); @@ -298,7 +292,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { * Similar to get() api without reloading from disk */ get(cpOrKey: CheckpointHex | string, opts?: StateCloneOpts): CachedBeaconStateAllForks | null { - this.metrics?.lookups.inc(); + this.metrics?.cpStateCache.lookups.inc(); const cpKey = typeof cpOrKey === "string" ? cpOrKey : toCacheKey(cpOrKey); const cacheItem = this.cache.get(cpKey); @@ -306,7 +300,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { return null; } - this.metrics?.hits.inc(); + this.metrics?.cpStateCache.hits.inc(); if (cpKey === this.preComputedCheckpoint) { this.preComputedCheckpointHits = (this.preComputedCheckpointHits ?? 0) + 1; @@ -314,7 +308,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { if (isInMemoryCacheItem(cacheItem)) { const {state} = cacheItem; - this.metrics?.stateClonedCount.observe(state.clonedCount); + this.metrics?.cpStateCache.stateClonedCount.observe(state.clonedCount); return state.clone(opts?.dontTransferCache); } @@ -328,7 +322,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { const cpHex = toCheckpointHex(cp); const key = toCacheKey(cpHex); const cacheItem = this.cache.get(key); - this.metrics?.adds.inc(); + this.metrics?.cpStateCache.adds.inc(); if (cacheItem !== undefined && isPersistedCacheItem(cacheItem)) { const persistedKey = cacheItem.value; // was persisted to disk, set back to memory @@ -336,7 +330,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { this.logger.verbose("Added checkpoint state to memory but a persisted key existed", { epoch: cp.epoch, rootHex: cpHex.rootHex, - persistedKey: toHexString(persistedKey), + persistedKey: toHex(persistedKey), }); } else { this.cache.set(key, {type: CacheItemType.inMemory, state}); @@ -561,7 +555,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { // amongst states of the same epoch, choose the one with the same view of reloadedCp if ( reloadedCpSlot < state.slot && - toHexString(getBlockRootAtSlot(state, reloadedCpSlot)) === reloadedCp.rootHex + toRootHex(getBlockRootAtSlot(state, reloadedCpSlot)) === reloadedCp.rootHex ) { return state; } @@ -594,6 +588,14 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { }); } + getStates(): IterableIterator { + const items = Array.from(this.cache.values()) + .filter(isInMemoryCacheItem) + .map((item) => item.state); + + return items.values(); + } + /** ONLY FOR DEBUGGING PURPOSES. For spec tests on error */ dumpCheckpointKeys(): string[] { return Array.from(this.cache.keys()); @@ -644,9 +646,9 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { let persistCount = 0; const epochBoundarySlot = computeStartSlotAtEpoch(epoch); const epochBoundaryRoot = - epochBoundarySlot === state.slot ? fromHexString(blockRootHex) : getBlockRootAtSlot(state, epochBoundarySlot); - const epochBoundaryHex = toHexString(epochBoundaryRoot); - const prevEpochRoot = toHexString(getBlockRootAtSlot(state, epochBoundarySlot - 1)); + epochBoundarySlot === state.slot ? fromHex(blockRootHex) : getBlockRootAtSlot(state, epochBoundarySlot); + const epochBoundaryHex = toRootHex(epochBoundaryRoot); + const prevEpochRoot = toRootHex(getBlockRootAtSlot(state, epochBoundarySlot - 1)); // for each epoch, usually there are 2 rootHexes respective to the 2 checkpoint states: Previous Root Checkpoint State and Current Root Checkpoint State const cpRootHexes = this.epochIndex.get(epoch) ?? []; @@ -675,7 +677,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { stateSlot: state.slot, rootHex, epochBoundaryHex, - persistedKey: persistedKey ? toHexString(persistedKey) : "", + persistedKey: persistedKey ? toHex(persistedKey) : "", }; if (persistedRootHexes.has(rootHex)) { @@ -684,25 +686,33 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { this.logger.verbose("Pruned checkpoint state from memory but no need to persist", logMeta); } else { // persist and do not update epochIndex - this.metrics?.statePersistSecFromSlot.observe(this.clock?.secFromSlot(this.clock?.currentSlot ?? 0) ?? 0); - const cpPersist = {epoch: epoch, root: fromHexString(rootHex)}; - { - const timer = this.metrics?.stateSerializeDuration.startTimer(); - // automatically free the buffer pool after this scope - using stateBytesWithKey = this.serializeState(state); - let stateBytes = stateBytesWithKey?.buffer; - if (stateBytes == null) { - // fallback logic to use regular way to get state ssz bytes - this.metrics?.persistedStateAllocCount.inc(); - stateBytes = state.serialize(); - } - timer?.(); - persistedKey = await this.datastore.write(cpPersist, stateBytes); - } + this.metrics?.cpStateCache.statePersistSecFromSlot.observe( + this.clock?.secFromSlot(this.clock?.currentSlot ?? 0) ?? 0 + ); + const cpPersist = {epoch: epoch, root: fromHex(rootHex)}; + // It's not sustainable to allocate ~240MB for each state every epoch, so we use buffer pool to reuse the memory. + // As monitored on holesky as of Jan 2024: + // - This does not increase heap allocation while gc time is the same + // - It helps stabilize persist time and save ~300ms in average (1.5s vs 1.2s) + // - It also helps the state reload to save ~500ms in average (4.3s vs 3.8s) + // - Also `serializeState.test.ts` perf test shows a lot of differences allocating ~240MB once vs per state serialization + const timer = this.metrics?.stateSerializeDuration.startTimer({ + source: AllocSource.PERSISTENT_CHECKPOINTS_CACHE_STATE, + }); + persistedKey = await serializeState( + state, + AllocSource.PERSISTENT_CHECKPOINTS_CACHE_STATE, + (stateBytes) => { + timer?.(); + return this.datastore.write(cpPersist, stateBytes); + }, + this.bufferPool + ); + persistCount++; this.logger.verbose("Pruned checkpoint state from memory and persisted to disk", { ...logMeta, - persistedKey: toHexString(persistedKey), + persistedKey: toHex(persistedKey), }); } // overwrite cpKey, this means the state is deleted from memory @@ -718,7 +728,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { this.cache.delete(cpKey); this.epochIndex.get(epoch)?.delete(rootHex); } - this.metrics?.statePruneFromMemoryCount.inc(); + this.metrics?.cpStateCache.statePruneFromMemoryCount.inc(); this.logger.verbose("Pruned checkpoint state from memory", logMeta); } } @@ -742,7 +752,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { if (persistedKey) { await this.datastore.remove(persistedKey); persistCount++; - this.metrics?.persistedStateRemoveCount.inc(); + this.metrics?.cpStateCache.persistedStateRemoveCount.inc(); } } this.cache.delete(key); @@ -755,29 +765,6 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { }); } - /* - * It's not sustainable to allocate ~240MB for each state every epoch, so we use buffer pool to reuse the memory. - * As monitored on holesky as of Jan 2024: - * - This does not increase heap allocation while gc time is the same - * - It helps stabilize persist time and save ~300ms in average (1.5s vs 1.2s) - * - It also helps the state reload to save ~500ms in average (4.3s vs 3.8s) - * - Also `serializeState.test.ts` perf test shows a lot of differences allocating ~240MB once vs per state serialization - */ - private serializeState(state: CachedBeaconStateAllForks): BufferWithKey | null { - const size = state.type.tree_serializedSize(state.node); - if (this.bufferPool) { - const bufferWithKey = this.bufferPool.alloc(size); - if (bufferWithKey) { - const stateBytes = bufferWithKey.buffer; - const dataView = new DataView(stateBytes.buffer, stateBytes.byteOffset, stateBytes.byteLength); - state.serializeToBytes({uint8Array: stateBytes, dataView}, 0); - return bufferWithKey; - } - } - - return null; - } - /** * Serialize validators to bytes leveraging the buffer pool to save memory allocation. * - As monitored on holesky as of Jan 2024, it helps save ~500ms state reload time (4.3s vs 3.8s) @@ -788,7 +775,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { const type = state.type.fields.validators; const size = type.tree_serializedSize(state.validators.node); if (this.bufferPool) { - const bufferWithKey = this.bufferPool.alloc(size); + const bufferWithKey = this.bufferPool.alloc(size, AllocSource.PERSISTENT_CHECKPOINTS_CACHE_VALIDATORS); if (bufferWithKey) { const validatorsBytes = bufferWithKey.buffer; const dataView = new DataView(validatorsBytes.buffer, validatorsBytes.byteOffset, validatorsBytes.byteLength); @@ -804,7 +791,7 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { export function toCheckpointHex(checkpoint: phase0.Checkpoint): CheckpointHex { return { epoch: checkpoint.epoch, - rootHex: toHexString(checkpoint.root), + rootHex: toRootHex(checkpoint.root), }; } @@ -812,7 +799,7 @@ function toCacheKey(cp: CheckpointHex | phase0.Checkpoint): CacheKey { if (isCheckpointHex(cp)) { return `${cp.rootHex}_${cp.epoch}`; } - return `${toHexString(cp.root)}_${cp.epoch}`; + return `${toRootHex(cp.root)}_${cp.epoch}`; } function fromCacheKey(key: CacheKey): CheckpointHex { diff --git a/packages/beacon-node/src/chain/stateCache/types.ts b/packages/beacon-node/src/chain/stateCache/types.ts index 41e9b91aaa43..1e8d6bd1bd62 100644 --- a/packages/beacon-node/src/chain/stateCache/types.ts +++ b/packages/beacon-node/src/chain/stateCache/types.ts @@ -33,6 +33,8 @@ export interface BlockStateCache { prune(headStateRootHex: RootHex): void; deleteAllBeforeEpoch(finalizedEpoch: Epoch): void; dumpSummary(): routes.lodestar.StateCacheItem[]; + /** Expose beacon states stored in cache. Use with caution */ + getStates(): IterableIterator; } /** @@ -74,6 +76,8 @@ export interface CheckpointStateCache { processState(blockRootHex: RootHex, state: CachedBeaconStateAllForks): Promise; clear(): void; dumpSummary(): routes.lodestar.StateCacheItem[]; + /** Expose beacon states stored in cache. Use with caution */ + getStates(): IterableIterator; } export enum CacheItemType { diff --git a/packages/beacon-node/src/chain/validation/aggregateAndProof.ts b/packages/beacon-node/src/chain/validation/aggregateAndProof.ts index 430464683493..39a3700aacf9 100644 --- a/packages/beacon-node/src/chain/validation/aggregateAndProof.ts +++ b/packages/beacon-node/src/chain/validation/aggregateAndProof.ts @@ -1,26 +1,26 @@ -import {toHexString} from "@chainsafe/ssz"; -import {ForkName} from "@lodestar/params"; -import {phase0, RootHex, ssz} from "@lodestar/types"; +import {ForkName, ForkSeq} from "@lodestar/params"; +import {electra, phase0, RootHex, ssz, IndexedAttestation, SignedAggregateAndProof} from "@lodestar/types"; import { computeEpochAtSlot, isAggregatorFromCommitteeLength, createAggregateSignatureSetFromComponents, } from "@lodestar/state-transition"; +import {toRootHex} from "@lodestar/utils"; import {IBeaconChain} from ".."; import {AttestationError, AttestationErrorCode, GossipAction} from "../errors/index.js"; import {RegenCaller} from "../regen/index.js"; -import {getAttDataBase64FromSignedAggregateAndProofSerialized} from "../../util/sszBytes.js"; import {getSelectionProofSignatureSet, getAggregateAndProofSignatureSet} from "./signatureSets/index.js"; import { getAttestationDataSigningRoot, getCommitteeIndices, + getSeenAttDataKeyFromSignedAggregateAndProof, getShufflingForAttestationVerification, verifyHeadBlockAndTargetRoot, verifyPropagationSlotRange, } from "./attestation.js"; export type AggregateAndProofValidationResult = { - indexedAttestation: phase0.IndexedAttestation; + indexedAttestation: IndexedAttestation; committeeIndices: Uint32Array; attDataRootHex: RootHex; }; @@ -28,7 +28,7 @@ export type AggregateAndProofValidationResult = { export async function validateApiAggregateAndProof( fork: ForkName, chain: IBeaconChain, - signedAggregateAndProof: phase0.SignedAggregateAndProof + signedAggregateAndProof: SignedAggregateAndProof ): Promise { const skipValidationKnownAttesters = true; const prioritizeBls = true; @@ -41,7 +41,7 @@ export async function validateApiAggregateAndProof( export async function validateGossipAggregateAndProof( fork: ForkName, chain: IBeaconChain, - signedAggregateAndProof: phase0.SignedAggregateAndProof, + signedAggregateAndProof: SignedAggregateAndProof, serializedData: Uint8Array ): Promise { return validateAggregateAndProof(fork, chain, signedAggregateAndProof, serializedData); @@ -50,7 +50,7 @@ export async function validateGossipAggregateAndProof( async function validateAggregateAndProof( fork: ForkName, chain: IBeaconChain, - signedAggregateAndProof: phase0.SignedAggregateAndProof, + signedAggregateAndProof: SignedAggregateAndProof, serializedData: Uint8Array | null = null, opts: {skipValidationKnownAttesters: boolean; prioritizeBls: boolean} = { skipValidationKnownAttesters: false, @@ -71,10 +71,24 @@ async function validateAggregateAndProof( const attData = aggregate.data; const attSlot = attData.slot; - const attDataBase64 = serializedData ? getAttDataBase64FromSignedAggregateAndProofSerialized(serializedData) : null; - const cachedAttData = attDataBase64 ? chain.seenAttestationDatas.get(attSlot, attDataBase64) : null; + const seenAttDataKey = serializedData ? getSeenAttDataKeyFromSignedAggregateAndProof(fork, serializedData) : null; + const cachedAttData = seenAttDataKey ? chain.seenAttestationDatas.get(attSlot, seenAttDataKey) : null; + + let attIndex; + if (ForkSeq[fork] >= ForkSeq.electra) { + attIndex = (aggregate as electra.Attestation).committeeBits.getSingleTrueBit(); + // [REJECT] len(committee_indices) == 1, where committee_indices = get_committee_indices(aggregate) + if (attIndex === null) { + throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.NOT_EXACTLY_ONE_COMMITTEE_BIT_SET}); + } + // [REJECT] aggregate.data.index == 0 + if (attData.index !== 0) { + throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.NON_ZERO_ATTESTATION_DATA_INDEX}); + } + } else { + attIndex = attData.index; + } - const attIndex = attData.index; const attEpoch = computeEpochAtSlot(attSlot); const attTarget = attData.target; const targetEpoch = attTarget.epoch; @@ -111,7 +125,7 @@ async function validateAggregateAndProof( // is a non-strict superset has _not_ already been seen. const attDataRootHex = cachedAttData ? cachedAttData.attDataRootHex - : toHexString(ssz.phase0.AttestationData.hashTreeRoot(attData)); + : toRootHex(ssz.phase0.AttestationData.hashTreeRoot(attData)); if ( !skipValidationKnownAttesters && chain.seenAggregatedAttestations.isKnown(targetEpoch, attDataRootHex, aggregationBits) @@ -154,7 +168,7 @@ async function validateAggregateAndProof( // [REJECT] The committee index is within the expected range // -- i.e. data.index < get_committee_count_per_slot(state, data.target.epoch) const committeeIndices = cachedAttData - ? cachedAttData.committeeIndices + ? cachedAttData.committeeValidatorIndices : getCommitteeIndices(shuffling, attSlot, attIndex); // [REJECT] The number of aggregation bits matches the committee size @@ -163,11 +177,16 @@ async function validateAggregateAndProof( throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.WRONG_NUMBER_OF_AGGREGATION_BITS}); } const attestingIndices = aggregate.aggregationBits.intersectValues(committeeIndices); - const indexedAttestation: phase0.IndexedAttestation = { + + const indexedAttestationContent: IndexedAttestation = { attestingIndices, data: attData, signature: aggregate.signature, }; + const indexedAttestation = + ForkSeq[fork] >= ForkSeq.electra + ? (indexedAttestationContent as electra.IndexedAttestation) + : (indexedAttestationContent as phase0.IndexedAttestation); // TODO: Check this before regen // [REJECT] The attestation has participants -- that is, diff --git a/packages/beacon-node/src/chain/validation/attestation.ts b/packages/beacon-node/src/chain/validation/attestation.ts index fc39534b45e6..119a8ee1a899 100644 --- a/packages/beacon-node/src/chain/validation/attestation.ts +++ b/packages/beacon-node/src/chain/validation/attestation.ts @@ -1,7 +1,26 @@ -import {toHexString} from "@chainsafe/ssz"; -import {phase0, Epoch, Root, Slot, RootHex, ssz} from "@lodestar/types"; +import {BitArray} from "@chainsafe/ssz"; +import { + phase0, + Epoch, + Root, + Slot, + RootHex, + ssz, + electra, + isElectraAttestation, + CommitteeIndex, + Attestation, + IndexedAttestation, +} from "@lodestar/types"; import {ProtoBlock} from "@lodestar/fork-choice"; -import {ATTESTATION_SUBNET_COUNT, SLOTS_PER_EPOCH, ForkName, ForkSeq, DOMAIN_BEACON_ATTESTER} from "@lodestar/params"; +import { + ATTESTATION_SUBNET_COUNT, + SLOTS_PER_EPOCH, + ForkName, + ForkSeq, + DOMAIN_BEACON_ATTESTER, + isForkPostElectra, +} from "@lodestar/params"; import { computeEpochAtSlot, createSingleSignatureSetFromComponents, @@ -13,16 +32,19 @@ import { computeSigningRoot, } from "@lodestar/state-transition"; import {BeaconConfig} from "@lodestar/config"; +import {toRootHex} from "@lodestar/utils"; import {AttestationError, AttestationErrorCode, GossipAction} from "../errors/index.js"; import {MAXIMUM_GOSSIP_CLOCK_DISPARITY_SEC} from "../../constants/index.js"; import {RegenCaller} from "../regen/index.js"; import { - AttDataBase64, getAggregationBitsFromAttestationSerialized, - getAttDataBase64FromAttestationSerialized, + getAttDataFromSignedAggregateAndProofElectra, + getCommitteeBitsFromAttestationSerialized, + getCommitteeBitsFromSignedAggregateAndProofElectra, + getAttDataFromSignedAggregateAndProofPhase0, getSignatureFromAttestationSerialized, } from "../../util/sszBytes.js"; -import {AttestationDataCacheEntry} from "../seenCache/seenAttestationData.js"; +import {AttestationDataCacheEntry, SeenAttDataKey} from "../seenCache/seenAttestationData.js"; import {sszDeserializeAttestation} from "../../network/gossip/topic.js"; import {Result, wrapError} from "../../util/wrapError.js"; import {IBeaconChain} from "../interface.js"; @@ -34,16 +56,17 @@ export type BatchResult = { }; export type AttestationValidationResult = { - attestation: phase0.Attestation; - indexedAttestation: phase0.IndexedAttestation; + attestation: Attestation; + indexedAttestation: IndexedAttestation; subnet: number; attDataRootHex: RootHex; + committeeIndex: CommitteeIndex; }; export type AttestationOrBytes = ApiAttestation | GossipAttestation; /** attestation from api */ -export type ApiAttestation = {attestation: phase0.Attestation; serializedData: null}; +export type ApiAttestation = {attestation: Attestation; serializedData: null}; /** attestation from gossip */ export type GossipAttestation = { @@ -51,7 +74,8 @@ export type GossipAttestation = { serializedData: Uint8Array; // available in NetworkProcessor since we check for unknown block root attestations attSlot: Slot; - attDataBase64?: string | null; + // for indexed gossip queue we have attDataBase64 + attDataBase64: SeenAttDataKey; }; export type Step0Result = AttestationValidationResult & { @@ -59,20 +83,6 @@ export type Step0Result = AttestationValidationResult & { validatorIndex: number; }; -/** - * Validate a single gossip attestation, do not prioritize bls signature set - */ -export async function validateGossipAttestation( - fork: ForkName, - chain: IBeaconChain, - attestationOrBytes: GossipAttestation, - /** Optional, to allow verifying attestations through API with unknown subnet */ - subnet: number -): Promise { - const prioritizeBls = false; - return validateAttestation(fork, chain, attestationOrBytes, subnet, prioritizeBls); -} - /** * Verify gossip attestations of the same attestation data. The main advantage is we can batch verify bls signatures * through verifySignatureSetsSameMessage bls api to improve performance. @@ -82,10 +92,10 @@ export async function validateGossipAttestation( export async function validateGossipAttestationsSameAttData( fork: ForkName, chain: IBeaconChain, - attestationOrBytesArr: AttestationOrBytes[], + attestationOrBytesArr: GossipAttestation[], subnet: number, // for unit test, consumers do not need to pass this - step0ValidationFn = validateGossipAttestationNoSignatureCheck + step0ValidationFn = validateAttestationNoSignatureCheck ): Promise { if (attestationOrBytesArr.length === 0) { return {results: [], batchableBls: false}; @@ -187,22 +197,10 @@ export async function validateApiAttestation( attestationOrBytes: ApiAttestation ): Promise { const prioritizeBls = true; - return validateAttestation(fork, chain, attestationOrBytes, null, prioritizeBls); -} + const subnet = null; -/** - * Validate a single unaggregated attestation - * subnet is null for api attestations - */ -export async function validateAttestation( - fork: ForkName, - chain: IBeaconChain, - attestationOrBytes: AttestationOrBytes, - subnet: number | null, - prioritizeBls = false -): Promise { try { - const step0Result = await validateGossipAttestationNoSignatureCheck(fork, chain, attestationOrBytes, subnet); + const step0Result = await validateAttestationNoSignatureCheck(fork, chain, attestationOrBytes, subnet); const {attestation, signatureSet, validatorIndex} = step0Result; const isValid = await chain.bls.verifySignatureSets([signatureSet], {batchable: true, priority: prioritizeBls}); @@ -230,7 +228,7 @@ export async function validateAttestation( * Only deserialize the attestation if needed, use the cached AttestationData instead * This is to avoid deserializing similar attestation multiple times which could help the gc */ -async function validateGossipAttestationNoSignatureCheck( +async function validateAttestationNoSignatureCheck( fork: ForkName, chain: IBeaconChain, attestationOrBytes: AttestationOrBytes, @@ -248,19 +246,16 @@ async function validateGossipAttestationNoSignatureCheck( // Run the checks that happen before an indexed attestation is constructed. let attestationOrCache: - | {attestation: phase0.Attestation; cache: null} + | {attestation: Attestation; cache: null} | {attestation: null; cache: AttestationDataCacheEntry; serializedData: Uint8Array}; - let attDataBase64: AttDataBase64 | null = null; + let attDataKey: SeenAttDataKey | null = null; if (attestationOrBytes.serializedData) { // gossip const attSlot = attestationOrBytes.attSlot; - // for old LIFO linear gossip queue we don't have attDataBase64 - // for indexed gossip queue we have attDataBase64 - attDataBase64 = - attestationOrBytes.attDataBase64 ?? getAttDataBase64FromAttestationSerialized(attestationOrBytes.serializedData); - const cachedAttData = attDataBase64 !== null ? chain.seenAttestationDatas.get(attSlot, attDataBase64) : null; + attDataKey = getSeenAttDataKeyFromGossipAttestation(fork, attestationOrBytes); + const cachedAttData = attDataKey !== null ? chain.seenAttestationDatas.get(attSlot, attDataKey) : null; if (cachedAttData === null) { - const attestation = sszDeserializeAttestation(attestationOrBytes.serializedData); + const attestation = sszDeserializeAttestation(fork, attestationOrBytes.serializedData); // only deserialize on the first AttestationData that's not cached attestationOrCache = {attestation, cache: null}; } else { @@ -268,7 +263,7 @@ async function validateGossipAttestationNoSignatureCheck( } } else { // api - attDataBase64 = null; + attDataKey = null; attestationOrCache = {attestation: attestationOrBytes.attestation, cache: null}; } @@ -276,10 +271,37 @@ async function validateGossipAttestationNoSignatureCheck( ? attestationOrCache.attestation.data : attestationOrCache.cache.attestationData; const attSlot = attData.slot; - const attIndex = attData.index; const attEpoch = computeEpochAtSlot(attSlot); const attTarget = attData.target; const targetEpoch = attTarget.epoch; + let committeeIndex; + if (attestationOrCache.attestation) { + if (isElectraAttestation(attestationOrCache.attestation)) { + // api or first time validation of a gossip attestation + const {committeeBits} = attestationOrCache.attestation; + // throw in both in case of undefined and null + if (committeeBits == null) { + throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.INVALID_SERIALIZED_BYTES}); + } + + committeeIndex = committeeBits.getSingleTrueBit(); + // [REJECT] len(committee_indices) == 1, where committee_indices = get_committee_indices(aggregate) + if (committeeIndex === null) { + throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.NOT_EXACTLY_ONE_COMMITTEE_BIT_SET}); + } + + // [REJECT] aggregate.data.index == 0 + if (attData.index !== 0) { + throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.NON_ZERO_ATTESTATION_DATA_INDEX}); + } + } else { + // phase0 attestation + committeeIndex = attData.index; + } + } else { + // found a seen AttestationData + committeeIndex = attestationOrCache.cache.committeeIndex; + } chain.metrics?.gossipAttestation.attestationSlotToClockSlot.observe( {caller: RegenCaller.validateGossipAttestation}, @@ -305,7 +327,7 @@ async function validateGossipAttestationNoSignatureCheck( // > TODO: Do this check **before** getting the target state but don't recompute zipIndexes const aggregationBits = attestationOrCache.attestation ? attestationOrCache.attestation.aggregationBits - : getAggregationBitsFromAttestationSerialized(attestationOrCache.serializedData); + : getAggregationBitsFromAttestationSerialized(fork, attestationOrCache.serializedData); if (aggregationBits === null) { throw new AttestationError(GossipAction.REJECT, { code: AttestationErrorCode.INVALID_SERIALIZED_BYTES, @@ -319,11 +341,11 @@ async function validateGossipAttestationNoSignatureCheck( }); } - let committeeIndices: Uint32Array; + let committeeValidatorIndices: Uint32Array; let getSigningRoot: () => Uint8Array; let expectedSubnet: number; if (attestationOrCache.cache) { - committeeIndices = attestationOrCache.cache.committeeIndices; + committeeValidatorIndices = attestationOrCache.cache.committeeValidatorIndices; const signingRoot = attestationOrCache.cache.signingRoot; getSigningRoot = () => signingRoot; expectedSubnet = attestationOrCache.cache.subnet; @@ -365,17 +387,17 @@ async function validateGossipAttestationNoSignatureCheck( // [REJECT] The committee index is within the expected range // -- i.e. data.index < get_committee_count_per_slot(state, data.target.epoch) - committeeIndices = getCommitteeIndices(shuffling, attSlot, attIndex); + committeeValidatorIndices = getCommitteeIndices(shuffling, attSlot, committeeIndex); getSigningRoot = () => getAttestationDataSigningRoot(chain.config, attData); - expectedSubnet = computeSubnetForSlot(shuffling, attSlot, attIndex); + expectedSubnet = computeSubnetForSlot(shuffling, attSlot, committeeIndex); } - const validatorIndex = committeeIndices[bitIndex]; + const validatorIndex = committeeValidatorIndices[bitIndex]; // [REJECT] The number of aggregation bits matches the committee size // -- i.e. len(attestation.aggregation_bits) == len(get_beacon_committee(state, data.slot, data.index)). // > TODO: Is this necessary? Lighthouse does not do this check. - if (aggregationBits.bitLen !== committeeIndices.length) { + if (aggregationBits.bitLen !== committeeValidatorIndices.length) { throw new AttestationError(GossipAction.REJECT, { code: AttestationErrorCode.WRONG_NUMBER_OF_AGGREGATION_BITS, }); @@ -421,6 +443,7 @@ async function validateGossipAttestationNoSignatureCheck( }); } + let committeeBits: BitArray | undefined = undefined; if (attestationOrCache.cache) { // there could be up to 6% of cpu time to compute signing root if we don't clone the signature set signatureSet = createSingleSignatureSetFromComponents( @@ -429,6 +452,7 @@ async function validateGossipAttestationNoSignatureCheck( signature ); attDataRootHex = attestationOrCache.cache.attDataRootHex; + committeeBits = attestationOrCache.cache.committeeBits; } else { signatureSet = createSingleSignatureSetFromComponents( chain.index2pubkey[validatorIndex], @@ -437,10 +461,16 @@ async function validateGossipAttestationNoSignatureCheck( ); // add cached attestation data before verifying signature - attDataRootHex = toHexString(ssz.phase0.AttestationData.hashTreeRoot(attData)); - if (attDataBase64) { - chain.seenAttestationDatas.add(attSlot, attDataBase64, { - committeeIndices, + attDataRootHex = toRootHex(ssz.phase0.AttestationData.hashTreeRoot(attData)); + // if attestation is phase0 the committeeBits is undefined anyway + committeeBits = isElectraAttestation(attestationOrCache.attestation) + ? attestationOrCache.attestation.committeeBits.clone() + : undefined; + if (attDataKey) { + chain.seenAttestationDatas.add(attSlot, attDataKey, { + committeeValidatorIndices, + committeeBits, + committeeIndex, signingRoot: signatureSet.signingRoot, subnet: expectedSubnet, // precompute this to be used in forkchoice @@ -452,20 +482,31 @@ async function validateGossipAttestationNoSignatureCheck( } // no signature check, leave that for step1 - const indexedAttestation: phase0.IndexedAttestation = { + const indexedAttestationContent = { attestingIndices, data: attData, signature, }; + const indexedAttestation = + ForkSeq[fork] >= ForkSeq.electra + ? (indexedAttestationContent as electra.IndexedAttestation) + : (indexedAttestationContent as phase0.IndexedAttestation); - const attestation: phase0.Attestation = attestationOrCache.attestation - ? attestationOrCache.attestation - : { - aggregationBits, - data: attData, - signature, - }; - return {attestation, indexedAttestation, subnet: expectedSubnet, attDataRootHex, signatureSet, validatorIndex}; + const attestation: Attestation = attestationOrCache.attestation ?? { + aggregationBits, + data: attData, + committeeBits, + signature, + }; + return { + attestation, + indexedAttestation, + subnet: expectedSubnet, + attDataRootHex, + signatureSet, + validatorIndex, + committeeIndex, + }; } /** @@ -643,7 +684,7 @@ function verifyHeadBlockIsKnown(chain: IBeaconChain, beaconBlockRoot: Root): Pro if (headBlock === null) { throw new AttestationError(GossipAction.IGNORE, { code: AttestationErrorCode.UNKNOWN_OR_PREFINALIZED_BEACON_BLOCK_ROOT, - root: toHexString(beaconBlockRoot), + root: toRootHex(beaconBlockRoot), }); } @@ -671,7 +712,7 @@ function verifyAttestationTargetRoot(headBlock: ProtoBlock, targetRoot: Root, at // https://github.com/ethereum/consensus-specs/pull/2001#issuecomment-699246659 throw new AttestationError(GossipAction.REJECT, { code: AttestationErrorCode.INVALID_TARGET_ROOT, - targetRoot: toHexString(targetRoot), + targetRoot: toRootHex(targetRoot), expected: null, }); } else { @@ -687,17 +728,21 @@ function verifyAttestationTargetRoot(headBlock: ProtoBlock, targetRoot: Root, at headBlock.blockRoot; // TODO: Do a fast comparision to convert and compare byte by byte - if (expectedTargetRoot !== toHexString(targetRoot)) { + if (expectedTargetRoot !== toRootHex(targetRoot)) { // Reject any attestation with an invalid target root. throw new AttestationError(GossipAction.REJECT, { code: AttestationErrorCode.INVALID_TARGET_ROOT, - targetRoot: toHexString(targetRoot), + targetRoot: toRootHex(targetRoot), expected: expectedTargetRoot, }); } } } +/** + * Get a list of indices of validators in the given committee + * attestationIndex - Index of the committee in shuffling.committees + */ export function getCommitteeIndices( shuffling: EpochShuffling, attestationSlot: Slot, @@ -723,3 +768,41 @@ export function computeSubnetForSlot(shuffling: EpochShuffling, slot: number, co const committeesSinceEpochStart = shuffling.committeesPerSlot * slotsSinceEpochStart; return (committeesSinceEpochStart + committeeIndex) % ATTESTATION_SUBNET_COUNT; } + +/** + * Return fork-dependent seen attestation key + * - for pre-electra, it's the AttestationData base64 + * - for electra and later, it's the AttestationData base64 + committeeBits base64 + */ +export function getSeenAttDataKeyFromGossipAttestation( + fork: ForkName, + attestation: GossipAttestation +): SeenAttDataKey | null { + const {attDataBase64, serializedData} = attestation; + if (isForkPostElectra(fork)) { + const committeeBits = getCommitteeBitsFromAttestationSerialized(serializedData); + return attDataBase64 && committeeBits ? attDataBase64 + committeeBits : null; + } + + // pre-electra + return attDataBase64; +} + +/** + * Extract attestation data key from SignedAggregateAndProof Uint8Array to use cached data from SeenAttestationDatas + * - for pre-electra, it's the AttestationData base64 + * - for electra and later, it's the AttestationData base64 + committeeBits base64 + */ +export function getSeenAttDataKeyFromSignedAggregateAndProof( + fork: ForkName, + aggregateAndProof: Uint8Array +): SeenAttDataKey | null { + if (isForkPostElectra(fork)) { + const attData = getAttDataFromSignedAggregateAndProofElectra(aggregateAndProof); + const committeeBits = getCommitteeBitsFromSignedAggregateAndProofElectra(aggregateAndProof); + return attData && committeeBits ? attData + committeeBits : null; + } + + // pre-electra + return getAttDataFromSignedAggregateAndProofPhase0(aggregateAndProof); +} diff --git a/packages/beacon-node/src/chain/validation/attesterSlashing.ts b/packages/beacon-node/src/chain/validation/attesterSlashing.ts index 818812526fb3..11a499c9bb53 100644 --- a/packages/beacon-node/src/chain/validation/attesterSlashing.ts +++ b/packages/beacon-node/src/chain/validation/attesterSlashing.ts @@ -9,7 +9,7 @@ import {AttesterSlashingError, AttesterSlashingErrorCode, GossipAction} from ".. export async function validateApiAttesterSlashing( chain: IBeaconChain, - attesterSlashing: phase0.AttesterSlashing + attesterSlashing: phase0.AttesterSlashing // TODO Electra: Handle electra.AttesterSlashing ): Promise { const prioritizeBls = true; return validateAttesterSlashing(chain, attesterSlashing, prioritizeBls); diff --git a/packages/beacon-node/src/chain/validation/blobSidecar.ts b/packages/beacon-node/src/chain/validation/blobSidecar.ts index f1ea7bfa95c8..4c82d7be153d 100644 --- a/packages/beacon-node/src/chain/validation/blobSidecar.ts +++ b/packages/beacon-node/src/chain/validation/blobSidecar.ts @@ -1,5 +1,5 @@ import {deneb, Root, Slot, ssz} from "@lodestar/types"; -import {toHex, verifyMerkleBranch} from "@lodestar/utils"; +import {toRootHex, verifyMerkleBranch} from "@lodestar/utils"; import {computeStartSlotAtEpoch, getBlockHeaderProposerSignatureSet} from "@lodestar/state-transition"; import {KZG_COMMITMENT_INCLUSION_PROOF_DEPTH, KZG_COMMITMENT_SUBTREE_INDEX0} from "@lodestar/params"; @@ -57,7 +57,7 @@ export async function validateGossipBlobSidecar( // check, we will load the parent and state from disk only to find out later that we // already know this block. const blockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(blobSidecar.signedBlockHeader.message); - const blockHex = toHex(blockRoot); + const blockHex = toRootHex(blockRoot); if (chain.forkChoice.getBlockHex(blockHex) !== null) { throw new BlobSidecarGossipError(GossipAction.IGNORE, {code: BlobSidecarErrorCode.ALREADY_KNOWN, root: blockHex}); } @@ -68,7 +68,7 @@ export async function validateGossipBlobSidecar( // _[IGNORE]_ The blob's block's parent (defined by `sidecar.block_parent_root`) has been seen (via both // gossip and non-gossip sources) (a client MAY queue blocks for processing once the parent block is // retrieved). - const parentRoot = toHex(blobSidecar.signedBlockHeader.message.parentRoot); + const parentRoot = toRootHex(blobSidecar.signedBlockHeader.message.parentRoot); const parentBlock = chain.forkChoice.getBlockHex(parentRoot); if (parentBlock === null) { // If fork choice does *not* consider the parent to be a descendant of the finalized block, @@ -183,9 +183,9 @@ export function validateBlobSidecars( !byteArrayEquals(expectedKzgCommitments[index], blobSidecar.kzgCommitment) ) { throw new Error( - `Invalid blob with slot=${blobBlockHeader.slot} blobBlockRoot=${toHex(blobBlockRoot)} index=${ + `Invalid blob with slot=${blobBlockHeader.slot} blobBlockRoot=${toRootHex(blobBlockRoot)} index=${ blobSidecar.index - } for the block blockRoot=${toHex(blockRoot)} slot=${blockSlot} index=${index}` + } for the block blockRoot=${toRootHex(blockRoot)} slot=${blockSlot} index=${index}` ); } blobs.push(blobSidecar.blob); diff --git a/packages/beacon-node/src/chain/validation/block.ts b/packages/beacon-node/src/chain/validation/block.ts index 214eeaf0ab4e..aabc1b14958a 100644 --- a/packages/beacon-node/src/chain/validation/block.ts +++ b/packages/beacon-node/src/chain/validation/block.ts @@ -1,4 +1,3 @@ -import {toHexString} from "@chainsafe/ssz"; import {ChainForkConfig} from "@lodestar/config"; import { computeStartSlotAtEpoch, @@ -8,7 +7,7 @@ import { isExecutionEnabled, getBlockProposerSignatureSet, } from "@lodestar/state-transition"; -import {sleep} from "@lodestar/utils"; +import {sleep, toRootHex} from "@lodestar/utils"; import {ForkName} from "@lodestar/params"; import {SignedBeaconBlock} from "@lodestar/types"; import {MAXIMUM_GOSSIP_CLOCK_DISPARITY} from "../../constants/index.js"; @@ -55,7 +54,7 @@ export async function validateGossipBlock( // reboot if the `observed_block_producers` cache is empty. In that case, without this // check, we will load the parent and state from disk only to find out later that we // already know this block. - const blockRoot = toHexString(config.getForkTypes(blockSlot).BeaconBlock.hashTreeRoot(block)); + const blockRoot = toRootHex(config.getForkTypes(blockSlot).BeaconBlock.hashTreeRoot(block)); if (chain.forkChoice.getBlockHex(blockRoot) !== null) { throw new BlockGossipError(GossipAction.IGNORE, {code: BlockErrorCode.ALREADY_KNOWN, root: blockRoot}); } @@ -71,7 +70,7 @@ export async function validateGossipBlock( // [REJECT] The current finalized_checkpoint is an ancestor of block -- i.e. // get_ancestor(store, block.parent_root, compute_start_slot_at_epoch(store.finalized_checkpoint.epoch)) == store.finalized_checkpoint.root - const parentRoot = toHexString(block.parentRoot); + const parentRoot = toRootHex(block.parentRoot); const parentBlock = chain.forkChoice.getBlockHex(parentRoot); if (parentBlock === null) { // If fork choice does *not* consider the parent to be a descendant of the finalized block, diff --git a/packages/beacon-node/src/chain/validation/signatureSets/aggregateAndProof.ts b/packages/beacon-node/src/chain/validation/signatureSets/aggregateAndProof.ts index 59787341cfb9..31d931818595 100644 --- a/packages/beacon-node/src/chain/validation/signatureSets/aggregateAndProof.ts +++ b/packages/beacon-node/src/chain/validation/signatureSets/aggregateAndProof.ts @@ -1,7 +1,7 @@ import {PublicKey} from "@chainsafe/blst"; -import {DOMAIN_AGGREGATE_AND_PROOF} from "@lodestar/params"; -import {ssz} from "@lodestar/types"; -import {Epoch, phase0} from "@lodestar/types"; +import {DOMAIN_AGGREGATE_AND_PROOF, ForkSeq} from "@lodestar/params"; +import {ssz, SignedAggregateAndProof} from "@lodestar/types"; +import {Epoch} from "@lodestar/types"; import { computeSigningRoot, computeStartSlotAtEpoch, @@ -13,7 +13,7 @@ import {BeaconConfig} from "@lodestar/config"; export function getAggregateAndProofSigningRoot( config: BeaconConfig, epoch: Epoch, - aggregateAndProof: phase0.SignedAggregateAndProof + aggregateAndProof: SignedAggregateAndProof ): Uint8Array { // previously, we call `const aggregatorDomain = state.config.getDomain(state.slot, DOMAIN_AGGREGATE_AND_PROOF, slot);` // at fork boundary, it's required to dial to target epoch https://github.com/ChainSafe/lodestar/blob/v1.11.3/packages/beacon-node/src/chain/validation/attestation.ts#L573 @@ -21,14 +21,15 @@ export function getAggregateAndProofSigningRoot( const slot = computeStartSlotAtEpoch(epoch); const fork = config.getForkName(slot); const aggregatorDomain = config.getDomainAtFork(fork, DOMAIN_AGGREGATE_AND_PROOF); - return computeSigningRoot(ssz.phase0.AggregateAndProof, aggregateAndProof.message, aggregatorDomain); + const sszType = ForkSeq[fork] >= ForkSeq.electra ? ssz.electra.AggregateAndProof : ssz.phase0.AggregateAndProof; + return computeSigningRoot(sszType, aggregateAndProof.message, aggregatorDomain); } export function getAggregateAndProofSignatureSet( config: BeaconConfig, epoch: Epoch, aggregator: PublicKey, - aggregateAndProof: phase0.SignedAggregateAndProof + aggregateAndProof: SignedAggregateAndProof ): ISignatureSet { return createSingleSignatureSetFromComponents( aggregator, diff --git a/packages/beacon-node/src/chain/validation/syncCommittee.ts b/packages/beacon-node/src/chain/validation/syncCommittee.ts index 43a4c95c59da..f47aa53a314e 100644 --- a/packages/beacon-node/src/chain/validation/syncCommittee.ts +++ b/packages/beacon-node/src/chain/validation/syncCommittee.ts @@ -1,7 +1,7 @@ -import {toHexString} from "@chainsafe/ssz"; import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; import {SYNC_COMMITTEE_SUBNET_SIZE, SYNC_COMMITTEE_SUBNET_COUNT} from "@lodestar/params"; import {altair} from "@lodestar/types"; +import {toRootHex} from "@lodestar/utils"; import {GossipAction, SyncCommitteeError, SyncCommitteeErrorCode} from "../errors/index.js"; import {IBeaconChain} from "../interface.js"; import {getSyncCommitteeSignatureSet} from "./signatureSets/index.js"; @@ -17,7 +17,7 @@ export async function validateGossipSyncCommittee( subnet: number ): Promise<{indexInSubcommittee: IndexInSubcommittee}> { const {slot, validatorIndex, beaconBlockRoot} = syncCommittee; - const messageRoot = toHexString(beaconBlockRoot); + const messageRoot = toRootHex(beaconBlockRoot); const headState = chain.getHeadState(); const indexInSubcommittee = validateGossipSyncCommitteeExceptSig(chain, headState, subnet, syncCommittee); diff --git a/packages/beacon-node/src/db/repositories/lightclientCheckpointHeader.ts b/packages/beacon-node/src/db/repositories/lightclientCheckpointHeader.ts index 78f165bb975c..22d6559792eb 100644 --- a/packages/beacon-node/src/db/repositories/lightclientCheckpointHeader.ts +++ b/packages/beacon-node/src/db/repositories/lightclientCheckpointHeader.ts @@ -25,4 +25,8 @@ export class CheckpointHeaderRepository extends Repository { + async getEth1DataAndDeposits(state: CachedBeaconStateAllForks): Promise { + if ( + state.epochCtx.isPostElectra() && + state.eth1DepositIndex >= (state as CachedBeaconStateElectra).depositRequestsStartIndex + ) { + // No need to poll eth1Data since Electra deprecates the mechanism after depositRequestsStartIndex is reached + return {eth1Data: state.eth1Data, deposits: []}; + } const eth1Data = this.forcedEth1DataVote ?? (await this.getEth1Data(state)); const deposits = await this.getDeposits(state, eth1Data); return {eth1Data, deposits}; @@ -141,7 +162,10 @@ export class Eth1DepositDataTracker { * Returns deposits to be included for a given state and eth1Data vote. * Requires internal caches to be updated regularly to return good results */ - private async getDeposits(state: BeaconStateAllForks, eth1DataVote: phase0.Eth1Data): Promise { + private async getDeposits( + state: CachedBeaconStateAllForks, + eth1DataVote: phase0.Eth1Data + ): Promise { // No new deposits have to be included, continue if (eth1DataVote.depositCount === state.eth1DepositIndex) { return []; @@ -162,7 +186,7 @@ export class Eth1DepositDataTracker { private async runAutoUpdate(): Promise { let lastRunMs = 0; - while (!this.signal.aborted) { + while (!this.signal.aborted && !this.stopPolling) { lastRunMs = Date.now(); try { diff --git a/packages/beacon-node/src/eth1/eth1MergeBlockTracker.ts b/packages/beacon-node/src/eth1/eth1MergeBlockTracker.ts index 8602b3058180..9ba4a09a5549 100644 --- a/packages/beacon-node/src/eth1/eth1MergeBlockTracker.ts +++ b/packages/beacon-node/src/eth1/eth1MergeBlockTracker.ts @@ -1,7 +1,6 @@ -import {toHexString} from "@chainsafe/ssz"; import {ChainConfig} from "@lodestar/config"; import {RootHex} from "@lodestar/types"; -import {Logger, pruneSetToMax} from "@lodestar/utils"; +import {Logger, pruneSetToMax, toRootHex} from "@lodestar/utils"; import {Metrics} from "../metrics/index.js"; import {ZERO_HASH_HEX} from "../constants/index.js"; import {enumToIndexMap} from "../util/enum.js"; @@ -239,7 +238,7 @@ export class Eth1MergeBlockTracker { private async internalGetTerminalPowBlockFromEth1(): Promise { // Search merge block by hash // Terminal block hash override takes precedence over terminal total difficulty - const terminalBlockHash = toHexString(this.config.TERMINAL_BLOCK_HASH); + const terminalBlockHash = toRootHex(this.config.TERMINAL_BLOCK_HASH); if (terminalBlockHash !== ZERO_HASH_HEX) { const block = await this.getPowBlock(terminalBlockHash); if (block) { diff --git a/packages/beacon-node/src/eth1/index.ts b/packages/beacon-node/src/eth1/index.ts index 9fdba90258a2..7b2ec17496d3 100644 --- a/packages/beacon-node/src/eth1/index.ts +++ b/packages/beacon-node/src/eth1/index.ts @@ -1,6 +1,6 @@ -import {fromHexString} from "@chainsafe/ssz"; import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; import {Root} from "@lodestar/types"; +import {fromHex} from "@lodestar/utils"; import {IEth1ForBlockProduction, Eth1DataAndDeposits, IEth1Provider, PowMergeBlock, TDProgress} from "./interface.js"; import {Eth1DepositDataTracker, Eth1DepositDataTrackerModules} from "./eth1DepositDataTracker.js"; import {Eth1MergeBlockTracker, Eth1MergeBlockTrackerModules} from "./eth1MergeBlockTracker.js"; @@ -92,7 +92,7 @@ export class Eth1ForBlockProduction implements IEth1ForBlockProduction { async getTerminalPowBlock(): Promise { const block = await this.eth1MergeBlockTracker.getTerminalPowBlock(); - return block && fromHexString(block.blockHash); + return block && fromHex(block.blockHash); } getPowBlock(powBlockHash: string): Promise { @@ -106,6 +106,10 @@ export class Eth1ForBlockProduction implements IEth1ForBlockProduction { startPollingMergeBlock(): void { return this.eth1MergeBlockTracker.startPollingMergeBlock(); } + + stopPollingEth1Data(): void { + return this.eth1DepositDataTracker?.stopPollingEth1Data(); + } } /** @@ -140,4 +144,8 @@ export class Eth1ForBlockProductionDisabled implements IEth1ForBlockProduction { startPollingMergeBlock(): void { // Ignore } + + stopPollingEth1Data(): void { + // Ignore + } } diff --git a/packages/beacon-node/src/eth1/interface.ts b/packages/beacon-node/src/eth1/interface.ts index fc9626eb5b8a..54fcdd12492f 100644 --- a/packages/beacon-node/src/eth1/interface.ts +++ b/packages/beacon-node/src/eth1/interface.ts @@ -62,6 +62,11 @@ export interface IEth1ForBlockProduction { * - head state not isMergeTransitionComplete */ startPollingMergeBlock(): void; + + /** + * Should stop polling eth1Data after a Electra block is finalized AND deposit_requests_start_index is reached + */ + stopPollingEth1Data(): void; } /** Different Eth1Block from phase0.Eth1Block with blockHash */ diff --git a/packages/beacon-node/src/eth1/provider/eth1Provider.ts b/packages/beacon-node/src/eth1/provider/eth1Provider.ts index 3af909cd132e..c86991eb7d15 100644 --- a/packages/beacon-node/src/eth1/provider/eth1Provider.ts +++ b/packages/beacon-node/src/eth1/provider/eth1Provider.ts @@ -1,7 +1,6 @@ -import {toHexString} from "@chainsafe/ssz"; import {phase0} from "@lodestar/types"; import {ChainConfig} from "@lodestar/config"; -import {fromHex, isErrorAborted, createElapsedTimeTracker, toPrintableUrl} from "@lodestar/utils"; +import {fromHex, isErrorAborted, createElapsedTimeTracker, toPrintableUrl, toHex} from "@lodestar/utils"; import {Logger} from "@lodestar/logger"; import {FetchError, isFetchError} from "@lodestar/api"; @@ -72,7 +71,7 @@ export class Eth1Provider implements IEth1Provider { ) { this.logger = opts.logger; this.deployBlock = opts.depositContractDeployBlock ?? 0; - this.depositContractAddress = toHexString(config.DEPOSIT_CONTRACT_ADDRESS); + this.depositContractAddress = toHex(config.DEPOSIT_CONTRACT_ADDRESS); const providerUrls = opts.providerUrls ?? DEFAULT_PROVIDER_URLS; this.rpc = new JsonRpcHttpClient(providerUrls, { diff --git a/packages/beacon-node/src/eth1/provider/utils.ts b/packages/beacon-node/src/eth1/provider/utils.ts index 506e4e48711a..096f1d7233c8 100644 --- a/packages/beacon-node/src/eth1/provider/utils.ts +++ b/packages/beacon-node/src/eth1/provider/utils.ts @@ -1,6 +1,5 @@ -import {fromHexString, toHexString} from "@chainsafe/ssz"; import {RootHex} from "@lodestar/types"; -import {bytesToBigInt, bigIntToBytes} from "@lodestar/utils"; +import {bytesToBigInt, bigIntToBytes, toHex, fromHex} from "@lodestar/utils"; import {ErrorParseJson} from "./jsonRpcHttpClient.js"; /** QUANTITY as defined in ethereum execution layer JSON RPC https://eth.wiki/json-rpc/API */ @@ -32,7 +31,7 @@ export function bytesToHex(bytes: Uint8Array): string { return "0x" + bytes[0].toString(16); } - return toHexString(bytes); + return toHex(bytes); } /** @@ -100,7 +99,7 @@ export function bytesToQuantity(bytes: Uint8Array): QUANTITY { * - WRONG: 004200 (must be prefixed 0x) */ export function bytesToData(bytes: Uint8Array): DATA { - return toHexString(bytes); + return toHex(bytes); } /** @@ -108,7 +107,7 @@ export function bytesToData(bytes: Uint8Array): DATA { */ export function dataToBytes(hex: DATA, fixedLength: number | null): Uint8Array { try { - const bytes = fromHexString(hex); + const bytes = fromHex(hex); if (fixedLength != null && bytes.length !== fixedLength) { throw Error(`Wrong data length ${bytes.length} expected ${fixedLength}`); } diff --git a/packages/beacon-node/src/eth1/utils/depositContract.ts b/packages/beacon-node/src/eth1/utils/depositContract.ts index 7247fe8cebaf..b576a3d5f61c 100644 --- a/packages/beacon-node/src/eth1/utils/depositContract.ts +++ b/packages/beacon-node/src/eth1/utils/depositContract.ts @@ -1,6 +1,6 @@ import {Interface} from "@ethersproject/abi"; -import {fromHexString} from "@chainsafe/ssz"; import {phase0, ssz} from "@lodestar/types"; +import {fromHex} from "@lodestar/utils"; const depositEventFragment = "event DepositEvent(bytes pubkey, bytes withdrawal_credentials, bytes amount, bytes signature, bytes index)"; @@ -23,15 +23,15 @@ export function parseDepositLog(log: {blockNumber: number; data: string; topics: blockNumber: log.blockNumber, index: parseHexNumLittleEndian(values.index), depositData: { - pubkey: fromHexString(values.pubkey), - withdrawalCredentials: fromHexString(values.withdrawal_credentials), + pubkey: fromHex(values.pubkey), + withdrawalCredentials: fromHex(values.withdrawal_credentials), amount: parseHexNumLittleEndian(values.amount), - signature: fromHexString(values.signature), + signature: fromHex(values.signature), }, }; } function parseHexNumLittleEndian(hex: string): number { // Can't use parseInt() because amount is a hex string in little endian - return ssz.UintNum64.deserialize(fromHexString(hex)); + return ssz.UintNum64.deserialize(fromHex(hex)); } diff --git a/packages/beacon-node/src/eth1/utils/deposits.ts b/packages/beacon-node/src/eth1/utils/deposits.ts index 19544917ffdc..8d0331fc01d6 100644 --- a/packages/beacon-node/src/eth1/utils/deposits.ts +++ b/packages/beacon-node/src/eth1/utils/deposits.ts @@ -1,9 +1,9 @@ import {toGindex, Tree} from "@chainsafe/persistent-merkle-tree"; -import {toHexString} from "@chainsafe/ssz"; -import {MAX_DEPOSITS} from "@lodestar/params"; -import {BeaconStateAllForks} from "@lodestar/state-transition"; +import {toRootHex} from "@lodestar/utils"; +import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; import {phase0, ssz} from "@lodestar/types"; import {FilterOptions} from "@lodestar/db"; +import {getEth1DepositCount} from "@lodestar/state-transition"; import {Eth1Error, Eth1ErrorCode} from "../errors.js"; import {DepositTree} from "../../db/repositories/depositDataRoot.js"; @@ -11,7 +11,7 @@ export type DepositGetter = (indexRange: FilterOptions, eth1Data: pha export async function getDeposits( // eth1_deposit_index represents the next deposit index to be added - state: BeaconStateAllForks, + state: CachedBeaconStateAllForks, eth1Data: phase0.Eth1Data, depositsGetter: DepositGetter ): Promise { @@ -22,9 +22,11 @@ export async function getDeposits( throw new Eth1Error({code: Eth1ErrorCode.DEPOSIT_INDEX_TOO_HIGH, depositIndex, depositCount}); } - // Spec v0.12.2 - // assert len(body.deposits) == min(MAX_DEPOSITS, state.eth1_data.deposit_count - state.eth1_deposit_index) - const depositsLen = Math.min(MAX_DEPOSITS, depositCount - depositIndex); + const depositsLen = getEth1DepositCount(state, eth1Data); + + if (depositsLen === 0) { + return []; // If depositsLen === 0, we can return early since no deposit with be returned from depositsGetter + } const indexRange = {gte: depositIndex, lt: depositIndex + depositsLen}; const deposits = await depositsGetter(indexRange, eth1Data); @@ -51,8 +53,8 @@ export function getDepositsWithProofs( if (!ssz.Root.equals(depositRoot, eth1Data.depositRoot)) { throw new Eth1Error({ code: Eth1ErrorCode.WRONG_DEPOSIT_ROOT, - root: toHexString(depositRoot), - expectedRoot: toHexString(eth1Data.depositRoot), + root: toRootHex(depositRoot), + expectedRoot: toRootHex(eth1Data.depositRoot), }); } diff --git a/packages/beacon-node/src/eth1/utils/eth1Vote.ts b/packages/beacon-node/src/eth1/utils/eth1Vote.ts index 3940ccb27bae..7a4e3ddca9b6 100644 --- a/packages/beacon-node/src/eth1/utils/eth1Vote.ts +++ b/packages/beacon-node/src/eth1/utils/eth1Vote.ts @@ -2,7 +2,7 @@ import {EPOCHS_PER_ETH1_VOTING_PERIOD, SLOTS_PER_EPOCH} from "@lodestar/params"; import {ChainForkConfig} from "@lodestar/config"; import {phase0, RootHex} from "@lodestar/types"; import {BeaconStateAllForks, computeTimeAtSlot} from "@lodestar/state-transition"; -import {toHex} from "@lodestar/utils"; +import {toRootHex} from "@lodestar/utils"; export type Eth1DataGetter = ({ timestampRange, @@ -120,7 +120,6 @@ function getKeysWithMaxValue(map: Map): T[] { * ✓ pickEth1Vote - max votes 37.89912 ops/s 26.38583 ms/op - 29 runs 1.27 s */ function getEth1DataKey(eth1Data: phase0.Eth1Data): string { - // return toHexString(ssz.phase0.Eth1Data.hashTreeRoot(eth1Data)); return fastSerializeEth1Data(eth1Data); } @@ -128,7 +127,7 @@ function getEth1DataKey(eth1Data: phase0.Eth1Data): string { * Serialize eth1Data types to a unique string ID. It is only used for comparison. */ export function fastSerializeEth1Data(eth1Data: phase0.Eth1Data): string { - return toHex(eth1Data.blockHash) + eth1Data.depositCount.toString(16) + toHex(eth1Data.depositRoot); + return toRootHex(eth1Data.blockHash) + eth1Data.depositCount.toString(16) + toRootHex(eth1Data.depositRoot); } export function votingPeriodStartTime(config: ChainForkConfig, state: BeaconStateAllForks): number { diff --git a/packages/beacon-node/src/execution/engine/http.ts b/packages/beacon-node/src/execution/engine/http.ts index c64a9715589f..b42424b28998 100644 --- a/packages/beacon-node/src/execution/engine/http.ts +++ b/packages/beacon-node/src/execution/engine/http.ts @@ -1,4 +1,4 @@ -import {ExecutionPayload, Root, RootHex, Wei} from "@lodestar/types"; +import {ExecutionPayload, ExecutionRequests, Root, RootHex, Wei} from "@lodestar/types"; import {SLOTS_PER_EPOCH, ForkName, ForkSeq} from "@lodestar/params"; import {Logger} from "@lodestar/logger"; import { @@ -37,6 +37,7 @@ import { ExecutionPayloadBody, assertReqSizeLimit, deserializeExecutionPayloadBody, + serializeExecutionRequests, } from "./types.js"; import {getExecutionEngineState} from "./utils.js"; @@ -195,14 +196,17 @@ export class ExecutionEngineHttp implements IExecutionEngine { fork: ForkName, executionPayload: ExecutionPayload, versionedHashes?: VersionedHashes, - parentBlockRoot?: Root + parentBlockRoot?: Root, + executionRequests?: ExecutionRequests ): Promise { const method = - ForkSeq[fork] >= ForkSeq.deneb - ? "engine_newPayloadV3" - : ForkSeq[fork] >= ForkSeq.capella - ? "engine_newPayloadV2" - : "engine_newPayloadV1"; + ForkSeq[fork] >= ForkSeq.electra + ? "engine_newPayloadV4" + : ForkSeq[fork] >= ForkSeq.deneb + ? "engine_newPayloadV3" + : ForkSeq[fork] >= ForkSeq.capella + ? "engine_newPayloadV2" + : "engine_newPayloadV1"; const serializedExecutionPayload = serializeExecutionPayload(fork, executionPayload); @@ -218,12 +222,28 @@ export class ExecutionEngineHttp implements IExecutionEngine { const serializedVersionedHashes = serializeVersionedHashes(versionedHashes); const parentBeaconBlockRoot = serializeBeaconBlockRoot(parentBlockRoot); - const method = "engine_newPayloadV3"; - engineRequest = { - method, - params: [serializedExecutionPayload, serializedVersionedHashes, parentBeaconBlockRoot], - methodOpts: notifyNewPayloadOpts, - }; + if (ForkSeq[fork] >= ForkSeq.electra) { + if (executionRequests === undefined) { + throw Error(`executionRequests required in notifyNewPayload for fork=${fork}`); + } + const serializedExecutionRequests = serializeExecutionRequests(executionRequests); + engineRequest = { + method: "engine_newPayloadV4", + params: [ + serializedExecutionPayload, + serializedVersionedHashes, + parentBeaconBlockRoot, + serializedExecutionRequests, + ], + methodOpts: notifyNewPayloadOpts, + }; + } else { + engineRequest = { + method: "engine_newPayloadV3", + params: [serializedExecutionPayload, serializedVersionedHashes, parentBeaconBlockRoot], + methodOpts: notifyNewPayloadOpts, + }; + } } else { const method = ForkSeq[fork] >= ForkSeq.capella ? "engine_newPayloadV2" : "engine_newPayloadV1"; engineRequest = { @@ -389,14 +409,17 @@ export class ExecutionEngineHttp implements IExecutionEngine { executionPayload: ExecutionPayload; executionPayloadValue: Wei; blobsBundle?: BlobsBundle; + executionRequests?: ExecutionRequests; shouldOverrideBuilder?: boolean; }> { const method = - ForkSeq[fork] >= ForkSeq.deneb - ? "engine_getPayloadV3" - : ForkSeq[fork] >= ForkSeq.capella - ? "engine_getPayloadV2" - : "engine_getPayloadV1"; + ForkSeq[fork] >= ForkSeq.electra + ? "engine_getPayloadV4" + : ForkSeq[fork] >= ForkSeq.deneb + ? "engine_getPayloadV3" + : ForkSeq[fork] >= ForkSeq.capella + ? "engine_getPayloadV2" + : "engine_getPayloadV1"; const payloadResponse = await this.rpc.fetchWithRetries< EngineApiRpcReturnTypes[typeof method], EngineApiRpcParamTypes[typeof method] @@ -414,7 +437,7 @@ export class ExecutionEngineHttp implements IExecutionEngine { this.payloadIdCache.prune(); } - async getPayloadBodiesByHash(blockHashes: RootHex[]): Promise<(ExecutionPayloadBody | null)[]> { + async getPayloadBodiesByHash(fork: ForkName, blockHashes: RootHex[]): Promise<(ExecutionPayloadBody | null)[]> { const method = "engine_getPayloadBodiesByHashV1"; assertReqSizeLimit(blockHashes.length, 32); const response = await this.rpc.fetchWithRetries< @@ -425,6 +448,7 @@ export class ExecutionEngineHttp implements IExecutionEngine { } async getPayloadBodiesByRange( + fork: ForkName, startBlockNumber: number, blockCount: number ): Promise<(ExecutionPayloadBody | null)[]> { diff --git a/packages/beacon-node/src/execution/engine/interface.ts b/packages/beacon-node/src/execution/engine/interface.ts index fa1da210cd12..e6f9cfee526b 100644 --- a/packages/beacon-node/src/execution/engine/interface.ts +++ b/packages/beacon-node/src/execution/engine/interface.ts @@ -1,6 +1,6 @@ import {ForkName} from "@lodestar/params"; import {KZGCommitment, Blob, KZGProof} from "@lodestar/types/deneb"; -import {Root, RootHex, capella, Wei, ExecutionPayload} from "@lodestar/types"; +import {Root, RootHex, capella, Wei, ExecutionPayload, ExecutionRequests} from "@lodestar/types"; import {DATA} from "../../eth1/provider/utils.js"; import {PayloadIdCache, PayloadId, WithdrawalV1} from "./payloadIdCache.js"; @@ -134,7 +134,8 @@ export interface IExecutionEngine { fork: ForkName, executionPayload: ExecutionPayload, versionedHashes?: VersionedHashes, - parentBeaconBlockRoot?: Root + parentBeaconBlockRoot?: Root, + executionRequests?: ExecutionRequests ): Promise; /** @@ -171,10 +172,11 @@ export interface IExecutionEngine { executionPayload: ExecutionPayload; executionPayloadValue: Wei; blobsBundle?: BlobsBundle; + executionRequests?: ExecutionRequests; shouldOverrideBuilder?: boolean; }>; - getPayloadBodiesByHash(blockHash: DATA[]): Promise<(ExecutionPayloadBody | null)[]>; + getPayloadBodiesByHash(fork: ForkName, blockHash: DATA[]): Promise<(ExecutionPayloadBody | null)[]>; - getPayloadBodiesByRange(start: number, count: number): Promise<(ExecutionPayloadBody | null)[]>; + getPayloadBodiesByRange(fork: ForkName, start: number, count: number): Promise<(ExecutionPayloadBody | null)[]>; } diff --git a/packages/beacon-node/src/execution/engine/mock.ts b/packages/beacon-node/src/execution/engine/mock.ts index a99a76508df8..4cff2a8d00f4 100644 --- a/packages/beacon-node/src/execution/engine/mock.ts +++ b/packages/beacon-node/src/execution/engine/mock.ts @@ -35,6 +35,7 @@ export type ExecutionEngineMockOpts = { onlyPredefinedResponses?: boolean; capellaForkTimestamp?: number; denebForkTimestamp?: number; + electraForkTimestamp?: number; }; type ExecutionBlock = { @@ -88,12 +89,14 @@ export class ExecutionEngineMockBackend implements JsonRpcBackend { engine_newPayloadV1: this.notifyNewPayload.bind(this), engine_newPayloadV2: this.notifyNewPayload.bind(this), engine_newPayloadV3: this.notifyNewPayload.bind(this), + engine_newPayloadV4: this.notifyNewPayload.bind(this), engine_forkchoiceUpdatedV1: this.notifyForkchoiceUpdate.bind(this), engine_forkchoiceUpdatedV2: this.notifyForkchoiceUpdate.bind(this), engine_forkchoiceUpdatedV3: this.notifyForkchoiceUpdate.bind(this), engine_getPayloadV1: this.getPayload.bind(this), engine_getPayloadV2: this.getPayload.bind(this), engine_getPayloadV3: this.getPayload.bind(this), + engine_getPayloadV4: this.getPayload.bind(this), engine_getPayloadBodiesByHashV1: this.getPayloadBodiesByHash.bind(this), engine_getPayloadBodiesByRangeV1: this.getPayloadBodiesByRange.bind(this), engine_getClientVersionV1: this.getClientVersionV1.bind(this), @@ -394,6 +397,7 @@ export class ExecutionEngineMockBackend implements JsonRpcBackend { } private timestampToFork(timestamp: number): ForkExecution { + if (timestamp > (this.opts.electraForkTimestamp ?? Infinity)) return ForkName.electra; if (timestamp > (this.opts.denebForkTimestamp ?? Infinity)) return ForkName.deneb; if (timestamp > (this.opts.capellaForkTimestamp ?? Infinity)) return ForkName.capella; return ForkName.bellatrix; diff --git a/packages/beacon-node/src/execution/engine/types.ts b/packages/beacon-node/src/execution/engine/types.ts index 85f514c953b0..32fe4cb79d3d 100644 --- a/packages/beacon-node/src/execution/engine/types.ts +++ b/packages/beacon-node/src/execution/engine/types.ts @@ -1,4 +1,4 @@ -import {capella, deneb, Wei, bellatrix, Root, ExecutionPayload} from "@lodestar/types"; +import {capella, deneb, electra, Wei, bellatrix, Root, ExecutionPayload, ExecutionRequests} from "@lodestar/types"; import { BYTES_PER_LOGS_BLOOM, FIELD_ELEMENTS_PER_BLOB, @@ -28,6 +28,7 @@ export type EngineApiRpcParamTypes = { engine_newPayloadV1: [ExecutionPayloadRpc]; engine_newPayloadV2: [ExecutionPayloadRpc]; engine_newPayloadV3: [ExecutionPayloadRpc, VersionedHashesRpc, DATA]; + engine_newPayloadV4: [ExecutionPayloadRpc, VersionedHashesRpc, DATA, ExecutionRequestsRpc]; /** * 1. Object - Payload validity status with respect to the consensus rules: * - blockHash: DATA, 32 Bytes - block hash value of the payload @@ -51,6 +52,7 @@ export type EngineApiRpcParamTypes = { engine_getPayloadV1: [QUANTITY]; engine_getPayloadV2: [QUANTITY]; engine_getPayloadV3: [QUANTITY]; + engine_getPayloadV4: [QUANTITY]; /** * 1. Array of DATA - Array of block_hash field values of the ExecutionPayload structure @@ -83,6 +85,7 @@ export type EngineApiRpcReturnTypes = { engine_newPayloadV1: PayloadStatus; engine_newPayloadV2: PayloadStatus; engine_newPayloadV3: PayloadStatus; + engine_newPayloadV4: PayloadStatus; engine_forkchoiceUpdatedV1: { payloadStatus: PayloadStatus; payloadId: QUANTITY | null; @@ -101,6 +104,7 @@ export type EngineApiRpcReturnTypes = { engine_getPayloadV1: ExecutionPayloadRpc; engine_getPayloadV2: ExecutionPayloadResponse; engine_getPayloadV3: ExecutionPayloadResponse; + engine_getPayloadV4: ExecutionPayloadResponse; engine_getPayloadBodiesByHashV1: (ExecutionPayloadBodyRpc | null)[]; @@ -114,13 +118,20 @@ type ExecutionPayloadRpcWithValue = { // even though CL tracks this as executionPayloadValue, EL returns this as blockValue blockValue: QUANTITY; blobsBundle?: BlobsBundleRpc; + requests?: ExecutionRequestsRpc; shouldOverrideBuilder?: boolean; }; type ExecutionPayloadResponse = ExecutionPayloadRpc | ExecutionPayloadRpcWithValue; -export type ExecutionPayloadBodyRpc = {transactions: DATA[]; withdrawals: WithdrawalV1[] | null}; +export type ExecutionPayloadBodyRpc = { + transactions: DATA[]; + withdrawals: WithdrawalV1[] | null | undefined; +}; -export type ExecutionPayloadBody = {transactions: bellatrix.Transaction[]; withdrawals: capella.Withdrawals | null}; +export type ExecutionPayloadBody = { + transactions: bellatrix.Transaction[]; + withdrawals: capella.Withdrawals | null; +}; export type ExecutionPayloadRpc = { parentHash: DATA; // 32 bytes @@ -150,6 +161,30 @@ export type WithdrawalRpc = { amount: QUANTITY; }; +export type ExecutionRequestsRpc = { + deposits: DepositRequestRpc[]; + withdrawals: WithdrawalRequestRpc[]; + consolidations: ConsolidationRequestRpc[]; +}; + +export type DepositRequestRpc = { + pubkey: DATA; + withdrawalCredentials: DATA; + amount: QUANTITY; + signature: DATA; + index: QUANTITY; +}; +export type WithdrawalRequestRpc = { + sourceAddress: DATA; + validatorPubkey: DATA; + amount: QUANTITY; +}; +export type ConsolidationRequestRpc = { + sourceAddress: DATA; + sourcePubkey: DATA; + targetPubkey: DATA; +}; + export type VersionedHashesRpc = DATA[]; export type PayloadAttributesRpc = { @@ -212,6 +247,8 @@ export function serializeExecutionPayload(fork: ForkName, data: ExecutionPayload payload.excessBlobGas = numToQuantity(excessBlobGas); } + // No changes in Electra + return payload; } @@ -230,23 +267,27 @@ export function parseExecutionPayload( executionPayload: ExecutionPayload; executionPayloadValue: Wei; blobsBundle?: BlobsBundle; + executionRequests?: ExecutionRequests; shouldOverrideBuilder?: boolean; } { let data: ExecutionPayloadRpc; let executionPayloadValue: Wei; let blobsBundle: BlobsBundle | undefined; + let executionRequests: ExecutionRequests | undefined; let shouldOverrideBuilder: boolean; if (hasPayloadValue(response)) { executionPayloadValue = quantityToBigint(response.blockValue); data = response.executionPayload; blobsBundle = response.blobsBundle ? parseBlobsBundle(response.blobsBundle) : undefined; + executionRequests = response.requests ? deserializeExecutionRequests(response.requests) : undefined; shouldOverrideBuilder = response.shouldOverrideBuilder ?? false; } else { data = response; // Just set it to zero as default executionPayloadValue = BigInt(0); blobsBundle = undefined; + executionRequests = undefined; shouldOverrideBuilder = false; } @@ -297,7 +338,9 @@ export function parseExecutionPayload( (executionPayload as deneb.ExecutionPayload).excessBlobGas = quantityToBigint(excessBlobGas); } - return {executionPayload, executionPayloadValue, blobsBundle, shouldOverrideBuilder}; + // No changes in Electra + + return {executionPayload, executionPayloadValue, blobsBundle, executionRequests, shouldOverrideBuilder}; } export function serializePayloadAttributes(data: PayloadAttributes): PayloadAttributesRpc { @@ -363,6 +406,76 @@ export function deserializeWithdrawal(serialized: WithdrawalRpc): capella.Withdr } as capella.Withdrawal; } +function serializeDepositRequest(depositRequest: electra.DepositRequest): DepositRequestRpc { + return { + pubkey: bytesToData(depositRequest.pubkey), + withdrawalCredentials: bytesToData(depositRequest.withdrawalCredentials), + amount: numToQuantity(depositRequest.amount), + signature: bytesToData(depositRequest.signature), + index: numToQuantity(depositRequest.index), + }; +} + +function deserializeDepositRequest(serialized: DepositRequestRpc): electra.DepositRequest { + return { + pubkey: dataToBytes(serialized.pubkey, 48), + withdrawalCredentials: dataToBytes(serialized.withdrawalCredentials, 32), + amount: quantityToNum(serialized.amount), + signature: dataToBytes(serialized.signature, 96), + index: quantityToNum(serialized.index), + } as electra.DepositRequest; +} + +function serializeWithdrawalRequest(withdrawalRequest: electra.WithdrawalRequest): WithdrawalRequestRpc { + return { + sourceAddress: bytesToData(withdrawalRequest.sourceAddress), + validatorPubkey: bytesToData(withdrawalRequest.validatorPubkey), + amount: numToQuantity(withdrawalRequest.amount), + }; +} + +function deserializeWithdrawalRequest(withdrawalRequest: WithdrawalRequestRpc): electra.WithdrawalRequest { + return { + sourceAddress: dataToBytes(withdrawalRequest.sourceAddress, 20), + validatorPubkey: dataToBytes(withdrawalRequest.validatorPubkey, 48), + amount: quantityToBigint(withdrawalRequest.amount), + }; +} + +function serializeConsolidationRequest(consolidationRequest: electra.ConsolidationRequest): ConsolidationRequestRpc { + return { + sourceAddress: bytesToData(consolidationRequest.sourceAddress), + sourcePubkey: bytesToData(consolidationRequest.sourcePubkey), + targetPubkey: bytesToData(consolidationRequest.targetPubkey), + }; +} + +function deserializeConsolidationRequest(consolidationRequest: ConsolidationRequestRpc): electra.ConsolidationRequest { + return { + sourceAddress: dataToBytes(consolidationRequest.sourceAddress, 20), + sourcePubkey: dataToBytes(consolidationRequest.sourcePubkey, 48), + targetPubkey: dataToBytes(consolidationRequest.targetPubkey, 48), + }; +} + +export function serializeExecutionRequests(executionRequests: ExecutionRequests): ExecutionRequestsRpc { + const {deposits, withdrawals, consolidations} = executionRequests; + return { + deposits: deposits.map(serializeDepositRequest), + withdrawals: withdrawals.map(serializeWithdrawalRequest), + consolidations: consolidations.map(serializeConsolidationRequest), + }; +} + +export function deserializeExecutionRequests(executionRequests: ExecutionRequestsRpc): ExecutionRequests { + const {deposits, withdrawals, consolidations} = executionRequests; + return { + deposits: deposits.map(deserializeDepositRequest), + withdrawals: withdrawals.map(deserializeWithdrawalRequest), + consolidations: consolidations.map(deserializeConsolidationRequest), + }; +} + export function deserializeExecutionPayloadBody(data: ExecutionPayloadBodyRpc | null): ExecutionPayloadBody | null { return data ? { diff --git a/packages/beacon-node/src/index.ts b/packages/beacon-node/src/index.ts index aa555a1ab0ca..723b56d0b488 100644 --- a/packages/beacon-node/src/index.ts +++ b/packages/beacon-node/src/index.ts @@ -1,4 +1,4 @@ -export {initStateFromAnchorState, initStateFromDb, initStateFromEth1} from "./chain/index.js"; +export {checkAndPersistAnchorState, initStateFromDb, initStateFromEth1} from "./chain/index.js"; export {BeaconDb, type IBeaconDb} from "./db/index.js"; export {Eth1Provider, type IEth1Provider} from "./eth1/index.js"; export {createNodeJsLibp2p, type NodeJsLibp2pOpts} from "./network/index.js"; @@ -20,4 +20,4 @@ export {RestApiServer} from "./api/rest/base.js"; export type {RestApiServerOpts, RestApiServerModules, RestApiServerMetrics} from "./api/rest/base.js"; // Export type util for CLI - TEMP move to lodestar-types eventually -export {getStateTypeFromBytes} from "./util/multifork.js"; +export {getStateTypeFromBytes, getStateSlotFromBytes} from "./util/multifork.js"; diff --git a/packages/beacon-node/src/metrics/metrics/beacon.ts b/packages/beacon-node/src/metrics/metrics/beacon.ts index 141121de9079..949999dbb1f4 100644 --- a/packages/beacon-node/src/metrics/metrics/beacon.ts +++ b/packages/beacon-node/src/metrics/metrics/beacon.ts @@ -120,6 +120,13 @@ export function createBeaconMetrics(register: RegistryMetricCreator) { }), }, + headState: { + unfinalizedPubkeyCacheSize: register.gauge({ + name: "beacon_head_state_unfinalized_pubkey_cache_size", + help: "Current size of the unfinalizedPubkey2Index cache in the head state", + }), + }, + parentBlockDistance: register.histogram({ name: "beacon_imported_block_parent_distance", help: "Histogram of distance to parent block of valid imported blocks", diff --git a/packages/beacon-node/src/metrics/metrics/lodestar.ts b/packages/beacon-node/src/metrics/metrics/lodestar.ts index f43a3f1cdbe6..bac740b7ee04 100644 --- a/packages/beacon-node/src/metrics/metrics/lodestar.ts +++ b/packages/beacon-node/src/metrics/metrics/lodestar.ts @@ -18,6 +18,7 @@ import {LodestarMetadata} from "../options.js"; import {RegistryMetricCreator} from "../utils/registryMetricCreator.js"; import {OpSource} from "../validatorMonitor.js"; import {CacheItemType} from "../../chain/stateCache/types.js"; +import {AllocSource} from "../../util/bufferPool.js"; export type LodestarMetrics = ReturnType; @@ -332,6 +333,10 @@ export function createLodestarMetrics( buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5], labelNames: ["source"], }), + numEffectiveBalanceUpdates: register.gauge({ + name: "lodestar_stfn_effective_balance_updates_count", + help: "Total count of effective balance updates", + }), preStateBalancesNodesPopulatedMiss: register.gauge<{source: StateCloneSource}>({ name: "lodestar_stfn_balances_nodes_populated_miss_total", help: "Total count state.balances nodesPopulated is false on stfn", @@ -374,6 +379,17 @@ export function createLodestarMetrics( help: "Total count state.validators nodesPopulated is false on stfn for post state", }), + epochCache: { + finalizedPubkeyDuplicateInsert: register.gauge({ + name: "lodestar_epoch_cache_finalized_pubkey_duplicate_insert_total", + help: "Total count of duplicate insert of finalized pubkeys", + }), + newUnFinalizedPubkey: register.gauge({ + name: "lodestar_epoch_cache_new_unfinalized_pubkey_total", + help: "Total count of unfinalized pubkeys added", + }), + }, + // BLS verifier thread pool and queue bls: { @@ -1150,13 +1166,15 @@ export function createLodestarMetrics( name: "lodestar_buffer_pool_length", help: "Buffer pool length", }), - hits: register.counter({ + hits: register.counter<{source: AllocSource}>({ name: "lodestar_buffer_pool_hits_total", help: "Total number of buffer pool hits", + labelNames: ["source"], }), - misses: register.counter({ + misses: register.counter<{source: AllocSource}>({ name: "lodestar_buffer_pool_misses_total", help: "Total number of buffer pool misses", + labelNames: ["source"], }), grows: register.counter({ name: "lodestar_buffer_pool_grows_total", @@ -1200,10 +1218,10 @@ export function createLodestarMetrics( help: "Histogram of cloned count per state every time state.clone() is called", buckets: [1, 2, 5, 10, 50, 250], }), - stateSerializeDuration: register.histogram({ - name: "lodestar_cp_state_cache_state_serialize_seconds", - help: "Histogram of time to serialize state to db", - buckets: [0.1, 0.5, 1, 2, 3, 4], + numStatesUpdated: register.histogram({ + name: "lodestar_cp_state_cache_state_updated_count", + help: "Histogram of number of state cache items updated every time removing and adding pubkeys to pubkey cache", + buckets: [1, 2, 5, 10, 50, 250], }), statePruneFromMemoryCount: register.gauge({ name: "lodestar_cp_state_cache_state_prune_from_memory_count", @@ -1251,10 +1269,6 @@ export function createLodestarMetrics( name: "lodestar_cp_state_cache_persisted_state_remove_count", help: "Total number of persisted states removed", }), - persistedStateAllocCount: register.counter({ - name: "lodestar_cp_state_cache_persisted_state_alloc_count", - help: "Total number time to allocate memory for persisted state", - }), }, balancesCache: { @@ -1278,22 +1292,45 @@ export function createLodestarMetrics( name: "lodestar_shuffling_cache_size", help: "Shuffling cache size", }), - processStateInsertNew: register.gauge({ - name: "lodestar_shuffling_cache_process_state_insert_new_total", - help: "Total number of times processState is called resulting a new shuffling", - }), - processStateUpdatePromise: register.gauge({ - name: "lodestar_shuffling_cache_process_state_update_promise_total", - help: "Total number of times processState is called resulting a promise being updated with shuffling", - }), - processStateNoOp: register.gauge({ - name: "lodestar_shuffling_cache_process_state_no_op_total", - help: "Total number of times processState is called resulting no changes", - }), insertPromiseCount: register.gauge({ name: "lodestar_shuffling_cache_insert_promise_count", help: "Total number of times insertPromise is called", }), + hit: register.gauge({ + name: "lodestar_shuffling_cache_hit_count", + help: "Count of shuffling cache hit", + }), + miss: register.gauge({ + name: "lodestar_shuffling_cache_miss_count", + help: "Count of shuffling cache miss", + }), + shufflingBuiltMultipleTimes: register.gauge({ + name: "lodestar_shuffling_cache_recalculated_shuffling_count", + help: "Count of shuffling that were build multiple times", + }), + shufflingPromiseNotResolvedAndThrownAway: register.gauge({ + name: "lodestar_shuffling_cache_promise_not_resolved_and_thrown_away_count", + help: "Count of shuffling cache promises that were discarded and the shuffling was built synchronously", + }), + shufflingPromiseNotResolved: register.gauge({ + name: "lodestar_shuffling_cache_promise_not_resolved_count", + help: "Count of shuffling cache promises that were requested before the promise was resolved", + }), + nextShufflingNotOnEpochCache: register.gauge({ + name: "lodestar_shuffling_cache_next_shuffling_not_on_epoch_cache", + help: "The next shuffling was not on the epoch cache before the epoch transition", + }), + shufflingPromiseResolutionTime: register.histogram({ + name: "lodestar_shuffling_cache_promise_resolution_time_seconds", + help: "Time from promise insertion until promise resolution when shuffling was ready in seconds", + buckets: [0.5, 1, 1.5, 2], + }), + shufflingCalculationTime: register.histogram<{source: "build" | "getSync"}>({ + name: "lodestar_shuffling_cache_shuffling_calculation_time_seconds", + help: "Run time of shuffling calculation", + buckets: [0.5, 0.75, 1, 1.25, 1.5], + labelNames: ["source"], + }), }, seenCache: { @@ -1373,10 +1410,59 @@ export function createLodestarMetrics( help: "regen function total errors", labelNames: ["entrypoint", "caller"], }), + regenFnAddPubkeyTime: register.histogram({ + name: "lodestar_regen_fn_add_pubkey_time_seconds", + help: "Historgram of time spent on adding pubkeys to all state cache items in seconds", + buckets: [0.01, 0.1, 0.5, 1, 2, 5], + }), + regenFnDeletePubkeyTime: register.histogram({ + name: "lodestar_regen_fn_delete_pubkey_time_seconds", + help: "Histrogram of time spent on deleting pubkeys from all state cache items in seconds", + buckets: [0.01, 0.1, 0.5, 1], + }), + regenFnNumStatesUpdated: register.histogram({ + name: "lodestar_regen_state_cache_state_updated_count", + help: "Histogram of number of state cache items updated every time removing pubkeys from unfinalized cache", + buckets: [1, 2, 5, 10, 50, 250], + }), unhandledPromiseRejections: register.gauge({ name: "lodestar_unhandled_promise_rejections_total", help: "UnhandledPromiseRejection total count", }), + stateSerializeDuration: register.histogram<{source: AllocSource}>({ + name: "lodestar_state_serialize_seconds", + help: "Histogram of time to serialize state", + labelNames: ["source"], + buckets: [0.1, 0.5, 1, 2, 3, 4], + }), + + // regen.getState metrics + regenGetState: { + blockCount: register.histogram<{caller: RegenCaller}>({ + name: "lodestar_regen_get_state_block_count", + help: "Block count in regen.getState", + labelNames: ["caller"], + buckets: [4, 8, 16, 32, 64], + }), + getSeedState: register.histogram<{caller: RegenCaller}>({ + name: "lodestar_regen_get_state_get_seed_state_seconds", + help: "Duration of get seed state in regen.getState", + labelNames: ["caller"], + buckets: [0.1, 0.5, 1, 2, 3, 4], + }), + loadBlocks: register.histogram<{caller: RegenCaller}>({ + name: "lodestar_regen_get_state_load_blocks_seconds", + help: "Duration of load blocks in regen.getState", + labelNames: ["caller"], + buckets: [0.1, 0.5, 1, 2, 3, 4], + }), + stateTransition: register.histogram<{caller: RegenCaller}>({ + name: "lodestar_regen_get_state_state_transition_seconds", + help: "Duration of state transition in regen.getState", + labelNames: ["caller"], + buckets: [0.1, 0.5, 1, 2, 3, 4], + }), + }, // Precompute next epoch transition precomputeNextEpochTransition: { diff --git a/packages/beacon-node/src/metrics/validatorMonitor.ts b/packages/beacon-node/src/metrics/validatorMonitor.ts index e5c8eef83679..34dfa6b72e03 100644 --- a/packages/beacon-node/src/metrics/validatorMonitor.ts +++ b/packages/beacon-node/src/metrics/validatorMonitor.ts @@ -8,12 +8,12 @@ import { getBlockRootAtSlot, ParticipationFlags, } from "@lodestar/state-transition"; -import {LogData, LogHandler, LogLevel, Logger, MapDef, MapDefMax, toHex} from "@lodestar/utils"; +import {LogData, LogHandler, LogLevel, Logger, MapDef, MapDefMax, toRootHex} from "@lodestar/utils"; import {BeaconBlock, RootHex, altair, deneb} from "@lodestar/types"; import {ChainConfig, ChainForkConfig} from "@lodestar/config"; import {ForkSeq, INTERVALS_PER_SLOT, MIN_ATTESTATION_INCLUSION_DELAY, SLOTS_PER_EPOCH} from "@lodestar/params"; import {Epoch, Slot, ValidatorIndex} from "@lodestar/types"; -import {IndexedAttestation, SignedAggregateAndProof} from "@lodestar/types/phase0"; +import {IndexedAttestation, SignedAggregateAndProof} from "@lodestar/types"; import {GENESIS_SLOT} from "../constants/constants.js"; import {LodestarMetrics} from "./metrics/lodestar.js"; @@ -392,7 +392,7 @@ export function createValidatorMonitor( const summary = getEpochSummary(validator, computeEpochAtSlot(block.slot)); summary.blockProposals.push({ - blockRoot: toHex(config.getForkTypes(block.slot).BeaconBlock.hashTreeRoot(block)), + blockRoot: toRootHex(config.getForkTypes(block.slot).BeaconBlock.hashTreeRoot(block)), blockSlot: block.slot, poolSubmitDelaySec: delaySec, successfullyImported: false, @@ -416,7 +416,7 @@ export function createValidatorMonitor( proposal.successfullyImported = true; } else { summary.blockProposals.push({ - blockRoot: toHex(config.getForkTypes(block.slot).BeaconBlock.hashTreeRoot(block)), + blockRoot: toRootHex(config.getForkTypes(block.slot).BeaconBlock.hashTreeRoot(block)), blockSlot: block.slot, poolSubmitDelaySec: null, successfullyImported: true, @@ -445,7 +445,7 @@ export function createValidatorMonitor( const attestationSummary = validator.attestations .getOrDefault(indexedAttestation.data.target.epoch) - .getOrDefault(toHex(indexedAttestation.data.target.root)); + .getOrDefault(toRootHex(indexedAttestation.data.target.root)); if ( attestationSummary.poolSubmitDelayMinSec === null || attestationSummary.poolSubmitDelayMinSec > delaySec @@ -494,7 +494,7 @@ export function createValidatorMonitor( validator.attestations .getOrDefault(indexedAttestation.data.target.epoch) - .getOrDefault(toHex(indexedAttestation.data.target.root)) + .getOrDefault(toRootHex(indexedAttestation.data.target.root)) .aggregateInclusionDelaysSec.push(delaySec); } } @@ -533,7 +533,7 @@ export function createValidatorMonitor( validator.attestations .getOrDefault(indexedAttestation.data.target.epoch) - .getOrDefault(toHex(indexedAttestation.data.target.root)) + .getOrDefault(toRootHex(indexedAttestation.data.target.root)) .aggregateInclusionDelaysSec.push(delaySec); } } @@ -577,7 +577,7 @@ export function createValidatorMonitor( validator.attestations .getOrDefault(indexedAttestation.data.target.epoch) - .getOrDefault(toHex(indexedAttestation.data.target.root)) + .getOrDefault(toRootHex(indexedAttestation.data.target.root)) .blockInclusions.push({ blockRoot: inclusionBlockRoot, blockSlot: inclusionBlockSlot, @@ -644,13 +644,20 @@ export function createValidatorMonitor( } // Compute summaries of previous epoch attestation performance - const prevEpoch = Math.max(0, computeEpochAtSlot(headState.slot) - 1); + const prevEpoch = computeEpochAtSlot(headState.slot) - 1; + + // During the end of first epoch, the prev epoch with be -1 + // Skip this as there is no attestation and block proposal summary in epoch -1 + if (prevEpoch === -1) { + return; + } + const rootCache = new RootHexCache(headState); if (config.getForkSeq(headState.slot) >= ForkSeq.altair) { const {previousEpochParticipation} = headState as CachedBeaconStateAltair; const prevEpochStartSlot = computeStartSlotAtEpoch(prevEpoch); - const prevEpochTargetRoot = toHex(getBlockRootAtSlot(headState, prevEpochStartSlot)); + const prevEpochTargetRoot = toRootHex(getBlockRootAtSlot(headState, prevEpochStartSlot)); // Check attestation performance for (const [index, validator] of validators.entries()) { @@ -1041,7 +1048,7 @@ export class RootHexCache { getBlockRootAtSlot(slot: Slot): RootHex { let root = this.blockRootSlotCache.get(slot); if (!root) { - root = toHex(getBlockRootAtSlot(this.state, slot)); + root = toRootHex(getBlockRootAtSlot(this.state, slot)); this.blockRootSlotCache.set(slot, root); } return root; diff --git a/packages/beacon-node/src/network/core/networkCore.ts b/packages/beacon-node/src/network/core/networkCore.ts index 07b346bc29e4..83ef4c4fb063 100644 --- a/packages/beacon-node/src/network/core/networkCore.ts +++ b/packages/beacon-node/src/network/core/networkCore.ts @@ -2,13 +2,12 @@ import {Connection, PeerId} from "@libp2p/interface"; import {multiaddr} from "@multiformats/multiaddr"; import {PublishOpts} from "@chainsafe/libp2p-gossipsub/types"; import {PeerScoreStatsDump} from "@chainsafe/libp2p-gossipsub/dist/src/score/peer-score.js"; -import {fromHexString} from "@chainsafe/ssz"; import {ENR} from "@chainsafe/enr"; import {routes} from "@lodestar/api"; import {BeaconConfig} from "@lodestar/config"; import type {LoggerNode} from "@lodestar/logger/node"; import {Epoch, phase0} from "@lodestar/types"; -import {withTimeout} from "@lodestar/utils"; +import {fromHex, withTimeout} from "@lodestar/utils"; import {ForkName} from "@lodestar/params"; import {ResponseIncoming} from "@lodestar/reqresp"; import {Libp2p} from "../interface.js"; @@ -195,7 +194,7 @@ export class NetworkCore implements INetworkCore { await gossip.start(); const enr = opts.discv5?.enr; - const nodeId = enr ? fromHexString(ENR.decodeTxt(enr).nodeId) : null; + const nodeId = enr ? fromHex(ENR.decodeTxt(enr).nodeId) : null; const attnetsService = new AttnetsService(config, clock, gossip, metadata, logger, metrics, nodeId, opts); const syncnetsService = new SyncnetsService(config, clock, gossip, metadata, logger, metrics, opts); diff --git a/packages/beacon-node/src/network/gossip/encoding.ts b/packages/beacon-node/src/network/gossip/encoding.ts index 24b9e1f69ee5..7a0c642d8ea4 100644 --- a/packages/beacon-node/src/network/gossip/encoding.ts +++ b/packages/beacon-node/src/network/gossip/encoding.ts @@ -4,7 +4,7 @@ import {digest} from "@chainsafe/as-sha256"; import {RPC} from "@chainsafe/libp2p-gossipsub/message"; import {DataTransform} from "@chainsafe/libp2p-gossipsub/types"; import snappyWasm from "@chainsafe/snappy-wasm"; -import {intToBytes, toHex} from "@lodestar/utils"; +import {intToBytes} from "@lodestar/utils"; import {ForkName} from "@lodestar/params"; import {MESSAGE_DOMAIN_VALID_SNAPPY} from "./constants.js"; import {getGossipSSZType, GossipTopicCache} from "./topic.js"; @@ -19,6 +19,9 @@ const h64Seed = BigInt(Math.floor(Math.random() * 1e9)); const encoder = new snappyWasm.Encoder(); const decoder = new snappyWasm.Decoder(); +// Shared buffer to convert msgId to string +const sharedMsgIdBuf = Buffer.alloc(20); + /** * The function used to generate a gossipsub message id * We use the first 8 bytes of SHA256(data) for content addressing @@ -32,7 +35,9 @@ export function fastMsgIdFn(rpcMsg: RPC.Message): string { } export function msgIdToStrFn(msgId: Uint8Array): string { - return toHex(msgId); + // this is the same logic to `toHex(msgId)` with better performance + sharedMsgIdBuf.set(msgId); + return `0x${sharedMsgIdBuf.toString("hex")}`; } /** diff --git a/packages/beacon-node/src/network/gossip/interface.ts b/packages/beacon-node/src/network/gossip/interface.ts index 25a871b4e2a0..af5e3888d04f 100644 --- a/packages/beacon-node/src/network/gossip/interface.ts +++ b/packages/beacon-node/src/network/gossip/interface.ts @@ -11,6 +11,8 @@ import { phase0, SignedBeaconBlock, Slot, + Attestation, + SignedAggregateAndProof, } from "@lodestar/types"; import {BeaconConfig} from "@lodestar/config"; import {Logger} from "@lodestar/utils"; @@ -34,6 +36,9 @@ export enum GossipType { bls_to_execution_change = "bls_to_execution_change", } +export type SequentialGossipType = Exclude; +export type BatchGossipType = GossipType.beacon_attestation; + export enum GossipEncoding { ssz_snappy = "ssz_snappy", } @@ -80,8 +85,8 @@ export type SSZTypeOfGossipTopic = T extends {type: infer export type GossipTypeMap = { [GossipType.beacon_block]: SignedBeaconBlock; [GossipType.blob_sidecar]: deneb.BlobSidecar; - [GossipType.beacon_aggregate_and_proof]: phase0.SignedAggregateAndProof; - [GossipType.beacon_attestation]: phase0.Attestation; + [GossipType.beacon_aggregate_and_proof]: SignedAggregateAndProof; + [GossipType.beacon_attestation]: Attestation; [GossipType.voluntary_exit]: phase0.SignedVoluntaryExit; [GossipType.proposer_slashing]: phase0.ProposerSlashing; [GossipType.attester_slashing]: phase0.AttesterSlashing; @@ -95,8 +100,8 @@ export type GossipTypeMap = { export type GossipFnByType = { [GossipType.beacon_block]: (signedBlock: SignedBeaconBlock) => Promise | void; [GossipType.blob_sidecar]: (blobSidecar: deneb.BlobSidecar) => Promise | void; - [GossipType.beacon_aggregate_and_proof]: (aggregateAndProof: phase0.SignedAggregateAndProof) => Promise | void; - [GossipType.beacon_attestation]: (attestation: phase0.Attestation) => Promise | void; + [GossipType.beacon_aggregate_and_proof]: (aggregateAndProof: SignedAggregateAndProof) => Promise | void; + [GossipType.beacon_attestation]: (attestation: Attestation) => Promise | void; [GossipType.voluntary_exit]: (voluntaryExit: phase0.SignedVoluntaryExit) => Promise | void; [GossipType.proposer_slashing]: (proposerSlashing: phase0.ProposerSlashing) => Promise | void; [GossipType.attester_slashing]: (attesterSlashing: phase0.AttesterSlashing) => Promise | void; @@ -179,25 +184,25 @@ export type GossipHandlerParamGeneric = { }; export type GossipHandlers = { - [K in GossipType]: DefaultGossipHandler | BatchGossipHandler; + [K in GossipType]: SequentialGossipHandler | BatchGossipHandler; }; -export type DefaultGossipHandler = ( +export type SequentialGossipHandler = ( gossipHandlerParam: GossipHandlerParamGeneric ) => Promise; -export type DefaultGossipHandlers = { - [K in GossipType]: DefaultGossipHandler; +export type SequentialGossipHandlers = { + [K in SequentialGossipType]: SequentialGossipHandler; +}; + +export type BatchGossipHandlers = { + [K in BatchGossipType]: BatchGossipHandler; }; export type BatchGossipHandler = ( gossipHandlerParams: GossipHandlerParamGeneric[] ) => Promise<(null | GossipActionError)[]>; -export type BatchGossipHandlers = { - [K in GossipType]?: BatchGossipHandler; -}; - // eslint-disable-next-line @typescript-eslint/no-explicit-any export type ResolvedType Promise> = F extends (...args: any) => Promise ? T diff --git a/packages/beacon-node/src/network/gossip/topic.ts b/packages/beacon-node/src/network/gossip/topic.ts index 4923b71e6887..b7c7425584c5 100644 --- a/packages/beacon-node/src/network/gossip/topic.ts +++ b/packages/beacon-node/src/network/gossip/topic.ts @@ -1,4 +1,4 @@ -import {phase0, ssz, sszTypesFor} from "@lodestar/types"; +import {ssz, Attestation, sszTypesFor} from "@lodestar/types"; import {ForkDigestContext} from "@lodestar/config"; import { ATTESTATION_SUBNET_COUNT, @@ -87,13 +87,13 @@ export function getGossipSSZType(topic: GossipTopic) { case GossipType.blob_sidecar: return ssz.deneb.BlobSidecar; case GossipType.beacon_aggregate_and_proof: - return ssz.phase0.SignedAggregateAndProof; + return sszTypesFor(topic.fork).SignedAggregateAndProof; case GossipType.beacon_attestation: - return ssz.phase0.Attestation; + return sszTypesFor(topic.fork).Attestation; case GossipType.proposer_slashing: return ssz.phase0.ProposerSlashing; case GossipType.attester_slashing: - return ssz.phase0.AttesterSlashing; + return sszTypesFor(topic.fork).AttesterSlashing; case GossipType.voluntary_exit: return ssz.phase0.SignedVoluntaryExit; case GossipType.sync_committee_contribution_and_proof: @@ -128,9 +128,9 @@ export function sszDeserialize(topic: T, serializedData: /** * Deserialize a gossip serialized data into an Attestation object. */ -export function sszDeserializeAttestation(serializedData: Uint8Array): phase0.Attestation { +export function sszDeserializeAttestation(fork: ForkName, serializedData: Uint8Array): Attestation { try { - return ssz.phase0.Attestation.deserialize(serializedData); + return sszTypesFor(fork).Attestation.deserialize(serializedData); } catch (e) { throw new GossipActionError(GossipAction.REJECT, {code: GossipErrorCode.INVALID_SERIALIZED_BYTES_ERROR_CODE}); } diff --git a/packages/beacon-node/src/network/interface.ts b/packages/beacon-node/src/network/interface.ts index 5012650e229a..8d73379af221 100644 --- a/packages/beacon-node/src/network/interface.ts +++ b/packages/beacon-node/src/network/interface.ts @@ -26,6 +26,7 @@ import { capella, deneb, phase0, + SignedAggregateAndProof, } from "@lodestar/types"; import {PeerIdStr} from "../util/peerId.js"; import {INetworkEventBus} from "./events.js"; @@ -71,7 +72,7 @@ export interface INetwork extends INetworkCorePublic { // Gossip publishBeaconBlock(signedBlock: SignedBeaconBlock): Promise; publishBlobSidecar(blobSidecar: deneb.BlobSidecar): Promise; - publishBeaconAggregateAndProof(aggregateAndProof: phase0.SignedAggregateAndProof): Promise; + publishBeaconAggregateAndProof(aggregateAndProof: SignedAggregateAndProof): Promise; publishBeaconAttestation(attestation: phase0.Attestation, subnet: number): Promise; publishVoluntaryExit(voluntaryExit: phase0.SignedVoluntaryExit): Promise; publishBlsToExecutionChange(blsToExecutionChange: capella.SignedBLSToExecutionChange): Promise; diff --git a/packages/beacon-node/src/network/network.ts b/packages/beacon-node/src/network/network.ts index 52b9d85c0064..1b3ccaaaf75a 100644 --- a/packages/beacon-node/src/network/network.ts +++ b/packages/beacon-node/src/network/network.ts @@ -17,6 +17,7 @@ import { LightClientFinalityUpdate, LightClientOptimisticUpdate, LightClientUpdate, + SignedAggregateAndProof, } from "@lodestar/types"; import {routes} from "@lodestar/api"; import {ResponseIncoming} from "@lodestar/reqresp"; @@ -316,7 +317,7 @@ export class Network implements INetwork { }); } - async publishBeaconAggregateAndProof(aggregateAndProof: phase0.SignedAggregateAndProof): Promise { + async publishBeaconAggregateAndProof(aggregateAndProof: SignedAggregateAndProof): Promise { const fork = this.config.getForkName(aggregateAndProof.message.aggregate.data.slot); return this.publishGossip( {type: GossipType.beacon_aggregate_and_proof, fork}, diff --git a/packages/beacon-node/src/network/options.ts b/packages/beacon-node/src/network/options.ts index d2070873261b..ebb321584d12 100644 --- a/packages/beacon-node/src/network/options.ts +++ b/packages/beacon-node/src/network/options.ts @@ -40,8 +40,6 @@ export const defaultNetworkOptions: NetworkOptions = { maxYoungGenerationSizeMb: 152, // subscribe 2 slots before aggregator dutied slot to get stable mesh peers as monitored on goerli slotsToSubscribeBeforeAggregatorDuty: 2, - // this should only be set to true if useWorker is true - beaconAttestationBatchValidation: true, // This will enable the light client server by default disableLightClientServer: false, }; diff --git a/packages/beacon-node/src/network/peers/discover.ts b/packages/beacon-node/src/network/peers/discover.ts index 1cb084846f61..79603f780e3d 100644 --- a/packages/beacon-node/src/network/peers/discover.ts +++ b/packages/beacon-node/src/network/peers/discover.ts @@ -249,13 +249,21 @@ export class PeerDiscovery { } // Run a discv5 subnet query to try to discover new peers - if (subnetsToDiscoverPeers.length > 0 || cachedENRsToDial.size < peersToConnect) { + const shouldRunFindRandomNodeQuery = subnetsToDiscoverPeers.length > 0 || cachedENRsToDial.size < peersToConnect; + if (shouldRunFindRandomNodeQuery) { void this.runFindRandomNodeQuery(); } + + this.logger.debug("Discover peers outcome", { + peersToConnect, + peersAvailableToDial: cachedENRsToDial.size, + subnetsToDiscover: subnetsToDiscoverPeers.length, + shouldRunFindRandomNodeQuery, + }); } /** - * Request to find peers. First, looked at cached peers in peerStore + * Request discv5 to find peers if there is no query in progress */ private async runFindRandomNodeQuery(): Promise { // Delay the 1st query after starting discv5 @@ -305,6 +313,7 @@ export class PeerDiscovery { const attnets = zeroAttnets; const syncnets = zeroSyncnets; const status = this.handleDiscoveredPeer(id, multiaddrs[0], attnets, syncnets); + this.logger.debug("Discovered peer via libp2p", {peer: prettyPrintPeerId(id), status}); this.metrics?.discovery.discoveredStatus.inc({status}); }; @@ -336,6 +345,7 @@ export class PeerDiscovery { const syncnets = syncnetsBytes ? deserializeEnrSubnets(syncnetsBytes, SYNC_COMMITTEE_SUBNET_COUNT) : zeroSyncnets; const status = this.handleDiscoveredPeer(peerId, multiaddrTCP, attnets, syncnets); + this.logger.debug("Discovered peer via discv5", {peer: prettyPrintPeerId(peerId), status}); this.metrics?.discovery.discoveredStatus.inc({status}); }; diff --git a/packages/beacon-node/src/network/peers/utils/assertPeerRelevance.ts b/packages/beacon-node/src/network/peers/utils/assertPeerRelevance.ts index 95743331ab2a..e588b1ae0308 100644 --- a/packages/beacon-node/src/network/peers/utils/assertPeerRelevance.ts +++ b/packages/beacon-node/src/network/peers/utils/assertPeerRelevance.ts @@ -1,5 +1,5 @@ -import {toHexString} from "@chainsafe/ssz"; import {ForkDigest, Root, Slot, phase0, ssz} from "@lodestar/types"; +import {toHex, toRootHex} from "@lodestar/utils"; // TODO: Why this value? (From Lighthouse) const FUTURE_SLOT_TOLERANCE = 1; @@ -78,10 +78,10 @@ export function isZeroRoot(root: Root): boolean { export function renderIrrelevantPeerType(type: IrrelevantPeerType): string { switch (type.code) { case IrrelevantPeerCode.INCOMPATIBLE_FORKS: - return `INCOMPATIBLE_FORKS ours: ${toHexString(type.ours)} theirs: ${toHexString(type.theirs)}`; + return `INCOMPATIBLE_FORKS ours: ${toHex(type.ours)} theirs: ${toHex(type.theirs)}`; case IrrelevantPeerCode.DIFFERENT_CLOCKS: return `DIFFERENT_CLOCKS slotDiff: ${type.slotDiff}`; case IrrelevantPeerCode.DIFFERENT_FINALIZED: - return `DIFFERENT_FINALIZED root: ${toHexString(type.remoteRoot)} expected: ${toHexString(type.expectedRoot)}`; + return `DIFFERENT_FINALIZED root: ${toRootHex(type.remoteRoot)} expected: ${toRootHex(type.expectedRoot)}`; } } diff --git a/packages/beacon-node/src/network/processor/gossipHandlers.ts b/packages/beacon-node/src/network/processor/gossipHandlers.ts index 82fe7d8db358..90c131a943ed 100644 --- a/packages/beacon-node/src/network/processor/gossipHandlers.ts +++ b/packages/beacon-node/src/network/processor/gossipHandlers.ts @@ -1,7 +1,6 @@ -import {toHexString} from "@chainsafe/ssz"; import {BeaconConfig, ChainForkConfig} from "@lodestar/config"; -import {LogLevel, Logger, prettyBytes} from "@lodestar/utils"; -import {Root, Slot, ssz, deneb, UintNum64, SignedBeaconBlock} from "@lodestar/types"; +import {LogLevel, Logger, prettyBytes, toRootHex} from "@lodestar/utils"; +import {Root, Slot, ssz, deneb, UintNum64, SignedBeaconBlock, sszTypesFor} from "@lodestar/types"; import {ForkName, ForkSeq} from "@lodestar/params"; import {routes} from "@lodestar/api"; import {computeTimeAtSlot} from "@lodestar/state-transition"; @@ -21,7 +20,7 @@ import { } from "../../chain/errors/index.js"; import { BatchGossipHandlers, - DefaultGossipHandlers, + SequentialGossipHandlers, GossipHandlerParamGeneric, GossipHandlers, GossipType, @@ -37,9 +36,7 @@ import { validateGossipBlsToExecutionChange, AggregateAndProofValidationResult, validateGossipAttestationsSameAttData, - validateGossipAttestation, - AttestationOrBytes, - AttestationValidationResult, + GossipAttestation, } from "../../chain/validation/index.js"; import {NetworkEvent, NetworkEventBus} from "../events.js"; import {PeerAction} from "../peers/index.js"; @@ -65,8 +62,6 @@ import {AggregatorTracker} from "./aggregatorTracker.js"; export type GossipHandlerOpts = { /** By default pass gossip attestations to forkchoice */ dontSendGossipAttestationsToForkchoice?: boolean; - /** By default don't validate gossip attestations in batch */ - beaconAttestationBatchValidation?: boolean; }; export type ValidatorFnsModules = { @@ -97,20 +92,15 @@ const BLOCK_AVAILABILITY_CUTOFF_MS = 3_000; * - Ethereum Consensus gossipsub protocol strictly defined a single topic for message */ export function getGossipHandlers(modules: ValidatorFnsModules, options: GossipHandlerOpts): GossipHandlers { - const defaultHandlers = getDefaultHandlers(modules, options); - if (options.beaconAttestationBatchValidation) { - const batchHandlers = getBatchHandlers(modules, options); - return {...defaultHandlers, ...batchHandlers}; - } - return defaultHandlers; + return {...getSequentialHandlers(modules, options), ...getBatchHandlers(modules, options)}; } /** * Default handlers validate gossip messages one by one. * We only have a choice to do batch validation for beacon_attestation topic. */ -function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandlerOpts): DefaultGossipHandlers { - const {chain, config, metrics, events, logger, core, aggregatorTracker} = modules; +function getSequentialHandlers(modules: ValidatorFnsModules, options: GossipHandlerOpts): SequentialGossipHandlers { + const {chain, config, metrics, events, logger, core} = modules; async function validateBeaconBlock( signedBlock: SignedBeaconBlock, @@ -145,6 +135,18 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler const blockInputMeta = config.getForkSeq(signedBlock.message.slot) >= ForkSeq.deneb ? blockInputRes.blockInputMeta : {}; + const logCtx = { + slot: slot, + root: blockHex, + currentSlot: chain.clock.currentSlot, + peerId: peerIdStr, + delaySec, + ...blockInputMeta, + recvToValLatency, + }; + + logger.debug("Received gossip block", {...logCtx}); + try { await validateGossipBlock(config, chain, signedBlock, fork); @@ -154,17 +156,7 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler metrics?.gossipBlock.gossipValidation.recvToValidation.observe(recvToValidation); metrics?.gossipBlock.gossipValidation.validationTime.observe(validationTime); - logger.debug("Received gossip block", { - slot: slot, - root: blockHex, - curentSlot: chain.clock.currentSlot, - peerId: peerIdStr, - delaySec, - ...blockInputMeta, - recvToValLatency, - recvToValidation, - validationTime, - }); + logger.debug("Validated gossip block", {...logCtx, recvToValidation, validationTime}); return blockInput; } catch (e) { @@ -298,7 +290,7 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler case BlockErrorCode.DATA_UNAVAILABLE: { const slot = signedBlock.message.slot; const forkTypes = config.getForkTypes(slot); - const rootHex = toHexString(forkTypes.BeaconBlock.hashTreeRoot(signedBlock.message)); + const rootHex = toRootHex(forkTypes.BeaconBlock.hashTreeRoot(signedBlock.message)); events.emit(NetworkEvent.unknownBlock, {rootHex, peer: peerIdStr}); @@ -422,7 +414,11 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler validationResult = await validateGossipAggregateAndProof(fork, chain, signedAggregateAndProof, serializedData); } catch (e) { if (e instanceof AttestationError && e.action === GossipAction.REJECT) { - chain.persistInvalidSszValue(ssz.phase0.SignedAggregateAndProof, signedAggregateAndProof, "gossip_reject"); + chain.persistInvalidSszValue( + sszTypesFor(fork).SignedAggregateAndProof, + signedAggregateAndProof, + "gossip_reject" + ); } throw e; } @@ -453,58 +449,6 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler chain.emitter.emit(routes.events.EventType.attestation, signedAggregateAndProof.message.aggregate); }, - [GossipType.beacon_attestation]: async ({ - gossipData, - topic, - seenTimestampSec, - }: GossipHandlerParamGeneric): Promise => { - const {serializedData, msgSlot} = gossipData; - if (msgSlot == undefined) { - throw Error("msgSlot is undefined for beacon_attestation topic"); - } - const {subnet, fork} = topic; - - // do not deserialize gossipSerializedData here, it's done in validateGossipAttestation only if needed - let validationResult: AttestationValidationResult; - try { - validationResult = await validateGossipAttestation( - fork, - chain, - {attestation: null, serializedData, attSlot: msgSlot}, - subnet - ); - } catch (e) { - if (e instanceof AttestationError && e.action === GossipAction.REJECT) { - chain.persistInvalidSszBytes(ssz.phase0.Attestation.typeName, serializedData, "gossip_reject"); - } - throw e; - } - - // Handler - const {indexedAttestation, attDataRootHex, attestation} = validationResult; - metrics?.registerGossipUnaggregatedAttestation(seenTimestampSec, indexedAttestation); - - try { - // Node may be subscribe to extra subnets (long-lived random subnets). For those, validate the messages - // but don't add to attestation pool, to save CPU and RAM - if (aggregatorTracker.shouldAggregate(subnet, indexedAttestation.data.slot)) { - const insertOutcome = chain.attestationPool.add(attestation, attDataRootHex); - metrics?.opPool.attestationPoolInsertOutcome.inc({insertOutcome}); - } - } catch (e) { - logger.error("Error adding unaggregated attestation to pool", {subnet}, e as Error); - } - - if (!options.dontSendGossipAttestationsToForkchoice) { - try { - chain.forkChoice.onAttestation(indexedAttestation, attDataRootHex); - } catch (e) { - logger.debug("Error adding gossip unaggregated attestation to forkchoice", {subnet}, e as Error); - } - } - - chain.emitter.emit(routes.events.EventType.attestation, attestation); - }, [GossipType.attester_slashing]: async ({ gossipData, @@ -655,7 +599,7 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler /** * For now, only beacon_attestation topic is batched. */ -function getBatchHandlers(modules: ValidatorFnsModules, options: GossipHandlerOpts): Partial { +function getBatchHandlers(modules: ValidatorFnsModules, options: GossipHandlerOpts): BatchGossipHandlers { const {chain, metrics, logger, aggregatorTracker} = modules; return { [GossipType.beacon_attestation]: async ( @@ -673,7 +617,7 @@ function getBatchHandlers(modules: ValidatorFnsModules, options: GossipHandlerOp serializedData: param.gossipData.serializedData, attSlot: param.gossipData.msgSlot, attDataBase64: param.gossipData.indexed, - })) as AttestationOrBytes[]; + })) as GossipAttestation[]; const {results: validationResults, batchableBls} = await validateGossipAttestationsSameAttData( fork, chain, @@ -689,14 +633,14 @@ function getBatchHandlers(modules: ValidatorFnsModules, options: GossipHandlerOp results.push(null); // Handler - const {indexedAttestation, attDataRootHex, attestation} = validationResult.result; + const {indexedAttestation, attDataRootHex, attestation, committeeIndex} = validationResult.result; metrics?.registerGossipUnaggregatedAttestation(gossipHandlerParams[i].seenTimestampSec, indexedAttestation); try { // Node may be subscribe to extra subnets (long-lived random subnets). For those, validate the messages // but don't add to attestation pool, to save CPU and RAM if (aggregatorTracker.shouldAggregate(subnet, indexedAttestation.data.slot)) { - const insertOutcome = chain.attestationPool.add(attestation, attDataRootHex); + const insertOutcome = chain.attestationPool.add(committeeIndex, attestation, attDataRootHex); metrics?.opPool.attestationPoolInsertOutcome.inc({insertOutcome}); } } catch (e) { @@ -747,12 +691,12 @@ export async function validateGossipFnRetryUnknownRoot( ) { if (unknownBlockRootRetries === 0) { // Trigger unknown block root search here - const rootHex = toHexString(blockRoot); + const rootHex = toRootHex(blockRoot); network.searchUnknownSlotRoot({slot, root: rootHex}); } if (unknownBlockRootRetries++ < MAX_UNKNOWN_BLOCK_ROOT_RETRIES) { - const foundBlock = await chain.waitForBlock(slot, toHexString(blockRoot)); + const foundBlock = await chain.waitForBlock(slot, toRootHex(blockRoot)); // Returns true if the block was found on time. In that case, try to get it from the fork-choice again. // Otherwise, throw the error below. if (foundBlock) { diff --git a/packages/beacon-node/src/network/processor/gossipQueues/index.ts b/packages/beacon-node/src/network/processor/gossipQueues/index.ts index 366b23b30679..12596a42b7a1 100644 --- a/packages/beacon-node/src/network/processor/gossipQueues/index.ts +++ b/packages/beacon-node/src/network/processor/gossipQueues/index.ts @@ -1,18 +1,10 @@ import {mapValues} from "@lodestar/utils"; -import {GossipType} from "../../gossip/interface.js"; +import {BatchGossipType, GossipType, SequentialGossipType} from "../../gossip/interface.js"; import {PendingGossipsubMessage} from "../types.js"; -import {getAttDataBase64FromAttestationSerialized} from "../../../util/sszBytes.js"; +import {getGossipAttestationIndex} from "../../../util/sszBytes.js"; import {LinearGossipQueue} from "./linear.js"; -import { - DropType, - GossipQueue, - GossipQueueOpts, - QueueType, - isIndexedGossipQueueAvgTimeOpts, - isIndexedGossipQueueMinSizeOpts, -} from "./types.js"; +import {DropType, GossipQueue, GossipQueueOpts, QueueType, isIndexedGossipQueueMinSizeOpts} from "./types.js"; import {IndexedGossipQueueMinSize} from "./indexed.js"; -import {IndexedGossipQueueAvgTime} from "./indexedAvgTime.js"; /** * In normal condition, the higher this value the more efficient the signature verification. @@ -28,8 +20,8 @@ export const MIN_SIGNATURE_SETS_TO_BATCH_VERIFY = 32; /** * Numbers from https://github.com/sigp/lighthouse/blob/b34a79dc0b02e04441ba01fd0f304d1e203d877d/beacon_node/network/src/beacon_processor/mod.rs#L69 */ -const defaultGossipQueueOpts: { - [K in GossipType]: GossipQueueOpts; +const linearGossipQueueOpts: { + [K in SequentialGossipType]: GossipQueueOpts; } = { // validation gossip block asap [GossipType.beacon_block]: {maxLength: 1024, type: QueueType.FIFO, dropOpts: {type: DropType.count, count: 1}}, @@ -45,15 +37,6 @@ const defaultGossipQueueOpts: { type: QueueType.LIFO, dropOpts: {type: DropType.count, count: 1}, }, - // lighthouse has attestation_queue 16384 and unknown_block_attestation_queue 8192, we use single queue - // this topic may cause node to be overload and drop 100% of lower priority queues - // so we want to drop it by ratio until node is stable enough (queue is empty) - // start with dropping 1% of the queue, then increase 1% more each time. Reset when queue is empty - [GossipType.beacon_attestation]: { - maxLength: 24576, - type: QueueType.LIFO, - dropOpts: {type: DropType.ratio, start: 0.01, step: 0.01}, - }, [GossipType.voluntary_exit]: {maxLength: 4096, type: QueueType.FIFO, dropOpts: {type: DropType.count, count: 1}}, [GossipType.proposer_slashing]: {maxLength: 4096, type: QueueType.FIFO, dropOpts: {type: DropType.count, count: 1}}, [GossipType.attester_slashing]: {maxLength: 4096, type: QueueType.FIFO, dropOpts: {type: DropType.count, count: 1}}, @@ -82,11 +65,16 @@ const defaultGossipQueueOpts: { }; const indexedGossipQueueOpts: { - [K in GossipType]?: GossipQueueOpts; + [K in BatchGossipType]: GossipQueueOpts; } = { [GossipType.beacon_attestation]: { + // lighthouse has attestation_queue 16384 and unknown_block_attestation_queue 8192, we use single queue + // this topic may cause node to be overload and drop 100% of lower priority queues maxLength: 24576, - indexFn: (item: PendingGossipsubMessage) => getAttDataBase64FromAttestationSerialized(item.msg.data), + indexFn: (item: PendingGossipsubMessage) => { + // Note indexFn is fork agnostic despite changes introduced in Electra + return getGossipAttestationIndex(item.msg.data); + }, minChunkSize: MIN_SIGNATURE_SETS_TO_BATCH_VERIFY, maxChunkSize: MAX_GOSSIP_ATTESTATION_BATCH_SIZE, }, @@ -108,17 +96,14 @@ const indexedGossipQueueOpts: { * By topic is too specific, so by type groups all similar objects in the same queue. All in the same won't allow * to customize different queue behaviours per object type (see `gossipQueueOpts`). */ -export function createGossipQueues(beaconAttestationBatchValidation = false): { +export function createGossipQueues(): { [K in GossipType]: GossipQueue; } { - const gossipQueueOpts = beaconAttestationBatchValidation - ? {...defaultGossipQueueOpts, ...indexedGossipQueueOpts} - : defaultGossipQueueOpts; + const gossipQueueOpts = {...linearGossipQueueOpts, ...indexedGossipQueueOpts}; + return mapValues(gossipQueueOpts, (opts) => { if (isIndexedGossipQueueMinSizeOpts(opts)) { return new IndexedGossipQueueMinSize(opts); - } else if (isIndexedGossipQueueAvgTimeOpts(opts)) { - return new IndexedGossipQueueAvgTime(opts); } else { return new LinearGossipQueue(opts); } diff --git a/packages/beacon-node/src/network/processor/gossipQueues/indexed.ts b/packages/beacon-node/src/network/processor/gossipQueues/indexed.ts index 4e29a52173f0..8edba7dfaadb 100644 --- a/packages/beacon-node/src/network/processor/gossipQueues/indexed.ts +++ b/packages/beacon-node/src/network/processor/gossipQueues/indexed.ts @@ -84,6 +84,7 @@ export class IndexedGossipQueueMinSize = { - items: T[]; - avgRecvTimestampMs: number; -}; - -function listScore(list: ItemList): number { - return list.items.length / Math.max(1000, Date.now() - list.avgRecvTimestampMs); -} - -/** - * An implementation of GossipQueue that tries to run the batch with highest score first. - * TODO: add unit tests - * - index items by indexFn using a map - * - compute avgRecvTimestampMs for each key every time we add new item - * - on next, pick the key with the highest score (check the score function above) - */ -export class IndexedGossipQueueAvgTime implements GossipQueue { - private _length = 0; - private indexedItems: Map> = new Map(); - - constructor(private readonly opts: IndexedGossipQueueOpts) {} - - get length(): number { - return this._length; - } - - get keySize(): number { - return this.indexedItems.size; - } - - clear(): void { - this.indexedItems = new Map(); - this._length = 0; - } - - // not implemented for this gossip queue - getDataAgeMs(): number[] { - return []; - } - - /** - * Add item to gossip queue. If queue is full, drop first item of first key. - * Return number of items dropped - */ - add(item: T): number { - const key = this.opts.indexFn(item); - if (key == null) { - // this comes from getAttDataBase64FromAttestationSerialized() return type - // should not happen - return 0; - } - item.indexed = key; - let list = this.indexedItems.get(key); - if (list == null) { - list = { - items: [], - avgRecvTimestampMs: Date.now(), - }; - this.indexedItems.set(key, list); - } else { - list.avgRecvTimestampMs = (list.avgRecvTimestampMs * list.items.length + Date.now()) / (list.items.length + 1); - list.items.push(item); - } - this._length++; - if (this._length <= this.opts.maxLength) { - return 0; - } - - // overload, need to drop more items - const firstKey = this.indexedItems.keys().next().value as string; - // there should be at least 1 key - if (firstKey == null) { - return 0; - } - const firstList = this.indexedItems.get(firstKey); - // should not happen - if (firstList == null) { - return 0; - } - - const deletedItem = firstList.items.shift(); - if (deletedItem != null) { - this._length--; - if (firstList.items.length === 0) { - this.indexedItems.delete(firstKey); - } - return 1; - } else { - return 0; - } - } - - /** - * Try to get list of items of the same key with highest score - */ - next(): T[] | null { - let maxScore = 0; - let maxScoreKey: string | undefined; - for (const [key, list] of this.indexedItems) { - const score = listScore(list); - if (score > maxScore) { - maxScore = score; - maxScoreKey = key; - } - } - - if (maxScoreKey == null) { - return null; - } - const items = this.indexedItems.get(maxScoreKey)?.items; - if (items == null) { - // should not happen - return null; - } - this.indexedItems.delete(maxScoreKey); - this._length = Math.max(0, this._length - items.length); - return items; - } - - getAll(): T[] { - const items: T[] = []; - for (const list of this.indexedItems.values()) { - items.push(...list.items); - } - return items; - } -} diff --git a/packages/beacon-node/src/network/processor/gossipQueues/types.ts b/packages/beacon-node/src/network/processor/gossipQueues/types.ts index 448f44c3f5d7..58034e82ed06 100644 --- a/packages/beacon-node/src/network/processor/gossipQueues/types.ts +++ b/packages/beacon-node/src/network/processor/gossipQueues/types.ts @@ -1,4 +1,4 @@ -export type GossipQueueOpts = LinearGossipQueueOpts | IndexedGossipQueueOpts | IndexedGossipQueueMinSizeOpts; +export type GossipQueueOpts = LinearGossipQueueOpts | IndexedGossipQueueMinSizeOpts; export type LinearGossipQueueOpts = { type: QueueType; @@ -25,15 +25,6 @@ export function isIndexedGossipQueueMinSizeOpts(opts: GossipQueueOpts): op ); } -export function isIndexedGossipQueueAvgTimeOpts(opts: GossipQueueOpts): opts is IndexedGossipQueueOpts { - const avgTimeOpts = opts as IndexedGossipQueueMinSizeOpts; - return ( - avgTimeOpts.indexFn !== undefined && - avgTimeOpts.minChunkSize === undefined && - avgTimeOpts.maxChunkSize === undefined - ); -} - export interface GossipQueue { length: number; keySize: number; diff --git a/packages/beacon-node/src/network/processor/index.ts b/packages/beacon-node/src/network/processor/index.ts index 9a1dcfb32fa0..5cfed6c20346 100644 --- a/packages/beacon-node/src/network/processor/index.ts +++ b/packages/beacon-node/src/network/processor/index.ts @@ -172,7 +172,7 @@ export class NetworkProcessor { this.metrics = metrics; this.logger = logger; this.events = events; - this.gossipQueues = createGossipQueues(this.opts.beaconAttestationBatchValidation); + this.gossipQueues = createGossipQueues(); this.gossipTopicConcurrency = mapValues(this.gossipQueues, () => 0); this.gossipValidatorFn = getGossipValidatorFn(modules.gossipHandlers ?? getGossipHandlers(modules, opts), modules); this.gossipValidatorBatchFn = getGossipValidatorBatchFn( diff --git a/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRoot.ts b/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRoot.ts index 2b802ab1edd9..b53addab9b43 100644 --- a/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRoot.ts +++ b/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRoot.ts @@ -1,7 +1,7 @@ -import {fromHexString} from "@chainsafe/ssz"; import {ChainForkConfig} from "@lodestar/config"; import {phase0, deneb} from "@lodestar/types"; import {ForkSeq} from "@lodestar/params"; +import {fromHex} from "@lodestar/utils"; import { BlockInput, BlockInputType, @@ -66,7 +66,7 @@ export async function unavailableBeaconBlobsByRoot( // resolve the block if thats unavailable let block, blobsCache, blockBytes, resolveAvailability, cachedData; if (unavailableBlockInput.block === null) { - const allBlocks = await network.sendBeaconBlocksByRoot(peerId, [fromHexString(unavailableBlockInput.blockRootHex)]); + const allBlocks = await network.sendBeaconBlocksByRoot(peerId, [fromHex(unavailableBlockInput.blockRootHex)]); block = allBlocks[0].data; blockBytes = allBlocks[0].bytes; cachedData = unavailableBlockInput.cachedData; diff --git a/packages/beacon-node/src/network/reqresp/handlers/beaconBlocksByRoot.ts b/packages/beacon-node/src/network/reqresp/handlers/beaconBlocksByRoot.ts index 0ed0e6a2d185..36d90256276e 100644 --- a/packages/beacon-node/src/network/reqresp/handlers/beaconBlocksByRoot.ts +++ b/packages/beacon-node/src/network/reqresp/handlers/beaconBlocksByRoot.ts @@ -1,6 +1,6 @@ -import {toHexString} from "@chainsafe/ssz"; import {ResponseOutgoing} from "@lodestar/reqresp"; import {Slot, phase0} from "@lodestar/types"; +import {toRootHex} from "@lodestar/utils"; import {IBeaconChain} from "../../../chain/index.js"; import {IBeaconDb} from "../../../db/index.js"; import {getSlotFromSignedBeaconBlockSerialized} from "../../../util/sszBytes.js"; @@ -33,7 +33,7 @@ export async function* onBeaconBlocksByRoot( if (slot === undefined) { const slotFromBytes = getSlotFromSignedBeaconBlockSerialized(blockBytes); if (slotFromBytes === null) { - throw Error(`Invalid block bytes for block root ${toHexString(root)}`); + throw Error(`Invalid block bytes for block root ${toRootHex(root)}`); } slot = slotFromBytes; } diff --git a/packages/beacon-node/src/network/reqresp/handlers/blobSidecarsByRoot.ts b/packages/beacon-node/src/network/reqresp/handlers/blobSidecarsByRoot.ts index 6aa16a0c2629..144d470d7199 100644 --- a/packages/beacon-node/src/network/reqresp/handlers/blobSidecarsByRoot.ts +++ b/packages/beacon-node/src/network/reqresp/handlers/blobSidecarsByRoot.ts @@ -1,7 +1,7 @@ import {ResponseError, ResponseOutgoing, RespStatus} from "@lodestar/reqresp"; import {BLOBSIDECAR_FIXED_SIZE} from "@lodestar/params"; import {deneb, RootHex} from "@lodestar/types"; -import {toHex, fromHex} from "@lodestar/utils"; +import {fromHex, toRootHex} from "@lodestar/utils"; import {IBeaconChain} from "../../../chain/index.js"; import {IBeaconDb} from "../../../db/index.js"; import {BLOB_SIDECARS_IN_WRAPPER_INDEX} from "../../../db/repositories/blobSidecars.js"; @@ -20,7 +20,7 @@ export async function* onBlobSidecarsByRoot( for (const blobIdentifier of requestBody) { const {blockRoot, index} = blobIdentifier; - const blockRootHex = toHex(blockRoot); + const blockRootHex = toRootHex(blockRoot); const block = chain.forkChoice.getBlockHex(blockRootHex); // NOTE: Only support non-finalized blocks. diff --git a/packages/beacon-node/src/network/reqresp/types.ts b/packages/beacon-node/src/network/reqresp/types.ts index 36fa0a4f2632..02d99dd86933 100644 --- a/packages/beacon-node/src/network/reqresp/types.ts +++ b/packages/beacon-node/src/network/reqresp/types.ts @@ -1,7 +1,20 @@ import {Type} from "@chainsafe/ssz"; import {ForkLightClient, ForkName, isForkLightClient} from "@lodestar/params"; import {Protocol, ProtocolHandler, ReqRespRequest} from "@lodestar/reqresp"; -import {Metadata, Root, SignedBeaconBlock, altair, deneb, phase0, ssz, sszTypesFor} from "@lodestar/types"; +import { + LightClientBootstrap, + LightClientFinalityUpdate, + LightClientOptimisticUpdate, + LightClientUpdate, + Metadata, + Root, + SignedBeaconBlock, + altair, + deneb, + phase0, + ssz, + sszTypesFor, +} from "@lodestar/types"; export type ProtocolNoHandler = Omit; @@ -48,10 +61,10 @@ type ResponseBodyByMethod = { [ReqRespMethod.BeaconBlocksByRoot]: SignedBeaconBlock; [ReqRespMethod.BlobSidecarsByRange]: deneb.BlobSidecar; [ReqRespMethod.BlobSidecarsByRoot]: deneb.BlobSidecar; - [ReqRespMethod.LightClientBootstrap]: altair.LightClientBootstrap; - [ReqRespMethod.LightClientUpdatesByRange]: altair.LightClientUpdate; - [ReqRespMethod.LightClientFinalityUpdate]: altair.LightClientFinalityUpdate; - [ReqRespMethod.LightClientOptimisticUpdate]: altair.LightClientOptimisticUpdate; + [ReqRespMethod.LightClientBootstrap]: LightClientBootstrap; + [ReqRespMethod.LightClientUpdatesByRange]: LightClientUpdate; + [ReqRespMethod.LightClientFinalityUpdate]: LightClientFinalityUpdate; + [ReqRespMethod.LightClientOptimisticUpdate]: LightClientOptimisticUpdate; }; /** Request SSZ type for each method and ForkName */ diff --git a/packages/beacon-node/src/node/utils/interop/state.ts b/packages/beacon-node/src/node/utils/interop/state.ts index 6528bd392bc7..fe26afef2013 100644 --- a/packages/beacon-node/src/node/utils/interop/state.ts +++ b/packages/beacon-node/src/node/utils/interop/state.ts @@ -20,6 +20,7 @@ export type InteropStateOpts = { withEth1Credentials?: boolean; }; +// TODO: (@matthewkeil) - Only used by initDevState. Consider combining into that function export function getInteropState( config: ChainForkConfig, { diff --git a/packages/beacon-node/src/node/utils/state.ts b/packages/beacon-node/src/node/utils/state.ts index 25bd77c82274..05da7042eef4 100644 --- a/packages/beacon-node/src/node/utils/state.ts +++ b/packages/beacon-node/src/node/utils/state.ts @@ -5,6 +5,9 @@ import {IBeaconDb} from "../../db/index.js"; import {interopDeposits} from "./interop/deposits.js"; import {getInteropState, InteropStateOpts} from "./interop/state.js"; +/** + * Builds state for `dev` command, for sim testing and some other tests + */ export function initDevState( config: ChainForkConfig, validatorCount: number, diff --git a/packages/beacon-node/src/sync/backfill/backfill.ts b/packages/beacon-node/src/sync/backfill/backfill.ts index 6d9716a37329..77d2836bdcc3 100644 --- a/packages/beacon-node/src/sync/backfill/backfill.ts +++ b/packages/beacon-node/src/sync/backfill/backfill.ts @@ -1,10 +1,9 @@ import {EventEmitter} from "events"; import {StrictEventEmitter} from "strict-event-emitter-types"; -import {toHexString} from "@chainsafe/ssz"; -import {BeaconStateAllForks, blockToHeader} from "@lodestar/state-transition"; +import {BeaconStateAllForks, blockToHeader, computeAnchorCheckpoint} from "@lodestar/state-transition"; import {BeaconConfig, ChainForkConfig} from "@lodestar/config"; import {phase0, Root, SignedBeaconBlock, Slot, ssz} from "@lodestar/types"; -import {ErrorAborted, Logger, sleep, toHex} from "@lodestar/utils"; +import {ErrorAborted, Logger, sleep, toRootHex} from "@lodestar/utils"; import {SLOTS_PER_EPOCH} from "@lodestar/params"; import {IBeaconChain} from "../../chain/index.js"; @@ -16,7 +15,6 @@ import {PeerIdStr} from "../../util/peerId.js"; import {shuffleOne} from "../../util/shuffle.js"; import {Metrics} from "../../metrics/metrics"; import {byteArrayEquals} from "../../util/bytes.js"; -import {computeAnchorCheckpoint} from "../../chain/initState.js"; import {verifyBlockProposerSignature, verifyBlockSequence, BackfillBlockHeader, BackfillBlock} from "./verify.js"; import {BackfillSyncError, BackfillSyncErrorCode} from "./errors.js"; /** @@ -251,7 +249,7 @@ export class BackfillSync extends (EventEmitter as {new (): BackfillSyncEmitter} lte: backfillStartFromSlot, }); modules.logger.info("Initializing from Checkpoint", { - root: toHexString(anchorCp.root), + root: toRootHex(anchorCp.root), epoch: anchorCp.epoch, backfillStartFromSlot, previousBackfilledRanges: JSON.stringify(previousBackfilledRanges), @@ -327,7 +325,7 @@ export class BackfillSync extends (EventEmitter as {new (): BackfillSyncEmitter} this.logger.error( `Backfilled till ${ this.syncAnchor.lastBackSyncedBlock.slot - } but not found previous saved finalized or wsCheckpoint with root=${toHexString( + } but not found previous saved finalized or wsCheckpoint with root=${toRootHex( this.prevFinalizedCheckpointBlock.root )}, slot=${this.prevFinalizedCheckpointBlock.slot}` ); @@ -341,7 +339,7 @@ export class BackfillSync extends (EventEmitter as {new (): BackfillSyncEmitter} this.logger.error( `Invalid root synced at a previous finalized or wsCheckpoint, slot=${ this.prevFinalizedCheckpointBlock.slot - }: expected=${toHexString(this.prevFinalizedCheckpointBlock.root)}, actual=${toHexString( + }: expected=${toRootHex(this.prevFinalizedCheckpointBlock.root)}, actual=${toRootHex( this.syncAnchor.lastBackSyncedBlock.root )}` ); @@ -349,7 +347,7 @@ export class BackfillSync extends (EventEmitter as {new (): BackfillSyncEmitter} break; } this.logger.verbose("Validated current prevFinalizedCheckpointBlock", { - root: toHexString(this.prevFinalizedCheckpointBlock.root), + root: toRootHex(this.prevFinalizedCheckpointBlock.root), slot: this.prevFinalizedCheckpointBlock.slot, }); @@ -379,7 +377,7 @@ export class BackfillSync extends (EventEmitter as {new (): BackfillSyncEmitter} if (this.syncAnchor.lastBackSyncedBlock.slot === GENESIS_SLOT) { if (!byteArrayEquals(this.syncAnchor.lastBackSyncedBlock.block.message.parentRoot, ZERO_HASH)) { Error( - `Invalid Gensis Block with non zero parentRoot=${toHexString( + `Invalid Gensis Block with non zero parentRoot=${toRootHex( this.syncAnchor.lastBackSyncedBlock.block.message.parentRoot )}` ); @@ -528,7 +526,7 @@ export class BackfillSync extends (EventEmitter as {new (): BackfillSyncEmitter} ) // TODO: explode and stop the entire node throw new Error( - `InvalidWsCheckpoint root=${toHex(this.wsCheckpointHeader.root)}, epoch=${ + `InvalidWsCheckpoint root=${toRootHex(this.wsCheckpointHeader.root)}, epoch=${ this.wsCheckpointHeader.slot / SLOTS_PER_EPOCH }, ${ wsDbCheckpointBlock @@ -537,7 +535,7 @@ export class BackfillSync extends (EventEmitter as {new (): BackfillSyncEmitter} }` ); this.logger.info("wsCheckpoint validated!", { - root: toHexString(this.wsCheckpointHeader.root), + root: toRootHex(this.wsCheckpointHeader.root), epoch: this.wsCheckpointHeader.slot / SLOTS_PER_EPOCH, }); this.wsValidated = true; @@ -584,13 +582,13 @@ export class BackfillSync extends (EventEmitter as {new (): BackfillSyncEmitter} this.prevFinalizedCheckpointBlock.slot === prevBackfillCpBlock.message.slot ) { this.logger.verbose("Validated current prevFinalizedCheckpointBlock", { - root: toHexString(this.prevFinalizedCheckpointBlock.root), + root: toRootHex(this.prevFinalizedCheckpointBlock.root), slot: prevBackfillCpBlock.message.slot, }); } else { validSequence = false; this.logger.warn( - `Invalid backfill sequence: previous finalized or checkpoint block root=${toHex( + `Invalid backfill sequence: previous finalized or checkpoint block root=${toRootHex( this.prevFinalizedCheckpointBlock.root )}, slot=${this.prevFinalizedCheckpointBlock.slot} ${ prevBackfillCpBlock ? "found at slot=" + prevBackfillCpBlock.message.slot : "not found" @@ -644,7 +642,7 @@ export class BackfillSync extends (EventEmitter as {new (): BackfillSyncEmitter} if (cleanupSeqs.length > 0) { await this.db.backfilledRanges.batchDelete(cleanupSeqs.map((entry) => entry.key)); this.logger.debug( - `Cleaned up the old sequences between ${this.backfillStartFromSlot},${toHex( + `Cleaned up the old sequences between ${this.backfillStartFromSlot},${toRootHex( this.syncAnchor.lastBackSyncedBlock.root )}`, {cleanupSeqs: JSON.stringify(cleanupSeqs)} @@ -670,7 +668,7 @@ export class BackfillSync extends (EventEmitter as {new (): BackfillSyncEmitter} if (expectedSlot !== null && anchorBlock.message.slot !== expectedSlot) throw Error( - `Invalid slot of anchorBlock read from DB with root=${toHex( + `Invalid slot of anchorBlock read from DB with root=${toRootHex( anchorBlockRoot )}, expected=${expectedSlot}, actual=${anchorBlock.message.slot}` ); @@ -687,9 +685,9 @@ export class BackfillSync extends (EventEmitter as {new (): BackfillSyncEmitter} // Before moving anchorBlock back, we need check for prevFinalizedCheckpointBlock if (anchorBlock.message.slot < this.prevFinalizedCheckpointBlock.slot) { throw Error( - `Skipped a prevFinalizedCheckpointBlock with slot=${toHex( + `Skipped a prevFinalizedCheckpointBlock with slot=${toRootHex( this.prevFinalizedCheckpointBlock.root - )}, root=${toHexString(this.prevFinalizedCheckpointBlock.root)}` + )}, root=${toRootHex(this.prevFinalizedCheckpointBlock.root)}` ); } if (anchorBlock.message.slot === this.prevFinalizedCheckpointBlock.slot) { @@ -700,7 +698,7 @@ export class BackfillSync extends (EventEmitter as {new (): BackfillSyncEmitter} throw Error( `Invalid root for prevFinalizedCheckpointBlock at slot=${ this.prevFinalizedCheckpointBlock.slot - }, expected=${toHexString(this.prevFinalizedCheckpointBlock.root)}, found=${toHex(anchorBlockRoot)}` + }, expected=${toRootHex(this.prevFinalizedCheckpointBlock.root)}, found=${toRootHex(anchorBlockRoot)}` ); } @@ -770,7 +768,7 @@ export class BackfillSync extends (EventEmitter as {new (): BackfillSyncEmitter} this.metrics?.backfillSync.totalBlocks.inc({method: BackfillSyncMethod.blockbyroot}); this.logger.verbose("Fetched new anchorBlock", { - root: toHexString(anchorBlockRoot), + root: toRootHex(anchorBlockRoot), slot: anchorBlock.data.message.slot, }); @@ -883,7 +881,7 @@ async function extractPreviousFinOrWsCheckpoint( const root = ssz.phase0.BeaconBlockHeader.hashTreeRoot(header); prevFinalizedCheckpointBlock = {root, slot: nextPrevFinOrWsBlock.message.slot}; logger?.debug("Extracted new prevFinalizedCheckpointBlock as potential previous finalized or wsCheckpoint", { - root: toHexString(prevFinalizedCheckpointBlock.root), + root: toRootHex(prevFinalizedCheckpointBlock.root), slot: prevFinalizedCheckpointBlock.slot, }); } else { diff --git a/packages/beacon-node/src/sync/range/chain.ts b/packages/beacon-node/src/sync/range/chain.ts index 41bbce3da820..0a2fc1a3f7c3 100644 --- a/packages/beacon-node/src/sync/range/chain.ts +++ b/packages/beacon-node/src/sync/range/chain.ts @@ -1,6 +1,5 @@ -import {toHexString} from "@chainsafe/ssz"; import {Epoch, Root, Slot, phase0} from "@lodestar/types"; -import {ErrorAborted, Logger} from "@lodestar/utils"; +import {ErrorAborted, Logger, toRootHex} from "@lodestar/utils"; import {ChainForkConfig} from "@lodestar/config"; import {BlockInput, BlockInputType} from "../../chain/blocks/types.js"; import {PeerAction} from "../../network/index.js"; @@ -234,7 +233,7 @@ export class SyncChain { /** Full debug state for lodestar API */ getDebugState(): SyncChainDebugState { return { - targetRoot: toHexString(this.target.root), + targetRoot: toRootHex(this.target.root), targetSlot: this.target.slot, syncType: this.syncType, status: this.status, diff --git a/packages/beacon-node/src/sync/range/range.ts b/packages/beacon-node/src/sync/range/range.ts index d20e0c3690cd..887a86a3aaf2 100644 --- a/packages/beacon-node/src/sync/range/range.ts +++ b/packages/beacon-node/src/sync/range/range.ts @@ -3,7 +3,7 @@ import {StrictEventEmitter} from "strict-event-emitter-types"; import {computeStartSlotAtEpoch} from "@lodestar/state-transition"; import {BeaconConfig} from "@lodestar/config"; import {Epoch, phase0} from "@lodestar/types"; -import {Logger, toHex} from "@lodestar/utils"; +import {Logger, toRootHex} from "@lodestar/utils"; import {IBeaconChain} from "../../chain/index.js"; import {INetwork} from "../../network/index.js"; import {Metrics} from "../../metrics/index.js"; @@ -119,7 +119,7 @@ export class RangeSync extends (EventEmitter as {new (): RangeSyncEmitter}) { syncType, startEpoch, targetSlot: target.slot, - targetRoot: toHex(target.root), + targetRoot: toRootHex(target.root), }); // If the peer existed in any other chain, remove it. @@ -242,7 +242,7 @@ export class RangeSync extends (EventEmitter as {new (): RangeSyncEmitter}) { syncType, firstEpoch: syncChain.firstBatchEpoch, targetSlot: syncChain.target.slot, - targetRoot: toHex(syncChain.target.root), + targetRoot: toRootHex(syncChain.target.root), }); } @@ -274,7 +274,7 @@ export class RangeSync extends (EventEmitter as {new (): RangeSyncEmitter}) { lastValidatedSlot: syncChain.lastValidatedSlot, firstEpoch: syncChain.firstBatchEpoch, targetSlot: syncChain.target.slot, - targetRoot: toHex(syncChain.target.root), + targetRoot: toRootHex(syncChain.target.root), validatedEpochs: syncChain.validatedEpochs, }); diff --git a/packages/beacon-node/src/sync/range/utils/chainTarget.ts b/packages/beacon-node/src/sync/range/utils/chainTarget.ts index 221b6e1be68f..15076a028818 100644 --- a/packages/beacon-node/src/sync/range/utils/chainTarget.ts +++ b/packages/beacon-node/src/sync/range/utils/chainTarget.ts @@ -1,5 +1,5 @@ -import {toHexString} from "@chainsafe/ssz"; import {Root, Slot} from "@lodestar/types"; +import {toRootHex} from "@lodestar/utils"; /** * Sync this up to this target. Uses slot instead of epoch to re-use logic for finalized sync @@ -21,7 +21,7 @@ export function computeMostCommonTarget(targets: ChainTarget[]): ChainTarget { let mostCommonCount = 0; for (const target of targets) { - const targetId = `${target.slot}-${toHexString(target.root)}`; + const targetId = `${target.slot}-${toRootHex(target.root)}`; const count = 1 + (countById.get(targetId) ?? 0); countById.set(targetId, count); if (count > mostCommonCount) { diff --git a/packages/beacon-node/src/sync/range/utils/hashBlocks.ts b/packages/beacon-node/src/sync/range/utils/hashBlocks.ts index 522242924a3c..986d023d9ca1 100644 --- a/packages/beacon-node/src/sync/range/utils/hashBlocks.ts +++ b/packages/beacon-node/src/sync/range/utils/hashBlocks.ts @@ -1,6 +1,6 @@ import {RootHex} from "@lodestar/types"; import {ChainForkConfig} from "@lodestar/config"; -import {toHex} from "@lodestar/utils"; +import {toRootHex} from "@lodestar/utils"; import {BlockInput} from "../../../chain/blocks/types.js"; /** @@ -12,14 +12,14 @@ export function hashBlocks(blocks: BlockInput[], config: ChainForkConfig): RootH return "0x"; case 1: { const block0 = blocks[0].block; - return toHex(config.getForkTypes(block0.message.slot).SignedBeaconBlock.hashTreeRoot(block0)); + return toRootHex(config.getForkTypes(block0.message.slot).SignedBeaconBlock.hashTreeRoot(block0)); } default: { const block0 = blocks[0].block; const blockN = blocks[blocks.length - 1].block; return ( - toHex(config.getForkTypes(block0.message.slot).SignedBeaconBlock.hashTreeRoot(block0)) + - toHex(config.getForkTypes(blockN.message.slot).SignedBeaconBlock.hashTreeRoot(blockN)) + toRootHex(config.getForkTypes(block0.message.slot).SignedBeaconBlock.hashTreeRoot(block0)) + + toRootHex(config.getForkTypes(blockN.message.slot).SignedBeaconBlock.hashTreeRoot(blockN)) ); } } diff --git a/packages/beacon-node/src/sync/sync.ts b/packages/beacon-node/src/sync/sync.ts index c7f01e1eae78..cc8ddc6eb499 100644 --- a/packages/beacon-node/src/sync/sync.ts +++ b/packages/beacon-node/src/sync/sync.ts @@ -93,8 +93,8 @@ export class BeaconSync implements IBeaconSync { // If we are pre/at genesis, signal ready if (currentSlot <= GENESIS_SLOT) { return { - headSlot: "0", - syncDistance: "0", + headSlot: 0, + syncDistance: 0, isSyncing: false, isOptimistic: false, elOffline, @@ -107,16 +107,16 @@ export class BeaconSync implements IBeaconSync { case SyncState.SyncingHead: case SyncState.Stalled: return { - headSlot: String(head.slot), - syncDistance: String(currentSlot - head.slot), + headSlot: head.slot, + syncDistance: currentSlot - head.slot, isSyncing: true, isOptimistic: isOptimisticBlock(head), elOffline, }; case SyncState.Synced: return { - headSlot: String(head.slot), - syncDistance: "0", + headSlot: head.slot, + syncDistance: 0, isSyncing: false, isOptimistic: isOptimisticBlock(head), elOffline, diff --git a/packages/beacon-node/src/sync/unknownBlock.ts b/packages/beacon-node/src/sync/unknownBlock.ts index 3c15b32eb8d8..a172b5d723db 100644 --- a/packages/beacon-node/src/sync/unknownBlock.ts +++ b/packages/beacon-node/src/sync/unknownBlock.ts @@ -1,6 +1,5 @@ -import {fromHexString, toHexString} from "@chainsafe/ssz"; import {ChainForkConfig} from "@lodestar/config"; -import {Logger, pruneSetToMax} from "@lodestar/utils"; +import {Logger, fromHex, pruneSetToMax, toRootHex} from "@lodestar/utils"; import {Root, RootHex, deneb} from "@lodestar/types"; import {INTERVALS_PER_SLOT} from "@lodestar/params"; import {sleep} from "@lodestar/utils"; @@ -139,8 +138,8 @@ export class UnknownBlockSync { private addUnknownParent(blockInput: BlockInput, peerIdStr: string): void { const block = blockInput.block.message; const blockRoot = this.config.getForkTypes(block.slot).BeaconBlock.hashTreeRoot(block); - const blockRootHex = toHexString(blockRoot); - const parentBlockRootHex = toHexString(block.parentRoot); + const blockRootHex = toRootHex(blockRoot); + const parentBlockRootHex = toRootHex(block.parentRoot); // add 1 pending block with status downloaded let pendingBlock = this.pendingBlocks.get(blockRootHex); @@ -180,9 +179,7 @@ export class UnknownBlockSync { } else { if (blockInputOrRootHex.block !== null) { const {block} = blockInputOrRootHex; - blockRootHex = toHexString( - this.config.getForkTypes(block.message.slot).BeaconBlock.hashTreeRoot(block.message) - ); + blockRootHex = toRootHex(this.config.getForkTypes(block.message.slot).BeaconBlock.hashTreeRoot(block.message)); unknownBlockType = PendingBlockType.UNKNOWN_BLOBS; } else { unknownBlockType = PendingBlockType.UNKNOWN_BLOCKINPUT; @@ -290,7 +287,7 @@ export class UnknownBlockSync { let res; if (block.blockInput === null) { - res = await wrapError(this.fetchUnknownBlockRoot(fromHexString(block.blockRootHex), connectedPeers)); + res = await wrapError(this.fetchUnknownBlockRoot(fromHex(block.blockRootHex), connectedPeers)); } else { res = await wrapError(this.fetchUnavailableBlockInput(block.blockInput, connectedPeers)); } @@ -304,7 +301,7 @@ export class UnknownBlockSync { ...block, status: PendingBlockStatus.downloaded, blockInput, - parentBlockRootHex: toHexString(blockInput.block.message.parentRoot), + parentBlockRootHex: toRootHex(blockInput.block.message.parentRoot), }; this.pendingBlocks.set(block.blockRootHex, block); const blockSlot = blockInput.block.message.slot; @@ -336,7 +333,7 @@ export class UnknownBlockSync { this.logger.debug("Downloaded block is before finalized slot", { finalizedSlot, blockSlot, - parentRoot: toHexString(blockRoot), + parentRoot: toRootHex(blockRoot), unknownBlockType, }); this.removeAndDownscoreAllDescendants(block); @@ -384,7 +381,7 @@ export class UnknownBlockSync { .BeaconBlock.hashTreeRoot(pendingBlock.blockInput.block.message); this.logger.verbose("Avoid proposer boost for this block of known proposer", { blockSlot, - blockRoot: toHexString(blockRoot), + blockRoot: toRootHex(blockRoot), proposerIndex, }); await sleep(this.proposerBoostSecWindow * 1000); @@ -466,7 +463,7 @@ export class UnknownBlockSync { connectedPeers: PeerIdStr[] ): Promise<{blockInput: BlockInput; peerIdStr: string}> { const shuffledPeers = shuffle(connectedPeers); - const blockRootHex = toHexString(blockRoot); + const blockRootHex = toRootHex(blockRoot); let lastError: Error | null = null; for (let i = 0; i < MAX_ATTEMPTS_PER_BLOCK; i++) { @@ -483,7 +480,7 @@ export class UnknownBlockSync { const block = blockInput.block.message; const receivedBlockRoot = this.config.getForkTypes(block.slot).BeaconBlock.hashTreeRoot(block); if (!byteArrayEquals(receivedBlockRoot, blockRoot)) { - throw Error(`Wrong block received by peer, got ${toHexString(receivedBlockRoot)} expected ${blockRootHex}`); + throw Error(`Wrong block received by peer, got ${toRootHex(receivedBlockRoot)} expected ${blockRootHex}`); } return {blockInput, peerIdStr: peer}; @@ -521,13 +518,13 @@ export class UnknownBlockSync { if (unavailableBlockInput.block === null) { blockRootHex = unavailableBlockInput.blockRootHex; - blockRoot = fromHexString(blockRootHex); + blockRoot = fromHex(blockRootHex); } else { const unavailableBlock = unavailableBlockInput.block; blockRoot = this.config .getForkTypes(unavailableBlock.message.slot) .BeaconBlock.hashTreeRoot(unavailableBlock.message); - blockRootHex = toHexString(blockRoot); + blockRootHex = toRootHex(blockRoot); blobKzgCommitmentsLen = (unavailableBlock.message.body as deneb.BeaconBlockBody).blobKzgCommitments.length; pendingBlobs = blobKzgCommitmentsLen - unavailableBlockInput.cachedData.blobsCache.size; } @@ -554,7 +551,7 @@ export class UnknownBlockSync { const receivedBlockRoot = this.config.getForkTypes(block.slot).BeaconBlock.hashTreeRoot(block); if (!byteArrayEquals(receivedBlockRoot, blockRoot)) { - throw Error(`Wrong block received by peer, got ${toHexString(receivedBlockRoot)} expected ${blockRootHex}`); + throw Error(`Wrong block received by peer, got ${toRootHex(receivedBlockRoot)} expected ${blockRootHex}`); } if (unavailableBlockInput.block === null) { this.logger.debug("Fetched NullBlockInput", {attempts: i, blockRootHex}); diff --git a/packages/beacon-node/src/util/bufferPool.ts b/packages/beacon-node/src/util/bufferPool.ts index f9e18a6d64a5..e3cf10fa88b3 100644 --- a/packages/beacon-node/src/util/bufferPool.ts +++ b/packages/beacon-node/src/util/bufferPool.ts @@ -5,6 +5,12 @@ import {Metrics} from "../metrics/metrics.js"; */ const GROW_RATIO = 1.1; +export enum AllocSource { + PERSISTENT_CHECKPOINTS_CACHE_VALIDATORS = "persistent_checkpoints_cache_validators", + PERSISTENT_CHECKPOINTS_CACHE_STATE = "persistent_checkpoints_cache_state", + ARCHIVE_STATE = "archive_state", +} + /** * A simple implementation to manage a single buffer. * This is initially used for state serialization at every epoch and for state reload. @@ -36,24 +42,24 @@ export class BufferPool { * If the buffer is already in use, return null. * Grow the buffer if the requested size is larger than the current buffer. */ - alloc(size: number): BufferWithKey | null { - return this.doAlloc(size, false); + alloc(size: number, source: AllocSource): BufferWithKey | null { + return this.doAlloc(size, source, false); } /** * Same to alloc() but the buffer is not zeroed. */ - allocUnsafe(size: number): BufferWithKey | null { - return this.doAlloc(size, true); + allocUnsafe(size: number, source: AllocSource): BufferWithKey | null { + return this.doAlloc(size, source, true); } - private doAlloc(size: number, isUnsafe = false): BufferWithKey | null { + private doAlloc(size: number, source: AllocSource, isUnsafe = false): BufferWithKey | null { if (this.inUse) { - this.metrics?.misses.inc(); + this.metrics?.misses.inc({source}); return null; } this.inUse = true; - this.metrics?.hits.inc(); + this.metrics?.hits.inc({source}); this.currentKey += 1; if (size > this.buffer.length) { this.metrics?.grows.inc(); diff --git a/packages/beacon-node/src/util/sszBytes.ts b/packages/beacon-node/src/util/sszBytes.ts index f87b899e9591..c27df1a0fbf3 100644 --- a/packages/beacon-node/src/util/sszBytes.ts +++ b/packages/beacon-node/src/util/sszBytes.ts @@ -1,16 +1,33 @@ import {BitArray, deserializeUint8ArrayBitListFromBytes} from "@chainsafe/ssz"; import {BLSSignature, RootHex, Slot} from "@lodestar/types"; -import {toHex} from "@lodestar/utils"; -import {BYTES_PER_FIELD_ELEMENT, FIELD_ELEMENTS_PER_BLOB} from "@lodestar/params"; +import { + BYTES_PER_FIELD_ELEMENT, + FIELD_ELEMENTS_PER_BLOB, + ForkName, + ForkSeq, + MAX_COMMITTEES_PER_SLOT, +} from "@lodestar/params"; export type BlockRootHex = RootHex; +// pre-electra, AttestationData is used to cache attestations export type AttDataBase64 = string; +// electra, CommitteeBits +export type CommitteeBitsBase64 = string; +// pre-electra // class Attestation(Container): // aggregation_bits: Bitlist[MAX_VALIDATORS_PER_COMMITTEE] - offset 4 // data: AttestationData - target data - 128 // signature: BLSSignature - 96 + +// electra +// class Attestation(Container): +// aggregation_bits: BitList[MAX_VALIDATORS_PER_COMMITTEE * MAX_COMMITTEES_PER_SLOT] - offset 4 +// data: AttestationData - target data - 128 +// signature: BLSSignature - 96 +// committee_bits: BitVector[MAX_COMMITTEES_PER_SLOT] // +// for all forks // class AttestationData(Container): 128 bytes fixed size // slot: Slot - data 8 // index: CommitteeIndex - data 8 @@ -23,8 +40,15 @@ const ATTESTATION_BEACON_BLOCK_ROOT_OFFSET = VARIABLE_FIELD_OFFSET + 8 + 8; const ROOT_SIZE = 32; const SLOT_SIZE = 8; const ATTESTATION_DATA_SIZE = 128; +// MAX_COMMITTEES_PER_SLOT is in bit, need to convert to byte +const COMMITTEE_BITS_SIZE = Math.max(Math.ceil(MAX_COMMITTEES_PER_SLOT / 8), 1); const SIGNATURE_SIZE = 96; +// shared Buffers to convert bytes to hex/base64 +const blockRootBuf = Buffer.alloc(ROOT_SIZE); +const attDataBuf = Buffer.alloc(ATTESTATION_DATA_SIZE); +const committeeBitsDataBuf = Buffer.alloc(COMMITTEE_BITS_SIZE); + /** * Extract slot from attestation serialized bytes. * Return null if data is not long enough to extract slot. @@ -46,36 +70,48 @@ export function getBlockRootFromAttestationSerialized(data: Uint8Array): BlockRo return null; } - return toHex(data.subarray(ATTESTATION_BEACON_BLOCK_ROOT_OFFSET, ATTESTATION_BEACON_BLOCK_ROOT_OFFSET + ROOT_SIZE)); + blockRootBuf.set( + data.subarray(ATTESTATION_BEACON_BLOCK_ROOT_OFFSET, ATTESTATION_BEACON_BLOCK_ROOT_OFFSET + ROOT_SIZE) + ); + return "0x" + blockRootBuf.toString("hex"); } /** - * Extract attestation data base64 from attestation serialized bytes. + * Extract attestation data base64 from all forks' attestation serialized bytes. * Return null if data is not long enough to extract attestation data. */ -export function getAttDataBase64FromAttestationSerialized(data: Uint8Array): AttDataBase64 | null { +export function getAttDataFromAttestationSerialized(data: Uint8Array): AttDataBase64 | null { if (data.length < VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE) { return null; } // base64 is a bit efficient than hex - return toBase64(data.slice(VARIABLE_FIELD_OFFSET, VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE)); + attDataBuf.set(data.subarray(VARIABLE_FIELD_OFFSET, VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE)); + return attDataBuf.toString("base64"); +} + +/** + * Alias of `getAttDataFromAttestationSerialized` specifically for batch handling indexing in gossip queue + */ +export function getGossipAttestationIndex(data: Uint8Array): AttDataBase64 | null { + return getAttDataFromAttestationSerialized(data); } /** * Extract aggregation bits from attestation serialized bytes. * Return null if data is not long enough to extract aggregation bits. */ -export function getAggregationBitsFromAttestationSerialized(data: Uint8Array): BitArray | null { - if (data.length < VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE + SIGNATURE_SIZE) { +export function getAggregationBitsFromAttestationSerialized(fork: ForkName, data: Uint8Array): BitArray | null { + const aggregationBitsStartIndex = + ForkSeq[fork] >= ForkSeq.electra + ? VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE + SIGNATURE_SIZE + COMMITTEE_BITS_SIZE + : VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE + SIGNATURE_SIZE; + + if (data.length < aggregationBitsStartIndex) { return null; } - const {uint8Array, bitLen} = deserializeUint8ArrayBitListFromBytes( - data, - VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE + SIGNATURE_SIZE, - data.length - ); + const {uint8Array, bitLen} = deserializeUint8ArrayBitListFromBytes(data, aggregationBitsStartIndex, data.length); return new BitArray(uint8Array, bitLen); } @@ -84,14 +120,28 @@ export function getAggregationBitsFromAttestationSerialized(data: Uint8Array): B * Return null if data is not long enough to extract signature. */ export function getSignatureFromAttestationSerialized(data: Uint8Array): BLSSignature | null { - if (data.length < VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE + SIGNATURE_SIZE) { + const signatureStartIndex = VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE; + + if (data.length < signatureStartIndex + SIGNATURE_SIZE) { return null; } - return data.subarray( - VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE, - VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE + SIGNATURE_SIZE - ); + return data.subarray(signatureStartIndex, signatureStartIndex + SIGNATURE_SIZE); +} + +/** + * Extract committee bits from Electra attestation serialized bytes. + * Return null if data is not long enough to extract committee bits. + */ +export function getCommitteeBitsFromAttestationSerialized(data: Uint8Array): CommitteeBitsBase64 | null { + const committeeBitsStartIndex = VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE + SIGNATURE_SIZE; + + if (data.length < committeeBitsStartIndex + COMMITTEE_BITS_SIZE) { + return null; + } + + committeeBitsDataBuf.set(data.subarray(committeeBitsStartIndex, committeeBitsStartIndex + COMMITTEE_BITS_SIZE)); + return committeeBitsDataBuf.toString("base64"); } // @@ -110,8 +160,9 @@ const SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET = AGGREGATE_OFFSET + VARIABLE_FIELD const SIGNED_AGGREGATE_AND_PROOF_BLOCK_ROOT_OFFSET = SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET + 8 + 8; /** - * Extract slot from signed aggregate and proof serialized bytes. - * Return null if data is not long enough to extract slot. + * Extract slot from signed aggregate and proof serialized bytes + * Return null if data is not long enough to extract slot + * This works for both phase + electra */ export function getSlotFromSignedAggregateAndProofSerialized(data: Uint8Array): Slot | null { if (data.length < SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET + SLOT_SIZE) { @@ -122,35 +173,72 @@ export function getSlotFromSignedAggregateAndProofSerialized(data: Uint8Array): } /** - * Extract block root from signed aggregate and proof serialized bytes. - * Return null if data is not long enough to extract block root. + * Extract block root from signed aggregate and proof serialized bytes + * Return null if data is not long enough to extract block root + * This works for both phase + electra */ export function getBlockRootFromSignedAggregateAndProofSerialized(data: Uint8Array): BlockRootHex | null { if (data.length < SIGNED_AGGREGATE_AND_PROOF_BLOCK_ROOT_OFFSET + ROOT_SIZE) { return null; } - return toHex( + blockRootBuf.set( data.subarray( SIGNED_AGGREGATE_AND_PROOF_BLOCK_ROOT_OFFSET, SIGNED_AGGREGATE_AND_PROOF_BLOCK_ROOT_OFFSET + ROOT_SIZE ) ); + return "0x" + blockRootBuf.toString("hex"); +} + +/** + * Extract AttestationData base64 from SignedAggregateAndProof for electra + * Return null if data is not long enough + */ +export function getAttDataFromSignedAggregateAndProofElectra(data: Uint8Array): AttDataBase64 | null { + const startIndex = SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET; + const endIndex = startIndex + ATTESTATION_DATA_SIZE; + + if (data.length < endIndex + SIGNATURE_SIZE + COMMITTEE_BITS_SIZE) { + return null; + } + attDataBuf.set(data.subarray(startIndex, endIndex)); + return attDataBuf.toString("base64"); +} + +/** + * Extract CommitteeBits base64 from SignedAggregateAndProof for electra + * Return null if data is not long enough + */ +export function getCommitteeBitsFromSignedAggregateAndProofElectra(data: Uint8Array): CommitteeBitsBase64 | null { + const startIndex = SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET + ATTESTATION_DATA_SIZE + SIGNATURE_SIZE; + const endIndex = startIndex + COMMITTEE_BITS_SIZE; + + if (data.length < endIndex) { + return null; + } + + committeeBitsDataBuf.set(data.subarray(startIndex, endIndex)); + return committeeBitsDataBuf.toString("base64"); } /** * Extract attestation data base64 from signed aggregate and proof serialized bytes. * Return null if data is not long enough to extract attestation data. */ -export function getAttDataBase64FromSignedAggregateAndProofSerialized(data: Uint8Array): AttDataBase64 | null { +export function getAttDataFromSignedAggregateAndProofPhase0(data: Uint8Array): AttDataBase64 | null { if (data.length < SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET + ATTESTATION_DATA_SIZE) { return null; } // base64 is a bit efficient than hex - return toBase64( - data.slice(SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET, SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET + ATTESTATION_DATA_SIZE) + attDataBuf.set( + data.subarray( + SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET, + SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET + ATTESTATION_DATA_SIZE + ) ); + return attDataBuf.toString("base64"); } /** @@ -217,7 +305,3 @@ function getSlotFromOffsetTrusted(data: Uint8Array, offset: number): Slot { function checkSlotHighBytes(data: Uint8Array, offset: number): boolean { return (data[offset + 4] | data[offset + 5] | data[offset + 6] | data[offset + 7]) === 0; } - -function toBase64(data: Uint8Array): string { - return Buffer.from(data.buffer, data.byteOffset, data.byteLength).toString("base64"); -} diff --git a/packages/beacon-node/test/e2e/api/impl/beacon/node/endpoints.test.ts b/packages/beacon-node/test/e2e/api/impl/beacon/node/endpoints.test.ts index d85fdb80720f..2d2a0a37c59e 100644 --- a/packages/beacon-node/test/e2e/api/impl/beacon/node/endpoints.test.ts +++ b/packages/beacon-node/test/e2e/api/impl/beacon/node/endpoints.test.ts @@ -1,6 +1,7 @@ import {describe, beforeAll, afterAll, it, expect, vi} from "vitest"; import {createBeaconConfig} from "@lodestar/config"; import {chainConfig as chainConfigDef} from "@lodestar/config/default"; +import {routes} from "@lodestar/api"; import {ApiClient, getClient} from "@lodestar/api/beacon"; import {sleep} from "@lodestar/utils"; import {LogLevel, testLogger} from "../../../../../utils/logger.js"; @@ -46,9 +47,9 @@ describe("beacon node api", function () { it("should return valid syncing status", async () => { const res = await client.node.getSyncingStatus(); - expect(res.value()).toEqual({ - headSlot: "0", - syncDistance: "0", + expect(res.value()).toEqual({ + headSlot: 0, + syncDistance: 0, isSyncing: false, isOptimistic: false, elOffline: false, diff --git a/packages/beacon-node/test/e2e/api/impl/lightclient/endpoint.test.ts b/packages/beacon-node/test/e2e/api/impl/lightclient/endpoint.test.ts index 81154005af68..c746746741cf 100644 --- a/packages/beacon-node/test/e2e/api/impl/lightclient/endpoint.test.ts +++ b/packages/beacon-node/test/e2e/api/impl/lightclient/endpoint.test.ts @@ -122,7 +122,7 @@ describe("lightclient api", function () { const committeeRes = await lightclient.getLightClientCommitteeRoot({startPeriod: 0, count: 1}); committeeRes.assertOk(); const client = getClient({baseUrl: `http://127.0.0.1:${restPort}`}, {config}).beacon; - const validators = (await client.getStateValidators({stateId: "head"})).value(); + const validators = (await client.postStateValidators({stateId: "head"})).value(); const pubkeys = validators.map((v) => v.validator.pubkey); expect(pubkeys.length).toBe(validatorCount); // only 2 validators spreading to 512 committee slots diff --git a/packages/beacon-node/test/e2e/chain/stateCache/nHistoricalStates.test.ts b/packages/beacon-node/test/e2e/chain/stateCache/nHistoricalStates.test.ts index 005b28baeefc..fd14b7b8b8c0 100644 --- a/packages/beacon-node/test/e2e/chain/stateCache/nHistoricalStates.test.ts +++ b/packages/beacon-node/test/e2e/chain/stateCache/nHistoricalStates.test.ts @@ -412,11 +412,10 @@ describe("regen/reload states with n-historical states configuration", function )?.value ).toEqual(reloadCount); - const stateSszMetricValues = await (followupBn.metrics?.cpStateCache.stateSerializeDuration as Histogram).get(); + const stateSszMetricValues = await (followupBn.metrics?.stateSerializeDuration as Histogram).get(); expect( - stateSszMetricValues?.values.find( - (value) => value.metricName === "lodestar_cp_state_cache_state_serialize_seconds_count" - )?.value + stateSszMetricValues?.values.find((value) => value.metricName === "lodestar_state_serialize_seconds_count") + ?.value ).toEqual(persistCount); // assert number of persisted/in-memory states diff --git a/packages/beacon-node/test/memory/unfinalizedPubkey2Index.ts b/packages/beacon-node/test/memory/unfinalizedPubkey2Index.ts new file mode 100644 index 000000000000..b37967d16ca4 --- /dev/null +++ b/packages/beacon-node/test/memory/unfinalizedPubkey2Index.ts @@ -0,0 +1,54 @@ +import crypto from "node:crypto"; +import {Map} from "immutable"; +import {ValidatorIndex} from "@lodestar/types"; +import {toMemoryEfficientHexStr} from "@lodestar/state-transition/src/cache/pubkeyCache.js"; +import {testRunnerMemory} from "./testRunnerMemory.js"; + +// Results in MacOS Nov 2023 +// +// UnfinalizedPubkey2Index 1000 keys - 274956.5 bytes / instance +// UnfinalizedPubkey2Index 10000 keys - 2591129.3 bytes / instance +// UnfinalizedPubkey2Index 100000 keys - 27261443.4 bytes / instance + +testRunnerMemoryBpi([ + { + id: "UnfinalizedPubkey2Index 1000 keys", + getInstance: () => getRandomMap(1000, () => toMemoryEfficientHexStr(crypto.randomBytes(48))), + }, + { + id: "UnfinalizedPubkey2Index 10000 keys", + getInstance: () => getRandomMap(10000, () => toMemoryEfficientHexStr(crypto.randomBytes(48))), + }, + { + id: "UnfinalizedPubkey2Index 100000 keys", + getInstance: () => getRandomMap(100000, () => toMemoryEfficientHexStr(crypto.randomBytes(48))), + }, +]); + +function getRandomMap(n: number, getKey: (i: number) => string): Map { + const map = Map(); + + return map.withMutations((m) => { + for (let i = 0; i < n; i++) { + m.set(getKey(i), i); + } + }); +} + +/** + * Test bytes per instance in different representations of raw binary data + */ +function testRunnerMemoryBpi(testCases: {getInstance: (bytes: number) => unknown; id: string}[]): void { + const longestId = Math.max(...testCases.map(({id}) => id.length)); + + for (const {id, getInstance} of testCases) { + const bpi = testRunnerMemory({ + getInstance, + convergeFactor: 1 / 100, + sampleEvery: 5, + }); + + // eslint-disable-next-line no-console + console.log(`${id.padEnd(longestId)} - ${bpi.toFixed(1)} bytes / instance`); + } +} diff --git a/packages/beacon-node/test/mocks/clock.ts b/packages/beacon-node/test/mocks/clock.ts index c38794bf16d4..6f09bd292491 100644 --- a/packages/beacon-node/test/mocks/clock.ts +++ b/packages/beacon-node/test/mocks/clock.ts @@ -74,5 +74,6 @@ export function getMockedClock(): Mocked { }, currentSlotWithGossipDisparity: undefined, isCurrentSlotGivenGossipDisparity: vi.fn(), + secFromSlot: vi.fn(), } as unknown as Mocked; } diff --git a/packages/beacon-node/test/mocks/mockedBeaconChain.ts b/packages/beacon-node/test/mocks/mockedBeaconChain.ts index cc85cfd7d553..addeacf26a89 100644 --- a/packages/beacon-node/test/mocks/mockedBeaconChain.ts +++ b/packages/beacon-node/test/mocks/mockedBeaconChain.ts @@ -124,7 +124,7 @@ vi.mock("../../src/chain/chain.js", async (importActual) => { // @ts-expect-error eth1: new Eth1ForBlockProduction(), opPool: new OpPool(), - aggregatedAttestationPool: new AggregatedAttestationPool(), + aggregatedAttestationPool: new AggregatedAttestationPool(config), // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-expect-error beaconProposerCache: new BeaconProposerCache(), diff --git a/packages/beacon-node/test/perf/chain/opPools/aggregatedAttestationPool.test.ts b/packages/beacon-node/test/perf/chain/opPools/aggregatedAttestationPool.test.ts index 60ff6ce48302..45fc07281c3b 100644 --- a/packages/beacon-node/test/perf/chain/opPools/aggregatedAttestationPool.test.ts +++ b/packages/beacon-node/test/perf/chain/opPools/aggregatedAttestationPool.test.ts @@ -2,6 +2,7 @@ import {itBench} from "@dapplion/benchmark"; import {BitArray, toHexString} from "@chainsafe/ssz"; import { CachedBeaconStateAltair, + computeAnchorCheckpoint, computeEpochAtSlot, computeStartSlotAtEpoch, getBlockRootAtSlot, @@ -10,10 +11,11 @@ import { import {HISTORICAL_ROOTS_LIMIT, SLOTS_PER_EPOCH} from "@lodestar/params"; import {ExecutionStatus, ForkChoice, IForkChoiceStore, ProtoArray, DataAvailabilityStatus} from "@lodestar/fork-choice"; import {ssz} from "@lodestar/types"; + +import {createChainForkConfig, defaultChainConfig} from "@lodestar/config"; // eslint-disable-next-line import/no-relative-packages import {generatePerfTestCachedStateAltair} from "../../../../../state-transition/test/perf/util.js"; import {AggregatedAttestationPool} from "../../../../src/chain/opPools/aggregatedAttestationPool.js"; -import {computeAnchorCheckpoint} from "../../../../src/chain/initState.js"; const vc = 1_500_000; @@ -230,7 +232,9 @@ function getAggregatedAttestationPool( numMissedVotes: number, numBadVotes: number ): AggregatedAttestationPool { - const pool = new AggregatedAttestationPool(); + const config = createChainForkConfig(defaultChainConfig); + + const pool = new AggregatedAttestationPool(config); for (let epochSlot = 0; epochSlot < SLOTS_PER_EPOCH; epochSlot++) { const slot = state.slot - 1 - epochSlot; const epoch = computeEpochAtSlot(slot); diff --git a/packages/beacon-node/test/perf/chain/stateCache/updateUnfinalizedPubkeys.test.ts b/packages/beacon-node/test/perf/chain/stateCache/updateUnfinalizedPubkeys.test.ts new file mode 100644 index 000000000000..eab66d2bee53 --- /dev/null +++ b/packages/beacon-node/test/perf/chain/stateCache/updateUnfinalizedPubkeys.test.ts @@ -0,0 +1,114 @@ +import {itBench, setBenchOpts} from "@dapplion/benchmark"; +import {Map as ImmutableMap} from "immutable"; +import {toBufferBE} from "bigint-buffer"; +import {digest} from "@chainsafe/as-sha256"; +import {SecretKey} from "@chainsafe/blst"; +import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; +import {ValidatorIndex, ssz} from "@lodestar/types"; +import {type CachedBeaconStateAllForks, toMemoryEfficientHexStr} from "@lodestar/state-transition"; +import {bytesToBigInt, intToBytes} from "@lodestar/utils"; +import {InMemoryCheckpointStateCache, BlockStateCacheImpl} from "../../../../src/chain/stateCache/index.js"; +import {BlockStateCache} from "../../../../src/chain/stateCache/types.js"; +import {generateCachedElectraState} from "../../../utils/state.js"; + +// Benchmark date from Mon Nov 21 2023 - Intel Core i7-9750H @ 2.60Ghz +// ✔ updateUnfinalizedPubkeys - updating 10 pubkeys 1444.173 ops/s 692.4380 us/op - 1057 runs 6.03 s +// ✔ updateUnfinalizedPubkeys - updating 100 pubkeys 189.5965 ops/s 5.274358 ms/op - 57 runs 1.15 s +// ✔ updateUnfinalizedPubkeys - updating 1000 pubkeys 12.90495 ops/s 77.48967 ms/op - 13 runs 1.62 s +describe("updateUnfinalizedPubkeys perf tests", function () { + setBenchOpts({noThreshold: true}); + + const numPubkeysToBeFinalizedCases = [10, 100, 1000]; + const numCheckpointStateCache = 8; + const numStateCache = 3 * 32; + + let checkpointStateCache: InMemoryCheckpointStateCache; + let stateCache: BlockStateCache; + + const unfinalizedPubkey2Index = generatePubkey2Index(0, Math.max.apply(null, numPubkeysToBeFinalizedCases)); + const baseState = generateCachedElectraState(); + + for (const numPubkeysToBeFinalized of numPubkeysToBeFinalizedCases) { + itBench({ + id: `updateUnfinalizedPubkeys - updating ${numPubkeysToBeFinalized} pubkeys`, + beforeEach: async () => { + baseState.epochCtx.unfinalizedPubkey2index = ImmutableMap(unfinalizedPubkey2Index); + baseState.epochCtx.pubkey2index = new PubkeyIndexMap(); + baseState.epochCtx.index2pubkey = []; + + checkpointStateCache = new InMemoryCheckpointStateCache({}); + stateCache = new BlockStateCacheImpl({}); + + for (let i = 0; i < numCheckpointStateCache; i++) { + const clonedState = baseState.clone(); + const checkpoint = ssz.phase0.Checkpoint.defaultValue(); + + clonedState.slot = i; + checkpoint.epoch = i; // Assigning arbitrary non-duplicate values to ensure checkpointStateCache correctly saves all the states + + checkpointStateCache.add(checkpoint, clonedState); + } + + for (let i = 0; i < numStateCache; i++) { + const clonedState = baseState.clone(); + clonedState.slot = i; + stateCache.add(clonedState); + } + }, + fn: async () => { + const newFinalizedValidators = baseState.epochCtx.unfinalizedPubkey2index.filter( + (index, _pubkey) => index < numPubkeysToBeFinalized + ); + + const states = stateCache.getStates(); + const cpStates = checkpointStateCache.getStates(); + + const firstState = states.next().value as CachedBeaconStateAllForks; + firstState.epochCtx.addFinalizedPubkeys(newFinalizedValidators); + + const pubkeysToDelete = Array.from(newFinalizedValidators.keys()); + + firstState.epochCtx.deleteUnfinalizedPubkeys(pubkeysToDelete); + + for (const s of states) { + s.epochCtx.deleteUnfinalizedPubkeys(pubkeysToDelete); + } + + for (const s of cpStates) { + s.epochCtx.deleteUnfinalizedPubkeys(pubkeysToDelete); + } + }, + }); + } + + type PubkeyHex = string; + + function generatePubkey2Index(startIndex: number, endIndex: number): Map { + const pubkey2Index = new Map(); + const pubkeys = generatePubkeys(endIndex - startIndex); + + for (let i = startIndex; i < endIndex; i++) { + pubkey2Index.set(toMemoryEfficientHexStr(pubkeys[i]), i); + } + + return pubkey2Index; + } + + function generatePubkeys(validatorCount: number): Uint8Array[] { + const keys = []; + + for (let i = 0; i < validatorCount; i++) { + const sk = generatePrivateKey(i); + const pk = sk.toPublicKey().toBytes(); + keys.push(pk); + } + + return keys; + } + + function generatePrivateKey(index: number): SecretKey { + const secretKeyBytes = toBufferBE(bytesToBigInt(digest(intToBytes(index, 32))) % BigInt("38581184513"), 32); + const secret: SecretKey = SecretKey.fromBytes(secretKeyBytes); + return secret; + } +}); diff --git a/packages/beacon-node/test/perf/chain/validation/attestation.test.ts b/packages/beacon-node/test/perf/chain/validation/attestation.test.ts index 5fce9a342509..e942e1ea17d0 100644 --- a/packages/beacon-node/test/perf/chain/validation/attestation.test.ts +++ b/packages/beacon-node/test/perf/chain/validation/attestation.test.ts @@ -3,9 +3,9 @@ import {expect} from "chai"; import {ssz} from "@lodestar/types"; // eslint-disable-next-line import/no-relative-packages import {generateTestCachedBeaconStateOnlyValidators} from "../../../../../state-transition/test/perf/util.js"; -import {validateAttestation, validateGossipAttestationsSameAttData} from "../../../../src/chain/validation/index.js"; +import {validateGossipAttestationsSameAttData} from "../../../../src/chain/validation/index.js"; import {getAttestationValidData} from "../../../utils/validationData/attestation.js"; -import {getAttDataBase64FromAttestationSerialized} from "../../../../src/util/sszBytes.js"; +import {getAttDataFromAttestationSerialized} from "../../../../src/util/sszBytes.js"; describe("validate gossip attestation", () => { setBenchOpts({ @@ -29,25 +29,7 @@ describe("validate gossip attestation", () => { }); const attSlot = attestation0.data.slot; - const serializedData = ssz.phase0.Attestation.serialize(attestation0); const fork = chain.config.getForkName(stateSlot); - itBench({ - id: `validate gossip attestation - vc ${vc}`, - beforeEach: () => chain.seenAttesters["validatorIndexesByEpoch"].clear(), - fn: async () => { - await validateAttestation( - fork, - chain, - { - attestation: null, - serializedData, - attSlot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), - }, - subnet0 - ); - }, - }); for (const chunkSize of [32, 64, 128, 256]) { const attestations = [attestation0]; @@ -67,7 +49,7 @@ describe("validate gossip attestation", () => { attestation: null, serializedData, attSlot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData) as string, }; }); diff --git a/packages/beacon-node/test/perf/network/gossip/encoding.test.ts b/packages/beacon-node/test/perf/network/gossip/encoding.test.ts new file mode 100644 index 000000000000..693c91f59249 --- /dev/null +++ b/packages/beacon-node/test/perf/network/gossip/encoding.test.ts @@ -0,0 +1,46 @@ +import {itBench} from "@dapplion/benchmark"; +import {toHex} from "@lodestar/utils"; + +/** + * This is a benchmark for different ways of converting a gossipsub message id to a hex string using Mac M1 + * encoding + ✔ toHex 6463330 ops/s 154.7190 ns/op - 7170 runs 1.26 s + ✔ Buffer.from 6696982 ops/s 149.3210 ns/op - 2023 runs 0.454 s + ✔ shared Buffer 1.013911e+7 ops/s 98.62800 ns/op - 3083 runs 0.404 s + */ +describe("encoding", function () { + const msgId = Uint8Array.from(Array.from({length: 20}, (_, i) => i)); + + const runsFactor = 1000; + itBench({ + id: "toHex", + fn: () => { + for (let i = 0; i < runsFactor; i++) { + toHex(msgId); + } + }, + runsFactor, + }); + + itBench({ + id: "Buffer.from", + fn: () => { + for (let i = 0; i < runsFactor; i++) { + "0x" + Buffer.from(msgId.buffer, msgId.byteOffset, msgId.byteLength).toString("hex"); + } + }, + runsFactor, + }); + + const sharedBuf = Buffer.from(msgId); + itBench({ + id: "shared Buffer", + fn: () => { + for (let i = 0; i < runsFactor; i++) { + sharedBuf.set(msgId); + "0x" + sharedBuf.toString("hex"); + } + }, + runsFactor, + }); +}); diff --git a/packages/beacon-node/test/scripts/el-interop/besu/common-setup.sh b/packages/beacon-node/test/scripts/el-interop/besu/common-setup.sh new file mode 100755 index 000000000000..f211f5d0714b --- /dev/null +++ b/packages/beacon-node/test/scripts/el-interop/besu/common-setup.sh @@ -0,0 +1,19 @@ +#!/bin/bash -x + +echo $TTD +echo $DATA_DIR +echo $EL_BINARY_DIR +echo $JWT_SECRET_HEX +echo $TEMPLATE_FILE + +echo $scriptDir +echo $currentDir + + +env TTD=$TTD envsubst < $scriptDir/$TEMPLATE_FILE > $DATA_DIR/genesis.json +echo "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8" > $DATA_DIR/sk.json +echo "12345678" > $DATA_DIR/password.txt +pubKey="0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" + +# echo a hex encoded 256 bit secret into a file +echo $JWT_SECRET_HEX> $DATA_DIR/jwtsecret diff --git a/packages/beacon-node/test/scripts/el-interop/besu/electra.tmpl b/packages/beacon-node/test/scripts/el-interop/besu/electra.tmpl new file mode 100644 index 000000000000..7a63bfbe36d6 --- /dev/null +++ b/packages/beacon-node/test/scripts/el-interop/besu/electra.tmpl @@ -0,0 +1,77 @@ +{ + "config": { + "chainId":6110, + "homesteadBlock":0, + "eip150Block":0, + "eip155Block":0, + "eip158Block":0, + "byzantiumBlock":0, + "constantinopleBlock":0, + "petersburgBlock":0, + "istanbulBlock":0, + "muirGlacierBlock":0, + "berlinBlock":0, + "londonBlock":0, + "terminalTotalDifficulty":0, + "cancunTime":0, + "experimentalEipsTime":10, + "clique": { + "period": 5, + "epoch": 30000 + }, + "depositContractAddress": "0x4242424242424242424242424242424242424242" + }, + "nonce":"0x42", + "timestamp":"0x0", + "extraData":"0x0000000000000000000000000000000000000000000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "gasLimit":"0x1C9C380", + "difficulty":"0x400000000", + "mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000", + "coinbase":"0x0000000000000000000000000000000000000000", + "alloc":{ + "0xa4664C40AACeBD82A2Db79f0ea36C06Bc6A19Adb": { + "balance": "1000000000000000000000000000" + }, + "0x4242424242424242424242424242424242424242": { + "balance": "0", + "code": "0x60806040526004361061003f5760003560e01c806301ffc9a71461004457806322895118146100a4578063621fd130146101ba578063c5f2892f14610244575b600080fd5b34801561005057600080fd5b506100906004803603602081101561006757600080fd5b50357fffffffff000000000000000000000000000000000000000000000000000000001661026b565b604080519115158252519081900360200190f35b6101b8600480360360808110156100ba57600080fd5b8101906020810181356401000000008111156100d557600080fd5b8201836020820111156100e757600080fd5b8035906020019184600183028401116401000000008311171561010957600080fd5b91939092909160208101903564010000000081111561012757600080fd5b82018360208201111561013957600080fd5b8035906020019184600183028401116401000000008311171561015b57600080fd5b91939092909160208101903564010000000081111561017957600080fd5b82018360208201111561018b57600080fd5b803590602001918460018302840111640100000000831117156101ad57600080fd5b919350915035610304565b005b3480156101c657600080fd5b506101cf6110b5565b6040805160208082528351818301528351919283929083019185019080838360005b838110156102095781810151838201526020016101f1565b50505050905090810190601f1680156102365780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b34801561025057600080fd5b506102596110c7565b60408051918252519081900360200190f35b60007fffffffff0000000000000000000000000000000000000000000000000000000082167f01ffc9a70000000000000000000000000000000000000000000000000000000014806102fe57507fffffffff0000000000000000000000000000000000000000000000000000000082167f8564090700000000000000000000000000000000000000000000000000000000145b92915050565b6030861461035d576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001806118056026913960400191505060405180910390fd5b602084146103b6576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252603681526020018061179c6036913960400191505060405180910390fd5b6060821461040f576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260298152602001806118786029913960400191505060405180910390fd5b670de0b6b3a7640000341015610470576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001806118526026913960400191505060405180910390fd5b633b9aca003406156104cd576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260338152602001806117d26033913960400191505060405180910390fd5b633b9aca00340467ffffffffffffffff811115610535576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252602781526020018061182b6027913960400191505060405180910390fd5b6060610540826114ba565b90507f649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c589898989858a8a6105756020546114ba565b6040805160a0808252810189905290819060208201908201606083016080840160c085018e8e80828437600083820152601f017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe01690910187810386528c815260200190508c8c808284376000838201819052601f9091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe01690920188810386528c5181528c51602091820193918e019250908190849084905b83811015610648578181015183820152602001610630565b50505050905090810190601f1680156106755780820380516001836020036101000a031916815260200191505b5086810383528881526020018989808284376000838201819052601f9091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0169092018881038452895181528951602091820193918b019250908190849084905b838110156106ef5781810151838201526020016106d7565b50505050905090810190601f16801561071c5780820380516001836020036101000a031916815260200191505b509d505050505050505050505050505060405180910390a1600060028a8a600060801b604051602001808484808284377fffffffffffffffffffffffffffffffff0000000000000000000000000000000090941691909301908152604080517ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0818403018152601090920190819052815191955093508392506020850191508083835b602083106107fc57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016107bf565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610859573d6000803e3d6000fd5b5050506040513d602081101561086e57600080fd5b5051905060006002806108846040848a8c6116fe565b6040516020018083838082843780830192505050925050506040516020818303038152906040526040518082805190602001908083835b602083106108f857805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016108bb565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610955573d6000803e3d6000fd5b5050506040513d602081101561096a57600080fd5b5051600261097b896040818d6116fe565b60405160009060200180848480828437919091019283525050604080518083038152602092830191829052805190945090925082918401908083835b602083106109f457805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016109b7565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610a51573d6000803e3d6000fd5b5050506040513d6020811015610a6657600080fd5b5051604080516020818101949094528082019290925280518083038201815260609092019081905281519192909182918401908083835b60208310610ada57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610a9d565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610b37573d6000803e3d6000fd5b5050506040513d6020811015610b4c57600080fd5b50516040805160208101858152929350600092600292839287928f928f92018383808284378083019250505093505050506040516020818303038152906040526040518082805190602001908083835b60208310610bd957805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610b9c565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610c36573d6000803e3d6000fd5b5050506040513d6020811015610c4b57600080fd5b50516040518651600291889160009188916020918201918291908601908083835b60208310610ca957805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610c6c565b6001836020036101000a0380198251168184511680821785525050505050509050018367ffffffffffffffff191667ffffffffffffffff1916815260180182815260200193505050506040516020818303038152906040526040518082805190602001908083835b60208310610d4e57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610d11565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610dab573d6000803e3d6000fd5b5050506040513d6020811015610dc057600080fd5b5051604080516020818101949094528082019290925280518083038201815260609092019081905281519192909182918401908083835b60208310610e3457805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610df7565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610e91573d6000803e3d6000fd5b5050506040513d6020811015610ea657600080fd5b50519050858114610f02576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260548152602001806117486054913960600191505060405180910390fd5b60205463ffffffff11610f60576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260218152602001806117276021913960400191505060405180910390fd5b602080546001019081905560005b60208110156110a9578160011660011415610fa0578260008260208110610f9157fe5b0155506110ac95505050505050565b600260008260208110610faf57fe5b01548460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061102557805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610fe8565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015611082573d6000803e3d6000fd5b5050506040513d602081101561109757600080fd5b50519250600282049150600101610f6e565b50fe5b50505050505050565b60606110c26020546114ba565b905090565b6020546000908190815b60208110156112f05781600116600114156111e6576002600082602081106110f557fe5b01548460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061116b57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161112e565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa1580156111c8573d6000803e3d6000fd5b5050506040513d60208110156111dd57600080fd5b505192506112e2565b600283602183602081106111f657fe5b015460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061126b57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161122e565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa1580156112c8573d6000803e3d6000fd5b5050506040513d60208110156112dd57600080fd5b505192505b6002820491506001016110d1565b506002826112ff6020546114ba565b600060401b6040516020018084815260200183805190602001908083835b6020831061135a57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161131d565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790527fffffffffffffffffffffffffffffffffffffffffffffffff000000000000000095909516920191825250604080518083037ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8018152601890920190819052815191955093508392850191508083835b6020831061143f57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101611402565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa15801561149c573d6000803e3d6000fd5b5050506040513d60208110156114b157600080fd5b50519250505090565b60408051600880825281830190925260609160208201818036833701905050905060c082901b8060071a60f81b826000815181106114f457fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060061a60f81b8260018151811061153757fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060051a60f81b8260028151811061157a57fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060041a60f81b826003815181106115bd57fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060031a60f81b8260048151811061160057fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060021a60f81b8260058151811061164357fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060011a60f81b8260068151811061168657fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060001a60f81b826007815181106116c957fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a90535050919050565b6000808585111561170d578182fd5b83861115611719578182fd5b505082019391909203915056fe4465706f736974436f6e74726163743a206d65726b6c6520747265652066756c6c4465706f736974436f6e74726163743a207265636f6e7374727563746564204465706f7369744461746120646f6573206e6f74206d6174636820737570706c696564206465706f7369745f646174615f726f6f744465706f736974436f6e74726163743a20696e76616c6964207769746864726177616c5f63726564656e7469616c73206c656e6774684465706f736974436f6e74726163743a206465706f7369742076616c7565206e6f74206d756c7469706c65206f6620677765694465706f736974436f6e74726163743a20696e76616c6964207075626b6579206c656e6774684465706f736974436f6e74726163743a206465706f7369742076616c756520746f6f20686967684465706f736974436f6e74726163743a206465706f7369742076616c756520746f6f206c6f774465706f736974436f6e74726163743a20696e76616c6964207369676e6174757265206c656e677468a26469706673582212201dd26f37a621703009abf16e77e69c93dc50c79db7f6cc37543e3e0e3decdc9764736f6c634300060b0033", + "storage": { + "0x0000000000000000000000000000000000000000000000000000000000000022": "0xf5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb4b", + "0x0000000000000000000000000000000000000000000000000000000000000023": "0xdb56114e00fdd4c1f85c892bf35ac9a89289aaecb1ebd0a96cde606a748b5d71", + "0x0000000000000000000000000000000000000000000000000000000000000024": "0xc78009fdf07fc56a11f122370658a353aaa542ed63e44c4bc15ff4cd105ab33c", + "0x0000000000000000000000000000000000000000000000000000000000000025": "0x536d98837f2dd165a55d5eeae91485954472d56f246df256bf3cae19352a123c", + "0x0000000000000000000000000000000000000000000000000000000000000026": "0x9efde052aa15429fae05bad4d0b1d7c64da64d03d7a1854a588c2cb8430c0d30", + "0x0000000000000000000000000000000000000000000000000000000000000027": "0xd88ddfeed400a8755596b21942c1497e114c302e6118290f91e6772976041fa1", + "0x0000000000000000000000000000000000000000000000000000000000000028": "0x87eb0ddba57e35f6d286673802a4af5975e22506c7cf4c64bb6be5ee11527f2c", + "0x0000000000000000000000000000000000000000000000000000000000000029": "0x26846476fd5fc54a5d43385167c95144f2643f533cc85bb9d16b782f8d7db193", + "0x000000000000000000000000000000000000000000000000000000000000002a": "0x506d86582d252405b840018792cad2bf1259f1ef5aa5f887e13cb2f0094f51e1", + "0x000000000000000000000000000000000000000000000000000000000000002b": "0xffff0ad7e659772f9534c195c815efc4014ef1e1daed4404c06385d11192e92b", + "0x000000000000000000000000000000000000000000000000000000000000002c": "0x6cf04127db05441cd833107a52be852868890e4317e6a02ab47683aa75964220", + "0x000000000000000000000000000000000000000000000000000000000000002d": "0xb7d05f875f140027ef5118a2247bbb84ce8f2f0f1123623085daf7960c329f5f", + "0x000000000000000000000000000000000000000000000000000000000000002e": "0xdf6af5f5bbdb6be9ef8aa618e4bf8073960867171e29676f8b284dea6a08a85e", + "0x000000000000000000000000000000000000000000000000000000000000002f": "0xb58d900f5e182e3c50ef74969ea16c7726c549757cc23523c369587da7293784", + "0x0000000000000000000000000000000000000000000000000000000000000030": "0xd49a7502ffcfb0340b1d7885688500ca308161a7f96b62df9d083b71fcc8f2bb", + "0x0000000000000000000000000000000000000000000000000000000000000031": "0x8fe6b1689256c0d385f42f5bbe2027a22c1996e110ba97c171d3e5948de92beb", + "0x0000000000000000000000000000000000000000000000000000000000000032": "0x8d0d63c39ebade8509e0ae3c9c3876fb5fa112be18f905ecacfecb92057603ab", + "0x0000000000000000000000000000000000000000000000000000000000000033": "0x95eec8b2e541cad4e91de38385f2e046619f54496c2382cb6cacd5b98c26f5a4", + "0x0000000000000000000000000000000000000000000000000000000000000034": "0xf893e908917775b62bff23294dbbe3a1cd8e6cc1c35b4801887b646a6f81f17f", + "0x0000000000000000000000000000000000000000000000000000000000000035": "0xcddba7b592e3133393c16194fac7431abf2f5485ed711db282183c819e08ebaa", + "0x0000000000000000000000000000000000000000000000000000000000000036": "0x8a8d7fe3af8caa085a7639a832001457dfb9128a8061142ad0335629ff23ff9c", + "0x0000000000000000000000000000000000000000000000000000000000000037": "0xfeb3c337d7a51a6fbf00b9e34c52e1c9195c969bd4e7a0bfd51d5c5bed9c1167", + "0x0000000000000000000000000000000000000000000000000000000000000038": "0xe71f0aa83cc32edfbefa9f4d3e0174ca85182eec9f3a09f6a6c0df6377a510d7", + "0x0000000000000000000000000000000000000000000000000000000000000039": "0x31206fa80a50bb6abe29085058f16212212a60eec8f049fecb92d8c8e0a84bc0", + "0x000000000000000000000000000000000000000000000000000000000000003a": "0x21352bfecbeddde993839f614c3dac0a3ee37543f9b412b16199dc158e23b544", + "0x000000000000000000000000000000000000000000000000000000000000003b": "0x619e312724bb6d7c3153ed9de791d764a366b389af13c58bf8a8d90481a46765", + "0x000000000000000000000000000000000000000000000000000000000000003c": "0x7cdd2986268250628d0c10e385c58c6191e6fbe05191bcc04f133f2cea72c1c4", + "0x000000000000000000000000000000000000000000000000000000000000003d": "0x848930bd7ba8cac54661072113fb278869e07bb8587f91392933374d017bcbe1", + "0x000000000000000000000000000000000000000000000000000000000000003e": "0x8869ff2c22b28cc10510d9853292803328be4fb0e80495e8bb8d271f5b889636", + "0x000000000000000000000000000000000000000000000000000000000000003f": "0xb5fe28e79f1b850f8658246ce9b6a1e7b49fc06db7143e8fe0b4f2b0c5523a5c", + "0x0000000000000000000000000000000000000000000000000000000000000040": "0x985e929f70af28d0bdd1a90a808f977f597c7c778c489e98d3bd8910d31ac0f7" + } + } + }, + "number":"0x0", + "gasUsed":"0x0", + "parentHash":"0x0000000000000000000000000000000000000000000000000000000000000000", + "baseFeePerGas":"0x7" +} diff --git a/packages/beacon-node/test/scripts/el-interop/besu/post-merge.sh b/packages/beacon-node/test/scripts/el-interop/besu/post-merge.sh new file mode 100755 index 000000000000..d864814ece7a --- /dev/null +++ b/packages/beacon-node/test/scripts/el-interop/besu/post-merge.sh @@ -0,0 +1,8 @@ +#!/bin/bash -x + +scriptDir=$(dirname $0) +currentDir=$(pwd) + +. $scriptDir/common-setup.sh + +$EL_BINARY_DIR/besu --engine-rpc-enabled --rpc-http-enabled --rpc-http-api ADMIN,ETH,MINER,NET --rpc-http-port $ETH_PORT --engine-rpc-port $ENGINE_PORT --engine-jwt-secret $currentDir/$DATA_DIR/jwtsecret --data-path $DATA_DIR --data-storage-format BONSAI --genesis-file $DATA_DIR/genesis.json diff --git a/packages/beacon-node/test/scripts/el-interop/besudocker/common-setup.sh b/packages/beacon-node/test/scripts/el-interop/besudocker/common-setup.sh new file mode 100644 index 000000000000..b3d93190ef2d --- /dev/null +++ b/packages/beacon-node/test/scripts/el-interop/besudocker/common-setup.sh @@ -0,0 +1,22 @@ +#!/bin/bash -x + +echo $TTD +echo $DATA_DIR +echo $EL_BINARY_DIR +echo $JWT_SECRET_HEX +echo $TEMPLATE_FILE + +echo $scriptDir +echo $currentDir + + +env TTD=$TTD envsubst < $scriptDir/$TEMPLATE_FILE > $DATA_DIR/genesis.json +echo "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8" > $DATA_DIR/sk.json +echo "12345678" > $DATA_DIR/password.txt +pubKey="0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" + +# echo a hex encoded 256 bit secret into a file +echo $JWT_SECRET_HEX> $DATA_DIR/jwtsecret +# clear any previous docker dangling docker run +docker rm -f custom-execution +rm -rf $DATA_DIR/besu diff --git a/packages/beacon-node/test/scripts/el-interop/besudocker/electra.tmpl b/packages/beacon-node/test/scripts/el-interop/besudocker/electra.tmpl new file mode 100644 index 000000000000..7a63bfbe36d6 --- /dev/null +++ b/packages/beacon-node/test/scripts/el-interop/besudocker/electra.tmpl @@ -0,0 +1,77 @@ +{ + "config": { + "chainId":6110, + "homesteadBlock":0, + "eip150Block":0, + "eip155Block":0, + "eip158Block":0, + "byzantiumBlock":0, + "constantinopleBlock":0, + "petersburgBlock":0, + "istanbulBlock":0, + "muirGlacierBlock":0, + "berlinBlock":0, + "londonBlock":0, + "terminalTotalDifficulty":0, + "cancunTime":0, + "experimentalEipsTime":10, + "clique": { + "period": 5, + "epoch": 30000 + }, + "depositContractAddress": "0x4242424242424242424242424242424242424242" + }, + "nonce":"0x42", + "timestamp":"0x0", + "extraData":"0x0000000000000000000000000000000000000000000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "gasLimit":"0x1C9C380", + "difficulty":"0x400000000", + "mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000", + "coinbase":"0x0000000000000000000000000000000000000000", + "alloc":{ + "0xa4664C40AACeBD82A2Db79f0ea36C06Bc6A19Adb": { + "balance": "1000000000000000000000000000" + }, + "0x4242424242424242424242424242424242424242": { + "balance": "0", + "code": "0x60806040526004361061003f5760003560e01c806301ffc9a71461004457806322895118146100a4578063621fd130146101ba578063c5f2892f14610244575b600080fd5b34801561005057600080fd5b506100906004803603602081101561006757600080fd5b50357fffffffff000000000000000000000000000000000000000000000000000000001661026b565b604080519115158252519081900360200190f35b6101b8600480360360808110156100ba57600080fd5b8101906020810181356401000000008111156100d557600080fd5b8201836020820111156100e757600080fd5b8035906020019184600183028401116401000000008311171561010957600080fd5b91939092909160208101903564010000000081111561012757600080fd5b82018360208201111561013957600080fd5b8035906020019184600183028401116401000000008311171561015b57600080fd5b91939092909160208101903564010000000081111561017957600080fd5b82018360208201111561018b57600080fd5b803590602001918460018302840111640100000000831117156101ad57600080fd5b919350915035610304565b005b3480156101c657600080fd5b506101cf6110b5565b6040805160208082528351818301528351919283929083019185019080838360005b838110156102095781810151838201526020016101f1565b50505050905090810190601f1680156102365780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b34801561025057600080fd5b506102596110c7565b60408051918252519081900360200190f35b60007fffffffff0000000000000000000000000000000000000000000000000000000082167f01ffc9a70000000000000000000000000000000000000000000000000000000014806102fe57507fffffffff0000000000000000000000000000000000000000000000000000000082167f8564090700000000000000000000000000000000000000000000000000000000145b92915050565b6030861461035d576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001806118056026913960400191505060405180910390fd5b602084146103b6576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252603681526020018061179c6036913960400191505060405180910390fd5b6060821461040f576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260298152602001806118786029913960400191505060405180910390fd5b670de0b6b3a7640000341015610470576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001806118526026913960400191505060405180910390fd5b633b9aca003406156104cd576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260338152602001806117d26033913960400191505060405180910390fd5b633b9aca00340467ffffffffffffffff811115610535576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252602781526020018061182b6027913960400191505060405180910390fd5b6060610540826114ba565b90507f649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c589898989858a8a6105756020546114ba565b6040805160a0808252810189905290819060208201908201606083016080840160c085018e8e80828437600083820152601f017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe01690910187810386528c815260200190508c8c808284376000838201819052601f9091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe01690920188810386528c5181528c51602091820193918e019250908190849084905b83811015610648578181015183820152602001610630565b50505050905090810190601f1680156106755780820380516001836020036101000a031916815260200191505b5086810383528881526020018989808284376000838201819052601f9091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0169092018881038452895181528951602091820193918b019250908190849084905b838110156106ef5781810151838201526020016106d7565b50505050905090810190601f16801561071c5780820380516001836020036101000a031916815260200191505b509d505050505050505050505050505060405180910390a1600060028a8a600060801b604051602001808484808284377fffffffffffffffffffffffffffffffff0000000000000000000000000000000090941691909301908152604080517ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0818403018152601090920190819052815191955093508392506020850191508083835b602083106107fc57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016107bf565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610859573d6000803e3d6000fd5b5050506040513d602081101561086e57600080fd5b5051905060006002806108846040848a8c6116fe565b6040516020018083838082843780830192505050925050506040516020818303038152906040526040518082805190602001908083835b602083106108f857805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016108bb565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610955573d6000803e3d6000fd5b5050506040513d602081101561096a57600080fd5b5051600261097b896040818d6116fe565b60405160009060200180848480828437919091019283525050604080518083038152602092830191829052805190945090925082918401908083835b602083106109f457805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016109b7565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610a51573d6000803e3d6000fd5b5050506040513d6020811015610a6657600080fd5b5051604080516020818101949094528082019290925280518083038201815260609092019081905281519192909182918401908083835b60208310610ada57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610a9d565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610b37573d6000803e3d6000fd5b5050506040513d6020811015610b4c57600080fd5b50516040805160208101858152929350600092600292839287928f928f92018383808284378083019250505093505050506040516020818303038152906040526040518082805190602001908083835b60208310610bd957805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610b9c565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610c36573d6000803e3d6000fd5b5050506040513d6020811015610c4b57600080fd5b50516040518651600291889160009188916020918201918291908601908083835b60208310610ca957805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610c6c565b6001836020036101000a0380198251168184511680821785525050505050509050018367ffffffffffffffff191667ffffffffffffffff1916815260180182815260200193505050506040516020818303038152906040526040518082805190602001908083835b60208310610d4e57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610d11565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610dab573d6000803e3d6000fd5b5050506040513d6020811015610dc057600080fd5b5051604080516020818101949094528082019290925280518083038201815260609092019081905281519192909182918401908083835b60208310610e3457805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610df7565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610e91573d6000803e3d6000fd5b5050506040513d6020811015610ea657600080fd5b50519050858114610f02576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260548152602001806117486054913960600191505060405180910390fd5b60205463ffffffff11610f60576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260218152602001806117276021913960400191505060405180910390fd5b602080546001019081905560005b60208110156110a9578160011660011415610fa0578260008260208110610f9157fe5b0155506110ac95505050505050565b600260008260208110610faf57fe5b01548460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061102557805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610fe8565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015611082573d6000803e3d6000fd5b5050506040513d602081101561109757600080fd5b50519250600282049150600101610f6e565b50fe5b50505050505050565b60606110c26020546114ba565b905090565b6020546000908190815b60208110156112f05781600116600114156111e6576002600082602081106110f557fe5b01548460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061116b57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161112e565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa1580156111c8573d6000803e3d6000fd5b5050506040513d60208110156111dd57600080fd5b505192506112e2565b600283602183602081106111f657fe5b015460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061126b57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161122e565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa1580156112c8573d6000803e3d6000fd5b5050506040513d60208110156112dd57600080fd5b505192505b6002820491506001016110d1565b506002826112ff6020546114ba565b600060401b6040516020018084815260200183805190602001908083835b6020831061135a57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161131d565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790527fffffffffffffffffffffffffffffffffffffffffffffffff000000000000000095909516920191825250604080518083037ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8018152601890920190819052815191955093508392850191508083835b6020831061143f57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101611402565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa15801561149c573d6000803e3d6000fd5b5050506040513d60208110156114b157600080fd5b50519250505090565b60408051600880825281830190925260609160208201818036833701905050905060c082901b8060071a60f81b826000815181106114f457fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060061a60f81b8260018151811061153757fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060051a60f81b8260028151811061157a57fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060041a60f81b826003815181106115bd57fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060031a60f81b8260048151811061160057fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060021a60f81b8260058151811061164357fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060011a60f81b8260068151811061168657fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060001a60f81b826007815181106116c957fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a90535050919050565b6000808585111561170d578182fd5b83861115611719578182fd5b505082019391909203915056fe4465706f736974436f6e74726163743a206d65726b6c6520747265652066756c6c4465706f736974436f6e74726163743a207265636f6e7374727563746564204465706f7369744461746120646f6573206e6f74206d6174636820737570706c696564206465706f7369745f646174615f726f6f744465706f736974436f6e74726163743a20696e76616c6964207769746864726177616c5f63726564656e7469616c73206c656e6774684465706f736974436f6e74726163743a206465706f7369742076616c7565206e6f74206d756c7469706c65206f6620677765694465706f736974436f6e74726163743a20696e76616c6964207075626b6579206c656e6774684465706f736974436f6e74726163743a206465706f7369742076616c756520746f6f20686967684465706f736974436f6e74726163743a206465706f7369742076616c756520746f6f206c6f774465706f736974436f6e74726163743a20696e76616c6964207369676e6174757265206c656e677468a26469706673582212201dd26f37a621703009abf16e77e69c93dc50c79db7f6cc37543e3e0e3decdc9764736f6c634300060b0033", + "storage": { + "0x0000000000000000000000000000000000000000000000000000000000000022": "0xf5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb4b", + "0x0000000000000000000000000000000000000000000000000000000000000023": "0xdb56114e00fdd4c1f85c892bf35ac9a89289aaecb1ebd0a96cde606a748b5d71", + "0x0000000000000000000000000000000000000000000000000000000000000024": "0xc78009fdf07fc56a11f122370658a353aaa542ed63e44c4bc15ff4cd105ab33c", + "0x0000000000000000000000000000000000000000000000000000000000000025": "0x536d98837f2dd165a55d5eeae91485954472d56f246df256bf3cae19352a123c", + "0x0000000000000000000000000000000000000000000000000000000000000026": "0x9efde052aa15429fae05bad4d0b1d7c64da64d03d7a1854a588c2cb8430c0d30", + "0x0000000000000000000000000000000000000000000000000000000000000027": "0xd88ddfeed400a8755596b21942c1497e114c302e6118290f91e6772976041fa1", + "0x0000000000000000000000000000000000000000000000000000000000000028": "0x87eb0ddba57e35f6d286673802a4af5975e22506c7cf4c64bb6be5ee11527f2c", + "0x0000000000000000000000000000000000000000000000000000000000000029": "0x26846476fd5fc54a5d43385167c95144f2643f533cc85bb9d16b782f8d7db193", + "0x000000000000000000000000000000000000000000000000000000000000002a": "0x506d86582d252405b840018792cad2bf1259f1ef5aa5f887e13cb2f0094f51e1", + "0x000000000000000000000000000000000000000000000000000000000000002b": "0xffff0ad7e659772f9534c195c815efc4014ef1e1daed4404c06385d11192e92b", + "0x000000000000000000000000000000000000000000000000000000000000002c": "0x6cf04127db05441cd833107a52be852868890e4317e6a02ab47683aa75964220", + "0x000000000000000000000000000000000000000000000000000000000000002d": "0xb7d05f875f140027ef5118a2247bbb84ce8f2f0f1123623085daf7960c329f5f", + "0x000000000000000000000000000000000000000000000000000000000000002e": "0xdf6af5f5bbdb6be9ef8aa618e4bf8073960867171e29676f8b284dea6a08a85e", + "0x000000000000000000000000000000000000000000000000000000000000002f": "0xb58d900f5e182e3c50ef74969ea16c7726c549757cc23523c369587da7293784", + "0x0000000000000000000000000000000000000000000000000000000000000030": "0xd49a7502ffcfb0340b1d7885688500ca308161a7f96b62df9d083b71fcc8f2bb", + "0x0000000000000000000000000000000000000000000000000000000000000031": "0x8fe6b1689256c0d385f42f5bbe2027a22c1996e110ba97c171d3e5948de92beb", + "0x0000000000000000000000000000000000000000000000000000000000000032": "0x8d0d63c39ebade8509e0ae3c9c3876fb5fa112be18f905ecacfecb92057603ab", + "0x0000000000000000000000000000000000000000000000000000000000000033": "0x95eec8b2e541cad4e91de38385f2e046619f54496c2382cb6cacd5b98c26f5a4", + "0x0000000000000000000000000000000000000000000000000000000000000034": "0xf893e908917775b62bff23294dbbe3a1cd8e6cc1c35b4801887b646a6f81f17f", + "0x0000000000000000000000000000000000000000000000000000000000000035": "0xcddba7b592e3133393c16194fac7431abf2f5485ed711db282183c819e08ebaa", + "0x0000000000000000000000000000000000000000000000000000000000000036": "0x8a8d7fe3af8caa085a7639a832001457dfb9128a8061142ad0335629ff23ff9c", + "0x0000000000000000000000000000000000000000000000000000000000000037": "0xfeb3c337d7a51a6fbf00b9e34c52e1c9195c969bd4e7a0bfd51d5c5bed9c1167", + "0x0000000000000000000000000000000000000000000000000000000000000038": "0xe71f0aa83cc32edfbefa9f4d3e0174ca85182eec9f3a09f6a6c0df6377a510d7", + "0x0000000000000000000000000000000000000000000000000000000000000039": "0x31206fa80a50bb6abe29085058f16212212a60eec8f049fecb92d8c8e0a84bc0", + "0x000000000000000000000000000000000000000000000000000000000000003a": "0x21352bfecbeddde993839f614c3dac0a3ee37543f9b412b16199dc158e23b544", + "0x000000000000000000000000000000000000000000000000000000000000003b": "0x619e312724bb6d7c3153ed9de791d764a366b389af13c58bf8a8d90481a46765", + "0x000000000000000000000000000000000000000000000000000000000000003c": "0x7cdd2986268250628d0c10e385c58c6191e6fbe05191bcc04f133f2cea72c1c4", + "0x000000000000000000000000000000000000000000000000000000000000003d": "0x848930bd7ba8cac54661072113fb278869e07bb8587f91392933374d017bcbe1", + "0x000000000000000000000000000000000000000000000000000000000000003e": "0x8869ff2c22b28cc10510d9853292803328be4fb0e80495e8bb8d271f5b889636", + "0x000000000000000000000000000000000000000000000000000000000000003f": "0xb5fe28e79f1b850f8658246ce9b6a1e7b49fc06db7143e8fe0b4f2b0c5523a5c", + "0x0000000000000000000000000000000000000000000000000000000000000040": "0x985e929f70af28d0bdd1a90a808f977f597c7c778c489e98d3bd8910d31ac0f7" + } + } + }, + "number":"0x0", + "gasUsed":"0x0", + "parentHash":"0x0000000000000000000000000000000000000000000000000000000000000000", + "baseFeePerGas":"0x7" +} diff --git a/packages/beacon-node/test/scripts/el-interop/besudocker/post-merge.sh b/packages/beacon-node/test/scripts/el-interop/besudocker/post-merge.sh new file mode 100755 index 000000000000..d26307ee3d24 --- /dev/null +++ b/packages/beacon-node/test/scripts/el-interop/besudocker/post-merge.sh @@ -0,0 +1,8 @@ +#!/bin/bash -x + +scriptDir=$(dirname $0) +currentDir=$(pwd) + +. $scriptDir/common-setup.sh + +docker run --rm -u $(id -u ${USER}):$(id -g ${USER}) --name custom-execution -p $ETH_PORT:$ETH_PORT -p $ENGINE_PORT:$ENGINE_PORT -v $currentDir/$DATA_DIR:/data $EL_BINARY_DIR --engine-rpc-enabled --rpc-http-enabled --rpc-http-api ADMIN,ETH,MINER,NET --rpc-http-port $ETH_PORT --engine-rpc-port $ENGINE_PORT --engine-jwt-secret /data/jwtsecret --data-path /data/besu --data-storage-format BONSAI --genesis-file /data/genesis.json diff --git a/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker/electra.tmpl b/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker/electra.tmpl new file mode 100644 index 000000000000..3a06b75cd000 --- /dev/null +++ b/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker/electra.tmpl @@ -0,0 +1,94 @@ +{ +"config": { +"chainId":1, +"homesteadBlock":0, +"eip150Block":0, +"eip155Block":0, +"eip158Block":0, +"byzantiumBlock":0, +"constantinopleBlock":0, +"petersburgBlock":0, +"istanbulBlock":0, +"muirGlacierBlock":0, +"berlinBlock":0, +"londonBlock":0, +"shanghaiTime":0, +"cancunTime": 0, +"pragueTime": 0, +"clique": { +"blockperiodseconds": 5, +"epochlength": 30000 +}, +"terminalTotalDifficulty":${TTD} +}, +"nonce":"0x42", +"timestamp":"0x0", +"extraData":"0x0000000000000000000000000000000000000000000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", +"gasLimit":"0x1C9C380", +"difficulty":"0x400000000", +"mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000", +"coinbase":"0x0000000000000000000000000000000000000000", +"alloc":{ + "0x610adc49ecd66cbf176a8247ebd59096c031bd9f": { + "balance": "0x6d6172697573766477000000" + }, + "0xa4664C40AACeBD82A2Db79f0ea36C06Bc6A19Adb": { + "balance": "1000000000000000000000000000" + }, + "0x00000000219ab540356cBB839Cbe05303d7705Fa": { + "balance": "0", + "code": "0x60806040526004361061003f5760003560e01c806301ffc9a71461004457806322895118146100a4578063621fd130146101ba578063c5f2892f14610244575b600080fd5b34801561005057600080fd5b506100906004803603602081101561006757600080fd5b50357fffffffff000000000000000000000000000000000000000000000000000000001661026b565b604080519115158252519081900360200190f35b6101b8600480360360808110156100ba57600080fd5b8101906020810181356401000000008111156100d557600080fd5b8201836020820111156100e757600080fd5b8035906020019184600183028401116401000000008311171561010957600080fd5b91939092909160208101903564010000000081111561012757600080fd5b82018360208201111561013957600080fd5b8035906020019184600183028401116401000000008311171561015b57600080fd5b91939092909160208101903564010000000081111561017957600080fd5b82018360208201111561018b57600080fd5b803590602001918460018302840111640100000000831117156101ad57600080fd5b919350915035610304565b005b3480156101c657600080fd5b506101cf6110b5565b6040805160208082528351818301528351919283929083019185019080838360005b838110156102095781810151838201526020016101f1565b50505050905090810190601f1680156102365780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b34801561025057600080fd5b506102596110c7565b60408051918252519081900360200190f35b60007fffffffff0000000000000000000000000000000000000000000000000000000082167f01ffc9a70000000000000000000000000000000000000000000000000000000014806102fe57507fffffffff0000000000000000000000000000000000000000000000000000000082167f8564090700000000000000000000000000000000000000000000000000000000145b92915050565b6030861461035d576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001806118056026913960400191505060405180910390fd5b602084146103b6576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252603681526020018061179c6036913960400191505060405180910390fd5b6060821461040f576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260298152602001806118786029913960400191505060405180910390fd5b670de0b6b3a7640000341015610470576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001806118526026913960400191505060405180910390fd5b633b9aca003406156104cd576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260338152602001806117d26033913960400191505060405180910390fd5b633b9aca00340467ffffffffffffffff811115610535576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252602781526020018061182b6027913960400191505060405180910390fd5b6060610540826114ba565b90507f649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c589898989858a8a6105756020546114ba565b6040805160a0808252810189905290819060208201908201606083016080840160c085018e8e80828437600083820152601f017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe01690910187810386528c815260200190508c8c808284376000838201819052601f9091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe01690920188810386528c5181528c51602091820193918e019250908190849084905b83811015610648578181015183820152602001610630565b50505050905090810190601f1680156106755780820380516001836020036101000a031916815260200191505b5086810383528881526020018989808284376000838201819052601f9091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0169092018881038452895181528951602091820193918b019250908190849084905b838110156106ef5781810151838201526020016106d7565b50505050905090810190601f16801561071c5780820380516001836020036101000a031916815260200191505b509d505050505050505050505050505060405180910390a1600060028a8a600060801b604051602001808484808284377fffffffffffffffffffffffffffffffff0000000000000000000000000000000090941691909301908152604080517ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0818403018152601090920190819052815191955093508392506020850191508083835b602083106107fc57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016107bf565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610859573d6000803e3d6000fd5b5050506040513d602081101561086e57600080fd5b5051905060006002806108846040848a8c6116fe565b6040516020018083838082843780830192505050925050506040516020818303038152906040526040518082805190602001908083835b602083106108f857805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016108bb565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610955573d6000803e3d6000fd5b5050506040513d602081101561096a57600080fd5b5051600261097b896040818d6116fe565b60405160009060200180848480828437919091019283525050604080518083038152602092830191829052805190945090925082918401908083835b602083106109f457805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016109b7565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610a51573d6000803e3d6000fd5b5050506040513d6020811015610a6657600080fd5b5051604080516020818101949094528082019290925280518083038201815260609092019081905281519192909182918401908083835b60208310610ada57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610a9d565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610b37573d6000803e3d6000fd5b5050506040513d6020811015610b4c57600080fd5b50516040805160208101858152929350600092600292839287928f928f92018383808284378083019250505093505050506040516020818303038152906040526040518082805190602001908083835b60208310610bd957805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610b9c565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610c36573d6000803e3d6000fd5b5050506040513d6020811015610c4b57600080fd5b50516040518651600291889160009188916020918201918291908601908083835b60208310610ca957805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610c6c565b6001836020036101000a0380198251168184511680821785525050505050509050018367ffffffffffffffff191667ffffffffffffffff1916815260180182815260200193505050506040516020818303038152906040526040518082805190602001908083835b60208310610d4e57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610d11565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610dab573d6000803e3d6000fd5b5050506040513d6020811015610dc057600080fd5b5051604080516020818101949094528082019290925280518083038201815260609092019081905281519192909182918401908083835b60208310610e3457805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610df7565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610e91573d6000803e3d6000fd5b5050506040513d6020811015610ea657600080fd5b50519050858114610f02576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260548152602001806117486054913960600191505060405180910390fd5b60205463ffffffff11610f60576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260218152602001806117276021913960400191505060405180910390fd5b602080546001019081905560005b60208110156110a9578160011660011415610fa0578260008260208110610f9157fe5b0155506110ac95505050505050565b600260008260208110610faf57fe5b01548460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061102557805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610fe8565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015611082573d6000803e3d6000fd5b5050506040513d602081101561109757600080fd5b50519250600282049150600101610f6e565b50fe5b50505050505050565b60606110c26020546114ba565b905090565b6020546000908190815b60208110156112f05781600116600114156111e6576002600082602081106110f557fe5b01548460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061116b57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161112e565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa1580156111c8573d6000803e3d6000fd5b5050506040513d60208110156111dd57600080fd5b505192506112e2565b600283602183602081106111f657fe5b015460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061126b57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161122e565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa1580156112c8573d6000803e3d6000fd5b5050506040513d60208110156112dd57600080fd5b505192505b6002820491506001016110d1565b506002826112ff6020546114ba565b600060401b6040516020018084815260200183805190602001908083835b6020831061135a57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161131d565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790527fffffffffffffffffffffffffffffffffffffffffffffffff000000000000000095909516920191825250604080518083037ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8018152601890920190819052815191955093508392850191508083835b6020831061143f57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101611402565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa15801561149c573d6000803e3d6000fd5b5050506040513d60208110156114b157600080fd5b50519250505090565b60408051600880825281830190925260609160208201818036833701905050905060c082901b8060071a60f81b826000815181106114f457fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060061a60f81b8260018151811061153757fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060051a60f81b8260028151811061157a57fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060041a60f81b826003815181106115bd57fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060031a60f81b8260048151811061160057fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060021a60f81b8260058151811061164357fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060011a60f81b8260068151811061168657fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060001a60f81b826007815181106116c957fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a90535050919050565b6000808585111561170d578182fd5b83861115611719578182fd5b505082019391909203915056fe4465706f736974436f6e74726163743a206d65726b6c6520747265652066756c6c4465706f736974436f6e74726163743a207265636f6e7374727563746564204465706f7369744461746120646f6573206e6f74206d6174636820737570706c696564206465706f7369745f646174615f726f6f744465706f736974436f6e74726163743a20696e76616c6964207769746864726177616c5f63726564656e7469616c73206c656e6774684465706f736974436f6e74726163743a206465706f7369742076616c7565206e6f74206d756c7469706c65206f6620677765694465706f736974436f6e74726163743a20696e76616c6964207075626b6579206c656e6774684465706f736974436f6e74726163743a206465706f7369742076616c756520746f6f20686967684465706f736974436f6e74726163743a206465706f7369742076616c756520746f6f206c6f774465706f736974436f6e74726163743a20696e76616c6964207369676e6174757265206c656e677468a2646970667358221220dceca8706b29e917dacf25fceef95acac8d90d765ac926663ce4096195952b6164736f6c634300060b0033", + "storage": { + "0x0000000000000000000000000000000000000000000000000000000000000022": "0xf5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb4b", + "0x0000000000000000000000000000000000000000000000000000000000000023": "0xdb56114e00fdd4c1f85c892bf35ac9a89289aaecb1ebd0a96cde606a748b5d71", + "0x0000000000000000000000000000000000000000000000000000000000000024": "0xc78009fdf07fc56a11f122370658a353aaa542ed63e44c4bc15ff4cd105ab33c", + "0x0000000000000000000000000000000000000000000000000000000000000025": "0x536d98837f2dd165a55d5eeae91485954472d56f246df256bf3cae19352a123c", + "0x0000000000000000000000000000000000000000000000000000000000000026": "0x9efde052aa15429fae05bad4d0b1d7c64da64d03d7a1854a588c2cb8430c0d30", + "0x0000000000000000000000000000000000000000000000000000000000000027": "0xd88ddfeed400a8755596b21942c1497e114c302e6118290f91e6772976041fa1", + "0x0000000000000000000000000000000000000000000000000000000000000028": "0x87eb0ddba57e35f6d286673802a4af5975e22506c7cf4c64bb6be5ee11527f2c", + "0x0000000000000000000000000000000000000000000000000000000000000029": "0x26846476fd5fc54a5d43385167c95144f2643f533cc85bb9d16b782f8d7db193", + "0x000000000000000000000000000000000000000000000000000000000000002a": "0x506d86582d252405b840018792cad2bf1259f1ef5aa5f887e13cb2f0094f51e1", + "0x000000000000000000000000000000000000000000000000000000000000002b": "0xffff0ad7e659772f9534c195c815efc4014ef1e1daed4404c06385d11192e92b", + "0x000000000000000000000000000000000000000000000000000000000000002c": "0x6cf04127db05441cd833107a52be852868890e4317e6a02ab47683aa75964220", + "0x000000000000000000000000000000000000000000000000000000000000002d": "0xb7d05f875f140027ef5118a2247bbb84ce8f2f0f1123623085daf7960c329f5f", + "0x000000000000000000000000000000000000000000000000000000000000002e": "0xdf6af5f5bbdb6be9ef8aa618e4bf8073960867171e29676f8b284dea6a08a85e", + "0x000000000000000000000000000000000000000000000000000000000000002f": "0xb58d900f5e182e3c50ef74969ea16c7726c549757cc23523c369587da7293784", + "0x0000000000000000000000000000000000000000000000000000000000000030": "0xd49a7502ffcfb0340b1d7885688500ca308161a7f96b62df9d083b71fcc8f2bb", + "0x0000000000000000000000000000000000000000000000000000000000000031": "0x8fe6b1689256c0d385f42f5bbe2027a22c1996e110ba97c171d3e5948de92beb", + "0x0000000000000000000000000000000000000000000000000000000000000032": "0x8d0d63c39ebade8509e0ae3c9c3876fb5fa112be18f905ecacfecb92057603ab", + "0x0000000000000000000000000000000000000000000000000000000000000033": "0x95eec8b2e541cad4e91de38385f2e046619f54496c2382cb6cacd5b98c26f5a4", + "0x0000000000000000000000000000000000000000000000000000000000000034": "0xf893e908917775b62bff23294dbbe3a1cd8e6cc1c35b4801887b646a6f81f17f", + "0x0000000000000000000000000000000000000000000000000000000000000035": "0xcddba7b592e3133393c16194fac7431abf2f5485ed711db282183c819e08ebaa", + "0x0000000000000000000000000000000000000000000000000000000000000036": "0x8a8d7fe3af8caa085a7639a832001457dfb9128a8061142ad0335629ff23ff9c", + "0x0000000000000000000000000000000000000000000000000000000000000037": "0xfeb3c337d7a51a6fbf00b9e34c52e1c9195c969bd4e7a0bfd51d5c5bed9c1167", + "0x0000000000000000000000000000000000000000000000000000000000000038": "0xe71f0aa83cc32edfbefa9f4d3e0174ca85182eec9f3a09f6a6c0df6377a510d7", + "0x0000000000000000000000000000000000000000000000000000000000000039": "0x31206fa80a50bb6abe29085058f16212212a60eec8f049fecb92d8c8e0a84bc0", + "0x000000000000000000000000000000000000000000000000000000000000003a": "0x21352bfecbeddde993839f614c3dac0a3ee37543f9b412b16199dc158e23b544", + "0x000000000000000000000000000000000000000000000000000000000000003b": "0x619e312724bb6d7c3153ed9de791d764a366b389af13c58bf8a8d90481a46765", + "0x000000000000000000000000000000000000000000000000000000000000003c": "0x7cdd2986268250628d0c10e385c58c6191e6fbe05191bcc04f133f2cea72c1c4", + "0x000000000000000000000000000000000000000000000000000000000000003d": "0x848930bd7ba8cac54661072113fb278869e07bb8587f91392933374d017bcbe1", + "0x000000000000000000000000000000000000000000000000000000000000003e": "0x8869ff2c22b28cc10510d9853292803328be4fb0e80495e8bb8d271f5b889636", + "0x000000000000000000000000000000000000000000000000000000000000003f": "0xb5fe28e79f1b850f8658246ce9b6a1e7b49fc06db7143e8fe0b4f2b0c5523a5c", + "0x0000000000000000000000000000000000000000000000000000000000000040": "0x985e929f70af28d0bdd1a90a808f977f597c7c778c489e98d3bd8910d31ac0f7" + } + }, + "0x25a219378dad9b3503c8268c9ca836a52427a4fb": { + "balance": "0", + "nonce": "1", + "code": "0x60203611603157600143035f35116029575f356120000143116029576120005f3506545f5260205ff35b5f5f5260205ff35b5f5ffd00" + }, + "0x00A3ca265EBcb825B45F985A16CEFB49958cE017": { + "balance": "0", + "nonce": "1", + "code": "0x3373fffffffffffffffffffffffffffffffffffffffe146090573615156028575f545f5260205ff35b366038141561012e5760115f54600182026001905f5b5f82111560595781019083028483029004916001019190603e565b90939004341061012e57600154600101600155600354806003026004013381556001015f3581556001016020359055600101600355005b6003546002548082038060101160a4575060105b5f5b81811460dd5780604c02838201600302600401805490600101805490600101549160601b83528260140152906034015260010160a6565b910180921460ed579060025560f8565b90505f6002555f6003555b5f548061049d141561010757505f5b60015460028282011161011c5750505f610122565b01600290035b5f555f600155604c025ff35b5f5ffd", + "storage": { + "0x0000000000000000000000000000000000000000000000000000000000000000": + "0x000000000000000000000000000000000000000000000000000000000000049d" + } + } +}, +"number":"0x0", +"gasUsed":"0x0", +"parentHash":"0x0000000000000000000000000000000000000000000000000000000000000000", +"baseFeePerGas":"0x7" +} diff --git a/packages/beacon-node/test/sim/electra-interop.test.ts b/packages/beacon-node/test/sim/electra-interop.test.ts new file mode 100644 index 000000000000..2d08428df558 --- /dev/null +++ b/packages/beacon-node/test/sim/electra-interop.test.ts @@ -0,0 +1,457 @@ +import fs from "node:fs"; +import assert from "node:assert"; +import {describe, it, vi, afterAll, afterEach} from "vitest"; + +import {LogLevel, sleep} from "@lodestar/utils"; +import {ForkName, SLOTS_PER_EPOCH, UNSET_DEPOSIT_REQUESTS_START_INDEX} from "@lodestar/params"; +import {electra, Epoch, Slot} from "@lodestar/types"; +import {ValidatorProposerConfig} from "@lodestar/validator"; + +import {ChainConfig} from "@lodestar/config"; +import {TimestampFormatCode} from "@lodestar/logger"; +import {CachedBeaconStateElectra} from "@lodestar/state-transition"; +import {initializeExecutionEngine} from "../../src/execution/index.js"; +import {ExecutionPayloadStatus, PayloadAttributes} from "../../src/execution/engine/interface.js"; + +import {testLogger, TestLoggerOpts} from "../utils/logger.js"; +import {runEL, ELStartMode, ELClient, sendRawTransactionBig} from "../utils/runEl.js"; +import {defaultExecutionEngineHttpOpts} from "../../src/execution/engine/http.js"; +import {getDevBeaconNode} from "../utils/node/beacon.js"; +import {BeaconRestApiServerOpts} from "../../src/api/index.js"; +import {simTestInfoTracker} from "../utils/node/simTest.js"; +import {getAndInitDevValidators} from "../utils/node/validator.js"; +import {ClockEvent} from "../../src/util/clock.js"; +import {dataToBytes} from "../../src/eth1/provider/utils.js"; +import {bytesToData} from "../../lib/eth1/provider/utils.js"; +import {BeaconNode} from "../../src/index.js"; +import {logFilesDir} from "./params.js"; +import {shell} from "./shell.js"; + +// NOTE: How to run +// DEV_RUN=true EL_BINARY_DIR=ethpandaops/ethereumjs:master-0e06ddf EL_SCRIPT_DIR=ethereumjsdocker yarn vitest --run test/sim/electra-interop.test.ts +// ``` + +/* eslint-disable no-console, @typescript-eslint/naming-convention */ + +const jwtSecretHex = "0xdc6457099f127cf0bac78de8b297df04951281909db4f58b43def7c7151e765d"; +const retries = defaultExecutionEngineHttpOpts.retries; +const retryDelay = defaultExecutionEngineHttpOpts.retryDelay; +describe("executionEngine / ExecutionEngineHttp", function () { + if (!process.env.EL_BINARY_DIR || !process.env.EL_SCRIPT_DIR) { + throw Error( + `EL ENV must be provided, EL_BINARY_DIR: ${process.env.EL_BINARY_DIR}, EL_SCRIPT_DIR: ${process.env.EL_SCRIPT_DIR}` + ); + } + vi.setConfig({testTimeout: 1000 * 60 * 10, hookTimeout: 1000 * 60 * 10}); + + const dataPath = fs.mkdtempSync("lodestar-test-electra"); + const elSetupConfig = { + elScriptDir: process.env.EL_SCRIPT_DIR, + elBinaryDir: process.env.EL_BINARY_DIR, + }; + const elRunOptions = { + dataPath, + jwtSecretHex, + enginePort: parseInt(process.env.ENGINE_PORT ?? "8551"), + ethPort: parseInt(process.env.ETH_PORT ?? "8545"), + }; + + const controller = new AbortController(); + afterAll(async () => { + controller?.abort(); + await shell(`sudo rm -rf ${dataPath}`); + }); + + const afterEachCallbacks: (() => Promise | void)[] = []; + afterEach(async () => { + while (afterEachCallbacks.length > 0) { + const callback = afterEachCallbacks.pop(); + if (callback) await callback(); + } + }); + + it("Send and get payloads with depositRequests to/from EL", async () => { + const {elClient, tearDownCallBack} = await runEL( + {...elSetupConfig, mode: ELStartMode.PostMerge, genesisTemplate: "electra.tmpl"}, + {...elRunOptions, ttd: BigInt(0)}, + controller.signal + ); + afterEachCallbacks.push(() => tearDownCallBack()); + const {genesisBlockHash, engineRpcUrl, ethRpcUrl} = elClient; + console.log({genesisBlockHash}); + + const loggerExecutionEngine = testLogger("executionEngine"); + + const executionEngine = initializeExecutionEngine( + {mode: "http", urls: [engineRpcUrl], jwtSecretHex, retries, retryDelay}, + {signal: controller.signal, logger: loggerExecutionEngine} + ); + + // 1. Prepare payload + const preparePayloadParams: PayloadAttributes = { + // Note: this is created with a pre-defined genesis.json + timestamp: 10, + prevRandao: dataToBytes("0x0000000000000000000000000000000000000000000000000000000000000000", 32), + suggestedFeeRecipient: "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", + withdrawals: [], + parentBeaconBlockRoot: dataToBytes("0x0000000000000000000000000000000000000000000000000000000000000000", 32), + }; + const payloadId = await executionEngine.notifyForkchoiceUpdate( + ForkName.electra, + genesisBlockHash, + //use finalizedBlockHash as safeBlockHash + genesisBlockHash, + genesisBlockHash, + preparePayloadParams + ); + if (!payloadId) throw Error("InvalidPayloadId"); + + // 2. Send raw deposit transaction A and B. tx A is to be imported via newPayload, tx B is to be included in payload via getPayload + const depositTransactionA = + "0x02f90213018080648401c9c3809400000000219ab540356cbb839cbe05303d7705fa8901bc16d674ec800000b901a422895118000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000001208cd4e5a69709cf8ee5b1b73d6efbf3f33bcac92fb7e4ce62b2467542fb50a72d0000000000000000000000000000000000000000000000000000000000000030ac842878bb70009552a4cfcad801d6e659c50bd50d7d03306790cb455ce7363c5b6972f0159d170f625a99b2064dbefc000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020010000000000000000000000818ccb1c4eda80270b04d6df822b1e72dd83c3030000000000000000000000000000000000000000000000000000000000000060a747f75c72d0cf0d2b52504c7385b516f0523e2f0842416399f42b4aee5c6384a5674f6426b1cc3d0827886fa9b909e616f5c9f61f986013ed2b9bf37071cbae951136265b549f44e3c8e26233c0433e9124b7fd0dc86e82f9fedfc0a179d769c080a067c9857d27a42f8fde4d5cf2d6c324af94469ac93ec867eacdd9002e1297835fa07927224866e03d51fb1ae94390e7aec453cad8df9e048892e98f945178eab254"; + const depositRequestA = { + amount: 32000000000, + index: 0, + pubkey: dataToBytes( + "0xac842878bb70009552a4cfcad801d6e659c50bd50d7d03306790cb455ce7363c5b6972f0159d170f625a99b2064dbefc", + 48 + ), + signature: dataToBytes( + "0xa747f75c72d0cf0d2b52504c7385b516f0523e2f0842416399f42b4aee5c6384a5674f6426b1cc3d0827886fa9b909e616f5c9f61f986013ed2b9bf37071cbae951136265b549f44e3c8e26233c0433e9124b7fd0dc86e82f9fedfc0a179d769", + 96 + ), + withdrawalCredentials: dataToBytes("0x010000000000000000000000818ccb1c4eda80270b04d6df822b1e72dd83c303", 32), + }; + + const depositTransactionB = + "0x02f90213010180648401c9c3809400000000219ab540356cbb839cbe05303d7705fa8901bc16d674ec800000b901a422895118000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000120a7ec6a3459bf9389265f62abbdffcd0ef20924bd03e4856d3b964edf565bd8e80000000000000000000000000000000000000000000000000000000000000030a5290ddb9abd6a7fb8bac3414c6c7ff093a18ff297c1eada20464de388b14aafa505bfc98847ca7e6f7ca3aa9d4ca769000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020010000000000000000000000da628fed218cbe3a9e684a9f51c49dd63a229a1d000000000000000000000000000000000000000000000000000000000000006080e12262f94795ce3453f17eea2dd44843ff7977d303b192c1d2a4ce0dbebc8856c398d6445cbf244ba9e99307ead1e30b2544a5e9693cdd5196a33c46e2dd8a8b83afc8278c1ea79cd5c13cac2b96a62257b3636787d0f1e0f881c50a4667ddc080a0b653aad27e504d4fcd19b7c317ffbd2a26a81d6ac14ecea6a891a63dcf7816dfa02953273b4cddc93b2a9ba21aaeb0db988cb1086319dd0b91f79bc101adfe32e4"; + const depositRequestB = { + amount: 32000000000, + index: 1, + pubkey: dataToBytes( + "0xa5290ddb9abd6a7fb8bac3414c6c7ff093a18ff297c1eada20464de388b14aafa505bfc98847ca7e6f7ca3aa9d4ca769", + 48 + ), + signature: dataToBytes( + "0x80e12262f94795ce3453f17eea2dd44843ff7977d303b192c1d2a4ce0dbebc8856c398d6445cbf244ba9e99307ead1e30b2544a5e9693cdd5196a33c46e2dd8a8b83afc8278c1ea79cd5c13cac2b96a62257b3636787d0f1e0f881c50a4667dd", + 96 + ), + withdrawalCredentials: dataToBytes("0x010000000000000000000000da628fed218cbe3a9e684a9f51c49dd63a229a1d", 32), + }; + + sendRawTransactionBig(ethRpcUrl, depositTransactionA, `${dataPath}/deposit.json`).catch((e: Error) => { + loggerExecutionEngine.error("Fail to send raw deposit transaction A", undefined, e); + }); + + sendRawTransactionBig(ethRpcUrl, depositTransactionB, `${dataPath}/deposit.json`).catch((e: Error) => { + loggerExecutionEngine.error("Fail to send raw deposit transaction B", undefined, e); + }); + + // 3. Import new payload with tx A and deposit receipt A + const newPayloadBlockHash = "0x4cec1852552239cf78e8bd2db35ff9396acb6b40c3ce486e6e3028bc75c9faec"; + const newPayload = { + parentHash: dataToBytes("0xeb86e5aca89ea5477a6e169a389efbbe7e5a3d5f5c5296bcde3a4b032ea9bae8", 32), + feeRecipient: dataToBytes("0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", 20), + stateRoot: dataToBytes("0x686ce0478cabce79b298712fefee4aefd2fac1ab4a4813936d2c1ccca788bbc3", 32), + logsBloom: dataToBytes( + "0x00000000000000000000400000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000020000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000", + 256 + ), + prevRandao: dataToBytes("0x0000000000000000000000000000000000000000000000000000000000000000", 32), + gasLimit: 30000000, + gasUsed: 84714, + timestamp: 16, + extraData: dataToBytes("0x", 0), + baseFeePerGas: 7n, + excessBlobGas: 0n, + transactions: [dataToBytes(depositTransactionA, null)], + withdrawals: [], + depositRequests: [depositRequestA], + blockNumber: 1, + blockHash: dataToBytes(newPayloadBlockHash, 32), + receiptsRoot: dataToBytes("0x0b67bea29f17eeb290685e01e9a2e4cd77a83471d9985a8ce27997a7ed3ee3f8", 32), + blobGasUsed: 0n, + }; + const parentBeaconBlockRoot = dataToBytes("0x0000000000000000000000000000000000000000000000000000000000000000", 32); + const payloadResult = await executionEngine.notifyNewPayload( + ForkName.electra, + newPayload, + [], + parentBeaconBlockRoot + ); + if (payloadResult.status !== ExecutionPayloadStatus.VALID) { + throw Error("getPayload returned payload that notifyNewPayload deems invalid"); + } + + // 4. Update fork choice + const preparePayloadParams2: PayloadAttributes = { + timestamp: 48, + prevRandao: dataToBytes("0x0000000000000000000000000000000000000000000000000000000000000000", 32), + suggestedFeeRecipient: "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", + withdrawals: [], + parentBeaconBlockRoot: dataToBytes("0x0000000000000000000000000000000000000000000000000000000000000000", 32), + }; + + const payloadId2 = await executionEngine.notifyForkchoiceUpdate( + ForkName.electra, + newPayloadBlockHash, + //use finalizedBlockHash as safeBlockHash + newPayloadBlockHash, + newPayloadBlockHash, + preparePayloadParams2 + ); + if (!payloadId2) throw Error("InvalidPayloadId"); + + // 5. Get the payload. Check depositRequests field contains deposit + // Wait a bit first for besu to pick up tx from the tx pool. + await sleep(1000); + const payloadAndBlockValue = await executionEngine.getPayload(ForkName.electra, payloadId2); + const payload = payloadAndBlockValue.executionPayload as electra.ExecutionPayload; + const depositRequests = payloadAndBlockValue.executionRequests?.deposits; + + if (payload.transactions.length !== 1) { + throw Error(`Number of transactions mismatched. Expected: 1, actual: ${payload.transactions.length}`); + } else { + const actualTransaction = bytesToData(payload.transactions[0]); + + if (actualTransaction !== depositTransactionB) { + throw Error(`Transaction mismatched. Expected: ${depositTransactionB}, actual: ${actualTransaction}`); + } + } + + if (depositRequests === undefined || depositRequests.length !== 1) { + throw Error(`Number of depositRequests mismatched. Expected: 1, actual: ${depositRequests?.length}`); + } + + const actualDepositRequest = depositRequests[0]; + assert.deepStrictEqual( + actualDepositRequest, + depositRequestB, + `Deposit receipts mismatched. Expected: ${JSON.stringify(depositRequestB)}, actual: ${JSON.stringify( + actualDepositRequest + )}` + ); + }); + + // TODO: get this post merge run working + it.skip("Post-merge, run for a few blocks", async function () { + console.log("\n\nPost-merge, run for a few blocks\n\n"); + const {elClient, tearDownCallBack} = await runEL( + {...elSetupConfig, mode: ELStartMode.PostMerge, genesisTemplate: "electra.tmpl"}, + {...elRunOptions, ttd: BigInt(0)}, + controller.signal + ); + afterEachCallbacks.push(() => tearDownCallBack()); + + await runNodeWithEL({ + elClient, + electraEpoch: 0, + testName: "post-merge", + }); + }); + + /** + * Want to test two things: + * 1) Send two raw deposit transactions, and see if two new validators with correct balances show up in the state.validators and unfinalized cache + * 2) Upon state-transition, see if the two new validators move from unfinalized cache to finalized cache + */ + async function runNodeWithEL({ + elClient, + electraEpoch, + testName, + }: { + elClient: ELClient; + electraEpoch: Epoch; + testName: string; + }): Promise { + const {genesisBlockHash, ttd, engineRpcUrl, ethRpcUrl} = elClient; + const validatorClientCount = 1; + const validatorsPerClient = 32; + + const testParams: Pick = { + SECONDS_PER_SLOT: 2, + }; + + // Just enough to have a checkpoint finalized + const expectedEpochsToFinish = 4; + // 1 epoch of margin of error + const epochsOfMargin = 1; + const timeoutSetupMargin = 30 * 1000; // Give extra 30 seconds of margin + + // delay a bit so regular sync sees it's up to date and sync is completed from the beginning + const genesisSlotsDelay = 8; + + const timeout = + ((epochsOfMargin + expectedEpochsToFinish) * SLOTS_PER_EPOCH + genesisSlotsDelay) * + testParams.SECONDS_PER_SLOT * + 1000; + + vi.setConfig({testTimeout: timeout + 2 * timeoutSetupMargin}); + + const genesisTime = Math.floor(Date.now() / 1000) + genesisSlotsDelay * testParams.SECONDS_PER_SLOT; + + const testLoggerOpts: TestLoggerOpts = { + level: LogLevel.info, + file: { + filepath: `${logFilesDir}/mergemock-${testName}.log`, + level: LogLevel.debug, + }, + timestampFormat: { + format: TimestampFormatCode.EpochSlot, + genesisTime, + slotsPerEpoch: SLOTS_PER_EPOCH, + secondsPerSlot: testParams.SECONDS_PER_SLOT, + }, + }; + const loggerNodeA = testLogger("Node-A", testLoggerOpts); + + const bn = await getDevBeaconNode({ + params: { + ...testParams, + ALTAIR_FORK_EPOCH: 0, + BELLATRIX_FORK_EPOCH: 0, + CAPELLA_FORK_EPOCH: 0, + DENEB_FORK_EPOCH: 0, + ELECTRA_FORK_EPOCH: electraEpoch, + TERMINAL_TOTAL_DIFFICULTY: ttd, + }, + options: { + api: {rest: {enabled: true} as BeaconRestApiServerOpts}, + sync: {isSingleNode: true}, + network: {allowPublishToZeroPeers: true, discv5: null}, + // Now eth deposit/merge tracker methods directly available on engine endpoints + eth1: {enabled: false, providerUrls: [engineRpcUrl], jwtSecretHex}, + executionEngine: {urls: [engineRpcUrl], jwtSecretHex}, + chain: {suggestedFeeRecipient: "0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"}, + }, + validatorCount: validatorClientCount * validatorsPerClient, + logger: loggerNodeA, + genesisTime, + eth1BlockHash: dataToBytes(genesisBlockHash, 32), + withEth1Credentials: true, + }); + + afterEachCallbacks.push(async function () { + await bn.close(); + await sleep(1000); + }); + + const stopInfoTracker = simTestInfoTracker(bn, loggerNodeA); + const valProposerConfig = { + defaultConfig: { + feeRecipient: "0xcccccccccccccccccccccccccccccccccccccccc", + }, + } as ValidatorProposerConfig; + + const {validators} = await getAndInitDevValidators({ + node: bn, + logPrefix: "Node-A", + validatorsPerClient, + validatorClientCount, + startIndex: 0, + // At least one sim test must use the REST API for beacon <-> validator comms + useRestApi: true, + testLoggerOpts, + valProposerConfig, + }); + + afterEachCallbacks.push(async function () { + await Promise.all(validators.map((v) => v.close())); + }); + + await waitForSlot(bn, 1); + + // send raw tx at slot 1 + const depositTransaction = + "0x02f90213018080648401c9c3809400000000219ab540356cbb839cbe05303d7705fa8901bc16d674ec800000b901a422895118000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000001208cd4e5a69709cf8ee5b1b73d6efbf3f33bcac92fb7e4ce62b2467542fb50a72d0000000000000000000000000000000000000000000000000000000000000030ac842878bb70009552a4cfcad801d6e659c50bd50d7d03306790cb455ce7363c5b6972f0159d170f625a99b2064dbefc000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020010000000000000000000000818ccb1c4eda80270b04d6df822b1e72dd83c3030000000000000000000000000000000000000000000000000000000000000060a747f75c72d0cf0d2b52504c7385b516f0523e2f0842416399f42b4aee5c6384a5674f6426b1cc3d0827886fa9b909e616f5c9f61f986013ed2b9bf37071cbae951136265b549f44e3c8e26233c0433e9124b7fd0dc86e82f9fedfc0a179d769c080a067c9857d27a42f8fde4d5cf2d6c324af94469ac93ec867eacdd9002e1297835fa07927224866e03d51fb1ae94390e7aec453cad8df9e048892e98f945178eab254"; + sendRawTransactionBig(ethRpcUrl, depositTransaction, `${dataPath}/deposit.json`).catch((e: Error) => { + loggerNodeA.error("Fail to send raw deposit transaction", undefined, e); + }); + + await waitForSlot(bn, 5); + // Expect new validator to be in unfinalized cache, in state.validators and not in finalized cache + let headState = bn.chain.getHeadState(); + let epochCtx = headState.epochCtx; + if (headState.validators.length !== 33 || headState.balances.length !== 33) { + throw Error("New validator is not reflected in the beacon state at slot 5"); + } + if (epochCtx.index2pubkey.length !== 32 || epochCtx.pubkey2index.size !== 32) { + throw Error("Finalized cache is modified."); + } + if (epochCtx.unfinalizedPubkey2index.size !== 1) { + throw Error( + `Unfinalized cache is missing the expected validator. Size: ${epochCtx.unfinalizedPubkey2index.size}` + ); + } + // validator count at epoch 1 should be empty at this point since no epoch transition has happened. + if (epochCtx.getValidatorCountAtEpoch(1) !== undefined) { + throw Error("Historical validator lengths is modified"); + } + + await new Promise((resolve, _reject) => { + bn.chain.clock.on(ClockEvent.epoch, (epoch) => { + // Resolve only if the finalized checkpoint includes execution payload + if (epoch >= expectedEpochsToFinish) { + console.log("\nGot event epoch, stopping validators and nodes\n"); + resolve(); + } + }); + }); + + // Stop chain and un-subscribe events so the execution engine won't update it's head + // Allow some time to broadcast finalized events and complete the importBlock routine + await Promise.all(validators.map((v) => v.close())); + await bn.close(); + await sleep(500); + + // Check if new validator is in finalized cache + headState = bn.chain.getHeadState() as CachedBeaconStateElectra; + epochCtx = headState.epochCtx; + + if (headState.validators.length !== 33 || headState.balances.length !== 33) { + throw Error("New validator is not reflected in the beacon state."); + } + if (epochCtx.index2pubkey.length !== 33 || epochCtx.pubkey2index.size !== 33) { + throw Error("New validator is not in finalized cache"); + } + if (!epochCtx.unfinalizedPubkey2index.isEmpty()) { + throw Error("Unfinalized cache still contains new validator"); + } + // After 4 epochs, headState's finalized cp epoch should be 2 + // epochCtx should only have validator count for epoch 3 and 4. + if (epochCtx.getValidatorCountAtEpoch(4) === undefined || epochCtx.getValidatorCountAtEpoch(3) === undefined) { + throw Error("Missing historical validator length for epoch 3 or 4"); + } + + if (epochCtx.getValidatorCountAtEpoch(4) !== 33 || epochCtx.getValidatorCountAtEpoch(3) !== 33) { + throw Error("Incorrect historical validator length for epoch 3 or 4"); + } + + if (epochCtx.getValidatorCountAtEpoch(2) !== undefined || epochCtx.getValidatorCountAtEpoch(1) !== undefined) { + throw Error("Historical validator length for epoch 1 or 2 is not dropped properly"); + } + + if (headState.depositRequestsStartIndex === UNSET_DEPOSIT_REQUESTS_START_INDEX) { + throw Error("state.depositRequestsStartIndex is not set upon processing new deposit receipt"); + } + + // wait for 1 slot to print current epoch stats + await sleep(1 * bn.config.SECONDS_PER_SLOT * 1000); + stopInfoTracker(); + console.log("\n\nDone\n\n"); + } +}); + +async function waitForSlot(bn: BeaconNode, targetSlot: Slot): Promise { + await new Promise((resolve, reject) => { + bn.chain.clock.on(ClockEvent.slot, (currentSlot) => { + if (currentSlot === targetSlot) { + resolve(); + return; + } + if (currentSlot > targetSlot) { + reject(Error(`Beacon node has passed target slot ${targetSlot}. Current slot ${currentSlot}`)); + } + }); + }); +} diff --git a/packages/beacon-node/test/spec/presets/epoch_processing.test.ts b/packages/beacon-node/test/spec/presets/epoch_processing.test.ts index a244762143f3..604243400aa0 100644 --- a/packages/beacon-node/test/spec/presets/epoch_processing.test.ts +++ b/packages/beacon-node/test/spec/presets/epoch_processing.test.ts @@ -5,6 +5,7 @@ import { EpochTransitionCache, BeaconStateAllForks, beforeProcessEpoch, + CachedBeaconStateAltair, } from "@lodestar/state-transition"; import * as epochFns from "@lodestar/state-transition/epoch"; import {ssz} from "@lodestar/types"; @@ -22,7 +23,10 @@ export type EpochTransitionFn = (state: CachedBeaconStateAllForks, epochTransiti /* eslint-disable @typescript-eslint/naming-convention */ const epochTransitionFns: Record = { - effective_balance_updates: epochFns.processEffectiveBalanceUpdates, + effective_balance_updates: (state, epochTransitionCache) => { + const fork = state.config.getForkSeq(state.slot); + epochFns.processEffectiveBalanceUpdates(fork, state, epochTransitionCache); + }, eth1_data_reset: epochFns.processEth1DataReset, historical_roots_update: epochFns.processHistoricalRootsUpdate, inactivity_updates: epochFns.processInactivityUpdates as EpochTransitionFn, @@ -30,12 +34,20 @@ const epochTransitionFns: Record = { participation_flag_updates: epochFns.processParticipationFlagUpdates as EpochTransitionFn, participation_record_updates: epochFns.processParticipationRecordUpdates as EpochTransitionFn, randao_mixes_reset: epochFns.processRandaoMixesReset, - registry_updates: epochFns.processRegistryUpdates, + registry_updates: (state, epochTransitionCache) => { + const fork = state.config.getForkSeq(state.slot); + epochFns.processRegistryUpdates(fork, state, epochTransitionCache); + }, rewards_and_penalties: epochFns.processRewardsAndPenalties, slashings: epochFns.processSlashings, slashings_reset: epochFns.processSlashingsReset, - sync_committee_updates: epochFns.processSyncCommitteeUpdates as EpochTransitionFn, + sync_committee_updates: (state, _) => { + const fork = state.config.getForkSeq(state.slot); + epochFns.processSyncCommitteeUpdates(fork, state as CachedBeaconStateAltair); + }, historical_summaries_update: epochFns.processHistoricalSummariesUpdate as EpochTransitionFn, + pending_balance_deposits: epochFns.processPendingBalanceDeposits as EpochTransitionFn, + pending_consolidations: epochFns.processPendingConsolidations as EpochTransitionFn, }; /** diff --git a/packages/beacon-node/test/spec/presets/fork.test.ts b/packages/beacon-node/test/spec/presets/fork.test.ts index 228ab6a38935..c121e651fcea 100644 --- a/packages/beacon-node/test/spec/presets/fork.test.ts +++ b/packages/beacon-node/test/spec/presets/fork.test.ts @@ -5,6 +5,7 @@ import { CachedBeaconStateAltair, CachedBeaconStatePhase0, CachedBeaconStateCapella, + CachedBeaconStateDeneb, } from "@lodestar/state-transition"; import * as slotFns from "@lodestar/state-transition/slot"; import {phase0, ssz} from "@lodestar/types"; @@ -35,6 +36,8 @@ const fork: TestRunnerFn = (forkNext) => { return slotFns.upgradeStateToCapella(preState as CachedBeaconStateBellatrix); case ForkName.deneb: return slotFns.upgradeStateToDeneb(preState as CachedBeaconStateCapella); + case ForkName.electra: + return slotFns.upgradeStateToElectra(preState as CachedBeaconStateDeneb); } }, options: { diff --git a/packages/beacon-node/test/spec/presets/fork_choice.test.ts b/packages/beacon-node/test/spec/presets/fork_choice.test.ts index 92862c6cb03b..7cb6e3c3d692 100644 --- a/packages/beacon-node/test/spec/presets/fork_choice.test.ts +++ b/packages/beacon-node/test/spec/presets/fork_choice.test.ts @@ -4,7 +4,17 @@ import {toHexString} from "@chainsafe/ssz"; import {BeaconStateAllForks, isExecutionStateType, signedBlockToSignedHeader} from "@lodestar/state-transition"; import {InputType} from "@lodestar/spec-test-util"; import {CheckpointWithHex, ForkChoice} from "@lodestar/fork-choice"; -import {phase0, bellatrix, ssz, RootHex, deneb, BeaconBlock, SignedBeaconBlock} from "@lodestar/types"; +import { + bellatrix, + ssz, + RootHex, + deneb, + BeaconBlock, + SignedBeaconBlock, + sszTypesFor, + Attestation, + AttesterSlashing, +} from "@lodestar/types"; import {bnToNum, fromHex} from "@lodestar/utils"; import {createBeaconConfig} from "@lodestar/config"; import {ACTIVE_PRESET, ForkSeq, isForkBlobs, ForkName} from "@lodestar/params"; @@ -136,8 +146,11 @@ const forkChoiceTest = const attestation = testcase.attestations.get(step.attestation); if (!attestation) throw Error(`No attestation ${step.attestation}`); const headState = chain.getHeadState(); - const attDataRootHex = toHexString(ssz.phase0.AttestationData.hashTreeRoot(attestation.data)); - chain.forkChoice.onAttestation(headState.epochCtx.getIndexedAttestation(attestation), attDataRootHex); + const attDataRootHex = toHexString(sszTypesFor(fork).AttestationData.hashTreeRoot(attestation.data)); + chain.forkChoice.onAttestation( + headState.epochCtx.getIndexedAttestation(ForkSeq[fork], attestation), + attDataRootHex + ); } // attester slashing step @@ -340,16 +353,16 @@ const forkChoiceTest = [BLOCK_FILE_NAME]: ssz[fork].SignedBeaconBlock, [BLOBS_FILE_NAME]: ssz.deneb.Blobs, [POW_BLOCK_FILE_NAME]: ssz.bellatrix.PowBlock, - [ATTESTATION_FILE_NAME]: ssz.phase0.Attestation, - [ATTESTER_SLASHING_FILE_NAME]: ssz.phase0.AttesterSlashing, + [ATTESTATION_FILE_NAME]: sszTypesFor(fork).Attestation, + [ATTESTER_SLASHING_FILE_NAME]: sszTypesFor(fork).AttesterSlashing, }, mapToTestCase: (t: Record) => { // t has input file name as key const blocks = new Map(); const blobs = new Map(); const powBlocks = new Map(); - const attestations = new Map(); - const attesterSlashings = new Map(); + const attestations = new Map(); + const attesterSlashings = new Map(); for (const key in t) { const blockMatch = key.match(BLOCK_FILE_NAME); if (blockMatch) { @@ -492,8 +505,8 @@ type ForkChoiceTestCase = { blocks: Map; blobs: Map; powBlocks: Map; - attestations: Map; - attesterSlashings: Map; + attestations: Map; + attesterSlashings: Map; }; function isTick(step: Step): step is OnTick { diff --git a/packages/beacon-node/test/spec/presets/genesis.test.ts b/packages/beacon-node/test/spec/presets/genesis.test.ts index f03f2595a566..773debe3bb19 100644 --- a/packages/beacon-node/test/spec/presets/genesis.test.ts +++ b/packages/beacon-node/test/spec/presets/genesis.test.ts @@ -60,9 +60,7 @@ const genesisInitialization: TestRunnerFn - ) + executionPayloadHeaderType.toViewDU(testcase["execution_payload_header"]) ); }, // eth1.yaml diff --git a/packages/beacon-node/test/spec/presets/operations.test.ts b/packages/beacon-node/test/spec/presets/operations.test.ts index 4c1c10e0cb66..7e2e9c1e9c5d 100644 --- a/packages/beacon-node/test/spec/presets/operations.test.ts +++ b/packages/beacon-node/test/spec/presets/operations.test.ts @@ -4,11 +4,12 @@ import { CachedBeaconStateAllForks, CachedBeaconStateBellatrix, CachedBeaconStateCapella, + CachedBeaconStateElectra, ExecutionPayloadStatus, getBlockRootAtSlot, } from "@lodestar/state-transition"; import * as blockFns from "@lodestar/state-transition/block"; -import {ssz, phase0, altair, bellatrix, capella, sszTypesFor} from "@lodestar/types"; +import {ssz, phase0, altair, bellatrix, capella, electra, sszTypesFor} from "@lodestar/types"; import {InputType} from "@lodestar/spec-test-util"; import {ACTIVE_PRESET, ForkName} from "@lodestar/params"; @@ -65,7 +66,8 @@ const operationFns: Record> = sync_aggregate_random: sync_aggregate, voluntary_exit: (state, testCase: {voluntary_exit: phase0.SignedVoluntaryExit}) => { - blockFns.processVoluntaryExit(state, testCase.voluntary_exit); + const fork = state.config.getForkSeq(state.slot); + blockFns.processVoluntaryExit(fork, state, testCase.voluntary_exit); }, execution_payload: (state, testCase: {body: bellatrix.BeaconBlockBody; execution: {execution_valid: boolean}}) => { @@ -82,7 +84,22 @@ const operationFns: Record> = }, withdrawals: (state, testCase: {execution_payload: capella.ExecutionPayload}) => { - blockFns.processWithdrawals(state as CachedBeaconStateCapella, testCase.execution_payload); + const fork = state.config.getForkSeq(state.slot); + blockFns.processWithdrawals(fork, state as CachedBeaconStateCapella, testCase.execution_payload); + }, + + withdrawal_request: (state, testCase: {withdrawal_request: electra.WithdrawalRequest}) => { + const fork = state.config.getForkSeq(state.slot); + blockFns.processWithdrawalRequest(fork, state as CachedBeaconStateElectra, testCase.withdrawal_request); + }, + + deposit_request: (state, testCase: {deposit_request: electra.DepositRequest}) => { + const fork = state.config.getForkSeq(state.slot); + blockFns.processDepositRequest(fork, state as CachedBeaconStateElectra, testCase.deposit_request); + }, + + consolidation_request: (state, testCase: {consolidation_request: electra.ConsolidationRequest}) => { + blockFns.processConsolidationRequest(state as CachedBeaconStateElectra, testCase.consolidation_request); }, }; @@ -116,8 +133,8 @@ const operations: TestRunnerFn = (fork, sszTypes: { pre: ssz[fork].BeaconState, post: ssz[fork].BeaconState, - attestation: ssz.phase0.Attestation, - attester_slashing: ssz.phase0.AttesterSlashing, + attestation: sszTypesFor(fork).Attestation, + attester_slashing: sszTypesFor(fork).AttesterSlashing, block: ssz[fork].BeaconBlock, body: ssz[fork].BeaconBlockBody, deposit: ssz.phase0.Deposit, @@ -132,6 +149,10 @@ const operations: TestRunnerFn = (fork, : ssz.bellatrix.ExecutionPayload, // Capella address_change: ssz.capella.SignedBLSToExecutionChange, + // Electra + withdrawal_request: ssz.electra.WithdrawalRequest, + deposit_request: ssz.electra.DepositRequest, + consolidation_request: ssz.electra.ConsolidationRequest, }, shouldError: (testCase) => testCase.post === undefined, getExpected: (testCase) => testCase.post, diff --git a/packages/beacon-node/test/spec/presets/ssz_static.test.ts b/packages/beacon-node/test/spec/presets/ssz_static.test.ts index d81b9dee0098..6e43d851ef66 100644 --- a/packages/beacon-node/test/spec/presets/ssz_static.test.ts +++ b/packages/beacon-node/test/spec/presets/ssz_static.test.ts @@ -1,6 +1,6 @@ import fs from "node:fs"; import path from "node:path"; -import {it, vi} from "vitest"; +import {expect, it, vi} from "vitest"; import {Type} from "@chainsafe/ssz"; import {ssz, sszTypesFor} from "@lodestar/types"; import {ACTIVE_PRESET, ForkName} from "@lodestar/params"; @@ -45,12 +45,20 @@ const sszStatic = /* eslint-disable @typescript-eslint/strict-boolean-expressions */ const sszType = (sszTypesFor(fork) as Types)[typeName] || + (ssz.electra as Types)[typeName] || + (ssz.deneb as Types)[typeName] || (ssz.capella as Types)[typeName] || (ssz.bellatrix as Types)[typeName] || (ssz.altair as Types)[typeName] || (ssz.phase0 as Types)[typeName]; + + it(`${fork} - ${typeName} type exists`, function () { + expect(sszType).toEqualWithMessage(expect.any(Type), `SSZ type ${typeName} for fork ${fork} is not defined`); + }); + if (!sszType) { - throw Error(`No type for ${typeName}`); + // Return instead of throwing an error to only skip ssz_static tests associated to missing type + return; } const sszTypeNoUint = replaceUintTypeWithUintBigintType(sszType); diff --git a/packages/beacon-node/test/spec/presets/transition.test.ts b/packages/beacon-node/test/spec/presets/transition.test.ts index d9925f292677..cae7c667b590 100644 --- a/packages/beacon-node/test/spec/presets/transition.test.ts +++ b/packages/beacon-node/test/spec/presets/transition.test.ts @@ -102,6 +102,14 @@ function getTransitionConfig(fork: ForkName, forkEpoch: number): Partial testId.startsWith(skippedPrefix))) { + if (opts?.skippedTestSuites?.some((skippedMatch) => testId.match(skippedMatch))) { displaySkipTest(testId); } else if (fork === undefined) { displayFailTest(testId, `Unknown fork ${forkStr}`); @@ -150,7 +156,11 @@ export function specTestIterator( // Generic testRunner else { const {testFunction, options} = testRunner.fn(fork, testHandler, testSuite); - + if (opts.skippedTests && options.shouldSkip === undefined) { + options.shouldSkip = (_testCase: any, name: string, _index: number): boolean => { + return opts?.skippedTests?.some((skippedMatch) => name.match(skippedMatch)) ?? false; + }; + } describeDirectorySpecTest(testId, testSuiteDirpath, testFunction, options); } } diff --git a/packages/beacon-node/test/unit/api/impl/beacon/state/utils.test.ts b/packages/beacon-node/test/unit/api/impl/beacon/state/utils.test.ts index a6020c0a3c13..39c936c0d025 100644 --- a/packages/beacon-node/test/unit/api/impl/beacon/state/utils.test.ts +++ b/packages/beacon-node/test/unit/api/impl/beacon/state/utils.test.ts @@ -1,107 +1,9 @@ import {describe, it, expect} from "vitest"; import {toHexString} from "@chainsafe/ssz"; -import {phase0} from "@lodestar/types"; -import {getValidatorStatus, getStateValidatorIndex} from "../../../../../../src/api/impl/beacon/state/utils.js"; +import {getStateValidatorIndex} from "../../../../../../src/api/impl/beacon/state/utils.js"; import {generateCachedAltairState} from "../../../../../utils/state.js"; describe("beacon state api utils", function () { - describe("getValidatorStatus", function () { - it("should return PENDING_INITIALIZED", function () { - const validator = { - activationEpoch: 1, - activationEligibilityEpoch: Infinity, - } as phase0.Validator; - const currentEpoch = 0; - const status = getValidatorStatus(validator, currentEpoch); - expect(status).toBe("pending_initialized"); - }); - it("should return PENDING_QUEUED", function () { - const validator = { - activationEpoch: 1, - activationEligibilityEpoch: 101010101101010, - } as phase0.Validator; - const currentEpoch = 0; - const status = getValidatorStatus(validator, currentEpoch); - expect(status).toBe("pending_queued"); - }); - it("should return ACTIVE_ONGOING", function () { - const validator = { - activationEpoch: 1, - exitEpoch: Infinity, - } as phase0.Validator; - const currentEpoch = 1; - const status = getValidatorStatus(validator, currentEpoch); - expect(status).toBe("active_ongoing"); - }); - it("should return ACTIVE_SLASHED", function () { - const validator = { - activationEpoch: 1, - exitEpoch: 101010101101010, - slashed: true, - } as phase0.Validator; - const currentEpoch = 1; - const status = getValidatorStatus(validator, currentEpoch); - expect(status).toBe("active_slashed"); - }); - it("should return ACTIVE_EXITING", function () { - const validator = { - activationEpoch: 1, - exitEpoch: 101010101101010, - slashed: false, - } as phase0.Validator; - const currentEpoch = 1; - const status = getValidatorStatus(validator, currentEpoch); - expect(status).toBe("active_exiting"); - }); - it("should return EXITED_SLASHED", function () { - const validator = { - exitEpoch: 1, - withdrawableEpoch: 3, - slashed: true, - } as phase0.Validator; - const currentEpoch = 2; - const status = getValidatorStatus(validator, currentEpoch); - expect(status).toBe("exited_slashed"); - }); - it("should return EXITED_UNSLASHED", function () { - const validator = { - exitEpoch: 1, - withdrawableEpoch: 3, - slashed: false, - } as phase0.Validator; - const currentEpoch = 2; - const status = getValidatorStatus(validator, currentEpoch); - expect(status).toBe("exited_unslashed"); - }); - it("should return WITHDRAWAL_POSSIBLE", function () { - const validator = { - withdrawableEpoch: 1, - effectiveBalance: 32, - } as phase0.Validator; - const currentEpoch = 1; - const status = getValidatorStatus(validator, currentEpoch); - expect(status).toBe("withdrawal_possible"); - }); - it("should return WITHDRAWAL_DONE", function () { - const validator = { - withdrawableEpoch: 1, - effectiveBalance: 0, - } as phase0.Validator; - const currentEpoch = 1; - const status = getValidatorStatus(validator, currentEpoch); - expect(status).toBe("withdrawal_done"); - }); - it("should error", function () { - const validator = {} as phase0.Validator; - const currentEpoch = 0; - try { - getValidatorStatus(validator, currentEpoch); - } catch (error) { - expect(error).toHaveProperty("message", "ValidatorStatus unknown"); - } - }); - }); - describe("getStateValidatorIndex", () => { const state = generateCachedAltairState(); const pubkey2index = state.epochCtx.pubkey2index; @@ -117,7 +19,13 @@ describe("beacon state api utils", function () { // "validator id not in state" expect(getStateValidatorIndex(String(state.validators.length), state, pubkey2index).valid).toBe(false); // "validator pubkey not in state" - expect(getStateValidatorIndex("0xabcd", state, pubkey2index).valid).toBe(false); + expect( + getStateValidatorIndex( + "0xa99af0913a2834ef4959637e8d7c4e17f0b63adc587d36ab43510452db3102d0771a4554ea4118a33913827d5ee80b76", + state, + pubkey2index + ).valid + ).toBe(false); }); it("should return valid: true on validator indices / pubkeys in the state", () => { diff --git a/packages/beacon-node/test/unit/chain/forkChoice/forkChoice.test.ts b/packages/beacon-node/test/unit/chain/forkChoice/forkChoice.test.ts index 6b96a0d1172f..611673086ce5 100644 --- a/packages/beacon-node/test/unit/chain/forkChoice/forkChoice.test.ts +++ b/packages/beacon-node/test/unit/chain/forkChoice/forkChoice.test.ts @@ -5,12 +5,13 @@ import {CheckpointWithHex, ExecutionStatus, ForkChoice, DataAvailabilityStatus} import {FAR_FUTURE_EPOCH, MAX_EFFECTIVE_BALANCE} from "@lodestar/params"; import { CachedBeaconStateAllForks, + computeAnchorCheckpoint, computeEpochAtSlot, getEffectiveBalanceIncrementsZeroed, } from "@lodestar/state-transition"; import {phase0, Slot, ssz, ValidatorIndex} from "@lodestar/types"; import {getTemporaryBlockHeader, processSlots} from "@lodestar/state-transition"; -import {ChainEventEmitter, computeAnchorCheckpoint, initializeForkChoice} from "../../../../src/chain/index.js"; +import {ChainEventEmitter, initializeForkChoice} from "../../../../src/chain/index.js"; import {generateSignedBlockAtSlot} from "../../../utils/typeGenerator.js"; import {createCachedBeaconStateTest} from "../../../utils/cachedBeaconState.js"; import {generateState} from "../../../utils/state.js"; diff --git a/packages/beacon-node/test/unit/chain/lightclient/upgradeLightClientHeader.test.ts b/packages/beacon-node/test/unit/chain/lightclient/upgradeLightClientHeader.test.ts index a9a5edc9ec0e..6da728be46e9 100644 --- a/packages/beacon-node/test/unit/chain/lightclient/upgradeLightClientHeader.test.ts +++ b/packages/beacon-node/test/unit/chain/lightclient/upgradeLightClientHeader.test.ts @@ -15,6 +15,7 @@ describe("UpgradeLightClientHeader", function () { BELLATRIX_FORK_EPOCH: 2, CAPELLA_FORK_EPOCH: 3, DENEB_FORK_EPOCH: 4, + ELECTRA_FORK_EPOCH: 5, }); const genesisValidatorsRoot = Buffer.alloc(32, 0xaa); @@ -27,6 +28,7 @@ describe("UpgradeLightClientHeader", function () { capella: ssz.capella.LightClientHeader.defaultValue(), bellatrix: ssz.altair.LightClientHeader.defaultValue(), deneb: ssz.deneb.LightClientHeader.defaultValue(), + electra: ssz.deneb.LightClientHeader.defaultValue(), }; testSlots = { @@ -35,6 +37,7 @@ describe("UpgradeLightClientHeader", function () { bellatrix: 17, capella: 25, deneb: 33, + electra: 41, }; }); diff --git a/packages/beacon-node/test/unit/chain/opPools/aggregatedAttestationPool.test.ts b/packages/beacon-node/test/unit/chain/opPools/aggregatedAttestationPool.test.ts index 800984fa84bc..f00a300bbe4d 100644 --- a/packages/beacon-node/test/unit/chain/opPools/aggregatedAttestationPool.test.ts +++ b/packages/beacon-node/test/unit/chain/opPools/aggregatedAttestationPool.test.ts @@ -1,14 +1,23 @@ import {BitArray, fromHexString, toHexString} from "@chainsafe/ssz"; import {describe, it, expect, beforeEach, beforeAll, afterEach, vi} from "vitest"; -import {SecretKey, Signature, fastAggregateVerify} from "@chainsafe/blst"; +import {SecretKey, Signature, fastAggregateVerify, aggregateSignatures} from "@chainsafe/blst"; import {CachedBeaconStateAllForks, newFilledArray} from "@lodestar/state-transition"; -import {FAR_FUTURE_EPOCH, ForkName, MAX_EFFECTIVE_BALANCE, SLOTS_PER_EPOCH} from "@lodestar/params"; +import { + FAR_FUTURE_EPOCH, + ForkName, + MAX_COMMITTEES_PER_SLOT, + MAX_EFFECTIVE_BALANCE, + SLOTS_PER_EPOCH, +} from "@lodestar/params"; import {ssz, phase0} from "@lodestar/types"; import {CachedBeaconStateAltair} from "@lodestar/state-transition/src/types.js"; +import {createChainForkConfig, defaultChainConfig} from "@lodestar/config"; import {MockedForkChoice, getMockedForkChoice} from "../../../mocks/mockedBeaconChain.js"; import { + aggregateConsolidation, AggregatedAttestationPool, aggregateInto, + AttestationsConsolidation, getNotSeenValidatorsFn, MatchingDataAttestationGroup, } from "../../../../src/chain/opPools/aggregatedAttestationPool.js"; @@ -28,6 +37,9 @@ const validSignature = fromHexString( describe("AggregatedAttestationPool", function () { let pool: AggregatedAttestationPool; const fork = ForkName.altair; + const config = createChainForkConfig({ + ...defaultChainConfig, + }); const altairForkEpoch = 2020; const currentEpoch = altairForkEpoch + 10; const currentSlot = SLOTS_PER_EPOCH * currentEpoch; @@ -71,7 +83,7 @@ describe("AggregatedAttestationPool", function () { let forkchoiceStub: MockedForkChoice; beforeEach(() => { - pool = new AggregatedAttestationPool(); + pool = new AggregatedAttestationPool(config); altairState = originalState.clone(); forkchoiceStub = getMockedForkChoice(); }); @@ -80,11 +92,11 @@ describe("AggregatedAttestationPool", function () { vi.clearAllMocks(); }); - it("getParticipationFn", () => { + it("getNotSeenValidatorsFn", () => { // previousEpochParticipation and currentEpochParticipation is created inside generateCachedState // 0 and 1 are fully participated const notSeenValidatorFn = getNotSeenValidatorsFn(altairState); - const participation = notSeenValidatorFn(currentEpoch, committee); + const participation = notSeenValidatorFn(currentEpoch, currentSlot, committeeIndex); // seen attesting indices are 0, 1 => not seen are 2, 3 expect(participation).toEqual( // { @@ -279,6 +291,7 @@ describe("MatchingDataAttestationGroup.getAttestationsForBlock", () => { } } const attestationsForBlock = attestationGroup.getAttestationsForBlock( + ForkName.phase0, // notSeenValidatorIndices, notSeenAttestingIndices ); @@ -320,3 +333,75 @@ describe("MatchingDataAttestationGroup aggregateInto", function () { ); }); }); + +describe("aggregateConsolidation", function () { + const sk0 = SecretKey.fromBytes(Buffer.alloc(32, 1)); + const sk1 = SecretKey.fromBytes(Buffer.alloc(32, 2)); + const sk2 = SecretKey.fromBytes(Buffer.alloc(32, 3)); + const skArr = [sk0, sk1, sk2]; + const testCases: { + name: string; + committeeIndices: number[]; + aggregationBitsArr: Array[]; + expectedAggregationBits: Array; + expectedCommitteeBits: Array; + }[] = [ + // note that bit index starts from the right + { + name: "test case 0", + committeeIndices: [0, 1, 2], + aggregationBitsArr: [[0b111], [0b011], [0b111]], + expectedAggregationBits: [0b11011111, 0b1], + expectedCommitteeBits: [true, true, true, false], + }, + { + name: "test case 1", + committeeIndices: [2, 3, 1], + aggregationBitsArr: [[0b100], [0b010], [0b001]], + expectedAggregationBits: [0b10100001, 0b0], + expectedCommitteeBits: [false, true, true, true], + }, + ]; + for (const { + name, + committeeIndices, + aggregationBitsArr, + expectedAggregationBits, + expectedCommitteeBits, + } of testCases) { + it(name, () => { + const attData = ssz.phase0.AttestationData.defaultValue(); + const consolidation: AttestationsConsolidation = { + byCommittee: new Map(), + attData: attData, + totalNotSeenCount: 0, + score: 0, + }; + // to simplify, instead of signing the signingRoot, just sign the attData root + const sigArr = skArr.map((sk) => sk.sign(ssz.phase0.AttestationData.hashTreeRoot(attData))); + const attestationSeed = ssz.electra.Attestation.defaultValue(); + for (let i = 0; i < committeeIndices.length; i++) { + const committeeIndex = committeeIndices[i]; + const commiteeBits = BitArray.fromBoolArray( + Array.from({length: MAX_COMMITTEES_PER_SLOT}, (_, i) => i === committeeIndex) + ); + const aggAttestation = { + ...attestationSeed, + aggregationBits: new BitArray(new Uint8Array(aggregationBitsArr[i]), 3), + committeeBits: commiteeBits, + signature: sigArr[i].toBytes(), + }; + consolidation.byCommittee.set(committeeIndex, { + attestation: aggAttestation, + notSeenAttesterCount: aggregationBitsArr[i].filter((item) => item).length, + }); + } + + const finalAttestation = aggregateConsolidation(consolidation); + expect(finalAttestation.aggregationBits.uint8Array).toEqual(new Uint8Array(expectedAggregationBits)); + expect(finalAttestation.committeeBits.toBoolArray()).toEqual(expectedCommitteeBits); + expect(finalAttestation.data).toEqual(attData); + expect(finalAttestation.signature).toEqual(aggregateSignatures(sigArr).toBytes()); + }); + } +}); diff --git a/packages/beacon-node/test/unit/chain/opPools/attestationPool.test.ts b/packages/beacon-node/test/unit/chain/opPools/attestationPool.test.ts new file mode 100644 index 000000000000..68efd0751585 --- /dev/null +++ b/packages/beacon-node/test/unit/chain/opPools/attestationPool.test.ts @@ -0,0 +1,120 @@ +import {fromHexString, toHexString} from "@chainsafe/ssz"; +import {describe, it, expect, beforeEach, vi} from "vitest"; +import {GENESIS_SLOT, SLOTS_PER_EPOCH} from "@lodestar/params"; +import {ssz} from "@lodestar/types"; +import {createChainForkConfig, defaultChainConfig} from "@lodestar/config"; +import {InsertOutcome} from "../../../../src/chain/opPools/types.js"; +import {AttestationPool} from "../../../../src/chain/opPools/attestationPool.js"; +import {getMockedClock} from "../../../mocks/clock.js"; + +/** Valid signature of random data to prevent BLS errors */ +export const validSignature = fromHexString( + "0xb2afb700f6c561ce5e1b4fedaec9d7c06b822d38c720cf588adfda748860a940adf51634b6788f298c552de40183b5a203b2bbe8b7dd147f0bb5bc97080a12efbb631c8888cb31a99cc4706eb3711865b8ea818c10126e4d818b542e9dbf9ae8" +); + +describe("AttestationPool", function () { + /* eslint-disable @typescript-eslint/naming-convention */ + const config = createChainForkConfig({ + ...defaultChainConfig, + ELECTRA_FORK_EPOCH: 5, + DENEB_FORK_EPOCH: 4, + CAPELLA_FORK_EPOCH: 3, + BELLATRIX_FORK_EPOCH: 2, + ALTAIR_FORK_EPOCH: 1, + }); + const clockStub = getMockedClock(); + vi.spyOn(clockStub, "secFromSlot").mockReturnValue(0); + + const cutOffSecFromSlot = (2 / 3) * config.SECONDS_PER_SLOT; + + // Mock attestations + const electraAttestationData = { + ...ssz.phase0.AttestationData.defaultValue(), + slot: config.ELECTRA_FORK_EPOCH * SLOTS_PER_EPOCH, + }; + const electraAttestation = { + ...ssz.electra.Attestation.defaultValue(), + data: electraAttestationData, + signature: validSignature, + }; + const phase0AttestationData = {...ssz.phase0.AttestationData.defaultValue(), slot: GENESIS_SLOT}; + const phase0Attestation = { + ...ssz.phase0.Attestation.defaultValue(), + data: phase0AttestationData, + signature: validSignature, + }; + + let pool: AttestationPool; + + beforeEach(() => { + pool = new AttestationPool(config, clockStub, cutOffSecFromSlot); + }); + + it("add correct electra attestation", () => { + const committeeIndex = 0; + const attDataRootHex = toHexString(ssz.phase0.AttestationData.hashTreeRoot(electraAttestation.data)); + const outcome = pool.add(committeeIndex, electraAttestation, attDataRootHex); + + expect(outcome).equal(InsertOutcome.NewData); + expect(pool.getAggregate(electraAttestationData.slot, committeeIndex, attDataRootHex)).toEqual(electraAttestation); + }); + + it("add correct phase0 attestation", () => { + const committeeIndex = null; + const attDataRootHex = toHexString(ssz.phase0.AttestationData.hashTreeRoot(phase0Attestation.data)); + const outcome = pool.add(committeeIndex, phase0Attestation, attDataRootHex); + + expect(outcome).equal(InsertOutcome.NewData); + expect(pool.getAggregate(phase0AttestationData.slot, committeeIndex, attDataRootHex)).toEqual(phase0Attestation); + expect(pool.getAggregate(phase0AttestationData.slot, 10, attDataRootHex)).toEqual(phase0Attestation); + expect(pool.getAggregate(phase0AttestationData.slot, 42, attDataRootHex)).toEqual(phase0Attestation); + expect(pool.getAggregate(phase0AttestationData.slot, null, attDataRootHex)).toEqual(phase0Attestation); + }); + + it("add electra attestation without committee index", () => { + const committeeIndex = null; + const attDataRootHex = toHexString(ssz.phase0.AttestationData.hashTreeRoot(electraAttestation.data)); + + expect(() => pool.add(committeeIndex, electraAttestation, attDataRootHex)).toThrow(); + expect(pool.getAggregate(electraAttestationData.slot, committeeIndex, attDataRootHex)).toBeNull(); + }); + + it("add phase0 attestation with committee index", () => { + const committeeIndex = 0; + const attDataRootHex = toHexString(ssz.phase0.AttestationData.hashTreeRoot(phase0Attestation.data)); + const outcome = pool.add(committeeIndex, phase0Attestation, attDataRootHex); + + expect(outcome).equal(InsertOutcome.NewData); + expect(pool.getAggregate(phase0AttestationData.slot, committeeIndex, attDataRootHex)).toEqual(phase0Attestation); + expect(pool.getAggregate(phase0AttestationData.slot, 123, attDataRootHex)).toEqual(phase0Attestation); + expect(pool.getAggregate(phase0AttestationData.slot, 456, attDataRootHex)).toEqual(phase0Attestation); + expect(pool.getAggregate(phase0AttestationData.slot, null, attDataRootHex)).toEqual(phase0Attestation); + }); + + it("add electra attestation with phase0 slot", () => { + const electraAttestationDataWithPhase0Slot = {...ssz.phase0.AttestationData.defaultValue(), slot: GENESIS_SLOT}; + const attestation = { + ...ssz.electra.Attestation.defaultValue(), + data: electraAttestationDataWithPhase0Slot, + signature: validSignature, + }; + const attDataRootHex = toHexString(ssz.phase0.AttestationData.hashTreeRoot(electraAttestationDataWithPhase0Slot)); + + expect(() => pool.add(0, attestation, attDataRootHex)).toThrow(); + }); + + it("add phase0 attestation with electra slot", () => { + const phase0AttestationDataWithElectraSlot = { + ...ssz.phase0.AttestationData.defaultValue(), + slot: config.ELECTRA_FORK_EPOCH * SLOTS_PER_EPOCH, + }; + const attestation = { + ...ssz.phase0.Attestation.defaultValue(), + data: phase0AttestationDataWithElectraSlot, + signature: validSignature, + }; + const attDataRootHex = toHexString(ssz.phase0.AttestationData.hashTreeRoot(phase0AttestationDataWithElectraSlot)); + + expect(() => pool.add(0, attestation, attDataRootHex)).toThrow(); + }); +}); diff --git a/packages/beacon-node/test/unit/chain/shufflingCache.test.ts b/packages/beacon-node/test/unit/chain/shufflingCache.test.ts index 6295a993c072..62b02cbf2b12 100644 --- a/packages/beacon-node/test/unit/chain/shufflingCache.test.ts +++ b/packages/beacon-node/test/unit/chain/shufflingCache.test.ts @@ -1,6 +1,4 @@ import {describe, it, expect, beforeEach} from "vitest"; - -import {getShufflingDecisionBlock} from "@lodestar/state-transition"; // eslint-disable-next-line import/no-relative-packages import {generateTestCachedBeaconStateOnlyValidators} from "../../../../state-transition/test/perf/util.js"; import {ShufflingCache} from "../../../src/chain/shufflingCache.js"; @@ -9,39 +7,43 @@ describe("ShufflingCache", function () { const vc = 64; const stateSlot = 100; const state = generateTestCachedBeaconStateOnlyValidators({vc, slot: stateSlot}); - const currentEpoch = state.epochCtx.currentShuffling.epoch; + const currentEpoch = state.epochCtx.epoch; + const currentDecisionRoot = state.epochCtx.currentDecisionRoot; let shufflingCache: ShufflingCache; beforeEach(() => { - shufflingCache = new ShufflingCache(null, {maxShufflingCacheEpochs: 1}); - shufflingCache.processState(state, currentEpoch); + shufflingCache = new ShufflingCache(null, null, {maxShufflingCacheEpochs: 1}, [ + { + shuffling: state.epochCtx.currentShuffling, + decisionRoot: currentDecisionRoot, + }, + ]); }); it("should get shuffling from cache", async function () { - const decisionRoot = getShufflingDecisionBlock(state, currentEpoch); - expect(await shufflingCache.get(currentEpoch, decisionRoot)).toEqual(state.epochCtx.currentShuffling); + expect(await shufflingCache.get(currentEpoch, currentDecisionRoot)).toEqual(state.epochCtx.currentShuffling); }); it("should bound by maxSize(=1)", async function () { - const decisionRoot = getShufflingDecisionBlock(state, currentEpoch); - expect(await shufflingCache.get(currentEpoch, decisionRoot)).toEqual(state.epochCtx.currentShuffling); + expect(await shufflingCache.get(currentEpoch, currentDecisionRoot)).toEqual(state.epochCtx.currentShuffling); // insert promises at the same epoch does not prune the cache shufflingCache.insertPromise(currentEpoch, "0x00"); - expect(await shufflingCache.get(currentEpoch, decisionRoot)).toEqual(state.epochCtx.currentShuffling); - // insert shufflings at other epochs does prune the cache - shufflingCache.processState(state, currentEpoch + 1); + expect(await shufflingCache.get(currentEpoch, currentDecisionRoot)).toEqual(state.epochCtx.currentShuffling); + // insert shuffling at other epochs does prune the cache + shufflingCache["set"](state.epochCtx.previousShuffling, state.epochCtx.previousDecisionRoot); // the current shuffling is not available anymore - expect(await shufflingCache.get(currentEpoch, decisionRoot)).toBeNull(); + expect(await shufflingCache.get(currentEpoch, currentDecisionRoot)).toBeNull(); }); it("should return shuffling from promise", async function () { - const nextDecisionRoot = getShufflingDecisionBlock(state, currentEpoch + 1); - shufflingCache.insertPromise(currentEpoch + 1, nextDecisionRoot); - const shufflingRequest0 = shufflingCache.get(currentEpoch + 1, nextDecisionRoot); - const shufflingRequest1 = shufflingCache.get(currentEpoch + 1, nextDecisionRoot); - shufflingCache.processState(state, currentEpoch + 1); - expect(await shufflingRequest0).toEqual(state.epochCtx.nextShuffling); - expect(await shufflingRequest1).toEqual(state.epochCtx.nextShuffling); + const previousEpoch = state.epochCtx.epoch - 1; + const previousDecisionRoot = state.epochCtx.previousDecisionRoot; + shufflingCache.insertPromise(previousEpoch, previousDecisionRoot); + const shufflingRequest0 = shufflingCache.get(previousEpoch, previousDecisionRoot); + const shufflingRequest1 = shufflingCache.get(previousEpoch, previousDecisionRoot); + shufflingCache["set"](state.epochCtx.previousShuffling, previousDecisionRoot); + expect(await shufflingRequest0).toEqual(state.epochCtx.previousShuffling); + expect(await shufflingRequest1).toEqual(state.epochCtx.previousShuffling); }); it("should support up to 2 promises at a time", async function () { diff --git a/packages/beacon-node/test/unit/chain/stateCache/fifoBlockStateCache.test.ts b/packages/beacon-node/test/unit/chain/stateCache/fifoBlockStateCache.test.ts index 7d3f34ddac36..b4aac92dd9bb 100644 --- a/packages/beacon-node/test/unit/chain/stateCache/fifoBlockStateCache.test.ts +++ b/packages/beacon-node/test/unit/chain/stateCache/fifoBlockStateCache.test.ts @@ -1,8 +1,7 @@ import {describe, it, expect, beforeEach} from "vitest"; import {toHexString} from "@chainsafe/ssz"; -import {EpochShuffling} from "@lodestar/state-transition"; +import {EpochShuffling, CachedBeaconStateAllForks} from "@lodestar/state-transition"; import {SLOTS_PER_EPOCH} from "@lodestar/params"; -import {CachedBeaconStateAllForks} from "@lodestar/state-transition/src/types.js"; import {FIFOBlockStateCache} from "../../../../src/chain/stateCache/index.js"; import {generateCachedState} from "../../../utils/state.js"; diff --git a/packages/beacon-node/test/unit/chain/validation/attestation/validateAttestation.test.ts b/packages/beacon-node/test/unit/chain/validation/attestation/validateAttestation.test.ts index 56aab699f4f7..90d37a74289d 100644 --- a/packages/beacon-node/test/unit/chain/validation/attestation/validateAttestation.test.ts +++ b/packages/beacon-node/test/unit/chain/validation/attestation/validateAttestation.test.ts @@ -1,7 +1,8 @@ import {BitArray} from "@chainsafe/ssz"; -import {describe, it} from "vitest"; -import {SLOTS_PER_EPOCH} from "@lodestar/params"; +import {describe, expect, it} from "vitest"; +import {ForkName, SLOTS_PER_EPOCH} from "@lodestar/params"; import {ssz} from "@lodestar/types"; +import {LodestarError} from "@lodestar/utils"; // eslint-disable-next-line import/no-relative-packages import {generateTestCachedBeaconStateOnlyValidators} from "../../../../../../state-transition/test/perf/util.js"; import {AttestationErrorCode, GossipErrorCode} from "../../../../../src/chain/errors/index.js"; @@ -9,14 +10,17 @@ import {IBeaconChain} from "../../../../../src/chain/index.js"; import { ApiAttestation, GossipAttestation, + getSeenAttDataKeyFromGossipAttestation, + getSeenAttDataKeyFromSignedAggregateAndProof, validateApiAttestation, - validateAttestation, + validateGossipAttestationsSameAttData, } from "../../../../../src/chain/validation/index.js"; -import {getAttDataBase64FromAttestationSerialized} from "../../../../../src/util/sszBytes.js"; +import {getAttDataFromAttestationSerialized} from "../../../../../src/util/sszBytes.js"; import {memoOnce} from "../../../../utils/cache.js"; import {expectRejectedWithLodestarError} from "../../../../utils/errors.js"; import {AttestationValidDataOpts, getAttestationValidData} from "../../../../utils/validationData/attestation.js"; +// TODO: more tests for electra describe("validateAttestation", () => { const vc = 64; const stateSlot = 100; @@ -72,7 +76,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData) as string, }, subnet, AttestationErrorCode.BAD_TARGET_EPOCH @@ -91,7 +95,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData) as string, }, subnet, AttestationErrorCode.PAST_SLOT @@ -110,7 +114,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData) as string, }, subnet, AttestationErrorCode.FUTURE_SLOT @@ -135,7 +139,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData) as string, }, subnet, AttestationErrorCode.NOT_EXACTLY_ONE_AGGREGATION_BIT_SET @@ -155,7 +159,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData) as string, }, subnet, AttestationErrorCode.NOT_EXACTLY_ONE_AGGREGATION_BIT_SET @@ -179,7 +183,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData) as string, }, subnet, AttestationErrorCode.UNKNOWN_OR_PREFINALIZED_BEACON_BLOCK_ROOT @@ -199,7 +203,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData) as string, }, subnet, AttestationErrorCode.INVALID_TARGET_ROOT @@ -226,7 +230,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData) as string, }, subnet, AttestationErrorCode.WRONG_NUMBER_OF_AGGREGATION_BITS @@ -245,7 +249,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData) as string, }, invalidSubnet, AttestationErrorCode.INVALID_SUBNET_ID @@ -265,7 +269,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData) as string, }, subnet, AttestationErrorCode.ATTESTATION_ALREADY_KNOWN @@ -287,7 +291,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData) as string, }, subnet, AttestationErrorCode.INVALID_SIGNATURE @@ -311,6 +315,58 @@ describe("validateAttestation", () => { errorCode: string ): Promise { const fork = chain.config.getForkName(stateSlot); - await expectRejectedWithLodestarError(validateAttestation(fork, chain, attestationOrBytes, subnet), errorCode); + const {results} = await validateGossipAttestationsSameAttData(fork, chain, [attestationOrBytes], subnet); + expect(results.length).toEqual(1); + expect((results[0].err as LodestarError<{code: string}>).type.code).toEqual(errorCode); } }); + +describe("getSeenAttDataKey", () => { + const slot = 100; + const index = 0; + const blockRoot = Buffer.alloc(32, 1); + + it("phase0", () => { + const attestationData = ssz.phase0.AttestationData.defaultValue(); + attestationData.slot = slot; + attestationData.index = index; + attestationData.beaconBlockRoot = blockRoot; + const attestation = ssz.phase0.Attestation.defaultValue(); + attestation.data = attestationData; + const attDataBase64 = Buffer.from(ssz.phase0.AttestationData.serialize(attestationData)).toString("base64"); + const attestationBytes = ssz.phase0.Attestation.serialize(attestation); + const gossipAttestation = {attDataBase64, serializedData: attestationBytes, attSlot: slot} as GossipAttestation; + + const signedAggregateAndProof = ssz.phase0.SignedAggregateAndProof.defaultValue(); + signedAggregateAndProof.message.aggregate.data.slot = slot; + signedAggregateAndProof.message.aggregate.data.index = index; + signedAggregateAndProof.message.aggregate.data.beaconBlockRoot = blockRoot; + const aggregateAndProofBytes = ssz.phase0.SignedAggregateAndProof.serialize(signedAggregateAndProof); + + expect(getSeenAttDataKeyFromGossipAttestation(ForkName.phase0, gossipAttestation)).toEqual( + getSeenAttDataKeyFromSignedAggregateAndProof(ForkName.phase0, aggregateAndProofBytes) + ); + }); + + it("electra", () => { + const attestationData = ssz.phase0.AttestationData.defaultValue(); + attestationData.slot = slot; + attestationData.index = index; + attestationData.beaconBlockRoot = blockRoot; + const attestation = ssz.electra.Attestation.defaultValue(); + attestation.data = attestationData; + const attDataBase64 = Buffer.from(ssz.phase0.AttestationData.serialize(attestationData)).toString("base64"); + const attestationBytes = ssz.electra.Attestation.serialize(attestation); + const gossipAttestation = {attDataBase64, serializedData: attestationBytes, attSlot: slot} as GossipAttestation; + + const signedAggregateAndProof = ssz.electra.SignedAggregateAndProof.defaultValue(); + signedAggregateAndProof.message.aggregate.data.slot = slot; + signedAggregateAndProof.message.aggregate.data.index = index; + signedAggregateAndProof.message.aggregate.data.beaconBlockRoot = blockRoot; + const aggregateAndProofBytes = ssz.electra.SignedAggregateAndProof.serialize(signedAggregateAndProof); + + expect(getSeenAttDataKeyFromGossipAttestation(ForkName.electra, gossipAttestation)).toEqual( + getSeenAttDataKeyFromSignedAggregateAndProof(ForkName.electra, aggregateAndProofBytes) + ); + }); +}); diff --git a/packages/beacon-node/test/unit/eth1/eth1MergeBlockTracker.test.ts b/packages/beacon-node/test/unit/eth1/eth1MergeBlockTracker.test.ts index 938c272b316a..151f3931cfde 100644 --- a/packages/beacon-node/test/unit/eth1/eth1MergeBlockTracker.test.ts +++ b/packages/beacon-node/test/unit/eth1/eth1MergeBlockTracker.test.ts @@ -32,7 +32,7 @@ describe("eth1 / Eth1MergeBlockTracker", () => { }); it("Should find terminal pow block through TERMINAL_BLOCK_HASH", async () => { - config.TERMINAL_BLOCK_HASH = Buffer.alloc(1, 32); + config.TERMINAL_BLOCK_HASH = Buffer.alloc(32, 1); const block: EthJsonRpcBlockRaw = { number: toHex(10), hash: toRootHex(11), diff --git a/packages/beacon-node/test/unit/eth1/utils/deposits.test.ts b/packages/beacon-node/test/unit/eth1/utils/deposits.test.ts index ce0d7fae1fad..316f75efc5ce 100644 --- a/packages/beacon-node/test/unit/eth1/utils/deposits.test.ts +++ b/packages/beacon-node/test/unit/eth1/utils/deposits.test.ts @@ -1,13 +1,15 @@ import {describe, it, expect} from "vitest"; import {phase0, ssz} from "@lodestar/types"; -import {MAX_DEPOSITS} from "@lodestar/params"; +import {MAX_DEPOSITS, SLOTS_PER_EPOCH} from "@lodestar/params"; import {verifyMerkleBranch} from "@lodestar/utils"; +import {createChainForkConfig} from "@lodestar/config"; import {filterBy} from "../../../utils/db.js"; import {Eth1ErrorCode} from "../../../../src/eth1/errors.js"; import {generateState} from "../../../utils/state.js"; import {expectRejectedWithLodestarError} from "../../../utils/errors.js"; import {getDeposits, getDepositsWithProofs, DepositGetter} from "../../../../src/eth1/utils/deposits.js"; import {DepositTree} from "../../../../src/db/repositories/depositDataRoot.js"; +import {createCachedBeaconStateTest} from "../../../utils/cachedBeaconState.js"; describe("eth1 / util / deposits", function () { describe("getDeposits", () => { @@ -18,6 +20,7 @@ describe("eth1 / util / deposits", function () { depositIndexes: number[]; expectedReturnedIndexes?: number[]; error?: Eth1ErrorCode; + postElectra?: boolean; }; const testCases: TestCase[] = [ @@ -70,18 +73,59 @@ describe("eth1 / util / deposits", function () { depositIndexes: [], expectedReturnedIndexes: [], }, + { + id: "No deposits to be included post Electra after deposit_requests_start_index", + depositCount: 2030, + eth1DepositIndex: 2025, + depositIndexes: Array.from({length: 2030}, (_, i) => i), + expectedReturnedIndexes: [], + postElectra: true, + }, + { + id: "Should return deposits post Electra before deposit_requests_start_index", + depositCount: 2022, + eth1DepositIndex: 2018, + depositIndexes: Array.from({length: 2022}, (_, i) => i), + expectedReturnedIndexes: [2018, 2019, 2020, 2021], + postElectra: true, + }, + { + id: "Should return deposits less than MAX_DEPOSITS post Electra before deposit_requests_start_index", + depositCount: 10 * MAX_DEPOSITS, + eth1DepositIndex: 0, + depositIndexes: Array.from({length: 10 * MAX_DEPOSITS}, (_, i) => i), + expectedReturnedIndexes: Array.from({length: MAX_DEPOSITS}, (_, i) => i), + postElectra: true, + }, ]; + /* eslint-disable @typescript-eslint/naming-convention */ + const postElectraConfig = createChainForkConfig({ + ALTAIR_FORK_EPOCH: 1, + BELLATRIX_FORK_EPOCH: 2, + CAPELLA_FORK_EPOCH: 3, + DENEB_FORK_EPOCH: 4, + ELECTRA_FORK_EPOCH: 5, + }); + const postElectraSlot = postElectraConfig.ELECTRA_FORK_EPOCH * SLOTS_PER_EPOCH + 1; + for (const testCase of testCases) { - const {id, depositIndexes, eth1DepositIndex, depositCount, expectedReturnedIndexes, error} = testCase; + const {id, depositIndexes, eth1DepositIndex, depositCount, expectedReturnedIndexes, error, postElectra} = + testCase; it(id, async function () { - const state = generateState({eth1DepositIndex}); + const state = postElectra + ? generateState({slot: postElectraSlot, eth1DepositIndex}, postElectraConfig) + : generateState({eth1DepositIndex}); + const cachedState = createCachedBeaconStateTest( + state, + postElectra ? postElectraConfig : createChainForkConfig({}) + ); const eth1Data = generateEth1Data(depositCount); const deposits = depositIndexes.map((index) => generateDepositEvent(index)); const depositsGetter: DepositGetter = async (indexRange) => filterBy(deposits, indexRange, (deposit) => deposit.index); - const resultPromise = getDeposits(state, eth1Data, depositsGetter); + const resultPromise = getDeposits(cachedState, eth1Data, depositsGetter); if (expectedReturnedIndexes) { const result = await resultPromise; diff --git a/packages/beacon-node/test/unit/executionEngine/http.test.ts b/packages/beacon-node/test/unit/executionEngine/http.test.ts index aa33c7dbbc40..c9f4ae671e53 100644 --- a/packages/beacon-node/test/unit/executionEngine/http.test.ts +++ b/packages/beacon-node/test/unit/executionEngine/http.test.ts @@ -219,7 +219,7 @@ describe("ExecutionEngine / http", () => { returnValue = response; - const res = await executionEngine.getPayloadBodiesByHash(reqBlockHashes); + const res = await executionEngine.getPayloadBodiesByHash(ForkName.bellatrix, reqBlockHashes); expect(reqJsonRpcPayload).toEqual(request); expect(res.map(serializeExecutionPayloadBody)).toEqual(response.result); @@ -268,7 +268,7 @@ describe("ExecutionEngine / http", () => { returnValue = response; - const res = await executionEngine.getPayloadBodiesByRange(startBlockNumber, blockCount); + const res = await executionEngine.getPayloadBodiesByRange(ForkName.bellatrix, startBlockNumber, blockCount); expect(reqJsonRpcPayload).toEqual(request); expect(res.map(serializeExecutionPayloadBody)).toEqual(response.result); diff --git a/packages/beacon-node/test/unit/network/beaconBlocksMaybeBlobsByRange.test.ts b/packages/beacon-node/test/unit/network/beaconBlocksMaybeBlobsByRange.test.ts index 3fb9cb8e1c79..d1dc7ba57fa9 100644 --- a/packages/beacon-node/test/unit/network/beaconBlocksMaybeBlobsByRange.test.ts +++ b/packages/beacon-node/test/unit/network/beaconBlocksMaybeBlobsByRange.test.ts @@ -24,6 +24,7 @@ describe("beaconBlocksMaybeBlobsByRange", () => { BELLATRIX_FORK_EPOCH: 0, CAPELLA_FORK_EPOCH: 0, DENEB_FORK_EPOCH: 0, + ELECTRA_FORK_EPOCH: 0, }); const genesisValidatorsRoot = Buffer.alloc(32, 0xaa); const config = createBeaconConfig(chainConfig, genesisValidatorsRoot); @@ -101,7 +102,7 @@ describe("beaconBlocksMaybeBlobsByRange", () => { const expectedResponse = blocksWithBlobs.map(([block, blobSidecars]) => { const blobs = blobSidecars !== undefined ? blobSidecars : []; return getBlockInput.availableData(config, block, BlockSource.byRange, null, { - fork: ForkName.deneb, + fork: ForkName.electra, blobs, blobsSource: BlobsSource.byRange, blobsBytes: blobs.map(() => null), diff --git a/packages/beacon-node/test/unit/network/fork.test.ts b/packages/beacon-node/test/unit/network/fork.test.ts index be748d2e8185..bbe1c0870d30 100644 --- a/packages/beacon-node/test/unit/network/fork.test.ts +++ b/packages/beacon-node/test/unit/network/fork.test.ts @@ -9,12 +9,14 @@ function getForkConfig({ bellatrix, capella, deneb, + electra, }: { phase0: number; altair: number; bellatrix: number; capella: number; deneb: number; + electra: number; }): BeaconConfig { const forks: Record = { phase0: { @@ -57,6 +59,14 @@ function getForkConfig({ prevVersion: Buffer.from([0, 0, 0, 3]), prevForkName: ForkName.capella, }, + electra: { + name: ForkName.electra, + seq: ForkSeq.electra, + epoch: electra, + version: Buffer.from([0, 0, 0, 5]), + prevVersion: Buffer.from([0, 0, 0, 4]), + prevForkName: ForkName.deneb, + }, }; const forksAscendingEpochOrder = Object.values(forks); const forksDescendingEpochOrder = Object.values(forks).reverse(); @@ -133,9 +143,10 @@ const testScenarios = [ for (const testScenario of testScenarios) { const {phase0, altair, bellatrix, capella, testCases} = testScenario; const deneb = Infinity; + const electra = Infinity; describe(`network / fork: phase0: ${phase0}, altair: ${altair}, bellatrix: ${bellatrix} capella: ${capella}`, () => { - const forkConfig = getForkConfig({phase0, altair, bellatrix, capella, deneb}); + const forkConfig = getForkConfig({phase0, altair, bellatrix, capella, deneb, electra}); const forks = forkConfig.forks; for (const testCase of testCases) { const {epoch, currentFork, nextFork, activeForks} = testCase; diff --git a/packages/beacon-node/test/unit/util/bufferPool.test.ts b/packages/beacon-node/test/unit/util/bufferPool.test.ts index 2c789c19f74d..ff66504ae65f 100644 --- a/packages/beacon-node/test/unit/util/bufferPool.test.ts +++ b/packages/beacon-node/test/unit/util/bufferPool.test.ts @@ -1,12 +1,12 @@ import {describe, it, expect} from "vitest"; -import {BufferPool} from "../../../src/util/bufferPool.js"; +import {AllocSource, BufferPool} from "../../../src/util/bufferPool.js"; describe("BufferPool", () => { const pool = new BufferPool(100); it("should increase length", () => { expect(pool.length).toEqual(110); - using mem = pool.alloc(200); + using mem = pool.alloc(200, AllocSource.PERSISTENT_CHECKPOINTS_CACHE_STATE); if (mem === null) { throw Error("Expected non-null mem"); } @@ -15,15 +15,15 @@ describe("BufferPool", () => { it("should not allow alloc if in use", () => { { - using mem = pool.alloc(20); + using mem = pool.alloc(20, AllocSource.PERSISTENT_CHECKPOINTS_CACHE_STATE); if (mem === null) { throw Error("Expected non-null mem"); } // in the same scope we can't allocate again - expect(pool.alloc(20)).toEqual(null); + expect(pool.alloc(20, AllocSource.PERSISTENT_CHECKPOINTS_CACHE_STATE)).toEqual(null); } // out of the scope we can allocate again - expect(pool.alloc(20)).not.toEqual(null); + expect(pool.alloc(20, AllocSource.PERSISTENT_CHECKPOINTS_CACHE_STATE)).not.toEqual(null); }); }); diff --git a/packages/beacon-node/test/unit/util/sszBytes.test.ts b/packages/beacon-node/test/unit/util/sszBytes.test.ts index 4285f4ca88b5..8b72c31df6c8 100644 --- a/packages/beacon-node/test/unit/util/sszBytes.test.ts +++ b/packages/beacon-node/test/unit/util/sszBytes.test.ts @@ -1,9 +1,11 @@ import {describe, it, expect} from "vitest"; -import {deneb, Epoch, phase0, RootHex, Slot, ssz} from "@lodestar/types"; +import {BitArray} from "@chainsafe/ssz"; +import {deneb, electra, Epoch, isElectraAttestation, phase0, RootHex, Slot, ssz} from "@lodestar/types"; import {fromHex, toHex} from "@lodestar/utils"; +import {ForkName, MAX_COMMITTEES_PER_SLOT} from "@lodestar/params"; import { - getAttDataBase64FromAttestationSerialized, - getAttDataBase64FromSignedAggregateAndProofSerialized, + getAttDataFromAttestationSerialized, + getAttDataFromSignedAggregateAndProofPhase0, getAggregationBitsFromAttestationSerialized, getBlockRootFromAttestationSerialized, getBlockRootFromSignedAggregateAndProofSerialized, @@ -12,10 +14,13 @@ import { getSignatureFromAttestationSerialized, getSlotFromSignedBeaconBlockSerialized, getSlotFromBlobSidecarSerialized, + getCommitteeBitsFromAttestationSerialized, + getCommitteeBitsFromSignedAggregateAndProofElectra, + getAttDataFromSignedAggregateAndProofElectra, } from "../../../src/util/sszBytes.js"; describe("attestation SSZ serialized picking", () => { - const testCases: phase0.Attestation[] = [ + const testCases: (phase0.Attestation | electra.Attestation)[] = [ ssz.phase0.Attestation.defaultValue(), attestationFromValues( 4_000_000, @@ -23,21 +28,45 @@ describe("attestation SSZ serialized picking", () => { 200_00, "eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeffffffffffffffffffffffffffffffff" ), + ssz.electra.Attestation.defaultValue(), + { + ...attestationFromValues( + 4_000_000, + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + 200_00, + "eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeffffffffffffffffffffffffffffffff" + ), + committeeBits: BitArray.fromSingleBit(MAX_COMMITTEES_PER_SLOT, 3), + }, ]; for (const [i, attestation] of testCases.entries()) { it(`attestation ${i}`, () => { - const bytes = ssz.phase0.Attestation.serialize(attestation); + const isElectra = isElectraAttestation(attestation); + const bytes = isElectra + ? ssz.electra.Attestation.serialize(attestation) + : ssz.phase0.Attestation.serialize(attestation); expect(getSlotFromAttestationSerialized(bytes)).toBe(attestation.data.slot); expect(getBlockRootFromAttestationSerialized(bytes)).toBe(toHex(attestation.data.beaconBlockRoot)); - expect(getAggregationBitsFromAttestationSerialized(bytes)?.toBoolArray()).toEqual( - attestation.aggregationBits.toBoolArray() - ); - expect(getSignatureFromAttestationSerialized(bytes)).toEqual(attestation.signature); + + if (isElectra) { + expect(getAggregationBitsFromAttestationSerialized(ForkName.electra, bytes)?.toBoolArray()).toEqual( + attestation.aggregationBits.toBoolArray() + ); + expect(getCommitteeBitsFromAttestationSerialized(bytes)).toEqual( + Buffer.from(attestation.committeeBits.uint8Array).toString("base64") + ); + expect(getSignatureFromAttestationSerialized(bytes)).toEqual(attestation.signature); + } else { + expect(getAggregationBitsFromAttestationSerialized(ForkName.phase0, bytes)?.toBoolArray()).toEqual( + attestation.aggregationBits.toBoolArray() + ); + expect(getSignatureFromAttestationSerialized(bytes)).toEqual(attestation.signature); + } const attDataBase64 = ssz.phase0.AttestationData.serialize(attestation.data); - expect(getAttDataBase64FromAttestationSerialized(bytes)).toBe(Buffer.from(attDataBase64).toString("base64")); + expect(getAttDataFromAttestationSerialized(bytes)).toBe(Buffer.from(attDataBase64).toString("base64")); }); } @@ -55,17 +84,18 @@ describe("attestation SSZ serialized picking", () => { } }); - it("getAttDataBase64FromAttestationSerialized - invalid data", () => { + it("getAttDataFromAttestationSerialized - invalid data", () => { const invalidAttDataBase64DataSizes = [0, 4, 100, 128, 131]; for (const size of invalidAttDataBase64DataSizes) { - expect(getAttDataBase64FromAttestationSerialized(Buffer.alloc(size))).toBeNull(); + expect(getAttDataFromAttestationSerialized(Buffer.alloc(size))).toBeNull(); } }); - it("getAggregateionBitsFromAttestationSerialized - invalid data", () => { + it("getAggregationBitsFromAttestationSerialized - invalid data", () => { const invalidAggregationBitsDataSizes = [0, 4, 100, 128, 227]; for (const size of invalidAggregationBitsDataSizes) { - expect(getAggregationBitsFromAttestationSerialized(Buffer.alloc(size))).toBeNull(); + expect(getAggregationBitsFromAttestationSerialized(ForkName.phase0, Buffer.alloc(size))).toBeNull(); + expect(getAggregationBitsFromAttestationSerialized(ForkName.electra, Buffer.alloc(size))).toBeNull(); } }); @@ -73,14 +103,15 @@ describe("attestation SSZ serialized picking", () => { const invalidSignatureDataSizes = [0, 4, 100, 128, 227]; for (const size of invalidSignatureDataSizes) { expect(getSignatureFromAttestationSerialized(Buffer.alloc(size))).toBeNull(); + expect(getSignatureFromAttestationSerialized(Buffer.alloc(size))).toBeNull(); } }); }); -describe("aggregateAndProof SSZ serialized picking", () => { +describe("phase0 SignedAggregateAndProof SSZ serialized picking", () => { const testCases: phase0.SignedAggregateAndProof[] = [ ssz.phase0.SignedAggregateAndProof.defaultValue(), - signedAggregateAndProofFromValues( + phase0SignedAggregateAndProofFromValues( 4_000_000, "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", 200_00, @@ -100,8 +131,62 @@ describe("aggregateAndProof SSZ serialized picking", () => { ); const attDataBase64 = ssz.phase0.AttestationData.serialize(signedAggregateAndProof.message.aggregate.data); - expect(getAttDataBase64FromSignedAggregateAndProofSerialized(bytes)).toBe( - Buffer.from(attDataBase64).toString("base64") + expect(getAttDataFromSignedAggregateAndProofPhase0(bytes)).toBe(Buffer.from(attDataBase64).toString("base64")); + }); + } + + it("getSlotFromSignedAggregateAndProofSerialized - invalid data", () => { + const invalidSlotDataSizes = [0, 4, 11]; + for (const size of invalidSlotDataSizes) { + expect(getSlotFromSignedAggregateAndProofSerialized(Buffer.alloc(size))).toBeNull(); + } + }); + + it("getBlockRootFromSignedAggregateAndProofSerialized - invalid data", () => { + const invalidBlockRootDataSizes = [0, 4, 20, 227]; + for (const size of invalidBlockRootDataSizes) { + expect(getBlockRootFromSignedAggregateAndProofSerialized(Buffer.alloc(size))).toBeNull(); + } + }); + + it("getAttDataBase64FromSignedAggregateAndProofSerialized - invalid data", () => { + const invalidAttDataBase64DataSizes = [0, 4, 100, 128, 339]; + for (const size of invalidAttDataBase64DataSizes) { + expect(getAttDataFromSignedAggregateAndProofPhase0(Buffer.alloc(size))).toBeNull(); + } + }); +}); + +describe("electra SignedAggregateAndProof SSZ serialized picking", () => { + const testCases: electra.SignedAggregateAndProof[] = [ + ssz.electra.SignedAggregateAndProof.defaultValue(), + electraSignedAggregateAndProofFromValues( + 4_000_000, + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + 200_00, + "eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeffffffffffffffffffffffffffffffff" + ), + ]; + + for (const [i, signedAggregateAndProof] of testCases.entries()) { + it(`signedAggregateAndProof ${i}`, () => { + const bytes = ssz.electra.SignedAggregateAndProof.serialize(signedAggregateAndProof); + + expect(getSlotFromSignedAggregateAndProofSerialized(bytes)).toBe( + signedAggregateAndProof.message.aggregate.data.slot + ); + expect(getBlockRootFromSignedAggregateAndProofSerialized(bytes)).toBe( + toHex(signedAggregateAndProof.message.aggregate.data.beaconBlockRoot) + ); + + const attDataBase64 = ssz.phase0.AttestationData.serialize(signedAggregateAndProof.message.aggregate.data); + const committeeBits = ssz.electra.CommitteeBits.serialize( + signedAggregateAndProof.message.aggregate.committeeBits + ); + + expect(getAttDataFromSignedAggregateAndProofElectra(bytes)).toBe(Buffer.from(attDataBase64).toString("base64")); + expect(getCommitteeBitsFromSignedAggregateAndProofElectra(bytes)).toBe( + Buffer.from(committeeBits).toString("base64") ); }); } @@ -123,7 +208,7 @@ describe("aggregateAndProof SSZ serialized picking", () => { it("getAttDataBase64FromSignedAggregateAndProofSerialized - invalid data", () => { const invalidAttDataBase64DataSizes = [0, 4, 100, 128, 339]; for (const size of invalidAttDataBase64DataSizes) { - expect(getAttDataBase64FromSignedAggregateAndProofSerialized(Buffer.alloc(size))).toBeNull(); + expect(getAttDataFromSignedAggregateAndProofPhase0(Buffer.alloc(size))).toBeNull(); } }); it("getSlotFromSignedAggregateAndProofSerialized - invalid data - large slots", () => { @@ -187,7 +272,7 @@ function attestationFromValues( return attestation; } -function signedAggregateAndProofFromValues( +function phase0SignedAggregateAndProofFromValues( slot: Slot, blockRoot: RootHex, targetEpoch: Epoch, @@ -201,6 +286,21 @@ function signedAggregateAndProofFromValues( return signedAggregateAndProof; } +function electraSignedAggregateAndProofFromValues( + slot: Slot, + blockRoot: RootHex, + targetEpoch: Epoch, + targetRoot: RootHex +): electra.SignedAggregateAndProof { + const signedAggregateAndProof = ssz.electra.SignedAggregateAndProof.defaultValue(); + signedAggregateAndProof.message.aggregate.data.slot = slot; + signedAggregateAndProof.message.aggregate.data.beaconBlockRoot = fromHex(blockRoot); + signedAggregateAndProof.message.aggregate.data.target.epoch = targetEpoch; + signedAggregateAndProof.message.aggregate.data.target.root = fromHex(targetRoot); + signedAggregateAndProof.message.aggregate.committeeBits = BitArray.fromSingleBit(MAX_COMMITTEES_PER_SLOT, 1); + return signedAggregateAndProof; +} + function signedBeaconBlockFromValues(slot: Slot): phase0.SignedBeaconBlock { const signedBeaconBlock = ssz.phase0.SignedBeaconBlock.defaultValue(); signedBeaconBlock.message.slot = slot; diff --git a/packages/beacon-node/test/utils/config.ts b/packages/beacon-node/test/utils/config.ts index 54c058d30722..2aad1c14c03e 100644 --- a/packages/beacon-node/test/utils/config.ts +++ b/packages/beacon-node/test/utils/config.ts @@ -31,5 +31,13 @@ export function getConfig(fork: ForkName, forkEpoch = 0): ChainForkConfig { CAPELLA_FORK_EPOCH: 0, DENEB_FORK_EPOCH: forkEpoch, }); + case ForkName.electra: + return createChainForkConfig({ + ALTAIR_FORK_EPOCH: 0, + BELLATRIX_FORK_EPOCH: 0, + CAPELLA_FORK_EPOCH: 0, + DENEB_FORK_EPOCH: 0, + ELECTRA_FORK_EPOCH: forkEpoch, + }); } } diff --git a/packages/beacon-node/test/utils/node/validator.ts b/packages/beacon-node/test/utils/node/validator.ts index 4ec60dcc8b4f..285fa13fd01f 100644 --- a/packages/beacon-node/test/utils/node/validator.ts +++ b/packages/beacon-node/test/utils/node/validator.ts @@ -97,7 +97,7 @@ export function getApiFromServerHandlers(api: BeaconApiMethods): ApiClient { return async (args: unknown) => { try { const apiResponse = new ApiResponse({} as any, null, new Response(null, {status: HttpStatusCode.OK})); - const result = await api(args, {}); + const result = await api.call(apiModule, args, {}); apiResponse.value = () => result.data; apiResponse.meta = () => result.meta; return apiResponse; diff --git a/packages/beacon-node/test/utils/state.ts b/packages/beacon-node/test/utils/state.ts index 1e9f614e8093..6ad85f3422f7 100644 --- a/packages/beacon-node/test/utils/state.ts +++ b/packages/beacon-node/test/utils/state.ts @@ -1,14 +1,16 @@ import {SecretKey} from "@chainsafe/blst"; +import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; import {config as minimalConfig} from "@lodestar/config/default"; import { BeaconStateAllForks, CachedBeaconStateAllForks, createCachedBeaconState, - PubkeyIndexMap, CachedBeaconStateBellatrix, BeaconStateBellatrix, + CachedBeaconStateElectra, + BeaconStateElectra, } from "@lodestar/state-transition"; -import {BeaconState, altair, bellatrix, ssz} from "@lodestar/types"; +import {BeaconState, altair, bellatrix, electra, ssz} from "@lodestar/types"; import {createBeaconConfig, ChainForkConfig} from "@lodestar/config"; import {FAR_FUTURE_EPOCH, ForkName, ForkSeq, MAX_EFFECTIVE_BALANCE, SYNC_COMMITTEE_SIZE} from "@lodestar/params"; @@ -64,6 +66,7 @@ export function generateState( : generateValidators(numValidators, validatorOpts)); state.genesisTime = Math.floor(Date.now() / 1000); + state.slot = stateSlot; state.fork.previousVersion = config.GENESIS_FORK_VERSION; state.fork.currentVersion = config.GENESIS_FORK_VERSION; state.latestBlockHeader.bodyRoot = ssz.phase0.BeaconBlockBody.hashTreeRoot(ssz.phase0.BeaconBlockBody.defaultValue()); @@ -92,11 +95,18 @@ export function generateState( }; } + if (forkSeq >= ForkSeq.electra) { + const stateElectra = state as electra.BeaconState; + stateElectra.depositRequestsStartIndex = 2023n; + stateElectra.latestExecutionPayloadHeader = ssz.electra.ExecutionPayloadHeader.defaultValue(); + } + return config.getForkTypes(stateSlot).BeaconState.toViewDU(state); } /** * This generates state with default pubkey + * TODO: (@matthewkeil) - this is duplicated and exists in state-transition as well */ export function generateCachedState(opts?: TestBeaconState): CachedBeaconStateAllForks { const config = getConfig(ForkName.phase0); @@ -137,6 +147,18 @@ export function generateCachedBellatrixState(opts?: TestBeaconState): CachedBeac }); } +/** + * This generates state with default pubkey + */ +export function generateCachedElectraState(opts?: TestBeaconState): CachedBeaconStateElectra { + const config = getConfig(ForkName.electra); + const state = generateState(opts, config); + return createCachedBeaconState(state as BeaconStateElectra, { + config: createBeaconConfig(config, state.genesisValidatorsRoot), + pubkey2index: new PubkeyIndexMap(), + index2pubkey: [], + }); +} export const zeroProtoBlock: ProtoBlock = { slot: 0, blockRoot: ZERO_HASH_HEX, diff --git a/packages/beacon-node/test/utils/validationData/attestation.ts b/packages/beacon-node/test/utils/validationData/attestation.ts index c33d942dabc5..22f551cbb663 100644 --- a/packages/beacon-node/test/utils/validationData/attestation.ts +++ b/packages/beacon-node/test/utils/validationData/attestation.ts @@ -1,10 +1,5 @@ import {BitArray, toHexString} from "@chainsafe/ssz"; -import { - computeEpochAtSlot, - computeSigningRoot, - computeStartSlotAtEpoch, - getShufflingDecisionBlock, -} from "@lodestar/state-transition"; +import {computeEpochAtSlot, computeSigningRoot, computeStartSlotAtEpoch} from "@lodestar/state-transition"; import {ProtoBlock, IForkChoice, ExecutionStatus, DataAvailabilityStatus} from "@lodestar/fork-choice"; import {DOMAIN_BEACON_ATTESTER} from "@lodestar/params"; import {phase0, Slot, ssz} from "@lodestar/types"; @@ -81,10 +76,20 @@ export function getAttestationValidData(opts: AttestationValidDataOpts): { dataAvailabilityStatus: DataAvailabilityStatus.PreData, }; - const shufflingCache = new ShufflingCache(); - shufflingCache.processState(state, state.epochCtx.currentShuffling.epoch); - shufflingCache.processState(state, state.epochCtx.nextShuffling.epoch); - const dependentRoot = getShufflingDecisionBlock(state, state.epochCtx.currentShuffling.epoch); + const shufflingCache = new ShufflingCache(null, null, {}, [ + { + shuffling: state.epochCtx.previousShuffling, + decisionRoot: state.epochCtx.previousDecisionRoot, + }, + { + shuffling: state.epochCtx.currentShuffling, + decisionRoot: state.epochCtx.currentDecisionRoot, + }, + { + shuffling: state.epochCtx.nextShuffling, + decisionRoot: state.epochCtx.nextDecisionRoot, + }, + ]); const forkChoice = { getBlock: (root) => { @@ -95,7 +100,7 @@ export function getAttestationValidData(opts: AttestationValidDataOpts): { if (rootHex !== toHexString(beaconBlockRoot)) return null; return headBlock; }, - getDependentRoot: () => dependentRoot, + getDependentRoot: () => state.epochCtx.currentDecisionRoot, } as Partial as IForkChoice; const committeeIndices = state.epochCtx.getBeaconCommittee(attSlot, attIndex); diff --git a/packages/cli/package.json b/packages/cli/package.json index 6b7bf36bacd2..a5527a8eee87 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "@chainsafe/lodestar", - "version": "1.20.2", + "version": "1.22.0", "description": "Command line interface for lodestar", "author": "ChainSafe Systems", "license": "LGPL-3.0", @@ -56,23 +56,23 @@ "@chainsafe/blst": "^2.0.3", "@chainsafe/discv5": "^9.0.0", "@chainsafe/enr": "^3.0.0", - "@chainsafe/persistent-merkle-tree": "^0.7.1", - "@chainsafe/ssz": "^0.15.1", + "@chainsafe/persistent-merkle-tree": "^0.8.0", + "@chainsafe/ssz": "^0.17.1", "@chainsafe/threads": "^1.11.1", "@libp2p/crypto": "^4.1.0", "@libp2p/peer-id": "^4.1.0", "@libp2p/peer-id-factory": "^4.1.0", - "@lodestar/api": "^1.20.2", - "@lodestar/beacon-node": "^1.20.2", - "@lodestar/config": "^1.20.2", - "@lodestar/db": "^1.20.2", - "@lodestar/light-client": "^1.20.2", - "@lodestar/logger": "^1.20.2", - "@lodestar/params": "^1.20.2", - "@lodestar/state-transition": "^1.20.2", - "@lodestar/types": "^1.20.2", - "@lodestar/utils": "^1.20.2", - "@lodestar/validator": "^1.20.2", + "@lodestar/api": "^1.22.0", + "@lodestar/beacon-node": "^1.22.0", + "@lodestar/config": "^1.22.0", + "@lodestar/db": "^1.22.0", + "@lodestar/light-client": "^1.22.0", + "@lodestar/logger": "^1.22.0", + "@lodestar/params": "^1.22.0", + "@lodestar/state-transition": "^1.22.0", + "@lodestar/types": "^1.22.0", + "@lodestar/utils": "^1.22.0", + "@lodestar/validator": "^1.22.0", "@multiformats/multiaddr": "^12.1.3", "deepmerge": "^4.3.1", "ethers": "^6.7.0", @@ -88,12 +88,12 @@ "yargs": "^17.7.1" }, "devDependencies": { - "@lodestar/test-utils": "^1.20.2", + "@lodestar/test-utils": "^1.22.0", "@types/debug": "^4.1.7", "@types/got": "^9.6.12", "@types/inquirer": "^9.0.3", "@types/proper-lockfile": "^4.1.4", "@types/yargs": "^17.0.24", - "fastify": "^4.27.0" + "fastify": "^5.0.0" } } diff --git a/packages/cli/src/cmds/beacon/initBeaconState.ts b/packages/cli/src/cmds/beacon/initBeaconState.ts index c8c444778991..67b578e9fdb9 100644 --- a/packages/cli/src/cmds/beacon/initBeaconState.ts +++ b/packages/cli/src/cmds/beacon/initBeaconState.ts @@ -1,15 +1,17 @@ import {ssz} from "@lodestar/types"; import {createBeaconConfig, BeaconConfig, ChainForkConfig} from "@lodestar/config"; -import {Logger} from "@lodestar/utils"; +import {Logger, formatBytes} from "@lodestar/utils"; import { isWithinWeakSubjectivityPeriod, ensureWithinWeakSubjectivityPeriod, BeaconStateAllForks, + loadState, + loadStateAndValidators, } from "@lodestar/state-transition"; import { IBeaconDb, IBeaconNodeOptions, - initStateFromAnchorState, + checkAndPersistAnchorState, initStateFromEth1, getStateTypeFromBytes, } from "@lodestar/beacon-node"; @@ -25,19 +27,23 @@ import { } from "../../networks/index.js"; import {BeaconArgs} from "./options.js"; +type StateWithBytes = {state: BeaconStateAllForks; stateBytes: Uint8Array}; + async function initAndVerifyWeakSubjectivityState( config: BeaconConfig, db: IBeaconDb, logger: Logger, - store: BeaconStateAllForks, - wsState: BeaconStateAllForks, + dbStateBytes: StateWithBytes, + wsStateBytes: StateWithBytes, wsCheckpoint: Checkpoint, opts: {ignoreWeakSubjectivityCheck?: boolean} = {} ): Promise<{anchorState: BeaconStateAllForks; wsCheckpoint: Checkpoint}> { + const dbState = dbStateBytes.state; + const wsState = wsStateBytes.state; // Check if the store's state and wsState are compatible if ( - store.genesisTime !== wsState.genesisTime || - !ssz.Root.equals(store.genesisValidatorsRoot, wsState.genesisValidatorsRoot) + dbState.genesisTime !== wsState.genesisTime || + !ssz.Root.equals(dbState.genesisValidatorsRoot, wsState.genesisValidatorsRoot) ) { throw new Error( "Db state and checkpoint state are not compatible, either clear the db or verify your checkpoint source" @@ -45,12 +51,12 @@ async function initAndVerifyWeakSubjectivityState( } // Pick the state which is ahead as an anchor to initialize the beacon chain - let anchorState = wsState; + let anchorState = wsStateBytes; let anchorCheckpoint = wsCheckpoint; let isCheckpointState = true; - if (store.slot > wsState.slot) { - anchorState = store; - anchorCheckpoint = getCheckpointFromState(store); + if (dbState.slot > wsState.slot) { + anchorState = dbStateBytes; + anchorCheckpoint = getCheckpointFromState(dbState); isCheckpointState = false; logger.verbose( "Db state is ahead of the provided checkpoint state, using the db state to initialize the beacon chain" @@ -59,19 +65,19 @@ async function initAndVerifyWeakSubjectivityState( // Throw error unless user explicitly asked not to, in testnets can happen that wss period is too small // that even some epochs of non finalization can cause finalized checkpoint to be out of valid range - const wssCheck = wrapFnError(() => ensureWithinWeakSubjectivityPeriod(config, anchorState, anchorCheckpoint)); + const wssCheck = wrapFnError(() => ensureWithinWeakSubjectivityPeriod(config, anchorState.state, anchorCheckpoint)); const isWithinWeakSubjectivityPeriod = wssCheck.err === null; if (!isWithinWeakSubjectivityPeriod && !opts.ignoreWeakSubjectivityCheck) { throw wssCheck.err; } - anchorState = await initStateFromAnchorState(config, db, logger, anchorState, { + await checkAndPersistAnchorState(config, db, logger, anchorState.state, anchorState.stateBytes, { isWithinWeakSubjectivityPeriod, isCheckpointState, }); // Return the latest anchorState but still return original wsCheckpoint to validate in backfill - return {anchorState, wsCheckpoint}; + return {anchorState: anchorState.state, wsCheckpoint}; } /** @@ -96,8 +102,20 @@ export async function initBeaconState( } // fetch the latest state stored in the db which will be used in all cases, if it exists, either // i) used directly as the anchor state - // ii) used during verification of a weak subjectivity state, - const lastDbState = await db.stateArchive.lastValue(); + // ii) used to load and verify a weak subjectivity state, + const lastDbSlot = await db.stateArchive.lastKey(); + const stateBytes = lastDbSlot !== null ? await db.stateArchive.getBinary(lastDbSlot) : null; + let lastDbState: BeaconStateAllForks | null = null; + let lastDbValidatorsBytes: Uint8Array | null = null; + let lastDbStateWithBytes: StateWithBytes | null = null; + if (stateBytes) { + logger.verbose("Found the last archived state", {slot: lastDbSlot, size: formatBytes(stateBytes.length)}); + const {state, validatorsBytes} = loadStateAndValidators(chainForkConfig, stateBytes); + lastDbState = state; + lastDbValidatorsBytes = validatorsBytes; + lastDbStateWithBytes = {state, stateBytes: stateBytes}; + } + if (lastDbState) { const config = createBeaconConfig(chainForkConfig, lastDbState.genesisValidatorsRoot); const wssCheck = isWithinWeakSubjectivityPeriod(config, lastDbState, getCheckpointFromState(lastDbState)); @@ -107,7 +125,9 @@ export async function initBeaconState( // Forcing to sync from checkpoint is only recommended if node is taking too long to sync from last db state. // It is important to remind the user to remove this flag again unless it is absolutely necessary. if (wssCheck) { - logger.warn("Forced syncing from checkpoint even though db state is within weak subjectivity period"); + logger.warn( + `Forced syncing from checkpoint even though db state at slot ${lastDbState.slot} is within weak subjectivity period` + ); logger.warn("Please consider removing --forceCheckpointSync flag unless absolutely necessary"); } } else { @@ -115,11 +135,15 @@ export async function initBeaconState( // - if no checkpoint sync args provided, or // - the lastDbState is within weak subjectivity period: if ((!args.checkpointState && !args.checkpointSyncUrl) || wssCheck) { - const anchorState = await initStateFromAnchorState(config, db, logger, lastDbState, { + if (stateBytes === null) { + // this never happens + throw Error(`There is no stateBytes for the lastDbState at slot ${lastDbState.slot}`); + } + await checkAndPersistAnchorState(config, db, logger, lastDbState, stateBytes, { isWithinWeakSubjectivityPeriod: wssCheck, isCheckpointState: false, }); - return {anchorState}; + return {anchorState: lastDbState}; } } } @@ -127,7 +151,8 @@ export async function initBeaconState( // See if we can sync state using checkpoint sync args or else start from genesis if (args.checkpointState) { return readWSState( - lastDbState, + lastDbStateWithBytes, + lastDbValidatorsBytes, { checkpointState: args.checkpointState, wssCheckpoint: args.wssCheckpoint, @@ -139,7 +164,8 @@ export async function initBeaconState( ); } else if (args.checkpointSyncUrl) { return fetchWSStateFromBeaconApi( - lastDbState, + lastDbStateWithBytes, + lastDbValidatorsBytes, { checkpointSyncUrl: args.checkpointSyncUrl, wssCheckpoint: args.wssCheckpoint, @@ -153,10 +179,10 @@ export async function initBeaconState( const genesisStateFile = args.genesisStateFile || getGenesisFileUrl(args.network || defaultNetwork); if (genesisStateFile && !args.forceGenesis) { const stateBytes = await downloadOrLoadFile(genesisStateFile); - let anchorState = getStateTypeFromBytes(chainForkConfig, stateBytes).deserializeToViewDU(stateBytes); + const anchorState = getStateTypeFromBytes(chainForkConfig, stateBytes).deserializeToViewDU(stateBytes); const config = createBeaconConfig(chainForkConfig, anchorState.genesisValidatorsRoot); const wssCheck = isWithinWeakSubjectivityPeriod(config, anchorState, getCheckpointFromState(anchorState)); - anchorState = await initStateFromAnchorState(config, db, logger, anchorState, { + await checkAndPersistAnchorState(config, db, logger, anchorState, stateBytes, { isWithinWeakSubjectivityPeriod: wssCheck, isCheckpointState: true, }); @@ -170,7 +196,8 @@ export async function initBeaconState( } async function readWSState( - lastDbState: BeaconStateAllForks | null, + lastDbStateBytes: StateWithBytes | null, + lastDbValidatorsBytes: Uint8Array | null, wssOpts: {checkpointState: string; wssCheckpoint?: string; ignoreWeakSubjectivityCheck?: boolean}, chainForkConfig: ChainForkConfig, db: IBeaconDb, @@ -180,19 +207,28 @@ async function readWSState( // if a weak subjectivity checkpoint has been provided, it is used for additional verification // otherwise, the state itself is used for verification (not bad, because the trusted state has been explicitly provided) const {checkpointState, wssCheckpoint, ignoreWeakSubjectivityCheck} = wssOpts; + const lastDbState = lastDbStateBytes?.state ?? null; const stateBytes = await downloadOrLoadFile(checkpointState); - const wsState = getStateTypeFromBytes(chainForkConfig, stateBytes).deserializeToViewDU(stateBytes); + let wsState: BeaconStateAllForks; + if (lastDbState && lastDbValidatorsBytes) { + // use lastDbState to load wsState if possible to share the same state tree + wsState = loadState(chainForkConfig, lastDbState, stateBytes, lastDbValidatorsBytes).state; + } else { + wsState = getStateTypeFromBytes(chainForkConfig, stateBytes).deserializeToViewDU(stateBytes); + } const config = createBeaconConfig(chainForkConfig, wsState.genesisValidatorsRoot); - const store = lastDbState ?? wsState; + const wsStateBytes = {state: wsState, stateBytes}; + const store = lastDbStateBytes ?? wsStateBytes; const checkpoint = wssCheckpoint ? getCheckpointFromArg(wssCheckpoint) : getCheckpointFromState(wsState); - return initAndVerifyWeakSubjectivityState(config, db, logger, store, wsState, checkpoint, { + return initAndVerifyWeakSubjectivityState(config, db, logger, store, wsStateBytes, checkpoint, { ignoreWeakSubjectivityCheck, }); } async function fetchWSStateFromBeaconApi( - lastDbState: BeaconStateAllForks | null, + lastDbStateBytes: StateWithBytes | null, + lastDbValidatorsBytes: Uint8Array | null, wssOpts: {checkpointSyncUrl: string; wssCheckpoint?: string; ignoreWeakSubjectivityCheck?: boolean}, chainForkConfig: ChainForkConfig, db: IBeaconDb, @@ -213,10 +249,15 @@ async function fetchWSStateFromBeaconApi( throw e; } - const {wsState, wsCheckpoint} = await fetchWeakSubjectivityState(chainForkConfig, logger, wssOpts); + const {wsState, wsStateBytes, wsCheckpoint} = await fetchWeakSubjectivityState(chainForkConfig, logger, wssOpts, { + lastDbState: lastDbStateBytes?.state ?? null, + lastDbValidatorsBytes, + }); + const config = createBeaconConfig(chainForkConfig, wsState.genesisValidatorsRoot); - const store = lastDbState ?? wsState; - return initAndVerifyWeakSubjectivityState(config, db, logger, store, wsState, wsCheckpoint, { + const wsStateWithBytes = {state: wsState, stateBytes: wsStateBytes}; + const store = lastDbStateBytes ?? wsStateWithBytes; + return initAndVerifyWeakSubjectivityState(config, db, logger, store, wsStateWithBytes, wsCheckpoint, { ignoreWeakSubjectivityCheck: wssOpts.ignoreWeakSubjectivityCheck, }); } diff --git a/packages/cli/src/cmds/dev/options.ts b/packages/cli/src/cmds/dev/options.ts index c484150e58d7..5286b81729c6 100644 --- a/packages/cli/src/cmds/dev/options.ts +++ b/packages/cli/src/cmds/dev/options.ts @@ -90,6 +90,10 @@ const externalOptionsOverrides: Partial fromHexString(pubkeyHex)); + const pubkeysBytes = pubkeys.map((pubkeyHex) => fromHex(pubkeyHex)); const interchangeV5 = await this.validator.exportInterchange(pubkeysBytes, { version: "5", diff --git a/packages/cli/src/cmds/validator/keymanager/keystoreCache.ts b/packages/cli/src/cmds/validator/keymanager/keystoreCache.ts index 85b1702892ee..2997a6b6b113 100644 --- a/packages/cli/src/cmds/validator/keymanager/keystoreCache.ts +++ b/packages/cli/src/cmds/validator/keymanager/keystoreCache.ts @@ -3,7 +3,7 @@ import path from "node:path"; import {Keystore} from "@chainsafe/bls-keystore"; import {SecretKey} from "@chainsafe/blst"; import {SignerLocal, SignerType} from "@lodestar/validator"; -import {fromHex, toHex} from "@lodestar/utils"; +import {fromHex, toHex, toPubkeyHex} from "@lodestar/utils"; import {writeFile600Perm} from "../../../util/file.js"; import {lockFilepath, unlockFilepath} from "../../../util/lockfile.js"; import {LocalKeystoreDefinition} from "./interface.js"; @@ -42,9 +42,9 @@ export async function loadKeystoreCache( const secretKey = SecretKey.fromBytes(secretKeyBytes); const publicKey = secretKey.toPublicKey().toBytes(); - if (toHex(publicKey) !== toHex(fromHex(k.pubkey))) { + if (toPubkeyHex(publicKey) !== toPubkeyHex(fromHex(k.pubkey))) { throw new Error( - `Keystore ${k.uuid} does not match the expected pubkey. expected=${toHex(fromHex(k.pubkey))}, found=${toHex( + `Keystore ${k.uuid} does not match the expected pubkey. expected=${toPubkeyHex(fromHex(k.pubkey))}, found=${toHex( publicKey )}` ); diff --git a/packages/cli/src/cmds/validator/keymanager/server.ts b/packages/cli/src/cmds/validator/keymanager/server.ts index 03880c8b8842..c4d3256c6151 100644 --- a/packages/cli/src/cmds/validator/keymanager/server.ts +++ b/packages/cli/src/cmds/validator/keymanager/server.ts @@ -1,10 +1,10 @@ import crypto from "node:crypto"; import fs from "node:fs"; import path from "node:path"; -import {toHexString} from "@chainsafe/ssz"; import {RestApiServer, RestApiServerOpts, RestApiServerModules} from "@lodestar/beacon-node"; import {KeymanagerApiMethods, registerRoutes} from "@lodestar/api/keymanager/server"; import {ChainForkConfig} from "@lodestar/config"; +import {toHex} from "@lodestar/utils"; import {writeFile600Perm} from "../../../util/index.js"; export type KeymanagerRestApiServerOpts = RestApiServerOpts & { @@ -21,6 +21,7 @@ export const keymanagerRestApiServerOptsDefault: KeymanagerRestApiServerOpts = { isAuthEnabled: true, // Slashing protection DB has been reported to be 3MB https://github.com/ChainSafe/lodestar/issues/4530 bodyLimit: 20 * 1024 * 1024, // 20MB + stacktraces: false, }; export type KeymanagerRestApiServerModules = RestApiServerModules & { @@ -50,7 +51,7 @@ export class KeymanagerRestApiServer extends RestApiServer { if (opts.isAuthEnabled) { // Generate a new token if token file does not exist or file do exist, but is empty - bearerToken = readFileIfExists(apiTokenPath) ?? `api-token-${toHexString(crypto.randomBytes(32))}`; + bearerToken = readFileIfExists(apiTokenPath) ?? `api-token-${toHex(crypto.randomBytes(32))}`; writeFile600Perm(apiTokenPath, bearerToken, {encoding: "utf8"}); } diff --git a/packages/cli/src/cmds/validator/options.ts b/packages/cli/src/cmds/validator/options.ts index aaa0e96d25a7..87b43543b62e 100644 --- a/packages/cli/src/cmds/validator/options.ts +++ b/packages/cli/src/cmds/validator/options.ts @@ -91,6 +91,7 @@ export type KeymanagerArgs = { "keymanager.cors"?: string; "keymanager.headerLimit"?: number; "keymanager.bodyLimit"?: number; + "keymanager.stacktraces"?: boolean; }; export const keymanagerOptions: CliCommandOptions = { @@ -141,6 +142,11 @@ export const keymanagerOptions: CliCommandOptions = { type: "number", description: "Defines the maximum payload, in bytes, the server is allowed to accept", }, + "keymanager.stacktraces": { + hidden: true, + type: "boolean", + description: "Return stacktraces in HTTP error responses", + }, }; export const validatorOptions: CliCommandOptions = { diff --git a/packages/cli/src/cmds/validator/slashingProtection/export.ts b/packages/cli/src/cmds/validator/slashingProtection/export.ts index 7d1a4f8e6e2f..c18b020f5782 100644 --- a/packages/cli/src/cmds/validator/slashingProtection/export.ts +++ b/packages/cli/src/cmds/validator/slashingProtection/export.ts @@ -1,8 +1,7 @@ import path from "node:path"; -import {toHexString} from "@chainsafe/ssz"; import {InterchangeFormatVersion} from "@lodestar/validator"; import {getNodeLogger} from "@lodestar/logger/node"; -import {CliCommand} from "@lodestar/utils"; +import {CliCommand, toPubkeyHex} from "@lodestar/utils"; import {YargsError, ensure0xPrefix, isValidatePubkeyHex, writeFile600Perm} from "../../../util/index.js"; import {parseLoggerArgs} from "../../../util/logger.js"; import {GlobalArgs} from "../../../options/index.js"; @@ -86,7 +85,7 @@ export const exportCmd: CliCommand toHexString(pubkey) === pubkeyHex); + const existingPubkey = allPubkeys.find((pubkey) => toPubkeyHex(pubkey) === pubkeyHex); if (!existingPubkey) { logger.warn("Pubkey not found in slashing protection db", {pubkey: pubkeyHex}); } else { diff --git a/packages/cli/src/cmds/validator/voluntaryExit.ts b/packages/cli/src/cmds/validator/voluntaryExit.ts index 279076b619f2..02f1591b59ed 100644 --- a/packages/cli/src/cmds/validator/voluntaryExit.ts +++ b/packages/cli/src/cmds/validator/voluntaryExit.ts @@ -8,7 +8,7 @@ import { } from "@lodestar/state-transition"; import {createBeaconConfig, BeaconConfig} from "@lodestar/config"; import {phase0, ssz, ValidatorIndex, Epoch} from "@lodestar/types"; -import {CliCommand, fromHex, toHex} from "@lodestar/utils"; +import {CliCommand, fromHex, toPubkeyHex} from "@lodestar/utils"; import {externalSignerPostSignature, SignableMessageType, Signer, SignerType} from "@lodestar/validator"; import {ApiClient, getClient} from "@lodestar/api"; import {ensure0xPrefix, YargsError, wrapError} from "../../util/index.js"; @@ -207,9 +207,9 @@ function selectSignersToExit(args: VoluntaryExitArgs, signers: Signer[]): Signer async function resolveValidatorIndexes(client: ApiClient, signersToExit: SignerPubkey[]) { const pubkeys = signersToExit.map(({pubkey}) => pubkey); - const validators = (await client.beacon.getStateValidators({stateId: "head", validatorIds: pubkeys})).value(); + const validators = (await client.beacon.postStateValidators({stateId: "head", validatorIds: pubkeys})).value(); - const dataByPubkey = new Map(validators.map((item) => [toHex(item.validator.pubkey), item])); + const dataByPubkey = new Map(validators.map((item) => [toPubkeyHex(item.validator.pubkey), item])); return signersToExit.map(({signer, pubkey}) => { const item = dataByPubkey.get(pubkey); diff --git a/packages/cli/src/networks/holesky.ts b/packages/cli/src/networks/holesky.ts index b86f6b543582..63bc6e07f8f2 100644 --- a/packages/cli/src/networks/holesky.ts +++ b/packages/cli/src/networks/holesky.ts @@ -10,7 +10,7 @@ export const bootEnrs = [ "enr:-Ku4QPG7F72mbKx3gEQEx07wpYYusGDh-ni6SNkLvOS-hhN-BxIggN7tKlmalb0L5JPoAfqD-akTZ-gX06hFeBEz4WoBh2F0dG5ldHOIAAAAAAAAAACEZXRoMpAhnTT-AQFwAP__________gmlkgnY0gmlwhJK-DYCJc2VjcDI1NmsxoQKLVXFOhp2uX6jeT0DvvDpPcU8FWMjQdR4wMuORMhpX24N1ZHCCIyk", "enr:-LK4QPxe-mDiSOtEB_Y82ozvxn9aQM07Ui8A-vQHNgYGMMthfsfOabaaTHhhJHFCBQQVRjBww_A5bM1rf8MlkJU_l68Eh2F0dG5ldHOIAADAAAAAAACEZXRoMpBpt9l0BAFwAAABAAAAAAAAgmlkgnY0gmlwhLKAiOmJc2VjcDI1NmsxoQJu6T9pclPObAzEVQ53DpVQqjadmVxdTLL-J3h9NFoCeIN0Y3CCIyiDdWRwgiMo", "enr:-Ly4QGbOw4xNel5EhmDsJJ-QhC9XycWtsetnWoZ0uRy381GHdHsNHJiCwDTOkb3S1Ade0SFQkWJX_pgb3g8Jfh93rvMBh2F0dG5ldHOIAAAAAAAAAACEZXRoMpBpt9l0BAFwAAABAAAAAAAAgmlkgnY0gmlwhJK-DYCJc2VjcDI1NmsxoQOxKv9sv3zKF8GDewgFGGHKP5HCZZpPpTrwl9eXKAWGxIhzeW5jbmV0cwCDdGNwgiMog3VkcIIjKA", - "enr:-LS4QG0uV4qvcpJ-HFDJRGBmnlD3TJo7yc4jwK8iP7iKaTlfQ5kZvIDspLMJhk7j9KapuL9yyHaZmwTEZqr10k9XumyCEcmHYXR0bmV0c4gAAAAABgAAAIRldGgykGm32XQEAXAAAAEAAAAAAACCaWSCdjSCaXCErK4j-YlzZWNwMjU2azGhAgfWRBEJlb7gAhXIB5ePmjj2b8io0UpEenq1Kl9cxStJg3RjcIIjKIN1ZHCCIyg", + "enr:-KO4QCi3ZY4TM5KL7bAG6laSYiYelDWu0crvUjCXlyc_cwEfUpMIuARuMJYGxWe-UYYpHEw_aBbZ1u-4tHQ8imyI5uaCAsGEZXRoMpBprg6ZBQFwAP__________gmlkgnY0gmlwhKyuI_mJc2VjcDI1NmsxoQLoFG5-vuNX6N49vnkTBaA3ZsBDF8B30DGqWOGtRGz5w4N0Y3CCIyiDdWRwgiMo", "enr:-Le4QLoE1wFHSlGcm48a9ZESb_MRLqPPu6G0vHqu4MaUcQNDHS69tsy-zkN0K6pglyzX8m24mkb-LtBcbjAYdP1uxm4BhGV0aDKQabfZdAQBcAAAAQAAAAAAAIJpZIJ2NIJpcIQ5gR6Wg2lwNpAgAUHQBwEQAAAAAAAAADR-iXNlY3AyNTZrMaEDPMSNdcL92uNIyCsS177Z6KTXlbZakQqxv3aQcWawNXeDdWRwgiMohHVkcDaCI4I", "enr:-KG4QC9Wm32mtzB5Fbj2ri2TEKglHmIWgvwTQCvNHBopuwpNAi1X6qOsBg_Z1-Bee-kfSrhzUQZSgDUyfH5outUprtoBgmlkgnY0gmlwhHEel3eDaXA2kP6AAAAAAAAAAlBW__4Srr-Jc2VjcDI1NmsxoQO7KE63Z4eSI55S1Yn7q9_xFkJ1Wt-a3LgiXuKGs19s0YN1ZHCCIyiEdWRwNoIjKA", ]; diff --git a/packages/cli/src/networks/index.ts b/packages/cli/src/networks/index.ts index 2d605335b0e8..0831b78cd2f6 100644 --- a/packages/cli/src/networks/index.ts +++ b/packages/cli/src/networks/index.ts @@ -3,12 +3,17 @@ import got from "got"; import {ENR} from "@chainsafe/enr"; import {SLOTS_PER_EPOCH} from "@lodestar/params"; import {HttpHeader, MediaType, WireFormat, getClient} from "@lodestar/api"; -import {getStateTypeFromBytes} from "@lodestar/beacon-node"; +import {getStateSlotFromBytes} from "@lodestar/beacon-node"; import {ChainConfig, ChainForkConfig} from "@lodestar/config"; import {Checkpoint} from "@lodestar/types/phase0"; import {Slot} from "@lodestar/types"; -import {fromHex, callFnWhenAwait, Logger} from "@lodestar/utils"; -import {BeaconStateAllForks, getLatestBlockRoot, computeCheckpointEpochAtStateSlot} from "@lodestar/state-transition"; +import {fromHex, callFnWhenAwait, Logger, formatBytes} from "@lodestar/utils"; +import { + BeaconStateAllForks, + getLatestBlockRoot, + computeCheckpointEpochAtStateSlot, + loadState, +} from "@lodestar/state-transition"; import {parseBootnodesFile} from "../util/format.js"; import * as mainnet from "./mainnet.js"; import * as dev from "./dev.js"; @@ -140,8 +145,12 @@ export function readBootnodes(bootnodesFilePath: string): string[] { export async function fetchWeakSubjectivityState( config: ChainForkConfig, logger: Logger, - {checkpointSyncUrl, wssCheckpoint}: {checkpointSyncUrl: string; wssCheckpoint?: string} -): Promise<{wsState: BeaconStateAllForks; wsCheckpoint: Checkpoint}> { + {checkpointSyncUrl, wssCheckpoint}: {checkpointSyncUrl: string; wssCheckpoint?: string}, + { + lastDbState, + lastDbValidatorsBytes, + }: {lastDbState: BeaconStateAllForks | null; lastDbValidatorsBytes: Uint8Array | null} +): Promise<{wsState: BeaconStateAllForks; wsStateBytes: Uint8Array; wsCheckpoint: Checkpoint}> { try { let wsCheckpoint: Checkpoint | null; let stateId: Slot | "finalized"; @@ -169,7 +178,7 @@ export async function fetchWeakSubjectivityState( } ); - const stateBytes = await callFnWhenAwait( + const wsStateBytes = await callFnWhenAwait( getStatePromise, () => logger.info("Download in progress, please wait..."), GET_STATE_LOG_INTERVAL @@ -177,13 +186,23 @@ export async function fetchWeakSubjectivityState( return res.ssz(); }); - logger.info("Download completed", {stateId}); + const wsSlot = getStateSlotFromBytes(wsStateBytes); + const logData = {stateId, size: formatBytes(wsStateBytes.length)}; + logger.info("Download completed", typeof stateId === "number" ? logData : {...logData, slot: wsSlot}); // It should not be required to get fork type from bytes but Checkpointz does not return // Eth-Consensus-Version header, see https://github.com/ethpandaops/checkpointz/issues/164 - const wsState = getStateTypeFromBytes(config, stateBytes).deserializeToViewDU(stateBytes); + let wsState: BeaconStateAllForks; + if (lastDbState && lastDbValidatorsBytes) { + // use lastDbState to load wsState if possible to share the same state tree + wsState = loadState(config, lastDbState, wsStateBytes, lastDbValidatorsBytes).state; + } else { + const stateType = config.getForkTypes(wsSlot).BeaconState; + wsState = stateType.deserializeToViewDU(wsStateBytes); + } return { wsState, + wsStateBytes, wsCheckpoint: wsCheckpoint ?? getCheckpointFromState(wsState), }; } catch (e) { diff --git a/packages/cli/src/options/beaconNodeOptions/api.ts b/packages/cli/src/options/beaconNodeOptions/api.ts index 996136f262ec..bed0105fd944 100644 --- a/packages/cli/src/options/beaconNodeOptions/api.ts +++ b/packages/cli/src/options/beaconNodeOptions/api.ts @@ -12,6 +12,7 @@ export type ApiArgs = { "rest.port": number; "rest.headerLimit"?: number; "rest.bodyLimit"?: number; + "rest.stacktraces"?: boolean; "rest.swaggerUI"?: boolean; }; @@ -26,6 +27,7 @@ export function parseArgs(args: ApiArgs): IBeaconNodeOptions["api"] { port: args["rest.port"], headerLimit: args["rest.headerLimit"], bodyLimit: args["rest.bodyLimit"], + stacktraces: args["rest.stacktraces"], swaggerUI: args["rest.swaggerUI"], }, }; @@ -92,6 +94,13 @@ export const options: CliCommandOptions = { description: "Defines the maximum payload, in bytes, the server is allowed to accept", }, + "rest.stacktraces": { + hidden: true, + type: "boolean", + description: "Return stacktraces in HTTP error responses", + group: "api", + }, + "rest.swaggerUI": { type: "boolean", description: "Enable Swagger UI for API exploration at http://{address}:{port}/documentation", diff --git a/packages/cli/src/options/beaconNodeOptions/network.ts b/packages/cli/src/options/beaconNodeOptions/network.ts index 25ba036a5dbf..bfe9c7710e86 100644 --- a/packages/cli/src/options/beaconNodeOptions/network.ts +++ b/packages/cli/src/options/beaconNodeOptions/network.ts @@ -26,7 +26,6 @@ export type NetworkArgs = { "network.connectToDiscv5Bootnodes"?: boolean; "network.discv5FirstQueryDelayMs"?: number; "network.dontSendGossipAttestationsToForkchoice"?: boolean; - "network.beaconAttestationBatchValidation"?: boolean; "network.allowPublishToZeroPeers"?: boolean; "network.gossipsubD"?: number; "network.gossipsubDLow"?: number; @@ -144,7 +143,6 @@ export function parseArgs(args: NetworkArgs): IBeaconNodeOptions["network"] { connectToDiscv5Bootnodes: args["network.connectToDiscv5Bootnodes"], discv5FirstQueryDelayMs: args["network.discv5FirstQueryDelayMs"], dontSendGossipAttestationsToForkchoice: args["network.dontSendGossipAttestationsToForkchoice"], - beaconAttestationBatchValidation: args["network.beaconAttestationBatchValidation"], allowPublishToZeroPeers: args["network.allowPublishToZeroPeers"], gossipsubD: args["network.gossipsubD"], gossipsubDLow: args["network.gossipsubDLow"], @@ -321,13 +319,6 @@ export const options: CliCommandOptions = { group: "network", }, - "network.beaconAttestationBatchValidation": { - hidden: true, - type: "boolean", - description: "Validate gossip attestations in batches", - group: "network", - }, - "network.allowPublishToZeroPeers": { hidden: true, type: "boolean", diff --git a/packages/cli/src/util/format.ts b/packages/cli/src/util/format.ts index 01c2753193a4..a86ca0662a9f 100644 --- a/packages/cli/src/util/format.ts +++ b/packages/cli/src/util/format.ts @@ -1,5 +1,5 @@ import {PublicKey} from "@chainsafe/blst"; -import {fromHexString} from "@chainsafe/ssz"; +import {fromHex} from "@lodestar/utils"; /** * 0x prefix a string if not prefixed already @@ -50,7 +50,7 @@ export function parseRange(range: string): number[] { export function assertValidPubkeysHex(pubkeysHex: string[]): void { for (const pubkeyHex of pubkeysHex) { - const pubkeyBytes = fromHexString(pubkeyHex); + const pubkeyBytes = fromHex(pubkeyHex); PublicKey.fromBytes(pubkeyBytes, true); } } diff --git a/packages/cli/test/sim/endpoints.test.ts b/packages/cli/test/sim/endpoints.test.ts index a40a18e379eb..6a119fc219d7 100644 --- a/packages/cli/test/sim/endpoints.test.ts +++ b/packages/cli/test/sim/endpoints.test.ts @@ -2,7 +2,8 @@ import path from "node:path"; import assert from "node:assert"; import {toHexString} from "@chainsafe/ssz"; -import {routes} from "@lodestar/api"; +import {routes, fetch} from "@lodestar/api"; +import {ssz} from "@lodestar/types"; import {Simulation} from "../utils/crucible/simulation.js"; import {BeaconClient, ExecutionClient} from "../utils/crucible/interfaces.js"; import {defineSimTestConfig, logFilesDir} from "../utils/crucible/utils/index.js"; @@ -40,7 +41,7 @@ await env.start({runTimeoutMs: estimatedTimeoutMs}); const node = env.nodes[0].beacon; await waitForSlot("Wait for 2 slots before checking endpoints", {env, slot: 2}); -const validators = (await node.api.beacon.getStateValidators({stateId: "head"})).value(); +const validators = (await node.api.beacon.postStateValidators({stateId: "head"})).value(); await env.tracker.assert("should have correct validators count called without filters", async () => { assert.equal(validators.length, validatorCount); @@ -55,12 +56,12 @@ await env.tracker.assert("should have correct validator index for second validat }); await env.tracker.assert( - "should return correct number of filtered validators when getStateValidators called with filters", + "should return correct number of filtered validators when postStateValidators called with filters", async () => { const filterPubKey = "0xa99a76ed7796f7be22d5b7e85deeb7c5677e88e511e0b337618f8c4eb61349b4bf2d153f649f7b53359fe8b94a38e44c"; - const res = await node.api.beacon.getStateValidators({stateId: "head", validatorIds: [filterPubKey]}); + const res = await node.api.beacon.postStateValidators({stateId: "head", validatorIds: [filterPubKey]}); assert.equal(res.value().length, 1); @@ -71,12 +72,12 @@ await env.tracker.assert( ); await env.tracker.assert( - "should return correct filtered validators when getStateValidators called with filters", + "should return correct filtered validators when postStateValidators called with filters", async () => { const filterPubKey = "0xa99a76ed7796f7be22d5b7e85deeb7c5677e88e511e0b337618f8c4eb61349b4bf2d153f649f7b53359fe8b94a38e44c"; - const res = await node.api.beacon.getStateValidators({stateId: "head", validatorIds: [filterPubKey]}); + const res = await node.api.beacon.postStateValidators({stateId: "head", validatorIds: [filterPubKey]}); assert.equal(toHexString(res.value()[0].validator.pubkey), filterPubKey); } @@ -105,10 +106,39 @@ await env.tracker.assert( } ); +await env.tracker.assert("should return HTTP error responses in a spec compliant format", async () => { + // ApiError with status 400 is thrown by handler + const res1 = await node.api.beacon.getStateValidator({stateId: "current", validatorId: 1}); + assert.deepStrictEqual(JSON.parse(await res1.errorBody()), {code: 400, message: "Invalid block id 'current'"}); + + // JSON schema validation failed + const res2 = await node.api.beacon.getPoolAttestationsV2({slot: "current" as unknown as number, committeeIndex: 123}); + assert.deepStrictEqual(JSON.parse(await res2.errorBody()), {code: 400, message: "slot must be integer"}); + + // Error processing multiple items + const signedAttestations = Array.from({length: 3}, () => ssz.phase0.Attestation.defaultValue()); + const res3 = await node.api.beacon.submitPoolAttestationsV2({signedAttestations}); + const errBody = JSON.parse(await res3.errorBody()) as {code: number; message: string; failures: unknown[]}; + assert.equal(errBody.code, 400); + assert.equal(errBody.message, "Error processing attestations"); + assert.equal(errBody.failures.length, signedAttestations.length); + assert.deepStrictEqual(errBody.failures[0], { + index: 0, + message: "ATTESTATION_ERROR_NOT_EXACTLY_ONE_AGGREGATION_BIT_SET", + }); + + // Route does not exist + const res4 = await fetch(`${node.restPublicUrl}/not/implemented/route`); + assert.deepStrictEqual(JSON.parse(await res4.text()), { + code: 404, + message: "Route GET:/not/implemented/route not found", + }); +}); + await env.tracker.assert("BN Not Synced", async () => { const expectedSyncStatus: routes.node.SyncingStatus = { - headSlot: "2", - syncDistance: "0", + headSlot: 2, + syncDistance: 0, isSyncing: false, isOptimistic: false, elOffline: false, diff --git a/packages/cli/test/unit/options/beaconNodeOptions.test.ts b/packages/cli/test/unit/options/beaconNodeOptions.test.ts index d74ae73b966f..879b5bfa2fc9 100644 --- a/packages/cli/test/unit/options/beaconNodeOptions.test.ts +++ b/packages/cli/test/unit/options/beaconNodeOptions.test.ts @@ -17,6 +17,7 @@ describe("options / beaconNodeOptions", () => { "rest.port": 7654, "rest.headerLimit": 16384, "rest.bodyLimit": 30e6, + "rest.stacktraces": true, "chain.blsVerifyAllMultiThread": true, "chain.blsVerifyAllMainThread": true, @@ -94,7 +95,6 @@ describe("options / beaconNodeOptions", () => { "network.blockCountPeerLimit": 500, "network.rateTrackerTimeoutMs": 60000, "network.dontSendGossipAttestationsToForkchoice": true, - "network.beaconAttestationBatchValidation": true, "network.allowPublishToZeroPeers": true, "network.gossipsubD": 4, "network.gossipsubDLow": 2, @@ -122,6 +122,7 @@ describe("options / beaconNodeOptions", () => { port: 7654, headerLimit: 16384, bodyLimit: 30e6, + stacktraces: true, }, }, chain: { @@ -204,7 +205,6 @@ describe("options / beaconNodeOptions", () => { connectToDiscv5Bootnodes: true, discv5FirstQueryDelayMs: 1000, dontSendGossipAttestationsToForkchoice: true, - beaconAttestationBatchValidation: true, allowPublishToZeroPeers: true, gossipsubD: 4, gossipsubDLow: 2, diff --git a/packages/cli/test/utils/crucible/assertions/blobsAssertion.ts b/packages/cli/test/utils/crucible/assertions/blobsAssertion.ts index dece5bc58ce8..50c9ed13f972 100644 --- a/packages/cli/test/utils/crucible/assertions/blobsAssertion.ts +++ b/packages/cli/test/utils/crucible/assertions/blobsAssertion.ts @@ -35,7 +35,7 @@ export function createBlobsAssertion( gasLimit: "0xc350", maxPriorityFeePerGas: "0x3b9aca00", maxFeePerGas: "0x3ba26b20", - maxFeePerBlobGas: "0x3e8", + maxFeePerBlobGas: "0x3e", value: "0x10000", nonce: `0x${(nonce ?? 0).toString(16)}`, blobVersionedHashes, diff --git a/packages/config/package.json b/packages/config/package.json index 8d5fd3d80c35..f257db65cf6e 100644 --- a/packages/config/package.json +++ b/packages/config/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/config", - "version": "1.20.2", + "version": "1.22.0", "description": "Chain configuration required for lodestar", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -64,8 +64,9 @@ "blockchain" ], "dependencies": { - "@chainsafe/ssz": "^0.15.1", - "@lodestar/params": "^1.20.2", - "@lodestar/types": "^1.20.2" + "@chainsafe/ssz": "^0.17.1", + "@lodestar/params": "^1.22.0", + "@lodestar/utils": "^1.22.0", + "@lodestar/types": "^1.22.0" } } diff --git a/packages/config/src/chainConfig/configs/mainnet.ts b/packages/config/src/chainConfig/configs/mainnet.ts index 883688ca821b..e5792a87ac70 100644 --- a/packages/config/src/chainConfig/configs/mainnet.ts +++ b/packages/config/src/chainConfig/configs/mainnet.ts @@ -1,5 +1,5 @@ /* eslint-disable @typescript-eslint/naming-convention */ -import {fromHexString as b} from "@chainsafe/ssz"; +import {fromHex as b} from "@lodestar/utils"; import {PresetName} from "@lodestar/params"; import {ChainConfig} from "../types.js"; @@ -49,6 +49,10 @@ export const chainConfig: ChainConfig = { DENEB_FORK_VERSION: b("0x04000000"), DENEB_FORK_EPOCH: 269568, // March 13, 2024, 01:55:35pm UTC + // ELECTRA + ELECTRA_FORK_VERSION: b("0x05000000"), + ELECTRA_FORK_EPOCH: Infinity, + // Time parameters // --------------------------------------------------------------- // 12 seconds @@ -98,4 +102,10 @@ export const chainConfig: ChainConfig = { // Deneb // `2**12` (= 4096 epochs, ~18 days) MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS: 4096, + + // Electra + // 2**8 * 10**9 (= 256,000,000,000) + MAX_PER_EPOCH_ACTIVATION_EXIT_CHURN_LIMIT: 256000000000, + // 2*7 * 10**9 (= 128,000,000,000) + MIN_PER_EPOCH_CHURN_LIMIT_ELECTRA: 128000000000, }; diff --git a/packages/config/src/chainConfig/configs/minimal.ts b/packages/config/src/chainConfig/configs/minimal.ts index 23cd14e763ec..53229d283511 100644 --- a/packages/config/src/chainConfig/configs/minimal.ts +++ b/packages/config/src/chainConfig/configs/minimal.ts @@ -1,5 +1,5 @@ /* eslint-disable @typescript-eslint/naming-convention */ -import {fromHexString as b} from "@chainsafe/ssz"; +import {fromHex as b} from "@lodestar/utils"; import {PresetName} from "@lodestar/params"; import {ChainConfig} from "../types.js"; @@ -45,6 +45,9 @@ export const chainConfig: ChainConfig = { // Deneb DENEB_FORK_VERSION: b("0x04000001"), DENEB_FORK_EPOCH: Infinity, + // ELECTRA + ELECTRA_FORK_VERSION: b("0x05000001"), + ELECTRA_FORK_EPOCH: Infinity, // Time parameters // --------------------------------------------------------------- @@ -96,4 +99,10 @@ export const chainConfig: ChainConfig = { // Deneb // `2**12` (= 4096 epochs, ~18 days) MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS: 4096, + + // Electra + // 2**7 * 10**9 (= 128,000,000,000) + MAX_PER_EPOCH_ACTIVATION_EXIT_CHURN_LIMIT: 128000000000, + // 2**6 * 10**9 (= 64,000,000,000) + MIN_PER_EPOCH_CHURN_LIMIT_ELECTRA: 64000000000, }; diff --git a/packages/config/src/chainConfig/json.ts b/packages/config/src/chainConfig/json.ts index 4e61333cbdee..78db9230c836 100644 --- a/packages/config/src/chainConfig/json.ts +++ b/packages/config/src/chainConfig/json.ts @@ -1,4 +1,4 @@ -import {fromHexString, toHexString} from "@chainsafe/ssz"; +import {fromHex, toHex} from "@lodestar/utils"; import {ChainConfig, chainConfigTypes, SpecValue, SpecValueTypeName} from "./types.js"; const MAX_UINT64_JSON = "18446744073709551615"; @@ -69,7 +69,7 @@ export function serializeSpecValue(value: SpecValue, typeName: SpecValueTypeName if (!(value instanceof Uint8Array)) { throw Error(`Invalid value ${value.toString()} expected Uint8Array`); } - return toHexString(value); + return toHex(value); case "string": if (typeof value !== "string") { @@ -95,7 +95,7 @@ export function deserializeSpecValue(valueStr: unknown, typeName: SpecValueTypeN return BigInt(valueStr); case "bytes": - return fromHexString(valueStr); + return fromHex(valueStr); case "string": return valueStr; diff --git a/packages/config/src/chainConfig/networks/chiado.ts b/packages/config/src/chainConfig/networks/chiado.ts index 43b13a210dac..d96bd7510c65 100644 --- a/packages/config/src/chainConfig/networks/chiado.ts +++ b/packages/config/src/chainConfig/networks/chiado.ts @@ -1,5 +1,5 @@ /* eslint-disable @typescript-eslint/naming-convention */ -import {fromHexString as b} from "@chainsafe/ssz"; +import {fromHex as b} from "@lodestar/utils"; import {ChainConfig} from "../types.js"; import {gnosisChainConfig as gnosis} from "./gnosis.js"; diff --git a/packages/config/src/chainConfig/networks/ephemery.ts b/packages/config/src/chainConfig/networks/ephemery.ts index 29e3f7b92d01..6fefc1800bfc 100644 --- a/packages/config/src/chainConfig/networks/ephemery.ts +++ b/packages/config/src/chainConfig/networks/ephemery.ts @@ -1,5 +1,5 @@ /* eslint-disable @typescript-eslint/naming-convention */ -import {fromHexString as b} from "@chainsafe/ssz"; +import {fromHex as b} from "@lodestar/utils"; import {ChainConfig} from "../types.js"; import {chainConfig as mainnet} from "../configs/mainnet.js"; diff --git a/packages/config/src/chainConfig/networks/gnosis.ts b/packages/config/src/chainConfig/networks/gnosis.ts index 8034e478fd28..2f58ffd4d045 100644 --- a/packages/config/src/chainConfig/networks/gnosis.ts +++ b/packages/config/src/chainConfig/networks/gnosis.ts @@ -1,5 +1,5 @@ /* eslint-disable @typescript-eslint/naming-convention */ -import {fromHexString as b} from "@chainsafe/ssz"; +import {fromHex as b} from "@lodestar/utils"; import {PresetName} from "@lodestar/params"; import {ChainConfig} from "../types.js"; import {chainConfig as mainnet} from "../configs/mainnet.js"; diff --git a/packages/config/src/chainConfig/networks/holesky.ts b/packages/config/src/chainConfig/networks/holesky.ts index 187543b871f2..16c462a9468e 100644 --- a/packages/config/src/chainConfig/networks/holesky.ts +++ b/packages/config/src/chainConfig/networks/holesky.ts @@ -1,5 +1,5 @@ /* eslint-disable @typescript-eslint/naming-convention */ -import {fromHexString as b} from "@chainsafe/ssz"; +import {fromHex as b} from "@lodestar/utils"; import {ChainConfig} from "../types.js"; import {chainConfig as mainnet} from "../configs/mainnet.js"; diff --git a/packages/config/src/chainConfig/networks/mainnet.ts b/packages/config/src/chainConfig/networks/mainnet.ts index 24584ad8442b..c137c578fc0e 100644 --- a/packages/config/src/chainConfig/networks/mainnet.ts +++ b/packages/config/src/chainConfig/networks/mainnet.ts @@ -1,5 +1,5 @@ /* eslint-disable @typescript-eslint/naming-convention */ -import {fromHexString as b} from "@chainsafe/ssz"; +import {fromHex as b} from "@lodestar/utils"; import {ChainConfig} from "../types.js"; import {chainConfig as mainnet} from "../configs/mainnet.js"; diff --git a/packages/config/src/chainConfig/networks/sepolia.ts b/packages/config/src/chainConfig/networks/sepolia.ts index 51102cfafa7d..39e72a24f3f6 100644 --- a/packages/config/src/chainConfig/networks/sepolia.ts +++ b/packages/config/src/chainConfig/networks/sepolia.ts @@ -1,5 +1,5 @@ /* eslint-disable @typescript-eslint/naming-convention */ -import {fromHexString as b} from "@chainsafe/ssz"; +import {fromHex as b} from "@lodestar/utils"; import {ChainConfig} from "../types.js"; import {chainConfig as mainnet} from "../configs/mainnet.js"; diff --git a/packages/config/src/chainConfig/types.ts b/packages/config/src/chainConfig/types.ts index 45f05bfaa724..05fff02f2eaf 100644 --- a/packages/config/src/chainConfig/types.ts +++ b/packages/config/src/chainConfig/types.ts @@ -40,6 +40,9 @@ export type ChainConfig = { // DENEB DENEB_FORK_VERSION: Uint8Array; DENEB_FORK_EPOCH: number; + // ELECTRA + ELECTRA_FORK_VERSION: Uint8Array; + ELECTRA_FORK_EPOCH: number; // Time parameters SECONDS_PER_SLOT: number; @@ -55,6 +58,8 @@ export type ChainConfig = { MIN_PER_EPOCH_CHURN_LIMIT: number; MAX_PER_EPOCH_ACTIVATION_CHURN_LIMIT: number; CHURN_LIMIT_QUOTIENT: number; + MAX_PER_EPOCH_ACTIVATION_EXIT_CHURN_LIMIT: number; + MIN_PER_EPOCH_CHURN_LIMIT_ELECTRA: number; // Fork choice PROPOSER_SCORE_BOOST: number; @@ -99,6 +104,9 @@ export const chainConfigTypes: SpecTypes = { // DENEB DENEB_FORK_VERSION: "bytes", DENEB_FORK_EPOCH: "number", + // ELECTRA + ELECTRA_FORK_VERSION: "bytes", + ELECTRA_FORK_EPOCH: "number", // Time parameters SECONDS_PER_SLOT: "number", @@ -114,6 +122,8 @@ export const chainConfigTypes: SpecTypes = { MIN_PER_EPOCH_CHURN_LIMIT: "number", MAX_PER_EPOCH_ACTIVATION_CHURN_LIMIT: "number", CHURN_LIMIT_QUOTIENT: "number", + MAX_PER_EPOCH_ACTIVATION_EXIT_CHURN_LIMIT: "number", + MIN_PER_EPOCH_CHURN_LIMIT_ELECTRA: "number", // Fork choice PROPOSER_SCORE_BOOST: "number", diff --git a/packages/config/src/forkConfig/index.ts b/packages/config/src/forkConfig/index.ts index 16d8952548b8..513cd7559ee3 100644 --- a/packages/config/src/forkConfig/index.ts +++ b/packages/config/src/forkConfig/index.ts @@ -11,7 +11,7 @@ import { ForkLightClient, ForkBlobs, } from "@lodestar/params"; -import {Slot, Version, SSZTypesFor, sszTypesFor} from "@lodestar/types"; +import {Slot, Version, SSZTypesFor, sszTypesFor, Epoch} from "@lodestar/types"; import {ChainConfig} from "../chainConfig/index.js"; import {ForkConfig, ForkInfo} from "./types.js"; @@ -59,10 +59,18 @@ export function createForkConfig(config: ChainConfig): ForkConfig { prevVersion: config.CAPELLA_FORK_VERSION, prevForkName: ForkName.capella, }; + const electra: ForkInfo = { + name: ForkName.electra, + seq: ForkSeq.electra, + epoch: config.ELECTRA_FORK_EPOCH, + version: config.ELECTRA_FORK_VERSION, + prevVersion: config.DENEB_FORK_VERSION, + prevForkName: ForkName.deneb, + }; /** Forks in order order of occurence, `phase0` first */ // Note: Downstream code relies on proper ordering. - const forks = {phase0, altair, bellatrix, capella, deneb}; + const forks = {phase0, altair, bellatrix, capella, deneb, electra}; // Prevents allocating an array on every getForkInfo() call const forksAscendingEpochOrder = Object.values(forks); @@ -76,6 +84,9 @@ export function createForkConfig(config: ChainConfig): ForkConfig { // Fork convenience methods getForkInfo(slot: Slot): ForkInfo { const epoch = Math.floor(Math.max(slot, 0) / SLOTS_PER_EPOCH); + return this.getForkInfoAtEpoch(epoch); + }, + getForkInfoAtEpoch(epoch: Epoch): ForkInfo { // NOTE: forks must be sorted by descending epoch, latest fork first for (const fork of forksDescendingEpochOrder) { if (epoch >= fork.epoch) return fork; @@ -88,6 +99,9 @@ export function createForkConfig(config: ChainConfig): ForkConfig { getForkSeq(slot: Slot): ForkSeq { return this.getForkInfo(slot).seq; }, + getForkSeqAtEpoch(epoch: Epoch): ForkSeq { + return this.getForkInfoAtEpoch(epoch).seq; + }, getForkVersion(slot: Slot): Version { return this.getForkInfo(slot).version; }, diff --git a/packages/config/src/forkConfig/types.ts b/packages/config/src/forkConfig/types.ts index 2905e6f03c34..ebb2899a2a21 100644 --- a/packages/config/src/forkConfig/types.ts +++ b/packages/config/src/forkConfig/types.ts @@ -21,11 +21,14 @@ export type ForkConfig = { /** Get the hard-fork info for the active fork at `slot` */ getForkInfo(slot: Slot): ForkInfo; - + /** Get the hard-fork info for the active fork at `epoch` */ + getForkInfoAtEpoch(epoch: Epoch): ForkInfo; /** Get the hard-fork name at a given slot */ getForkName(slot: Slot): ForkName; /** Get the hard-fork sequence number at a given slot */ getForkSeq(slot: Slot): ForkSeq; + /** Get the hard-fork sequence number at a given epoch */ + getForkSeqAtEpoch(epoch: Epoch): ForkSeq; /** Get the hard-fork version at a given slot */ getForkVersion(slot: Slot): Version; /** Get SSZ types by hard-fork */ diff --git a/packages/config/src/genesisConfig/index.ts b/packages/config/src/genesisConfig/index.ts index 52fdd03880a6..d2dfae2a8e08 100644 --- a/packages/config/src/genesisConfig/index.ts +++ b/packages/config/src/genesisConfig/index.ts @@ -1,6 +1,6 @@ -import {toHexString} from "@chainsafe/ssz"; import {ForkName, SLOTS_PER_EPOCH, DOMAIN_VOLUNTARY_EXIT} from "@lodestar/params"; import {DomainType, ForkDigest, phase0, Root, Slot, ssz, Version} from "@lodestar/types"; +import {toHex} from "@lodestar/utils"; import {ChainForkConfig} from "../beaconConfig.js"; import {ForkDigestHex, CachedGenesis} from "./types.js"; export type {ForkDigestContext} from "./types.js"; @@ -139,7 +139,7 @@ function computeForkDataRoot(currentVersion: Version, genesisValidatorsRoot: Roo } function toHexStringNoPrefix(hex: string | Uint8Array): string { - return strip0xPrefix(typeof hex === "string" ? hex : toHexString(hex)); + return strip0xPrefix(typeof hex === "string" ? hex : toHex(hex)); } function strip0xPrefix(hex: string): string { diff --git a/packages/db/package.json b/packages/db/package.json index 6cfaf6ecce70..0e3a2c847d4b 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/db", - "version": "1.20.2", + "version": "1.22.0", "description": "DB modules of Lodestar", "author": "ChainSafe Systems", "homepage": "https://github.com/ChainSafe/lodestar#readme", @@ -35,13 +35,13 @@ "check-readme": "typescript-docs-verifier" }, "dependencies": { - "@chainsafe/ssz": "^0.15.1", - "@lodestar/config": "^1.20.2", - "@lodestar/utils": "^1.20.2", + "@chainsafe/ssz": "^0.17.1", + "@lodestar/config": "^1.22.0", + "@lodestar/utils": "^1.22.0", "classic-level": "^1.4.1", "it-all": "^3.0.4" }, "devDependencies": { - "@lodestar/logger": "^1.20.2" + "@lodestar/logger": "^1.22.0" } } diff --git a/packages/flare/package.json b/packages/flare/package.json index 9e9560d32dd8..3a11e9b49ebf 100644 --- a/packages/flare/package.json +++ b/packages/flare/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/flare", - "version": "1.20.2", + "version": "1.22.0", "description": "Beacon chain debugging tool", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -58,14 +58,14 @@ "blockchain" ], "dependencies": { - "@chainsafe/blst": "^2.0.3", "@chainsafe/bls-keygen": "^0.4.0", - "@lodestar/api": "^1.20.2", - "@lodestar/config": "^1.20.2", - "@lodestar/params": "^1.20.2", - "@lodestar/state-transition": "^1.20.2", - "@lodestar/types": "^1.20.2", - "@lodestar/utils": "^1.20.2", + "@chainsafe/blst": "^2.0.3", + "@lodestar/api": "^1.22.0", + "@lodestar/config": "^1.22.0", + "@lodestar/params": "^1.22.0", + "@lodestar/state-transition": "^1.22.0", + "@lodestar/types": "^1.22.0", + "@lodestar/utils": "^1.22.0", "source-map-support": "^0.5.21", "yargs": "^17.7.1" }, diff --git a/packages/flare/src/cmds/selfSlashAttester.ts b/packages/flare/src/cmds/selfSlashAttester.ts index 8b43e6a92cb0..6ccc9d80a98f 100644 --- a/packages/flare/src/cmds/selfSlashAttester.ts +++ b/packages/flare/src/cmds/selfSlashAttester.ts @@ -1,10 +1,10 @@ import {SecretKey, aggregateSignatures} from "@chainsafe/blst"; import {getClient} from "@lodestar/api"; -import {phase0, ssz} from "@lodestar/types"; +import {AttesterSlashing, phase0, ssz} from "@lodestar/types"; import {config as chainConfig} from "@lodestar/config/default"; import {createBeaconConfig, BeaconConfig} from "@lodestar/config"; import {DOMAIN_BEACON_ATTESTER, MAX_VALIDATORS_PER_COMMITTEE} from "@lodestar/params"; -import {CliCommand, toHexString} from "@lodestar/utils"; +import {CliCommand, toPubkeyHex} from "@lodestar/utils"; import {computeSigningRoot} from "@lodestar/state-transition"; import {deriveSecretKeys, SecretKeysArgs, secretKeysOptions} from "../util/deriveSecretKeys.js"; @@ -79,7 +79,7 @@ export async function selfSlashAttesterHandler(args: SelfSlashArgs): Promise sk.toPublicKey().toHex()); - const validators = (await client.beacon.getStateValidators({stateId: "head", validatorIds: pksHex})).value(); + const validators = (await client.beacon.postStateValidators({stateId: "head", validatorIds: pksHex})).value(); // All validators in the batch will be part of the same AttesterSlashing const attestingIndices = validators.map((v) => v.index); @@ -90,9 +90,9 @@ export async function selfSlashAttesterHandler(args: SelfSlashArgs): Promise sk.toPublicKey().toHex()); - const validators = (await client.beacon.getStateValidators({stateId: "head", validatorIds: pksHex})).value(); + const validators = (await client.beacon.postStateValidators({stateId: "head", validatorIds: pksHex})).value(); // Submit all ProposerSlashing for range at once await Promise.all( @@ -86,9 +86,9 @@ export async function selfSlashProposerHandler(args: SelfSlashArgs): Promise epochNow) { @@ -1484,7 +1483,7 @@ export function assertValidTerminalPowBlock( // powBock.blockHash is hex, so we just pick the corresponding root if (!ssz.Root.equals(block.body.executionPayload.parentHash, config.TERMINAL_BLOCK_HASH)) throw new Error( - `Invalid terminal block hash, expected: ${toHexString(config.TERMINAL_BLOCK_HASH)}, actual: ${toHexString( + `Invalid terminal block hash, expected: ${toRootHex(config.TERMINAL_BLOCK_HASH)}, actual: ${toRootHex( block.body.executionPayload.parentHash )}` ); diff --git a/packages/fork-choice/src/forkChoice/interface.ts b/packages/fork-choice/src/forkChoice/interface.ts index d0629c2125cc..0b6d56a88bf2 100644 --- a/packages/fork-choice/src/forkChoice/interface.ts +++ b/packages/fork-choice/src/forkChoice/interface.ts @@ -1,6 +1,6 @@ import {EffectiveBalanceIncrements} from "@lodestar/state-transition"; import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; -import {Epoch, Slot, ValidatorIndex, phase0, Root, RootHex, BeaconBlock} from "@lodestar/types"; +import {Epoch, Slot, ValidatorIndex, phase0, Root, RootHex, BeaconBlock, IndexedAttestation} from "@lodestar/types"; import { ProtoBlock, MaybeValidExecutionStatus, @@ -156,7 +156,7 @@ export interface IForkChoice { * The supplied `attestation` **must** pass the `in_valid_indexed_attestation` function as it * will not be run here. */ - onAttestation(attestation: phase0.IndexedAttestation, attDataRoot: string, forceImport?: boolean): void; + onAttestation(attestation: IndexedAttestation, attDataRoot: string, forceImport?: boolean): void; /** * Register attester slashing in order not to consider their votes in `getHead` * diff --git a/packages/fork-choice/src/forkChoice/store.ts b/packages/fork-choice/src/forkChoice/store.ts index faf700241fa8..d16e021529db 100644 --- a/packages/fork-choice/src/forkChoice/store.ts +++ b/packages/fork-choice/src/forkChoice/store.ts @@ -1,4 +1,4 @@ -import {toHexString} from "@chainsafe/ssz"; +import {toRootHex} from "@lodestar/utils"; import {EffectiveBalanceIncrements, CachedBeaconStateAllForks} from "@lodestar/state-transition"; import {phase0, Slot, RootHex, ValidatorIndex} from "@lodestar/types"; import {CheckpointHexWithTotalBalance, CheckpointHexWithBalance} from "./interface.js"; @@ -103,7 +103,7 @@ export function toCheckpointWithHex(checkpoint: phase0.Checkpoint): CheckpointWi return { epoch: checkpoint.epoch, root, - rootHex: toHexString(root), + rootHex: toRootHex(root), }; } diff --git a/packages/fork-choice/src/protoArray/protoArray.ts b/packages/fork-choice/src/protoArray/protoArray.ts index eaa86b2f0ee1..0b793d2be099 100644 --- a/packages/fork-choice/src/protoArray/protoArray.ts +++ b/packages/fork-choice/src/protoArray/protoArray.ts @@ -1,8 +1,8 @@ -import {toHexString} from "@chainsafe/ssz"; import {Epoch, RootHex, Slot} from "@lodestar/types"; import {computeEpochAtSlot, computeStartSlotAtEpoch} from "@lodestar/state-transition"; import {GENESIS_EPOCH} from "@lodestar/params"; +import {toRootHex} from "@lodestar/utils"; import {ForkChoiceError, ForkChoiceErrorCode} from "../forkChoice/errors.js"; import {ProtoBlock, ProtoNode, HEX_ZERO_HASH, ExecutionStatus, LVHExecResponse} from "./interface.js"; import {ProtoArrayError, ProtoArrayErrorCode, LVHExecError, LVHExecErrorCode} from "./errors.js"; @@ -10,7 +10,7 @@ import {ProtoArrayError, ProtoArrayErrorCode, LVHExecError, LVHExecErrorCode} fr export const DEFAULT_PRUNE_THRESHOLD = 0; type ProposerBoost = {root: RootHex; score: number}; -const ZERO_HASH_HEX = toHexString(Buffer.alloc(32, 0)); +const ZERO_HASH_HEX = toRootHex(Buffer.alloc(32, 0)); export class ProtoArray { // Do not attempt to prune the tree unless it has at least this many nodes. diff --git a/packages/fork-choice/test/perf/forkChoice/util.ts b/packages/fork-choice/test/perf/forkChoice/util.ts index dbd049e257c1..6c04ac817fb2 100644 --- a/packages/fork-choice/test/perf/forkChoice/util.ts +++ b/packages/fork-choice/test/perf/forkChoice/util.ts @@ -41,7 +41,7 @@ export function initializeForkChoice(opts: Opts): ForkChoice { genesisSlot ); - const balances = new Uint8Array(Array.from({length: opts.initialValidatorCount}, () => 32)); + const balances = new Uint16Array(Array.from({length: opts.initialValidatorCount}, () => 32)); const fcStore: IForkChoiceStore = { currentSlot: genesisSlot, diff --git a/packages/fork-choice/test/unit/forkChoice/forkChoice.test.ts b/packages/fork-choice/test/unit/forkChoice/forkChoice.test.ts index fcb7376cffa8..40988a0e4a71 100644 --- a/packages/fork-choice/test/unit/forkChoice/forkChoice.test.ts +++ b/packages/fork-choice/test/unit/forkChoice/forkChoice.test.ts @@ -52,16 +52,16 @@ describe("Forkchoice", function () { currentSlot: genesisSlot + 1, justified: { checkpoint: {epoch: genesisEpoch, root: fromHexString(finalizedRoot), rootHex: finalizedRoot}, - balances: new Uint8Array([32]), + balances: new Uint16Array([32]), totalBalance: 32, }, unrealizedJustified: { checkpoint: {epoch: genesisEpoch, root: fromHexString(finalizedRoot), rootHex: finalizedRoot}, - balances: new Uint8Array([32]), + balances: new Uint16Array([32]), }, finalizedCheckpoint: {epoch: genesisEpoch, root: fromHexString(finalizedRoot), rootHex: finalizedRoot}, unrealizedFinalizedCheckpoint: {epoch: genesisEpoch, root: fromHexString(finalizedRoot), rootHex: finalizedRoot}, - justifiedBalancesGetter: () => new Uint8Array([32]), + justifiedBalancesGetter: () => new Uint16Array([32]), equivocatingIndices: new Set(), }; diff --git a/packages/fork-choice/test/unit/forkChoice/getProposerHead.test.ts b/packages/fork-choice/test/unit/forkChoice/getProposerHead.test.ts index cc14b5b57b92..f603a4069b83 100644 --- a/packages/fork-choice/test/unit/forkChoice/getProposerHead.test.ts +++ b/packages/fork-choice/test/unit/forkChoice/getProposerHead.test.ts @@ -102,12 +102,12 @@ describe("Forkchoice / GetProposerHead", function () { currentSlot: genesisSlot + 1, justified: { checkpoint: {epoch: genesisEpoch, root: fromHexString(genesisBlock.blockRoot), rootHex: genesisBlock.blockRoot}, - balances: new Uint8Array(Array(32).fill(150)), + balances: new Uint16Array(Array(32).fill(150)), totalBalance: 32 * 150, }, unrealizedJustified: { checkpoint: {epoch: genesisEpoch, root: fromHexString(genesisBlock.blockRoot), rootHex: genesisBlock.blockRoot}, - balances: new Uint8Array(Array(32).fill(150)), + balances: new Uint16Array(Array(32).fill(150)), }, finalizedCheckpoint: { epoch: genesisEpoch, @@ -119,7 +119,7 @@ describe("Forkchoice / GetProposerHead", function () { root: fromHexString(genesisBlock.blockRoot), rootHex: genesisBlock.blockRoot, }, - justifiedBalancesGetter: () => new Uint8Array(Array(32).fill(150)), + justifiedBalancesGetter: () => new Uint16Array(Array(32).fill(150)), equivocatingIndices: new Set(), }; diff --git a/packages/fork-choice/test/unit/protoArray/computeDeltas.test.ts b/packages/fork-choice/test/unit/protoArray/computeDeltas.test.ts index fde551d43cda..4428807bd13d 100644 --- a/packages/fork-choice/test/unit/protoArray/computeDeltas.test.ts +++ b/packages/fork-choice/test/unit/protoArray/computeDeltas.test.ts @@ -253,7 +253,7 @@ describe("computeDeltas", () => { nextEpoch: 0, })); - const balances = new Uint8Array([firstBalance, secondBalance]); + const balances = new Uint16Array([firstBalance, secondBalance]); // 1st validator is part of an attester slashing const equivocatingIndices = new Set([0]); let deltas = computeDeltas(indices.size, votes, balances, balances, equivocatingIndices); diff --git a/packages/light-client/README.md b/packages/light-client/README.md index cd3a0265008a..0323e8fc4326 100644 --- a/packages/light-client/README.md +++ b/packages/light-client/README.md @@ -62,7 +62,7 @@ import { const config = getChainForkConfigFromNetwork("sepolia"); const logger = getConsoleLogger({logDebug: Boolean(process.env.DEBUG)}); -const api = getApiFromUrl({urls: ["https://lodestar-sepolia.chainsafe.io"]}, {config}); +const api = getApiFromUrl("https://lodestar-sepolia.chainsafe.io", "sepolia"); const lightclient = await Lightclient.initializeFromCheckpointRoot({ config, @@ -82,11 +82,11 @@ await lightclient.start(); logger.info("Lightclient synced"); lightclient.emitter.on(LightclientEvent.lightClientFinalityHeader, async (finalityUpdate) => { - logger.info(finalityUpdate); + logger.info("Received finality update", {slot: finalityUpdate.beacon.slot}); }); lightclient.emitter.on(LightclientEvent.lightClientOptimisticHeader, async (optimisticUpdate) => { - logger.info(optimisticUpdate); + logger.info("Received optimistic update", {slot: optimisticUpdate.beacon.slot}); }); ``` diff --git a/packages/light-client/package.json b/packages/light-client/package.json index 6576bddfee6a..2eda3a45d455 100644 --- a/packages/light-client/package.json +++ b/packages/light-client/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.20.2", + "version": "1.22.0", "type": "module", "exports": { ".": { @@ -75,19 +75,19 @@ "dependencies": { "@chainsafe/bls": "7.1.3", "@chainsafe/blst": "^0.2.0", - "@chainsafe/persistent-merkle-tree": "^0.7.1", - "@chainsafe/ssz": "^0.15.1", - "@lodestar/api": "^1.20.2", - "@lodestar/config": "^1.20.2", - "@lodestar/params": "^1.20.2", - "@lodestar/types": "^1.20.2", - "@lodestar/utils": "^1.20.2", + "@chainsafe/persistent-merkle-tree": "^0.8.0", + "@chainsafe/ssz": "^0.17.1", + "@lodestar/api": "^1.22.0", + "@lodestar/config": "^1.22.0", + "@lodestar/params": "^1.22.0", + "@lodestar/types": "^1.22.0", + "@lodestar/utils": "^1.22.0", "mitt": "^3.0.0" }, "devDependencies": { - "@chainsafe/as-sha256": "^0.4.1", + "@chainsafe/as-sha256": "^0.5.0", "@types/qs": "^6.9.7", - "fastify": "^4.27.0", + "fastify": "^5.0.0", "qs": "^6.11.1", "uint8arrays": "^5.0.1" }, diff --git a/packages/light-client/src/index.ts b/packages/light-client/src/index.ts index 16ecf1adf939..5ec0dd73b469 100644 --- a/packages/light-client/src/index.ts +++ b/packages/light-client/src/index.ts @@ -1,5 +1,4 @@ import mitt from "mitt"; -import {fromHexString, toHexString} from "@chainsafe/ssz"; import {EPOCHS_PER_SYNC_COMMITTEE_PERIOD} from "@lodestar/params"; import { LightClientBootstrap, @@ -13,7 +12,7 @@ import { SyncPeriod, } from "@lodestar/types"; import {createBeaconConfig, BeaconConfig, ChainForkConfig} from "@lodestar/config"; -import {isErrorAborted, sleep} from "@lodestar/utils"; +import {fromHex, isErrorAborted, sleep, toRootHex} from "@lodestar/utils"; import {getCurrentSlot, slotWithFutureTolerance, timeUntilNextEpoch} from "./utils/clock.js"; import {chunkifyInclusiveRange} from "./utils/chunkify.js"; import {LightclientEmitter, LightclientEvent} from "./events.js"; @@ -120,7 +119,7 @@ export class Lightclient { this.genesisTime = genesisData.genesisTime; this.genesisValidatorsRoot = typeof genesisData.genesisValidatorsRoot === "string" - ? fromHexString(genesisData.genesisValidatorsRoot) + ? fromHex(genesisData.genesisValidatorsRoot) : genesisData.genesisValidatorsRoot; this.config = createBeaconConfig(config, this.genesisValidatorsRoot); @@ -162,7 +161,7 @@ export class Lightclient { const {transport, checkpointRoot} = args; // Fetch bootstrap state with proof at the trusted block root - const {data: bootstrap} = await transport.getBootstrap(toHexString(checkpointRoot)); + const {data: bootstrap} = await transport.getBootstrap(toRootHex(checkpointRoot)); validateLightClientBootstrap(args.config, checkpointRoot, bootstrap); diff --git a/packages/light-client/src/spec/index.ts b/packages/light-client/src/spec/index.ts index fc1a431129e8..0934e15b1c17 100644 --- a/packages/light-client/src/spec/index.ts +++ b/packages/light-client/src/spec/index.ts @@ -10,7 +10,7 @@ import { import {computeSyncPeriodAtSlot} from "../utils/index.js"; import {getSyncCommitteeAtPeriod, processLightClientUpdate, ProcessUpdateOpts} from "./processLightClientUpdate.js"; import {ILightClientStore, LightClientStore, LightClientStoreEvents} from "./store.js"; -import {ZERO_FINALITY_BRANCH, ZERO_HEADER, ZERO_NEXT_SYNC_COMMITTEE_BRANCH, ZERO_SYNC_COMMITTEE} from "./utils.js"; +import {ZERO_FINALITY_BRANCH, ZERO_HEADER, ZERO_SYNC_COMMITTEE, getZeroSyncCommitteeBranch} from "./utils.js"; export {isBetterUpdate, toLightClientUpdateSummary} from "./isBetterUpdate.js"; export type {LightClientUpdateSummary} from "./isBetterUpdate.js"; @@ -37,7 +37,7 @@ export class LightclientSpec { this.onUpdate(currentSlot, { attestedHeader: finalityUpdate.attestedHeader, nextSyncCommittee: ZERO_SYNC_COMMITTEE, - nextSyncCommitteeBranch: ZERO_NEXT_SYNC_COMMITTEE_BRANCH, + nextSyncCommitteeBranch: getZeroSyncCommitteeBranch(this.config.getForkName(finalityUpdate.signatureSlot)), finalizedHeader: finalityUpdate.finalizedHeader, finalityBranch: finalityUpdate.finalityBranch, syncAggregate: finalityUpdate.syncAggregate, @@ -49,7 +49,7 @@ export class LightclientSpec { this.onUpdate(currentSlot, { attestedHeader: optimisticUpdate.attestedHeader, nextSyncCommittee: ZERO_SYNC_COMMITTEE, - nextSyncCommitteeBranch: ZERO_NEXT_SYNC_COMMITTEE_BRANCH, + nextSyncCommitteeBranch: getZeroSyncCommitteeBranch(this.config.getForkName(optimisticUpdate.signatureSlot)), finalizedHeader: {beacon: ZERO_HEADER}, finalityBranch: ZERO_FINALITY_BRANCH, syncAggregate: optimisticUpdate.syncAggregate, diff --git a/packages/light-client/src/spec/utils.ts b/packages/light-client/src/spec/utils.ts index 65d6f3e84c59..408412464606 100644 --- a/packages/light-client/src/spec/utils.ts +++ b/packages/light-client/src/spec/utils.ts @@ -7,6 +7,9 @@ import { ForkName, BLOCK_BODY_EXECUTION_PAYLOAD_DEPTH as EXECUTION_PAYLOAD_DEPTH, BLOCK_BODY_EXECUTION_PAYLOAD_INDEX as EXECUTION_PAYLOAD_INDEX, + NEXT_SYNC_COMMITTEE_DEPTH_ELECTRA, + isForkPostElectra, + FINALIZED_ROOT_DEPTH_ELECTRA, } from "@lodestar/params"; import { ssz, @@ -17,17 +20,18 @@ import { LightClientUpdate, BeaconBlockHeader, SyncCommittee, + isElectraLightClientUpdate, } from "@lodestar/types"; import {ChainForkConfig} from "@lodestar/config"; import {isValidMerkleBranch, computeEpochAtSlot, computeSyncPeriodAtSlot} from "../utils/index.js"; +import {normalizeMerkleBranch} from "../utils/normalizeMerkleBranch.js"; import {LightClientStore} from "./store.js"; export const GENESIS_SLOT = 0; export const ZERO_HASH = new Uint8Array(32); export const ZERO_PUBKEY = new Uint8Array(48); export const ZERO_SYNC_COMMITTEE = ssz.altair.SyncCommittee.defaultValue(); -export const ZERO_NEXT_SYNC_COMMITTEE_BRANCH = Array.from({length: NEXT_SYNC_COMMITTEE_DEPTH}, () => ZERO_HASH); export const ZERO_HEADER = ssz.phase0.BeaconBlockHeader.defaultValue(); export const ZERO_FINALITY_BRANCH = Array.from({length: FINALIZED_ROOT_DEPTH}, () => ZERO_HASH); /** From https://notes.ethereum.org/@vbuterin/extended_light_client_protocol#Optimistic-head-determining-function */ @@ -41,10 +45,19 @@ export function getSafetyThreshold(maxActiveParticipants: number): number { return Math.floor(maxActiveParticipants / SAFETY_THRESHOLD_FACTOR); } +export function getZeroSyncCommitteeBranch(fork: ForkName): Uint8Array[] { + const nextSyncCommitteeDepth = isForkPostElectra(fork) + ? NEXT_SYNC_COMMITTEE_DEPTH_ELECTRA + : NEXT_SYNC_COMMITTEE_DEPTH; + + return Array.from({length: nextSyncCommitteeDepth}, () => ZERO_HASH); +} + export function isSyncCommitteeUpdate(update: LightClientUpdate): boolean { return ( // Fast return for when constructing full LightClientUpdate from partial updates - update.nextSyncCommitteeBranch !== ZERO_NEXT_SYNC_COMMITTEE_BRANCH && + update.nextSyncCommitteeBranch !== + getZeroSyncCommitteeBranch(isElectraLightClientUpdate(update) ? ForkName.electra : ForkName.altair) && update.nextSyncCommitteeBranch.some((branch) => !byteArrayEquals(branch, ZERO_HASH)) ); } @@ -112,6 +125,13 @@ export function upgradeLightClientHeader( // Break if no further upgradation is required else fall through if (ForkSeq[targetFork] <= ForkSeq.deneb) break; + + // eslint-disable-next-line no-fallthrough + case ForkName.electra: + // No changes to LightClientHeader in Electra + + // Break if no further upgrades is required else fall through + if (ForkSeq[targetFork] <= ForkSeq.electra) break; } return upgradedHeader; } @@ -163,6 +183,14 @@ export function upgradeLightClientUpdate( ): LightClientUpdate { update.attestedHeader = upgradeLightClientHeader(config, targetFork, update.attestedHeader); update.finalizedHeader = upgradeLightClientHeader(config, targetFork, update.finalizedHeader); + update.nextSyncCommitteeBranch = normalizeMerkleBranch( + update.nextSyncCommitteeBranch, + isForkPostElectra(targetFork) ? NEXT_SYNC_COMMITTEE_DEPTH_ELECTRA : NEXT_SYNC_COMMITTEE_DEPTH + ); + update.finalityBranch = normalizeMerkleBranch( + update.finalityBranch, + isForkPostElectra(targetFork) ? FINALIZED_ROOT_DEPTH_ELECTRA : FINALIZED_ROOT_DEPTH + ); return update; } @@ -174,6 +202,10 @@ export function upgradeLightClientFinalityUpdate( ): LightClientFinalityUpdate { finalityUpdate.attestedHeader = upgradeLightClientHeader(config, targetFork, finalityUpdate.attestedHeader); finalityUpdate.finalizedHeader = upgradeLightClientHeader(config, targetFork, finalityUpdate.finalizedHeader); + finalityUpdate.finalityBranch = normalizeMerkleBranch( + finalityUpdate.finalityBranch, + isForkPostElectra(targetFork) ? FINALIZED_ROOT_DEPTH_ELECTRA : FINALIZED_ROOT_DEPTH + ); return finalityUpdate; } diff --git a/packages/light-client/src/spec/validateLightClientBootstrap.ts b/packages/light-client/src/spec/validateLightClientBootstrap.ts index 30540da24bd1..2eafea0791f0 100644 --- a/packages/light-client/src/spec/validateLightClientBootstrap.ts +++ b/packages/light-client/src/spec/validateLightClientBootstrap.ts @@ -2,11 +2,14 @@ import {byteArrayEquals} from "@chainsafe/ssz"; import {LightClientBootstrap, Root, ssz} from "@lodestar/types"; import {ChainForkConfig} from "@lodestar/config"; import {toHex} from "@lodestar/utils"; +import {isForkPostElectra} from "@lodestar/params"; import {isValidMerkleBranch} from "../utils/verifyMerkleBranch.js"; import {isValidLightClientHeader} from "./utils.js"; const CURRENT_SYNC_COMMITTEE_INDEX = 22; const CURRENT_SYNC_COMMITTEE_DEPTH = 5; +const CURRENT_SYNC_COMMITTEE_INDEX_ELECTRA = 22; +const CURRENT_SYNC_COMMITTEE_DEPTH_ELECTRA = 6; export function validateLightClientBootstrap( config: ChainForkConfig, @@ -14,6 +17,7 @@ export function validateLightClientBootstrap( bootstrap: LightClientBootstrap ): void { const headerRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(bootstrap.header.beacon); + const fork = config.getForkName(bootstrap.header.beacon.slot); if (!isValidLightClientHeader(config, bootstrap.header)) { throw Error("Bootstrap Header is not Valid Light Client Header"); @@ -27,8 +31,8 @@ export function validateLightClientBootstrap( !isValidMerkleBranch( ssz.altair.SyncCommittee.hashTreeRoot(bootstrap.currentSyncCommittee), bootstrap.currentSyncCommitteeBranch, - CURRENT_SYNC_COMMITTEE_DEPTH, - CURRENT_SYNC_COMMITTEE_INDEX, + isForkPostElectra(fork) ? CURRENT_SYNC_COMMITTEE_DEPTH_ELECTRA : CURRENT_SYNC_COMMITTEE_DEPTH, + isForkPostElectra(fork) ? CURRENT_SYNC_COMMITTEE_INDEX_ELECTRA : CURRENT_SYNC_COMMITTEE_INDEX, bootstrap.header.beacon.stateRoot ) ) { diff --git a/packages/light-client/src/spec/validateLightClientUpdate.ts b/packages/light-client/src/spec/validateLightClientUpdate.ts index fde760da3b05..9a5ea1985f16 100644 --- a/packages/light-client/src/spec/validateLightClientUpdate.ts +++ b/packages/light-client/src/spec/validateLightClientUpdate.ts @@ -1,15 +1,19 @@ import bls from "@chainsafe/bls"; import type {PublicKey, Signature} from "@chainsafe/bls/types"; -import {LightClientUpdate, Root, ssz} from "@lodestar/types"; +import {LightClientUpdate, Root, isElectraLightClientUpdate, ssz} from "@lodestar/types"; import {ChainForkConfig} from "@lodestar/config"; import { FINALIZED_ROOT_INDEX, FINALIZED_ROOT_DEPTH, - NEXT_SYNC_COMMITTEE_INDEX, NEXT_SYNC_COMMITTEE_DEPTH, MIN_SYNC_COMMITTEE_PARTICIPANTS, DOMAIN_SYNC_COMMITTEE, GENESIS_SLOT, + NEXT_SYNC_COMMITTEE_DEPTH_ELECTRA, + NEXT_SYNC_COMMITTEE_INDEX_ELECTRA, + NEXT_SYNC_COMMITTEE_INDEX, + FINALIZED_ROOT_DEPTH_ELECTRA, + FINALIZED_ROOT_INDEX_ELECTRA, } from "@lodestar/params"; import {getParticipantPubkeys, sumBits} from "../utils/utils.js"; import {isValidMerkleBranch} from "../utils/index.js"; @@ -78,8 +82,8 @@ export function validateLightClientUpdate( !isValidMerkleBranch( finalizedRoot, update.finalityBranch, - FINALIZED_ROOT_DEPTH, - FINALIZED_ROOT_INDEX, + isElectraLightClientUpdate(update) ? FINALIZED_ROOT_DEPTH_ELECTRA : FINALIZED_ROOT_DEPTH, + isElectraLightClientUpdate(update) ? FINALIZED_ROOT_INDEX_ELECTRA : FINALIZED_ROOT_INDEX, update.attestedHeader.beacon.stateRoot ) ) { @@ -98,8 +102,8 @@ export function validateLightClientUpdate( !isValidMerkleBranch( ssz.altair.SyncCommittee.hashTreeRoot(update.nextSyncCommittee), update.nextSyncCommitteeBranch, - NEXT_SYNC_COMMITTEE_DEPTH, - NEXT_SYNC_COMMITTEE_INDEX, + isElectraLightClientUpdate(update) ? NEXT_SYNC_COMMITTEE_DEPTH_ELECTRA : NEXT_SYNC_COMMITTEE_DEPTH, + isElectraLightClientUpdate(update) ? NEXT_SYNC_COMMITTEE_INDEX_ELECTRA : NEXT_SYNC_COMMITTEE_INDEX, update.attestedHeader.beacon.stateRoot ) ) { diff --git a/packages/light-client/src/utils/api.ts b/packages/light-client/src/utils/api.ts index 7947aa96dd3e..6ccb187052e1 100644 --- a/packages/light-client/src/utils/api.ts +++ b/packages/light-client/src/utils/api.ts @@ -1,13 +1,13 @@ -import {getClient, ApiClient} from "@lodestar/api"; +import {getClient, ApiClient, ApiRequestInit} from "@lodestar/api"; import {ChainForkConfig, createChainForkConfig} from "@lodestar/config"; import {NetworkName, networksChainConfig} from "@lodestar/config/networks"; -export function getApiFromUrl(url: string, network: NetworkName): ApiClient { +export function getApiFromUrl(url: string, network: NetworkName, init?: ApiRequestInit): ApiClient { if (!(network in networksChainConfig)) { throw Error(`Invalid network name "${network}". Valid options are: ${Object.keys(networksChainConfig).join()}`); } - return getClient({urls: [url]}, {config: createChainForkConfig(networksChainConfig[network])}); + return getClient({urls: [url], globalInit: init}, {config: createChainForkConfig(networksChainConfig[network])}); } export function getChainForkConfigFromNetwork(network: NetworkName): ChainForkConfig { diff --git a/packages/light-client/src/utils/normalizeMerkleBranch.ts b/packages/light-client/src/utils/normalizeMerkleBranch.ts new file mode 100644 index 000000000000..ae3309f8ff2e --- /dev/null +++ b/packages/light-client/src/utils/normalizeMerkleBranch.ts @@ -0,0 +1,15 @@ +import {ZERO_HASH} from "../spec/utils.js"; + +export const SYNC_COMMITTEES_DEPTH = 4; +export const SYNC_COMMITTEES_INDEX = 11; + +/** + * Given merkle branch ``branch``, extend its depth according to ``depth`` + * If given ``depth`` is less than the depth of ``branch``, it will return + * unmodified ``branch`` + */ +export function normalizeMerkleBranch(branch: Uint8Array[], depth: number): Uint8Array[] { + const numExtraDepth = depth - branch.length; + + return [...Array.from({length: numExtraDepth}, () => ZERO_HASH), ...branch]; +} diff --git a/packages/light-client/src/validation.ts b/packages/light-client/src/validation.ts index e7839f115153..c756d612f3e7 100644 --- a/packages/light-client/src/validation.ts +++ b/packages/light-client/src/validation.ts @@ -1,6 +1,14 @@ import bls from "@chainsafe/bls"; import type {PublicKey, Signature} from "@chainsafe/bls/types"; -import {altair, LightClientFinalityUpdate, LightClientUpdate, Root, Slot, ssz} from "@lodestar/types"; +import { + altair, + isElectraLightClientUpdate, + LightClientFinalityUpdate, + LightClientUpdate, + Root, + Slot, + ssz, +} from "@lodestar/types"; import { FINALIZED_ROOT_INDEX, FINALIZED_ROOT_DEPTH, @@ -8,6 +16,9 @@ import { NEXT_SYNC_COMMITTEE_DEPTH, MIN_SYNC_COMMITTEE_PARTICIPANTS, DOMAIN_SYNC_COMMITTEE, + NEXT_SYNC_COMMITTEE_DEPTH_ELECTRA, + FINALIZED_ROOT_DEPTH_ELECTRA, + NEXT_SYNC_COMMITTEE_INDEX_ELECTRA, } from "@lodestar/params"; import {BeaconConfig} from "@lodestar/config"; import {isValidMerkleBranch} from "./utils/verifyMerkleBranch.js"; @@ -39,7 +50,11 @@ export function assertValidLightClientUpdate( if (isFinalized) { assertValidFinalityProof(update); } else { - assertZeroHashes(update.finalityBranch, FINALIZED_ROOT_DEPTH, "finalityBranches"); + assertZeroHashes( + update.finalityBranch, + isElectraLightClientUpdate(update) ? FINALIZED_ROOT_DEPTH_ELECTRA : FINALIZED_ROOT_DEPTH, + "finalityBranches" + ); } // DIFF FROM SPEC: @@ -99,8 +114,8 @@ export function assertValidSyncCommitteeProof(update: LightClientUpdate): void { !isValidMerkleBranch( ssz.altair.SyncCommittee.hashTreeRoot(update.nextSyncCommittee), update.nextSyncCommitteeBranch, - NEXT_SYNC_COMMITTEE_DEPTH, - NEXT_SYNC_COMMITTEE_INDEX, + isElectraLightClientUpdate(update) ? NEXT_SYNC_COMMITTEE_DEPTH_ELECTRA : NEXT_SYNC_COMMITTEE_DEPTH, + isElectraLightClientUpdate(update) ? NEXT_SYNC_COMMITTEE_INDEX_ELECTRA : NEXT_SYNC_COMMITTEE_INDEX, update.attestedHeader.beacon.stateRoot ) ) { diff --git a/packages/light-client/test/unit/utils.test.ts b/packages/light-client/test/unit/utils.test.ts index 91bfab113431..9913c6c462a4 100644 --- a/packages/light-client/test/unit/utils.test.ts +++ b/packages/light-client/test/unit/utils.test.ts @@ -1,6 +1,8 @@ import {describe, it, expect} from "vitest"; import {isValidMerkleBranch} from "../../src/utils/verifyMerkleBranch.js"; import {computeMerkleBranch} from "../utils/utils.js"; +import {normalizeMerkleBranch} from "../../src/utils/normalizeMerkleBranch.js"; +import {ZERO_HASH} from "../../src/spec/utils.js"; describe("utils", () => { it("constructMerkleBranch", () => { @@ -11,4 +13,18 @@ describe("utils", () => { expect(isValidMerkleBranch(leaf, proof, depth, index, root)).toBe(true); }); + it("normalizeMerkleBranch", () => { + const branch: Uint8Array[] = []; + const branchDepth = 5; + const newDepth = 7; + + for (let i = 0; i < branchDepth; i++) { + branch.push(new Uint8Array(Array.from({length: 32}, () => i))); + } + + const normalizedBranch = normalizeMerkleBranch(branch, newDepth); + const expectedNormalizedBranch = [ZERO_HASH, ZERO_HASH, ...branch]; + + expect(normalizedBranch).toEqual(expectedNormalizedBranch); + }); }); diff --git a/packages/logger/package.json b/packages/logger/package.json index 117040580d60..196fb306a8b0 100644 --- a/packages/logger/package.json +++ b/packages/logger/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.20.2", + "version": "1.22.0", "type": "module", "exports": { ".": { @@ -66,14 +66,14 @@ }, "types": "lib/index.d.ts", "dependencies": { - "@lodestar/utils": "^1.20.2", + "@lodestar/utils": "^1.22.0", "winston": "^3.8.2", "winston-daily-rotate-file": "^4.7.1", "winston-transport": "^4.5.0" }, "devDependencies": { "@chainsafe/threads": "^1.11.1", - "@lodestar/test-utils": "^1.20.2", + "@lodestar/test-utils": "^1.22.0", "@types/triple-beam": "^1.3.2", "triple-beam": "^1.3.0" }, diff --git a/packages/logger/src/utils/json.ts b/packages/logger/src/utils/json.ts index f6f2d85487c7..7408de582dd1 100644 --- a/packages/logger/src/utils/json.ts +++ b/packages/logger/src/utils/json.ts @@ -1,4 +1,4 @@ -import {LodestarError, mapValues, toHexString} from "@lodestar/utils"; +import {LodestarError, mapValues, toHex} from "@lodestar/utils"; const MAX_DEPTH = 0; @@ -29,7 +29,7 @@ export function logCtxToJson(arg: unknown, depth = 0, fromError = false): LogDat if (arg === null) return "null"; if (arg instanceof Uint8Array) { - return toHexString(arg); + return toHex(arg); } // For any type that may include recursiveness break early at the first level @@ -90,7 +90,7 @@ export function logCtxToString(arg: unknown, depth = 0, fromError = false): stri if (arg === null) return "null"; if (arg instanceof Uint8Array) { - return toHexString(arg); + return toHex(arg); } // For any type that may include recursiveness break early at the first level diff --git a/packages/params/CHANGELOG.md b/packages/params/CHANGELOG.md deleted file mode 100644 index 20c296a0d5c7..000000000000 --- a/packages/params/CHANGELOG.md +++ /dev/null @@ -1,19 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), -and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - -### [Unreleased] - -#### Breaking change - -- presets now export {params: IBeaconPreset} instead of {...IBeaconPreset} - -## [0.2.0] - -### Added - -- `RANDOM_SUBNETS_PER_VALIDATOR` and `EPOCHS_PER_RANDOM_SUBNET_SUBSCRIPTION` constant params -- TODO: check what else is added diff --git a/packages/params/package.json b/packages/params/package.json index 20f50f625cfd..c281f97a41ec 100644 --- a/packages/params/package.json +++ b/packages/params/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/params", - "version": "1.20.2", + "version": "1.22.0", "description": "Chain parameters required for lodestar", "author": "ChainSafe Systems", "license": "Apache-2.0", diff --git a/packages/params/src/forkName.ts b/packages/params/src/forkName.ts index a5f6d49d1cef..42e8917942d2 100644 --- a/packages/params/src/forkName.ts +++ b/packages/params/src/forkName.ts @@ -7,6 +7,7 @@ export enum ForkName { bellatrix = "bellatrix", capella = "capella", deneb = "deneb", + electra = "electra", } /** @@ -18,6 +19,7 @@ export enum ForkSeq { bellatrix = 2, capella = 3, deneb = 4, + electra = 5, } function exclude(coll: T[], val: U[]): Exclude[] { @@ -78,3 +80,16 @@ export const forkBlobs = exclude(forkAll, [ForkName.phase0, ForkName.altair, For export function isForkBlobs(fork: ForkName): fork is ForkBlobs { return isForkWithdrawals(fork) && fork !== ForkName.capella; } + +export type ForkPreElectra = ForkPreBlobs | ForkName.deneb; +export type ForkPostElectra = Exclude; +export const forkPostElectra = exclude(forkAll, [ + ForkName.phase0, + ForkName.altair, + ForkName.bellatrix, + ForkName.capella, + ForkName.deneb, +]); +export function isForkPostElectra(fork: ForkName): fork is ForkPostElectra { + return isForkBlobs(fork) && fork !== ForkName.deneb; +} diff --git a/packages/params/src/index.ts b/packages/params/src/index.ts index 6a95e3ca632e..aa6e97641526 100644 --- a/packages/params/src/index.ts +++ b/packages/params/src/index.ts @@ -93,6 +93,21 @@ export const { MAX_BLOB_COMMITMENTS_PER_BLOCK, MAX_BLOBS_PER_BLOCK, KZG_COMMITMENT_INCLUSION_PROOF_DEPTH, + + MAX_EFFECTIVE_BALANCE_ELECTRA, + MIN_ACTIVATION_BALANCE, + PENDING_BALANCE_DEPOSITS_LIMIT, + PENDING_PARTIAL_WITHDRAWALS_LIMIT, + PENDING_CONSOLIDATIONS_LIMIT, + MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA, + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD, + + MAX_DEPOSIT_REQUESTS_PER_PAYLOAD, + MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD, + MAX_ATTESTER_SLASHINGS_ELECTRA, + MAX_ATTESTATIONS_ELECTRA, + MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP, + WHISTLEBLOWER_REWARD_QUOTIENT_ELECTRA, } = activePreset; //////////// @@ -109,11 +124,14 @@ export const FAR_FUTURE_EPOCH = Infinity; export const BASE_REWARDS_PER_EPOCH = 4; export const DEPOSIT_CONTRACT_TREE_DEPTH = 2 ** 5; // 32 export const JUSTIFICATION_BITS_LENGTH = 4; +export const ZERO_HASH = Buffer.alloc(32, 0); +export const ZERO_HASH_HEX = "0x" + "00".repeat(32); // Withdrawal prefixes // Since the prefixes are just 1 byte, we define and use them as number export const BLS_WITHDRAWAL_PREFIX = 0; export const ETH1_ADDRESS_WITHDRAWAL_PREFIX = 1; +export const COMPOUNDING_WITHDRAWAL_PREFIX = 2; // Domain types @@ -128,7 +146,6 @@ export const DOMAIN_SYNC_COMMITTEE = Uint8Array.from([7, 0, 0, 0]); export const DOMAIN_SYNC_COMMITTEE_SELECTION_PROOF = Uint8Array.from([8, 0, 0, 0]); export const DOMAIN_CONTRIBUTION_AND_PROOF = Uint8Array.from([9, 0, 0, 0]); export const DOMAIN_BLS_TO_EXECUTION_CHANGE = Uint8Array.from([10, 0, 0, 0]); -export const DOMAIN_BLOB_SIDECAR = Uint8Array.from([11, 0, 0, 0]); // Application specific domains @@ -244,3 +261,13 @@ export const KZG_COMMITMENT_SUBTREE_INDEX0 = KZG_COMMITMENT_GINDEX0 - 2 ** KZG_C // ssz.deneb.BlobSidecars.elementType.fixedSize export const BLOBSIDECAR_FIXED_SIZE = ACTIVE_PRESET === PresetName.minimal ? 131672 : 131928; + +// Electra Misc +export const UNSET_DEPOSIT_REQUESTS_START_INDEX = 2n ** 64n - 1n; +export const FULL_EXIT_REQUEST_AMOUNT = 0; +export const FINALIZED_ROOT_GINDEX_ELECTRA = 169; +export const FINALIZED_ROOT_DEPTH_ELECTRA = 7; +export const FINALIZED_ROOT_INDEX_ELECTRA = 41; +export const NEXT_SYNC_COMMITTEE_GINDEX_ELECTRA = 87; +export const NEXT_SYNC_COMMITTEE_DEPTH_ELECTRA = 6; +export const NEXT_SYNC_COMMITTEE_INDEX_ELECTRA = 23; diff --git a/packages/params/src/presets/mainnet.ts b/packages/params/src/presets/mainnet.ts index 42a705a07f03..ca599e990df4 100644 --- a/packages/params/src/presets/mainnet.ts +++ b/packages/params/src/presets/mainnet.ts @@ -118,4 +118,20 @@ export const mainnetPreset: BeaconPreset = { MAX_BLOB_COMMITMENTS_PER_BLOCK: 4096, MAX_BLOBS_PER_BLOCK: 6, KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: 17, + + // ELECTRA + MAX_DEPOSIT_REQUESTS_PER_PAYLOAD: 8192, + MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD: 16, + MAX_ATTESTER_SLASHINGS_ELECTRA: 1, + MAX_ATTESTATIONS_ELECTRA: 8, + MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP: 8, + // 2**11 * 10**9 (= 2,048,000,000,000) Gwei + MAX_EFFECTIVE_BALANCE_ELECTRA: 2048000000000, + MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA: 4096, + MIN_ACTIVATION_BALANCE: 32000000000, + PENDING_BALANCE_DEPOSITS_LIMIT: 134217728, + PENDING_PARTIAL_WITHDRAWALS_LIMIT: 134217728, + PENDING_CONSOLIDATIONS_LIMIT: 262144, + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD: 1, + WHISTLEBLOWER_REWARD_QUOTIENT_ELECTRA: 4096, }; diff --git a/packages/params/src/presets/minimal.ts b/packages/params/src/presets/minimal.ts index b940841a0429..5dc8fc10d803 100644 --- a/packages/params/src/presets/minimal.ts +++ b/packages/params/src/presets/minimal.ts @@ -119,4 +119,20 @@ export const minimalPreset: BeaconPreset = { MAX_BLOB_COMMITMENTS_PER_BLOCK: 16, MAX_BLOBS_PER_BLOCK: 6, KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: 9, + + // ELECTRA + MAX_DEPOSIT_REQUESTS_PER_PAYLOAD: 4, + MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD: 2, + MAX_ATTESTER_SLASHINGS_ELECTRA: 1, + MAX_ATTESTATIONS_ELECTRA: 8, + MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP: 1, + // 2**11 * 10**9 (= 2,048,000,000,000) Gwei + MAX_EFFECTIVE_BALANCE_ELECTRA: 2048000000000, + MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA: 4096, + MIN_ACTIVATION_BALANCE: 32000000000, + PENDING_BALANCE_DEPOSITS_LIMIT: 134217728, + PENDING_PARTIAL_WITHDRAWALS_LIMIT: 64, + PENDING_CONSOLIDATIONS_LIMIT: 64, + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD: 1, + WHISTLEBLOWER_REWARD_QUOTIENT_ELECTRA: 4096, }; diff --git a/packages/params/src/types.ts b/packages/params/src/types.ts index 3c5ba6381131..e867b4a3cf71 100644 --- a/packages/params/src/types.ts +++ b/packages/params/src/types.ts @@ -82,6 +82,21 @@ export type BeaconPreset = { MAX_BLOB_COMMITMENTS_PER_BLOCK: number; MAX_BLOBS_PER_BLOCK: number; KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: number; + + // ELECTRA + MAX_DEPOSIT_REQUESTS_PER_PAYLOAD: number; + MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD: number; + MAX_ATTESTER_SLASHINGS_ELECTRA: number; + MAX_ATTESTATIONS_ELECTRA: number; + MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP: number; + MAX_EFFECTIVE_BALANCE_ELECTRA: number; + MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA: number; + MIN_ACTIVATION_BALANCE: number; + PENDING_BALANCE_DEPOSITS_LIMIT: number; + PENDING_PARTIAL_WITHDRAWALS_LIMIT: number; + PENDING_CONSOLIDATIONS_LIMIT: number; + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD: number; + WHISTLEBLOWER_REWARD_QUOTIENT_ELECTRA: number; }; /** @@ -167,6 +182,21 @@ export const beaconPresetTypes: BeaconPresetTypes = { MAX_BLOB_COMMITMENTS_PER_BLOCK: "number", MAX_BLOBS_PER_BLOCK: "number", KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: "number", + + // ELECTRA + MAX_DEPOSIT_REQUESTS_PER_PAYLOAD: "number", + MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD: "number", + MAX_ATTESTER_SLASHINGS_ELECTRA: "number", + MAX_ATTESTATIONS_ELECTRA: "number", + MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP: "number", + MAX_EFFECTIVE_BALANCE_ELECTRA: "number", + MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA: "number", + MIN_ACTIVATION_BALANCE: "number", + PENDING_BALANCE_DEPOSITS_LIMIT: "number", + PENDING_PARTIAL_WITHDRAWALS_LIMIT: "number", + PENDING_CONSOLIDATIONS_LIMIT: "number", + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD: "number", + WHISTLEBLOWER_REWARD_QUOTIENT_ELECTRA: "number", }; type BeaconPresetTypes = { diff --git a/packages/params/test/e2e/ensure-config-is-synced.test.ts b/packages/params/test/e2e/ensure-config-is-synced.test.ts index 06fb4bae000c..38168fa02bae 100644 --- a/packages/params/test/e2e/ensure-config-is-synced.test.ts +++ b/packages/params/test/e2e/ensure-config-is-synced.test.ts @@ -8,7 +8,7 @@ import {loadConfigYaml} from "../yaml.js"; // Not e2e, but slow. Run with e2e tests /** https://github.com/ethereum/consensus-specs/releases */ -const specConfigCommit = "v1.4.0-beta.5"; +const specConfigCommit = "v1.5.0-alpha.3"; describe("Ensure config is synced", function () { vi.setConfig({testTimeout: 60 * 1000}); diff --git a/packages/params/test/unit/__snapshots__/forkName.test.ts.snap b/packages/params/test/unit/__snapshots__/forkName.test.ts.snap index 3d7009a7c2ec..a54f9dd6913b 100644 --- a/packages/params/test/unit/__snapshots__/forkName.test.ts.snap +++ b/packages/params/test/unit/__snapshots__/forkName.test.ts.snap @@ -7,12 +7,14 @@ exports[`forkName > should have valid allForks 1`] = ` "bellatrix", "capella", "deneb", + "electra", ] `; exports[`forkName > should have valid blobs forks 1`] = ` [ "deneb", + "electra", ] `; @@ -21,6 +23,7 @@ exports[`forkName > should have valid execution forks 1`] = ` "bellatrix", "capella", "deneb", + "electra", ] `; @@ -30,6 +33,7 @@ exports[`forkName > should have valid lightclient forks 1`] = ` "bellatrix", "capella", "deneb", + "electra", ] `; @@ -37,5 +41,6 @@ exports[`forkName > should have valid withdrawal forks 1`] = ` [ "capella", "deneb", + "electra", ] `; diff --git a/packages/prover/package.json b/packages/prover/package.json index f5d8c26105d4..6a31b5b1baa0 100644 --- a/packages/prover/package.json +++ b/packages/prover/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.20.2", + "version": "1.22.0", "type": "module", "exports": { ".": { @@ -69,13 +69,13 @@ "@ethereumjs/tx": "^4.1.2", "@ethereumjs/util": "^8.0.6", "@ethereumjs/vm": "^6.4.2", - "@lodestar/api": "^1.20.2", - "@lodestar/config": "^1.20.2", - "@lodestar/light-client": "^1.20.2", - "@lodestar/logger": "^1.20.2", - "@lodestar/params": "^1.20.2", - "@lodestar/types": "^1.20.2", - "@lodestar/utils": "^1.20.2", + "@lodestar/api": "^1.22.0", + "@lodestar/config": "^1.22.0", + "@lodestar/light-client": "^1.22.0", + "@lodestar/logger": "^1.22.0", + "@lodestar/params": "^1.22.0", + "@lodestar/types": "^1.22.0", + "@lodestar/utils": "^1.22.0", "ethereum-cryptography": "^2.0.0", "find-up": "^6.3.0", "http-proxy": "^1.18.1", @@ -84,7 +84,7 @@ "yargs": "^17.7.1" }, "devDependencies": { - "@lodestar/test-utils": "^1.20.2", + "@lodestar/test-utils": "^1.22.0", "@types/http-proxy": "^1.17.10", "@types/yargs": "^17.0.24", "axios": "^1.3.4", diff --git a/packages/reqresp/package.json b/packages/reqresp/package.json index 5f72d47e4efb..44dcb8048f99 100644 --- a/packages/reqresp/package.json +++ b/packages/reqresp/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.20.2", + "version": "1.22.0", "type": "module", "exports": { ".": { @@ -54,9 +54,9 @@ "dependencies": { "@chainsafe/fast-crc32c": "^4.1.1", "@libp2p/interface": "^1.3.0", - "@lodestar/config": "^1.20.2", - "@lodestar/params": "^1.20.2", - "@lodestar/utils": "^1.20.2", + "@lodestar/config": "^1.22.0", + "@lodestar/params": "^1.22.0", + "@lodestar/utils": "^1.22.0", "it-all": "^3.0.4", "it-pipe": "^3.0.1", "snappy": "^7.2.2", @@ -65,8 +65,8 @@ "uint8arraylist": "^2.4.7" }, "devDependencies": { - "@lodestar/logger": "^1.20.2", - "@lodestar/types": "^1.20.2", + "@lodestar/logger": "^1.22.0", + "@lodestar/types": "^1.22.0", "libp2p": "1.4.3" }, "peerDependencies": { diff --git a/packages/spec-test-util/package.json b/packages/spec-test-util/package.json index e1240193671b..b4aabf0140e8 100644 --- a/packages/spec-test-util/package.json +++ b/packages/spec-test-util/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/spec-test-util", - "version": "1.20.2", + "version": "1.22.0", "description": "Spec test suite generator from yaml test files", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -62,7 +62,7 @@ "blockchain" ], "dependencies": { - "@lodestar/utils": "^1.20.2", + "@lodestar/utils": "^1.22.0", "axios": "^1.3.4", "rimraf": "^4.4.1", "snappyjs": "^0.7.0", diff --git a/packages/state-transition/LICENSE b/packages/state-transition/LICENSE index 153d416dc8d2..261eeb9e9f8b 100644 --- a/packages/state-transition/LICENSE +++ b/packages/state-transition/LICENSE @@ -1,165 +1,201 @@ - GNU LESSER GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - - This version of the GNU Lesser General Public License incorporates -the terms and conditions of version 3 of the GNU General Public -License, supplemented by the additional permissions listed below. - - 0. Additional Definitions. - - As used herein, "this License" refers to version 3 of the GNU Lesser -General Public License, and the "GNU GPL" refers to version 3 of the GNU -General Public License. - - "The Library" refers to a covered work governed by this License, -other than an Application or a Combined Work as defined below. - - An "Application" is any work that makes use of an interface provided -by the Library, but which is not otherwise based on the Library. -Defining a subclass of a class defined by the Library is deemed a mode -of using an interface provided by the Library. - - A "Combined Work" is a work produced by combining or linking an -Application with the Library. The particular version of the Library -with which the Combined Work was made is also called the "Linked -Version". - - The "Minimal Corresponding Source" for a Combined Work means the -Corresponding Source for the Combined Work, excluding any source code -for portions of the Combined Work that, considered in isolation, are -based on the Application, and not on the Linked Version. - - The "Corresponding Application Code" for a Combined Work means the -object code and/or source code for the Application, including any data -and utility programs needed for reproducing the Combined Work from the -Application, but excluding the System Libraries of the Combined Work. - - 1. Exception to Section 3 of the GNU GPL. - - You may convey a covered work under sections 3 and 4 of this License -without being bound by section 3 of the GNU GPL. - - 2. Conveying Modified Versions. - - If you modify a copy of the Library, and, in your modifications, a -facility refers to a function or data to be supplied by an Application -that uses the facility (other than as an argument passed when the -facility is invoked), then you may convey a copy of the modified -version: - - a) under this License, provided that you make a good faith effort to - ensure that, in the event an Application does not supply the - function or data, the facility still operates, and performs - whatever part of its purpose remains meaningful, or - - b) under the GNU GPL, with none of the additional permissions of - this License applicable to that copy. - - 3. Object Code Incorporating Material from Library Header Files. - - The object code form of an Application may incorporate material from -a header file that is part of the Library. You may convey such object -code under terms of your choice, provided that, if the incorporated -material is not limited to numerical parameters, data structure -layouts and accessors, or small macros, inline functions and templates -(ten or fewer lines in length), you do both of the following: - - a) Give prominent notice with each copy of the object code that the - Library is used in it and that the Library and its use are - covered by this License. - - b) Accompany the object code with a copy of the GNU GPL and this license - document. - - 4. Combined Works. - - You may convey a Combined Work under terms of your choice that, -taken together, effectively do not restrict modification of the -portions of the Library contained in the Combined Work and reverse -engineering for debugging such modifications, if you also do each of -the following: - - a) Give prominent notice with each copy of the Combined Work that - the Library is used in it and that the Library and its use are - covered by this License. - - b) Accompany the Combined Work with a copy of the GNU GPL and this license - document. - - c) For a Combined Work that displays copyright notices during - execution, include the copyright notice for the Library among - these notices, as well as a reference directing the user to the - copies of the GNU GPL and this license document. - - d) Do one of the following: - - 0) Convey the Minimal Corresponding Source under the terms of this - License, and the Corresponding Application Code in a form - suitable for, and under terms that permit, the user to - recombine or relink the Application with a modified version of - the Linked Version to produce a modified Combined Work, in the - manner specified by section 6 of the GNU GPL for conveying - Corresponding Source. - - 1) Use a suitable shared library mechanism for linking with the - Library. A suitable mechanism is one that (a) uses at run time - a copy of the Library already present on the user's computer - system, and (b) will operate properly with a modified version - of the Library that is interface-compatible with the Linked - Version. - - e) Provide Installation Information, but only if you would otherwise - be required to provide such information under section 6 of the - GNU GPL, and only to the extent that such information is - necessary to install and execute a modified version of the - Combined Work produced by recombining or relinking the - Application with a modified version of the Linked Version. (If - you use option 4d0, the Installation Information must accompany - the Minimal Corresponding Source and Corresponding Application - Code. If you use option 4d1, you must provide the Installation - Information in the manner specified by section 6 of the GNU GPL - for conveying Corresponding Source.) - - 5. Combined Libraries. - - You may place library facilities that are a work based on the -Library side by side in a single library together with other library -facilities that are not Applications and are not covered by this -License, and convey such a combined library under terms of your -choice, if you do both of the following: - - a) Accompany the combined library with a copy of the same work based - on the Library, uncombined with any other library facilities, - conveyed under the terms of this License. - - b) Give prominent notice with the combined library that part of it - is a work based on the Library, and explaining where to find the - accompanying uncombined form of the same work. - - 6. Revised Versions of the GNU Lesser General Public License. - - The Free Software Foundation may publish revised and/or new versions -of the GNU Lesser General Public License from time to time. Such new -versions will be similar in spirit to the present version, but may -differ in detail to address new problems or concerns. - - Each version is given a distinguishing version number. If the -Library as you received it specifies that a certain numbered version -of the GNU Lesser General Public License "or any later version" -applies to it, you have the option of following the terms and -conditions either of that published version or of any later version -published by the Free Software Foundation. If the Library as you -received it does not specify a version number of the GNU Lesser -General Public License, you may choose any version of the GNU Lesser -General Public License ever published by the Free Software Foundation. - - If the Library as you received it specifies that a proxy can decide -whether future versions of the GNU Lesser General Public License shall -apply, that proxy's public statement of acceptance of any version is -permanent authorization for you to choose that version for the -Library. \ No newline at end of file + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/state-transition/package.json b/packages/state-transition/package.json index 0271f64b684f..6816e5679cf5 100644 --- a/packages/state-transition/package.json +++ b/packages/state-transition/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.20.2", + "version": "1.22.0", "type": "module", "exports": { ".": { @@ -58,16 +58,18 @@ }, "types": "lib/index.d.ts", "dependencies": { - "@chainsafe/as-sha256": "^0.4.1", + "@chainsafe/as-sha256": "^0.5.0", "@chainsafe/blst": "^2.0.3", - "@chainsafe/persistent-merkle-tree": "^0.7.1", + "@chainsafe/persistent-merkle-tree": "^0.8.0", "@chainsafe/persistent-ts": "^0.19.1", - "@chainsafe/ssz": "^0.15.1", - "@lodestar/config": "^1.20.2", - "@lodestar/params": "^1.20.2", - "@lodestar/types": "^1.20.2", - "@lodestar/utils": "^1.20.2", - "bigint-buffer": "^1.1.5" + "@chainsafe/ssz": "^0.17.1", + "@lodestar/config": "^1.22.0", + "@lodestar/params": "^1.22.0", + "@lodestar/types": "^1.22.0", + "@chainsafe/pubkey-index-map": "2.0.0", + "@lodestar/utils": "^1.22.0", + "bigint-buffer": "^1.1.5", + "immutable": "^4.3.2" }, "keywords": [ "ethereum", diff --git a/packages/state-transition/src/block/index.ts b/packages/state-transition/src/block/index.ts index fdfc9e903518..3857511292c8 100644 --- a/packages/state-transition/src/block/index.ts +++ b/packages/state-transition/src/block/index.ts @@ -47,10 +47,12 @@ export function processBlock( // https://github.com/ethereum/consensus-specs/blob/b62c9e877990242d63aa17a2a59a49bc649a2f2e/specs/eip4844/beacon-chain.md#disabling-withdrawals if (fork >= ForkSeq.capella) { processWithdrawals( + fork, state as CachedBeaconStateCapella, fullOrBlindedPayload as capella.FullOrBlindedExecutionPayload ); } + processExecutionPayload(fork, state as CachedBeaconStateBellatrix, block.body, externalData); } diff --git a/packages/state-transition/src/block/initiateValidatorExit.ts b/packages/state-transition/src/block/initiateValidatorExit.ts index e34d4dda7002..d1420daef84c 100644 --- a/packages/state-transition/src/block/initiateValidatorExit.ts +++ b/packages/state-transition/src/block/initiateValidatorExit.ts @@ -1,7 +1,8 @@ import {CompositeViewDU} from "@chainsafe/ssz"; -import {FAR_FUTURE_EPOCH} from "@lodestar/params"; +import {FAR_FUTURE_EPOCH, ForkSeq} from "@lodestar/params"; import {ssz} from "@lodestar/types"; -import {CachedBeaconStateAllForks} from "../types.js"; +import {CachedBeaconStateAllForks, CachedBeaconStateElectra} from "../types.js"; +import {computeExitEpochAndUpdateChurn} from "../util/epoch.js"; /** * Initiate the exit of the validator with index ``index``. @@ -24,6 +25,7 @@ import {CachedBeaconStateAllForks} from "../types.js"; * Forcing consumers to pass the SubTree of `validator` directly mitigates this issue. */ export function initiateValidatorExit( + fork: ForkSeq, state: CachedBeaconStateAllForks, validator: CompositeViewDU ): void { @@ -34,18 +36,27 @@ export function initiateValidatorExit( return; } - // Limits the number of validators that can exit on each epoch. - // Expects all state.validators to follow this rule, i.e. no validator.exitEpoch is greater than exitQueueEpoch. - // If there the churnLimit is reached at this current exitQueueEpoch, advance epoch and reset churn. - if (epochCtx.exitQueueChurn >= epochCtx.churnLimit) { - epochCtx.exitQueueEpoch += 1; - epochCtx.exitQueueChurn = 1; // = 1 to account for this validator with exitQueueEpoch + if (fork < ForkSeq.electra) { + // Limits the number of validators that can exit on each epoch. + // Expects all state.validators to follow this rule, i.e. no validator.exitEpoch is greater than exitQueueEpoch. + // If there the churnLimit is reached at this current exitQueueEpoch, advance epoch and reset churn. + if (epochCtx.exitQueueChurn >= epochCtx.churnLimit) { + epochCtx.exitQueueEpoch += 1; + epochCtx.exitQueueChurn = 1; // = 1 to account for this validator with exitQueueEpoch + } else { + // Add this validator to the current exitQueueEpoch churn + epochCtx.exitQueueChurn += 1; + } + + // set validator exit epoch + validator.exitEpoch = epochCtx.exitQueueEpoch; } else { - // Add this validator to the current exitQueueEpoch churn - epochCtx.exitQueueChurn += 1; + // set validator exit epoch + // Note we don't use epochCtx.exitQueueChurn and exitQueueEpoch anymore + validator.exitEpoch = computeExitEpochAndUpdateChurn( + state as CachedBeaconStateElectra, + BigInt(validator.effectiveBalance) + ); } - - // set validator exit epoch and withdrawable epoch - validator.exitEpoch = epochCtx.exitQueueEpoch; - validator.withdrawableEpoch = epochCtx.exitQueueEpoch + config.MIN_VALIDATOR_WITHDRAWABILITY_DELAY; + validator.withdrawableEpoch = validator.exitEpoch + config.MIN_VALIDATOR_WITHDRAWABILITY_DELAY; } diff --git a/packages/state-transition/src/block/isValidIndexedAttestation.ts b/packages/state-transition/src/block/isValidIndexedAttestation.ts index e3965b97ee73..33d92a208260 100644 --- a/packages/state-transition/src/block/isValidIndexedAttestation.ts +++ b/packages/state-transition/src/block/isValidIndexedAttestation.ts @@ -1,4 +1,4 @@ -import {MAX_VALIDATORS_PER_COMMITTEE} from "@lodestar/params"; +import {ForkSeq, MAX_COMMITTEES_PER_SLOT, MAX_VALIDATORS_PER_COMMITTEE} from "@lodestar/params"; import {phase0} from "@lodestar/types"; import {CachedBeaconStateAllForks} from "../types.js"; import {verifySignatureSet} from "../util/index.js"; @@ -44,7 +44,11 @@ export function isValidIndexedAttestationBigint( */ export function isValidIndexedAttestationIndices(state: CachedBeaconStateAllForks, indices: number[]): boolean { // verify max number of indices - if (!(indices.length > 0 && indices.length <= MAX_VALIDATORS_PER_COMMITTEE)) { + const maxIndices = + state.config.getForkSeq(state.slot) >= ForkSeq.electra + ? MAX_VALIDATORS_PER_COMMITTEE * MAX_COMMITTEES_PER_SLOT + : MAX_VALIDATORS_PER_COMMITTEE; + if (!(indices.length > 0 && indices.length <= maxIndices)) { return false; } diff --git a/packages/state-transition/src/block/processAttestationPhase0.ts b/packages/state-transition/src/block/processAttestationPhase0.ts index 248ba83b4ed2..ba6bc9089693 100644 --- a/packages/state-transition/src/block/processAttestationPhase0.ts +++ b/packages/state-transition/src/block/processAttestationPhase0.ts @@ -1,7 +1,7 @@ -import {toHexString} from "@chainsafe/ssz"; -import {Slot, phase0, ssz} from "@lodestar/types"; - +import {toRootHex} from "@lodestar/utils"; +import {Slot, Attestation, electra, phase0, ssz} from "@lodestar/types"; import {MIN_ATTESTATION_INCLUSION_DELAY, SLOTS_PER_EPOCH, ForkSeq} from "@lodestar/params"; +import {assert} from "@lodestar/utils"; import {computeEpochAtSlot} from "../util/index.js"; import {CachedBeaconStatePhase0, CachedBeaconStateAllForks} from "../types.js"; import {isValidIndexedAttestation} from "./index.js"; @@ -51,27 +51,18 @@ export function processAttestationPhase0( state.previousEpochAttestations.push(pendingAttestation); } - if (!isValidIndexedAttestation(state, epochCtx.getIndexedAttestation(attestation), verifySignature)) { + if (!isValidIndexedAttestation(state, epochCtx.getIndexedAttestation(ForkSeq.phase0, attestation), verifySignature)) { throw new Error("Attestation is not valid"); } } -export function validateAttestation( - fork: ForkSeq, - state: CachedBeaconStateAllForks, - attestation: phase0.Attestation -): void { +export function validateAttestation(fork: ForkSeq, state: CachedBeaconStateAllForks, attestation: Attestation): void { const {epochCtx} = state; const slot = state.slot; const data = attestation.data; const computedEpoch = computeEpochAtSlot(data.slot); const committeeCount = epochCtx.getCommitteeCountPerSlot(computedEpoch); - if (!(data.index < committeeCount)) { - throw new Error( - "Attestation committee index not within current committee count: " + - `committeeIndex=${data.index} committeeCount=${committeeCount}` - ); - } + if (!(data.target.epoch === epochCtx.previousShuffling.epoch || data.target.epoch === epochCtx.epoch)) { throw new Error( "Attestation target epoch not in previous or current epoch: " + @@ -93,12 +84,47 @@ export function validateAttestation( ); } - const committee = epochCtx.getBeaconCommittee(data.slot, data.index); - if (attestation.aggregationBits.bitLen !== committee.length) { - throw new Error( - "Attestation aggregation bits length does not match committee length: " + - `aggregationBitsLength=${attestation.aggregationBits.bitLen} committeeLength=${committee.length}` + if (fork >= ForkSeq.electra) { + assert.equal(data.index, 0, `AttestationData.index must be zero: index=${data.index}`); + const attestationElectra = attestation as electra.Attestation; + const committeeIndices = attestationElectra.committeeBits.getTrueBitIndexes(); + + if (committeeIndices.length === 0) { + throw Error("Attestation should have at least one committee bit set"); + } else { + const lastCommitteeIndex = committeeIndices[committeeIndices.length - 1]; + if (lastCommitteeIndex >= committeeCount) { + throw new Error( + `Attestation committee index exceeds committee count: lastCommitteeIndex=${lastCommitteeIndex} numCommittees=${committeeCount}` + ); + } + } + + // Get total number of attestation participant of every committee specified + const participantCount = committeeIndices + .map((committeeIndex) => epochCtx.getBeaconCommittee(data.slot, committeeIndex).length) + .reduce((acc, committeeSize) => acc + committeeSize, 0); + + assert.equal( + attestationElectra.aggregationBits.bitLen, + participantCount, + `Attestation aggregation bits length does not match total number of committee participant aggregationBitsLength=${attestation.aggregationBits.bitLen} participantCount=${participantCount}` ); + } else { + if (!(data.index < committeeCount)) { + throw new Error( + "Attestation committee index not within current committee count: " + + `committeeIndex=${data.index} committeeCount=${committeeCount}` + ); + } + + const committee = epochCtx.getBeaconCommittee(data.slot, data.index); + if (attestation.aggregationBits.bitLen !== committee.length) { + throw new Error( + "Attestation aggregation bits length does not match committee length: " + + `aggregationBitsLength=${attestation.aggregationBits.bitLen} committeeLength=${committee.length}` + ); + } } } @@ -113,5 +139,5 @@ export function isTimelyTarget(fork: ForkSeq, inclusionDistance: Slot): boolean } export function checkpointToStr(checkpoint: phase0.Checkpoint): string { - return `${toHexString(checkpoint.root)}:${checkpoint.epoch}`; + return `${toRootHex(checkpoint.root)}:${checkpoint.epoch}`; } diff --git a/packages/state-transition/src/block/processAttestations.ts b/packages/state-transition/src/block/processAttestations.ts index 2b132fa22e0b..844bda768570 100644 --- a/packages/state-transition/src/block/processAttestations.ts +++ b/packages/state-transition/src/block/processAttestations.ts @@ -1,4 +1,4 @@ -import {phase0} from "@lodestar/types"; +import {Attestation} from "@lodestar/types"; import {ForkSeq} from "@lodestar/params"; import {CachedBeaconStateAllForks, CachedBeaconStateAltair, CachedBeaconStatePhase0} from "../types.js"; import {processAttestationPhase0} from "./processAttestationPhase0.js"; @@ -10,7 +10,7 @@ import {processAttestationsAltair} from "./processAttestationsAltair.js"; export function processAttestations( fork: ForkSeq, state: CachedBeaconStateAllForks, - attestations: phase0.Attestation[], + attestations: Attestation[], verifySignatures = true ): void { if (fork === ForkSeq.phase0) { diff --git a/packages/state-transition/src/block/processAttestationsAltair.ts b/packages/state-transition/src/block/processAttestationsAltair.ts index e37629712194..046a23d7dc27 100644 --- a/packages/state-transition/src/block/processAttestationsAltair.ts +++ b/packages/state-transition/src/block/processAttestationsAltair.ts @@ -1,5 +1,5 @@ import {byteArrayEquals} from "@chainsafe/ssz"; -import {Epoch, phase0} from "@lodestar/types"; +import {Epoch, Attestation, phase0} from "@lodestar/types"; import {intSqrt} from "@lodestar/utils"; import { @@ -32,7 +32,7 @@ const SLOTS_PER_EPOCH_SQRT = intSqrt(SLOTS_PER_EPOCH); export function processAttestationsAltair( fork: ForkSeq, state: CachedBeaconStateAltair, - attestations: phase0.Attestation[], + attestations: Attestation[], verifySignature = true ): void { const {epochCtx} = state; @@ -49,8 +49,7 @@ export function processAttestationsAltair( validateAttestation(fork, state, attestation); // Retrieve the validator indices from the attestation participation bitfield - const committeeIndices = epochCtx.getBeaconCommittee(data.slot, data.index); - const attestingIndices = attestation.aggregationBits.intersectValues(committeeIndices); + const attestingIndices = epochCtx.getAttestingIndices(fork, attestation); // this check is done last because its the most expensive (if signature verification is toggled on) // TODO: Why should we verify an indexed attestation that we just created? If it's just for the signature @@ -76,6 +75,7 @@ export function processAttestationsAltair( // For each participant, update their participation // In epoch processing, this participation info is used to calculate balance updates let totalBalanceIncrementsWithWeight = 0; + const validators = state.validators; for (const index of attestingIndices) { const flags = epochParticipation.get(index); @@ -105,7 +105,7 @@ export function processAttestationsAltair( // TODO: describe issue. Compute progressive target balances // When processing each attestation, increase the cummulative target balance. Only applies post-altair if ((flagsNewSet & TIMELY_TARGET) === TIMELY_TARGET) { - const validator = state.validators.getReadonly(index); + const validator = validators.getReadonly(index); if (!validator.slashed) { if (inCurrentEpoch) { epochCtx.currentTargetUnslashedBalanceIncrements += effectiveBalanceIncrements[index]; diff --git a/packages/state-transition/src/block/processBlockHeader.ts b/packages/state-transition/src/block/processBlockHeader.ts index 755850969b4f..da3e389fb507 100644 --- a/packages/state-transition/src/block/processBlockHeader.ts +++ b/packages/state-transition/src/block/processBlockHeader.ts @@ -1,5 +1,6 @@ -import {toHexString, byteArrayEquals} from "@chainsafe/ssz"; +import {byteArrayEquals} from "@chainsafe/ssz"; import {BeaconBlock, BlindedBeaconBlock, ssz} from "@lodestar/types"; +import {toRootHex} from "@lodestar/utils"; import {CachedBeaconStateAllForks} from "../types.js"; import {ZERO_HASH} from "../constants/index.js"; import {blindedOrFullBlockToHeader} from "../util/index.js"; @@ -32,7 +33,7 @@ export function processBlockHeader(state: CachedBeaconStateAllForks, block: Beac // verify that the parent matches if (!byteArrayEquals(block.parentRoot, ssz.phase0.BeaconBlockHeader.hashTreeRoot(state.latestBlockHeader))) { throw new Error( - `Block parent root ${toHexString(block.parentRoot)} does not match state latest block, block slot=${slot}` + `Block parent root ${toRootHex(block.parentRoot)} does not match state latest block, block slot=${slot}` ); } diff --git a/packages/state-transition/src/block/processBlsToExecutionChange.ts b/packages/state-transition/src/block/processBlsToExecutionChange.ts index 1cc3706a756f..be79f06f3f21 100644 --- a/packages/state-transition/src/block/processBlsToExecutionChange.ts +++ b/packages/state-transition/src/block/processBlsToExecutionChange.ts @@ -1,7 +1,8 @@ -import {toHexString, byteArrayEquals} from "@chainsafe/ssz"; +import {byteArrayEquals} from "@chainsafe/ssz"; import {digest} from "@chainsafe/as-sha256"; import {capella} from "@lodestar/types"; import {BLS_WITHDRAWAL_PREFIX, ETH1_ADDRESS_WITHDRAWAL_PREFIX} from "@lodestar/params"; +import {toHex} from "@lodestar/utils"; import {verifyBlsToExecutionChangeSignature} from "../signatureSets/index.js"; import {CachedBeaconStateCapella} from "../types.js"; @@ -60,9 +61,7 @@ export function isValidBlsToExecutionChange( return { valid: false, error: Error( - `Invalid withdrawalCredentials expected=${toHexString(withdrawalCredentials)} actual=${toHexString( - digestCredentials - )}` + `Invalid withdrawalCredentials expected=${toHex(withdrawalCredentials)} actual=${toHex(digestCredentials)}` ), }; } diff --git a/packages/state-transition/src/block/processConsolidationRequest.ts b/packages/state-transition/src/block/processConsolidationRequest.ts new file mode 100644 index 000000000000..691ecd5eca0b --- /dev/null +++ b/packages/state-transition/src/block/processConsolidationRequest.ts @@ -0,0 +1,73 @@ +import {electra, ssz} from "@lodestar/types"; +import {FAR_FUTURE_EPOCH, MIN_ACTIVATION_BALANCE, PENDING_CONSOLIDATIONS_LIMIT} from "@lodestar/params"; + +import {CachedBeaconStateElectra} from "../types.js"; +import {getConsolidationChurnLimit, isActiveValidator} from "../util/validator.js"; +import {hasExecutionWithdrawalCredential} from "../util/electra.js"; +import {computeConsolidationEpochAndUpdateChurn} from "../util/epoch.js"; + +export function processConsolidationRequest( + state: CachedBeaconStateElectra, + consolidationRequest: electra.ConsolidationRequest +): void { + // If the pending consolidations queue is full, consolidation requests are ignored + if (state.pendingConsolidations.length >= PENDING_CONSOLIDATIONS_LIMIT) { + return; + } + + // If there is too little available consolidation churn limit, consolidation requests are ignored + if (getConsolidationChurnLimit(state.epochCtx) <= MIN_ACTIVATION_BALANCE) { + return; + } + + const {sourcePubkey, targetPubkey} = consolidationRequest; + const sourceIndex = state.epochCtx.getValidatorIndex(sourcePubkey); + const targetIndex = state.epochCtx.getValidatorIndex(targetPubkey); + + if (sourceIndex === null || targetIndex === null) { + return; + } + + // Verify that source != target, so a consolidation cannot be used as an exit. + if (sourceIndex === targetIndex) { + return; + } + + const sourceValidator = state.validators.get(sourceIndex); + const targetValidator = state.validators.getReadonly(targetIndex); + const sourceWithdrawalAddress = sourceValidator.withdrawalCredentials.subarray(12); + const currentEpoch = state.epochCtx.epoch; + + // Verify withdrawal credentials + if ( + !hasExecutionWithdrawalCredential(sourceValidator.withdrawalCredentials) || + !hasExecutionWithdrawalCredential(targetValidator.withdrawalCredentials) + ) { + return; + } + + if (Buffer.compare(sourceWithdrawalAddress, consolidationRequest.sourceAddress) !== 0) { + return; + } + + // Verify the source and the target are active + if (!isActiveValidator(sourceValidator, currentEpoch) || !isActiveValidator(targetValidator, currentEpoch)) { + return; + } + + // Verify exits for source and target have not been initiated + if (sourceValidator.exitEpoch !== FAR_FUTURE_EPOCH || targetValidator.exitEpoch !== FAR_FUTURE_EPOCH) { + return; + } + + // TODO Electra: See if we can get rid of big int + const exitEpoch = computeConsolidationEpochAndUpdateChurn(state, BigInt(sourceValidator.effectiveBalance)); + sourceValidator.exitEpoch = exitEpoch; + sourceValidator.withdrawableEpoch = exitEpoch + state.config.MIN_VALIDATOR_WITHDRAWABILITY_DELAY; + + const pendingConsolidation = ssz.electra.PendingConsolidation.toViewDU({ + sourceIndex, + targetIndex, + }); + state.pendingConsolidations.push(pendingConsolidation); +} diff --git a/packages/state-transition/src/block/processDeposit.ts b/packages/state-transition/src/block/processDeposit.ts index 7ade79fd7739..ee75dff0dfd1 100644 --- a/packages/state-transition/src/block/processDeposit.ts +++ b/packages/state-transition/src/block/processDeposit.ts @@ -1,5 +1,5 @@ import {PublicKey, Signature, verify} from "@chainsafe/blst"; -import {phase0, ssz} from "@lodestar/types"; +import {BLSPubkey, Bytes32, UintNum64, phase0, ssz} from "@lodestar/types"; import {verifyMerkleBranch} from "@lodestar/utils"; import { @@ -11,9 +11,19 @@ import { MAX_EFFECTIVE_BALANCE, } from "@lodestar/params"; +import {DepositData} from "@lodestar/types/lib/phase0/types.js"; +import {DepositRequest} from "@lodestar/types/lib/electra/types.js"; +import {BeaconConfig} from "@lodestar/config"; import {ZERO_HASH} from "../constants/index.js"; -import {computeDomain, computeSigningRoot, increaseBalance} from "../util/index.js"; -import {CachedBeaconStateAllForks, CachedBeaconStateAltair} from "../types.js"; +import { + computeDomain, + computeSigningRoot, + hasCompoundingWithdrawalCredential, + hasEth1WithdrawalCredential, + increaseBalance, + switchToCompoundingValidator, +} from "../util/index.js"; +import {CachedBeaconStateAllForks, CachedBeaconStateAltair, CachedBeaconStateElectra} from "../types.js"; /** * Process a Deposit operation. Potentially adds a new validator to the registry. Mutates the validators and balances @@ -22,8 +32,6 @@ import {CachedBeaconStateAllForks, CachedBeaconStateAltair} from "../types.js"; * PERF: Work depends on number of Deposit per block. On regular networks the average is 0 / block. */ export function processDeposit(fork: ForkSeq, state: CachedBeaconStateAllForks, deposit: phase0.Deposit): void { - const {config, validators, epochCtx} = state; - // verify the merkle branch if ( !verifyMerkleBranch( @@ -40,68 +48,132 @@ export function processDeposit(fork: ForkSeq, state: CachedBeaconStateAllForks, // deposits must be processed in order state.eth1DepositIndex += 1; - const pubkey = deposit.data.pubkey; // Drop tree - const amount = deposit.data.amount; - const cachedIndex = epochCtx.pubkey2index.get(pubkey); - if (cachedIndex === undefined || !Number.isSafeInteger(cachedIndex) || cachedIndex >= validators.length) { - // verify the deposit signature (proof of posession) which is not checked by the deposit contract - const depositMessage = { - pubkey: deposit.data.pubkey, // Retain tree for hashing - withdrawalCredentials: deposit.data.withdrawalCredentials, // Retain tree for hashing - amount: deposit.data.amount, - }; - // fork-agnostic domain since deposits are valid across forks - const domain = computeDomain(DOMAIN_DEPOSIT, config.GENESIS_FORK_VERSION, ZERO_HASH); - const signingRoot = computeSigningRoot(ssz.phase0.DepositMessage, depositMessage, domain); - try { - // Pubkeys must be checked for group + inf. This must be done only once when the validator deposit is processed - const publicKey = PublicKey.fromBytes(pubkey, true); - const signature = Signature.fromBytes(deposit.data.signature, true); - if (!verify(signingRoot, publicKey, signature)) { - return; + applyDeposit(fork, state, deposit.data); +} + +/** + * Adds a new validator into the registry. Or increase balance if already exist. + * Follows applyDeposit() in consensus spec. Will be used by processDeposit() and processDepositRequest() + * + */ +export function applyDeposit( + fork: ForkSeq, + state: CachedBeaconStateAllForks, + deposit: DepositData | DepositRequest +): void { + const {config, validators, epochCtx} = state; + const {pubkey, withdrawalCredentials, amount} = deposit; + + const cachedIndex = epochCtx.getValidatorIndex(pubkey); + if (cachedIndex === null || !Number.isSafeInteger(cachedIndex) || cachedIndex >= validators.length) { + if (isValidDepositSignature(config, pubkey, withdrawalCredentials, amount, deposit.signature)) { + addValidatorToRegistry(fork, state, pubkey, withdrawalCredentials, amount); + } + } else { + if (fork < ForkSeq.electra) { + // increase balance by deposit amount right away pre-electra + increaseBalance(state, cachedIndex, amount); + } else if (fork >= ForkSeq.electra) { + const stateElectra = state as CachedBeaconStateElectra; + const pendingBalanceDeposit = ssz.electra.PendingBalanceDeposit.toViewDU({ + index: cachedIndex, + amount: BigInt(amount), + }); + stateElectra.pendingBalanceDeposits.push(pendingBalanceDeposit); + + if ( + hasCompoundingWithdrawalCredential(withdrawalCredentials) && + hasEth1WithdrawalCredential(validators.getReadonly(cachedIndex).withdrawalCredentials) && + isValidDepositSignature(config, pubkey, withdrawalCredentials, amount, deposit.signature) + ) { + switchToCompoundingValidator(stateElectra, cachedIndex); } - } catch (e) { - return; // Catch all BLS errors: failed key validation, failed signature validation, invalid signature } + } +} - // add validator and balance entries - const effectiveBalance = Math.min(amount - (amount % EFFECTIVE_BALANCE_INCREMENT), MAX_EFFECTIVE_BALANCE); - validators.push( - ssz.phase0.Validator.toViewDU({ - pubkey, - withdrawalCredentials: deposit.data.withdrawalCredentials, - activationEligibilityEpoch: FAR_FUTURE_EPOCH, - activationEpoch: FAR_FUTURE_EPOCH, - exitEpoch: FAR_FUTURE_EPOCH, - withdrawableEpoch: FAR_FUTURE_EPOCH, - effectiveBalance, - slashed: false, - }) - ); - state.balances.push(amount); +function addValidatorToRegistry( + fork: ForkSeq, + state: CachedBeaconStateAllForks, + pubkey: BLSPubkey, + withdrawalCredentials: Bytes32, + amount: UintNum64 +): void { + const {validators, epochCtx} = state; + // add validator and balance entries + const effectiveBalance = + fork < ForkSeq.electra ? Math.min(amount - (amount % EFFECTIVE_BALANCE_INCREMENT), MAX_EFFECTIVE_BALANCE) : 0; + validators.push( + ssz.phase0.Validator.toViewDU({ + pubkey, + withdrawalCredentials, + activationEligibilityEpoch: FAR_FUTURE_EPOCH, + activationEpoch: FAR_FUTURE_EPOCH, + exitEpoch: FAR_FUTURE_EPOCH, + withdrawableEpoch: FAR_FUTURE_EPOCH, + effectiveBalance, + slashed: false, + }) + ); - const validatorIndex = validators.length - 1; - // Updating here is better than updating at once on epoch transition - // - Simplify genesis fn applyDeposits(): effectiveBalanceIncrements is populated immediately - // - Keep related code together to reduce risk of breaking this cache - // - Should have equal performance since it sets a value in a flat array - epochCtx.effectiveBalanceIncrementsSet(validatorIndex, effectiveBalance); + const validatorIndex = validators.length - 1; + // TODO Electra: Review this + // Updating here is better than updating at once on epoch transition + // - Simplify genesis fn applyDeposits(): effectiveBalanceIncrements is populated immediately + // - Keep related code together to reduce risk of breaking this cache + // - Should have equal performance since it sets a value in a flat array + epochCtx.effectiveBalanceIncrementsSet(validatorIndex, effectiveBalance); - // now that there is a new validator, update the epoch context with the new pubkey - epochCtx.addPubkey(validatorIndex, pubkey); + // now that there is a new validator, update the epoch context with the new pubkey + epochCtx.addPubkey(validatorIndex, pubkey); - // Only after altair: - if (fork >= ForkSeq.altair) { - const stateAltair = state as CachedBeaconStateAltair; + // Only after altair: + if (fork >= ForkSeq.altair) { + const stateAltair = state as CachedBeaconStateAltair; - stateAltair.inactivityScores.push(0); + stateAltair.inactivityScores.push(0); - // add participation caches - stateAltair.previousEpochParticipation.push(0); - stateAltair.currentEpochParticipation.push(0); - } - } else { - // increase balance by deposit amount - increaseBalance(state, cachedIndex, amount); + // add participation caches + stateAltair.previousEpochParticipation.push(0); + stateAltair.currentEpochParticipation.push(0); + } + + if (fork < ForkSeq.electra) { + state.balances.push(amount); + } else if (fork >= ForkSeq.electra) { + state.balances.push(0); + const stateElectra = state as CachedBeaconStateElectra; + const pendingBalanceDeposit = ssz.electra.PendingBalanceDeposit.toViewDU({ + index: validatorIndex, + amount: BigInt(amount), + }); + stateElectra.pendingBalanceDeposits.push(pendingBalanceDeposit); + } +} + +function isValidDepositSignature( + config: BeaconConfig, + pubkey: Uint8Array, + withdrawalCredentials: Uint8Array, + amount: number, + depositSignature: Uint8Array +): boolean { + // verify the deposit signature (proof of posession) which is not checked by the deposit contract + const depositMessage = { + pubkey, + withdrawalCredentials, + amount, + }; + // fork-agnostic domain since deposits are valid across forks + const domain = computeDomain(DOMAIN_DEPOSIT, config.GENESIS_FORK_VERSION, ZERO_HASH); + const signingRoot = computeSigningRoot(ssz.phase0.DepositMessage, depositMessage, domain); + try { + // Pubkeys must be checked for group + inf. This must be done only once when the validator deposit is processed + const publicKey = PublicKey.fromBytes(pubkey, true); + const signature = Signature.fromBytes(depositSignature, true); + + return verify(signingRoot, publicKey, signature); + } catch (e) { + return false; // Catch all BLS errors: failed key validation, failed signature validation, invalid signature } } diff --git a/packages/state-transition/src/block/processDepositRequest.ts b/packages/state-transition/src/block/processDepositRequest.ts new file mode 100644 index 000000000000..e5dd99a40c4e --- /dev/null +++ b/packages/state-transition/src/block/processDepositRequest.ts @@ -0,0 +1,17 @@ +import {electra} from "@lodestar/types"; +import {ForkSeq, UNSET_DEPOSIT_REQUESTS_START_INDEX} from "@lodestar/params"; + +import {CachedBeaconStateElectra} from "../types.js"; +import {applyDeposit} from "./processDeposit.js"; + +export function processDepositRequest( + fork: ForkSeq, + state: CachedBeaconStateElectra, + depositRequest: electra.DepositRequest +): void { + if (state.depositRequestsStartIndex === UNSET_DEPOSIT_REQUESTS_START_INDEX) { + state.depositRequestsStartIndex = BigInt(depositRequest.index); + } + + applyDeposit(fork, state, depositRequest); +} diff --git a/packages/state-transition/src/block/processExecutionPayload.ts b/packages/state-transition/src/block/processExecutionPayload.ts index 3c28a400d3bf..3d70e46d40fd 100644 --- a/packages/state-transition/src/block/processExecutionPayload.ts +++ b/packages/state-transition/src/block/processExecutionPayload.ts @@ -1,6 +1,7 @@ -import {toHexString, byteArrayEquals} from "@chainsafe/ssz"; +import {byteArrayEquals} from "@chainsafe/ssz"; import {BeaconBlockBody, BlindedBeaconBlockBody, deneb, isExecutionPayload} from "@lodestar/types"; import {ForkSeq, MAX_BLOBS_PER_BLOCK} from "@lodestar/params"; +import {toHex, toRootHex} from "@lodestar/utils"; import {CachedBeaconStateBellatrix, CachedBeaconStateCapella} from "../types.js"; import {getRandaoMix} from "../util/index.js"; import { @@ -23,7 +24,7 @@ export function processExecutionPayload( const {latestExecutionPayloadHeader} = state; if (!byteArrayEquals(payload.parentHash, latestExecutionPayloadHeader.blockHash)) { throw Error( - `Invalid execution payload parentHash ${toHexString(payload.parentHash)} latest blockHash ${toHexString( + `Invalid execution payload parentHash ${toRootHex(payload.parentHash)} latest blockHash ${toRootHex( latestExecutionPayloadHeader.blockHash )}` ); @@ -33,9 +34,7 @@ export function processExecutionPayload( // Verify random const expectedRandom = getRandaoMix(state, state.epochCtx.epoch); if (!byteArrayEquals(payload.prevRandao, expectedRandom)) { - throw Error( - `Invalid execution payload random ${toHexString(payload.prevRandao)} expected=${toHexString(expectedRandom)}` - ); + throw Error(`Invalid execution payload random ${toHex(payload.prevRandao)} expected=${toHex(expectedRandom)}`); } // Verify timestamp diff --git a/packages/state-transition/src/block/processOperations.ts b/packages/state-transition/src/block/processOperations.ts index 38716bb42a40..bb52af14ba32 100644 --- a/packages/state-transition/src/block/processOperations.ts +++ b/packages/state-transition/src/block/processOperations.ts @@ -1,14 +1,18 @@ -import {BeaconBlockBody, capella} from "@lodestar/types"; -import {ForkSeq, MAX_DEPOSITS} from "@lodestar/params"; +import {BeaconBlockBody, capella, electra} from "@lodestar/types"; +import {ForkSeq} from "@lodestar/params"; -import {CachedBeaconStateAllForks, CachedBeaconStateCapella} from "../types.js"; +import {CachedBeaconStateAllForks, CachedBeaconStateCapella, CachedBeaconStateElectra} from "../types.js"; +import {getEth1DepositCount} from "../util/deposit.js"; import {processAttestations} from "./processAttestations.js"; import {processProposerSlashing} from "./processProposerSlashing.js"; import {processAttesterSlashing} from "./processAttesterSlashing.js"; import {processDeposit} from "./processDeposit.js"; import {processVoluntaryExit} from "./processVoluntaryExit.js"; import {processBlsToExecutionChange} from "./processBlsToExecutionChange.js"; +import {processWithdrawalRequest} from "./processWithdrawalRequest.js"; +import {processDepositRequest} from "./processDepositRequest.js"; import {ProcessBlockOpts} from "./types.js"; +import {processConsolidationRequest} from "./processConsolidationRequest.js"; export { processProposerSlashing, @@ -16,7 +20,10 @@ export { processAttestations, processDeposit, processVoluntaryExit, + processWithdrawalRequest, processBlsToExecutionChange, + processDepositRequest, + processConsolidationRequest, }; export function processOperations( @@ -26,7 +33,7 @@ export function processOperations( opts: ProcessBlockOpts = {verifySignatures: true} ): void { // verify that outstanding deposits are processed up to the maximum number of deposits - const maxDeposits = Math.min(MAX_DEPOSITS, state.eth1Data.depositCount - state.eth1DepositIndex); + const maxDeposits = getEth1DepositCount(state); if (body.deposits.length !== maxDeposits) { throw new Error( `Block contains incorrect number of deposits: depositCount=${body.deposits.length} expected=${maxDeposits}` @@ -45,8 +52,9 @@ export function processOperations( for (const deposit of body.deposits) { processDeposit(fork, state, deposit); } + for (const voluntaryExit of body.voluntaryExits) { - processVoluntaryExit(state, voluntaryExit, opts.verifySignatures); + processVoluntaryExit(fork, state, voluntaryExit, opts.verifySignatures); } if (fork >= ForkSeq.capella) { @@ -54,4 +62,21 @@ export function processOperations( processBlsToExecutionChange(state as CachedBeaconStateCapella, blsToExecutionChange); } } + + if (fork >= ForkSeq.electra) { + const stateElectra = state as CachedBeaconStateElectra; + const bodyElectra = body as electra.BeaconBlockBody; + + for (const depositRequest of bodyElectra.executionRequests.deposits) { + processDepositRequest(fork, stateElectra, depositRequest); + } + + for (const elWithdrawalRequest of bodyElectra.executionRequests.withdrawals) { + processWithdrawalRequest(fork, stateElectra, elWithdrawalRequest); + } + + for (const elConsolidationRequest of bodyElectra.executionRequests.consolidations) { + processConsolidationRequest(stateElectra, elConsolidationRequest); + } + } } diff --git a/packages/state-transition/src/block/processVoluntaryExit.ts b/packages/state-transition/src/block/processVoluntaryExit.ts index 80982623a447..b08aa7800884 100644 --- a/packages/state-transition/src/block/processVoluntaryExit.ts +++ b/packages/state-transition/src/block/processVoluntaryExit.ts @@ -1,7 +1,7 @@ -import {FAR_FUTURE_EPOCH} from "@lodestar/params"; +import {FAR_FUTURE_EPOCH, ForkSeq} from "@lodestar/params"; import {phase0} from "@lodestar/types"; -import {isActiveValidator} from "../util/index.js"; -import {CachedBeaconStateAllForks} from "../types.js"; +import {getPendingBalanceToWithdraw, isActiveValidator} from "../util/index.js"; +import {CachedBeaconStateAllForks, CachedBeaconStateElectra} from "../types.js"; import {verifyVoluntaryExitSignature} from "../signatureSets/index.js"; import {initiateValidatorExit} from "./index.js"; @@ -11,16 +11,21 @@ import {initiateValidatorExit} from "./index.js"; * PERF: Work depends on number of VoluntaryExit per block. On regular networks the average is 0 / block. */ export function processVoluntaryExit( + fork: ForkSeq, state: CachedBeaconStateAllForks, signedVoluntaryExit: phase0.SignedVoluntaryExit, verifySignature = true ): void { - if (!isValidVoluntaryExit(state, signedVoluntaryExit, verifySignature)) { - throw Error("Invalid voluntary exit"); + const isValidExit = + fork >= ForkSeq.electra + ? isValidVoluntaryExitElectra(state as CachedBeaconStateElectra, signedVoluntaryExit, verifySignature) + : isValidVoluntaryExit(state, signedVoluntaryExit, verifySignature); + if (!isValidExit) { + throw Error(`Invalid voluntary exit at forkSeq=${fork}`); } const validator = state.validators.get(signedVoluntaryExit.message.validatorIndex); - initiateValidatorExit(state, validator); + initiateValidatorExit(fork, state, validator); } export function isValidVoluntaryExit( @@ -46,3 +51,16 @@ export function isValidVoluntaryExit( (!verifySignature || verifyVoluntaryExitSignature(state, signedVoluntaryExit)) ); } + +function isValidVoluntaryExitElectra( + state: CachedBeaconStateElectra, + signedVoluntaryExit: phase0.SignedVoluntaryExit, + verifySignature = true +): boolean { + // only exit validator if it has no pending withdrawals in the queue (post-Electra only) + if (getPendingBalanceToWithdraw(state, signedVoluntaryExit.message.validatorIndex) === 0) { + return isValidVoluntaryExit(state, signedVoluntaryExit, verifySignature); + } + + return false; +} diff --git a/packages/state-transition/src/block/processWithdrawalRequest.ts b/packages/state-transition/src/block/processWithdrawalRequest.ts new file mode 100644 index 000000000000..e8a64ec63e41 --- /dev/null +++ b/packages/state-transition/src/block/processWithdrawalRequest.ts @@ -0,0 +1,99 @@ +import {electra, phase0, ssz} from "@lodestar/types"; +import { + FAR_FUTURE_EPOCH, + MIN_ACTIVATION_BALANCE, + PENDING_PARTIAL_WITHDRAWALS_LIMIT, + FULL_EXIT_REQUEST_AMOUNT, + ForkSeq, +} from "@lodestar/params"; + +import {toHex} from "@lodestar/utils"; +import {CachedBeaconStateElectra} from "../types.js"; +import {hasCompoundingWithdrawalCredential, hasExecutionWithdrawalCredential} from "../util/electra.js"; +import {getPendingBalanceToWithdraw, isActiveValidator} from "../util/validator.js"; +import {computeExitEpochAndUpdateChurn} from "../util/epoch.js"; +import {initiateValidatorExit} from "./initiateValidatorExit.js"; + +export function processWithdrawalRequest( + fork: ForkSeq, + state: CachedBeaconStateElectra, + withdrawalRequest: electra.WithdrawalRequest +): void { + const amount = Number(withdrawalRequest.amount); + const {pendingPartialWithdrawals, validators, epochCtx} = state; + // no need to use unfinalized pubkey cache from 6110 as validator won't be active anyway + const {pubkey2index, config} = epochCtx; + const isFullExitRequest = amount === FULL_EXIT_REQUEST_AMOUNT; + + // If partial withdrawal queue is full, only full exits are processed + if (pendingPartialWithdrawals.length >= PENDING_PARTIAL_WITHDRAWALS_LIMIT && !isFullExitRequest) { + return; + } + + // bail out if validator is not in beacon state + // note that we don't need to check for 6110 unfinalized vals as they won't be eligible for withdraw/exit anyway + const validatorIndex = pubkey2index.get(withdrawalRequest.validatorPubkey); + if (validatorIndex === null) { + return; + } + + const validator = validators.get(validatorIndex); + if (!isValidatorEligibleForWithdrawOrExit(validator, withdrawalRequest.sourceAddress, state)) { + return; + } + + // TODO Electra: Consider caching pendingPartialWithdrawals + const pendingBalanceToWithdraw = getPendingBalanceToWithdraw(state, validatorIndex); + const validatorBalance = state.balances.get(validatorIndex); + + if (isFullExitRequest) { + // only exit validator if it has no pending withdrawals in the queue + if (pendingBalanceToWithdraw === 0) { + initiateValidatorExit(fork, state, validator); + } + return; + } + + // partial withdrawal request + const hasSufficientEffectiveBalance = validator.effectiveBalance >= MIN_ACTIVATION_BALANCE; + const hasExcessBalance = validatorBalance > MIN_ACTIVATION_BALANCE + pendingBalanceToWithdraw; + + // Only allow partial withdrawals with compounding withdrawal credentials + if ( + hasCompoundingWithdrawalCredential(validator.withdrawalCredentials) && + hasSufficientEffectiveBalance && + hasExcessBalance + ) { + const amountToWithdraw = BigInt( + Math.min(validatorBalance - MIN_ACTIVATION_BALANCE - pendingBalanceToWithdraw, amount) + ); + const exitQueueEpoch = computeExitEpochAndUpdateChurn(state, amountToWithdraw); + const withdrawableEpoch = exitQueueEpoch + config.MIN_VALIDATOR_WITHDRAWABILITY_DELAY; + + const pendingPartialWithdrawal = ssz.electra.PendingPartialWithdrawal.toViewDU({ + index: validatorIndex, + amount: amountToWithdraw, + withdrawableEpoch, + }); + state.pendingPartialWithdrawals.push(pendingPartialWithdrawal); + } +} + +function isValidatorEligibleForWithdrawOrExit( + validator: phase0.Validator, + sourceAddress: Uint8Array, + state: CachedBeaconStateElectra +): boolean { + const {withdrawalCredentials} = validator; + const addressStr = toHex(withdrawalCredentials.subarray(12)); + const sourceAddressStr = toHex(sourceAddress); + const {epoch: currentEpoch, config} = state.epochCtx; + + return ( + hasExecutionWithdrawalCredential(withdrawalCredentials) && + addressStr === sourceAddressStr && + isActiveValidator(validator, currentEpoch) && + validator.exitEpoch === FAR_FUTURE_EPOCH && + currentEpoch >= validator.activationEpoch + config.SHARD_COMMITTEE_PERIOD + ); +} diff --git a/packages/state-transition/src/block/processWithdrawals.ts b/packages/state-transition/src/block/processWithdrawals.ts index ddea73c27a26..d4dfd47b4d94 100644 --- a/packages/state-transition/src/block/processWithdrawals.ts +++ b/packages/state-transition/src/block/processWithdrawals.ts @@ -1,19 +1,33 @@ -import {byteArrayEquals, toHexString} from "@chainsafe/ssz"; +import {byteArrayEquals} from "@chainsafe/ssz"; import {ssz, capella} from "@lodestar/types"; import { - MAX_EFFECTIVE_BALANCE, MAX_WITHDRAWALS_PER_PAYLOAD, MAX_VALIDATORS_PER_WITHDRAWALS_SWEEP, + ForkSeq, + MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP, + FAR_FUTURE_EPOCH, + MIN_ACTIVATION_BALANCE, + MAX_EFFECTIVE_BALANCE, } from "@lodestar/params"; -import {CachedBeaconStateCapella} from "../types.js"; -import {decreaseBalance, hasEth1WithdrawalCredential, isCapellaPayloadHeader} from "../util/index.js"; +import {toRootHex} from "@lodestar/utils"; +import {CachedBeaconStateCapella, CachedBeaconStateElectra} from "../types.js"; +import { + decreaseBalance, + getMaxEffectiveBalance, + hasEth1WithdrawalCredential, + hasExecutionWithdrawalCredential, + isCapellaPayloadHeader, +} from "../util/index.js"; export function processWithdrawals( - state: CachedBeaconStateCapella, + fork: ForkSeq, + state: CachedBeaconStateCapella | CachedBeaconStateElectra, payload: capella.FullOrBlindedExecutionPayload ): void { - const {withdrawals: expectedWithdrawals} = getExpectedWithdrawals(state); + // partialWithdrawalsCount is withdrawals coming from EL since electra (EIP-7002) + // TODO - electra: may switch to executionWithdrawalsCount + const {withdrawals: expectedWithdrawals, partialWithdrawalsCount} = getExpectedWithdrawals(fork, state); const numWithdrawals = expectedWithdrawals.length; if (isCapellaPayloadHeader(payload)) { @@ -21,9 +35,9 @@ export function processWithdrawals( const actualWithdrawalsRoot = payload.withdrawalsRoot; if (!byteArrayEquals(expectedWithdrawalsRoot, actualWithdrawalsRoot)) { throw Error( - `Invalid withdrawalsRoot of executionPayloadHeader, expected=${toHexString( + `Invalid withdrawalsRoot of executionPayloadHeader, expected=${toRootHex( expectedWithdrawalsRoot - )}, actual=${toHexString(actualWithdrawalsRoot)}` + )}, actual=${toRootHex(actualWithdrawalsRoot)}` ); } } else { @@ -43,6 +57,11 @@ export function processWithdrawals( decreaseBalance(state, withdrawal.validatorIndex, Number(withdrawal.amount)); } + if (fork >= ForkSeq.electra) { + const stateElectra = state as CachedBeaconStateElectra; + stateElectra.pendingPartialWithdrawals = stateElectra.pendingPartialWithdrawals.sliceFrom(partialWithdrawalsCount); + } + // Update the nextWithdrawalIndex if (expectedWithdrawals.length > 0) { const latestWithdrawal = expectedWithdrawals[expectedWithdrawals.length - 1]; @@ -62,46 +81,107 @@ export function processWithdrawals( } } -export function getExpectedWithdrawals(state: CachedBeaconStateCapella): { +export function getExpectedWithdrawals( + fork: ForkSeq, + state: CachedBeaconStateCapella | CachedBeaconStateElectra +): { withdrawals: capella.Withdrawal[]; sampledValidators: number; + partialWithdrawalsCount: number; } { + if (fork < ForkSeq.capella) { + throw new Error(`getExpectedWithdrawals not supported at forkSeq=${fork} < ForkSeq.capella`); + } + const epoch = state.epochCtx.epoch; let withdrawalIndex = state.nextWithdrawalIndex; const {validators, balances, nextWithdrawalValidatorIndex} = state; - const bound = Math.min(validators.length, MAX_VALIDATORS_PER_WITHDRAWALS_SWEEP); - - let n = 0; const withdrawals: capella.Withdrawal[] = []; + const isPostElectra = fork >= ForkSeq.electra; + + if (isPostElectra) { + const stateElectra = state as CachedBeaconStateElectra; + + // MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP = 8, PENDING_PARTIAL_WITHDRAWALS_LIMIT: 134217728 so we should only call getAllReadonly() if it makes sense + // pendingPartialWithdrawals comes from EIP-7002 smart contract where it takes fee so it's more likely than not validator is in correct condition to withdraw + // also we may break early if withdrawableEpoch > epoch + const allPendingPartialWithdrawals = + stateElectra.pendingPartialWithdrawals.length <= MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP + ? stateElectra.pendingPartialWithdrawals.getAllReadonly() + : null; + + // EIP-7002: Execution layer triggerable withdrawals + for (let i = 0; i < stateElectra.pendingPartialWithdrawals.length; i++) { + const withdrawal = allPendingPartialWithdrawals + ? allPendingPartialWithdrawals[i] + : stateElectra.pendingPartialWithdrawals.getReadonly(i); + if (withdrawal.withdrawableEpoch > epoch || withdrawals.length === MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP) { + break; + } + + const validator = validators.getReadonly(withdrawal.index); + + if ( + validator.exitEpoch === FAR_FUTURE_EPOCH && + validator.effectiveBalance >= MIN_ACTIVATION_BALANCE && + balances.get(withdrawal.index) > MIN_ACTIVATION_BALANCE + ) { + const balanceOverMinActivationBalance = BigInt(balances.get(withdrawal.index) - MIN_ACTIVATION_BALANCE); + const withdrawableBalance = + balanceOverMinActivationBalance < withdrawal.amount ? balanceOverMinActivationBalance : withdrawal.amount; + withdrawals.push({ + index: withdrawalIndex, + validatorIndex: withdrawal.index, + address: validator.withdrawalCredentials.subarray(12), + amount: withdrawableBalance, + }); + withdrawalIndex++; + } + } + } + + // partialWithdrawalsCount is withdrawals coming from EL since electra (EIP-7002) + const partialWithdrawalsCount = withdrawals.length; + const bound = Math.min(validators.length, MAX_VALIDATORS_PER_WITHDRAWALS_SWEEP); + let n = 0; // Just run a bounded loop max iterating over all withdrawals // however breaks out once we have MAX_WITHDRAWALS_PER_PAYLOAD for (n = 0; n < bound; n++) { // Get next validator in turn const validatorIndex = (nextWithdrawalValidatorIndex + n) % validators.length; - // It's most likely for validators to not have set eth1 credentials, than having 0 balance const validator = validators.getReadonly(validatorIndex); - if (!hasEth1WithdrawalCredential(validator.withdrawalCredentials)) { + const balance = balances.get(validatorIndex); + const {withdrawableEpoch, withdrawalCredentials, effectiveBalance} = validator; + const hasWithdrawableCredentials = isPostElectra + ? hasExecutionWithdrawalCredential(withdrawalCredentials) + : hasEth1WithdrawalCredential(withdrawalCredentials); + // early skip for balance = 0 as its now more likely that validator has exited/slahed with + // balance zero than not have withdrawal credentials set + if (balance === 0 || !hasWithdrawableCredentials) { continue; } - const balance = balances.get(validatorIndex); - - if (balance > 0 && validator.withdrawableEpoch <= epoch) { + // capella full withdrawal + if (withdrawableEpoch <= epoch) { withdrawals.push({ index: withdrawalIndex, validatorIndex, - address: validator.withdrawalCredentials.slice(12), + address: validator.withdrawalCredentials.subarray(12), amount: BigInt(balance), }); withdrawalIndex++; - } else if (validator.effectiveBalance === MAX_EFFECTIVE_BALANCE && balance > MAX_EFFECTIVE_BALANCE) { + } else if ( + effectiveBalance === (isPostElectra ? getMaxEffectiveBalance(withdrawalCredentials) : MAX_EFFECTIVE_BALANCE) && + balance > effectiveBalance + ) { + // capella partial withdrawal withdrawals.push({ index: withdrawalIndex, validatorIndex, - address: validator.withdrawalCredentials.slice(12), - amount: BigInt(balance - MAX_EFFECTIVE_BALANCE), + address: validator.withdrawalCredentials.subarray(12), + amount: BigInt(balance - effectiveBalance), }); withdrawalIndex++; } @@ -112,5 +192,5 @@ export function getExpectedWithdrawals(state: CachedBeaconStateCapella): { } } - return {withdrawals, sampledValidators: n}; + return {withdrawals, sampledValidators: n, partialWithdrawalsCount}; } diff --git a/packages/state-transition/src/block/slashValidator.ts b/packages/state-transition/src/block/slashValidator.ts index 9f3eb2947644..c4b7d5f848ea 100644 --- a/packages/state-transition/src/block/slashValidator.ts +++ b/packages/state-transition/src/block/slashValidator.ts @@ -6,11 +6,13 @@ import { MIN_SLASHING_PENALTY_QUOTIENT, MIN_SLASHING_PENALTY_QUOTIENT_ALTAIR, MIN_SLASHING_PENALTY_QUOTIENT_BELLATRIX, + MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA, PROPOSER_REWARD_QUOTIENT, PROPOSER_WEIGHT, TIMELY_TARGET_FLAG_INDEX, WEIGHT_DENOMINATOR, WHISTLEBLOWER_REWARD_QUOTIENT, + WHISTLEBLOWER_REWARD_QUOTIENT_ELECTRA, } from "@lodestar/params"; import {decreaseBalance, increaseBalance} from "../util/index.js"; @@ -31,7 +33,7 @@ export function slashValidator( const validator = state.validators.get(slashedIndex); // TODO: Bellatrix initiateValidatorExit validators.update() with the one below - initiateValidatorExit(state, validator); + initiateValidatorExit(fork, state, validator); validator.slashed = true; validator.withdrawableEpoch = Math.max(validator.withdrawableEpoch, epoch + EPOCHS_PER_SLASHINGS_VECTOR); @@ -41,7 +43,7 @@ export function slashValidator( // state.slashings is initially a Gwei (BigInt) vector, however since Nov 2023 it's converted to UintNum64 (number) vector in the state transition because: // - state.slashings[nextEpoch % EPOCHS_PER_SLASHINGS_VECTOR] is reset per epoch in processSlashingsReset() // - max slashed validators per epoch is SLOTS_PER_EPOCH * MAX_ATTESTER_SLASHINGS * MAX_VALIDATORS_PER_COMMITTEE which is 32 * 2 * 2048 = 131072 on mainnet - // - with that and 32_000_000_000 MAX_EFFECTIVE_BALANCE, it still fits in a number given that Math.floor(Number.MAX_SAFE_INTEGER / 32_000_000_000) = 281474 + // - with that and 32_000_000_000 MAX_EFFECTIVE_BALANCE or 2048_000_000_000 MAX_EFFECTIVE_BALANCE_ELECTRA, it still fits in a number given that Math.floor(Number.MAX_SAFE_INTEGER / 32_000_000_000) = 281474 // - we don't need to compute the total slashings from state.slashings, it's handled by totalSlashingsByIncrement in EpochCache const slashingIndex = epoch % EPOCHS_PER_SLASHINGS_VECTOR; state.slashings.set(slashingIndex, (state.slashings.get(slashingIndex) ?? 0) + effectiveBalance); @@ -52,11 +54,16 @@ export function slashValidator( ? MIN_SLASHING_PENALTY_QUOTIENT : fork === ForkSeq.altair ? MIN_SLASHING_PENALTY_QUOTIENT_ALTAIR - : MIN_SLASHING_PENALTY_QUOTIENT_BELLATRIX; + : fork < ForkSeq.electra // no change from bellatrix to deneb + ? MIN_SLASHING_PENALTY_QUOTIENT_BELLATRIX + : MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA; decreaseBalance(state, slashedIndex, Math.floor(effectiveBalance / minSlashingPenaltyQuotient)); // apply proposer and whistleblower rewards - const whistleblowerReward = Math.floor(effectiveBalance / WHISTLEBLOWER_REWARD_QUOTIENT); + const whistleblowerReward = + fork < ForkSeq.electra + ? Math.floor(effectiveBalance / WHISTLEBLOWER_REWARD_QUOTIENT) + : Math.floor(effectiveBalance / WHISTLEBLOWER_REWARD_QUOTIENT_ELECTRA); const proposerReward = fork === ForkSeq.phase0 ? Math.floor(whistleblowerReward / PROPOSER_REWARD_QUOTIENT) diff --git a/packages/state-transition/src/cache/effectiveBalanceIncrements.ts b/packages/state-transition/src/cache/effectiveBalanceIncrements.ts index a82eb0300432..bd72b333a03c 100644 --- a/packages/state-transition/src/cache/effectiveBalanceIncrements.ts +++ b/packages/state-transition/src/cache/effectiveBalanceIncrements.ts @@ -3,18 +3,17 @@ import {BeaconStateAllForks} from "../types.js"; /** * Alias to allow easier refactoring. - * TODO: Estimate the risk of future proof of MAX_EFFECTIVE_BALANCE_INCREMENT < 255 */ -export type EffectiveBalanceIncrements = Uint8Array; +export type EffectiveBalanceIncrements = Uint16Array; -/** Helper to prevent re-writting tests downstream if we change Uint8Array to number[] */ +/** Helper to prevent re-writting tests downstream if we change Uint16Array to number[] */ export function getEffectiveBalanceIncrementsZeroed(len: number): EffectiveBalanceIncrements { - return new Uint8Array(len); + return new Uint16Array(len); } /** * effectiveBalanceIncrements length will always be equal or greater than validatorCount. The - * getEffectiveBalanceIncrementsByteLen() modulo is used to reduce the frequency at which its Uint8Array is recreated. + * getEffectiveBalanceIncrementsByteLen() modulo is used to reduce the frequency at which its Uint16Array is recreated. * if effectiveBalanceIncrements has length greater than validatorCount it's not a problem since those values would * never be accessed. */ @@ -22,7 +21,7 @@ export function getEffectiveBalanceIncrementsWithLen(validatorCount: number): Ef // TODO: Research what's the best number to minimize both memory cost and copy costs const byteLen = 1024 * Math.ceil(validatorCount / 1024); - return new Uint8Array(byteLen); + return new Uint16Array(byteLen); } /** @@ -32,7 +31,7 @@ export function getEffectiveBalanceIncrementsWithLen(validatorCount: number): Ef */ export function getEffectiveBalanceIncrements(state: BeaconStateAllForks): EffectiveBalanceIncrements { const validatorsArr = state.validators.getAllReadonlyValues(); - const effectiveBalanceIncrements = new Uint8Array(validatorsArr.length); + const effectiveBalanceIncrements = new Uint16Array(validatorsArr.length); for (let i = 0; i < validatorsArr.length; i++) { effectiveBalanceIncrements[i] = Math.floor(validatorsArr[i].effectiveBalance / EFFECTIVE_BALANCE_INCREMENT); } diff --git a/packages/state-transition/src/cache/epochCache.ts b/packages/state-transition/src/cache/epochCache.ts index 8f15dab90535..5e901e33d992 100644 --- a/packages/state-transition/src/cache/epochCache.ts +++ b/packages/state-transition/src/cache/epochCache.ts @@ -1,5 +1,19 @@ import {PublicKey} from "@chainsafe/blst"; -import {BLSSignature, CommitteeIndex, Epoch, Slot, ValidatorIndex, phase0, SyncPeriod} from "@lodestar/types"; +import * as immutable from "immutable"; +import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; +import { + BLSSignature, + CommitteeIndex, + Epoch, + Slot, + ValidatorIndex, + phase0, + RootHex, + SyncPeriod, + Attestation, + IndexedAttestation, + electra, +} from "@lodestar/types"; import {createBeaconConfig, BeaconConfig, ChainConfig} from "@lodestar/config"; import { ATTESTATION_SUBNET_COUNT, @@ -12,7 +26,7 @@ import { SLOTS_PER_EPOCH, WEIGHT_DENOMINATOR, } from "@lodestar/params"; -import {LodestarError} from "@lodestar/utils"; +import {LodestarError, fromHex} from "@lodestar/utils"; import { computeActivationExitEpoch, computeEpochAtSlot, @@ -25,19 +39,34 @@ import { computeProposers, getActivationChurnLimit, } from "../util/index.js"; -import {computeEpochShuffling, EpochShuffling, getShufflingDecisionBlock} from "../util/epochShuffling.js"; +import { + computeEpochShuffling, + EpochShuffling, + calculateShufflingDecisionRoot, + IShufflingCache, +} from "../util/epochShuffling.js"; import {computeBaseRewardPerIncrement, computeSyncParticipantReward} from "../util/syncCommittee.js"; import {sumTargetUnslashedBalanceIncrements} from "../util/targetUnslashedBalance.js"; import {getTotalSlashingsByIncrement} from "../epoch/processSlashings.js"; +import {AttesterDuty, calculateCommitteeAssignments} from "../util/calculateCommitteeAssignments.js"; +import {EpochCacheMetrics} from "../metrics.js"; import {EffectiveBalanceIncrements, getEffectiveBalanceIncrementsWithLen} from "./effectiveBalanceIncrements.js"; -import {Index2PubkeyCache, PubkeyIndexMap, syncPubkeys} from "./pubkeyCache.js"; -import {BeaconStateAllForks, BeaconStateAltair, ShufflingGetter} from "./types.js"; +import {BeaconStateAllForks, BeaconStateAltair} from "./types.js"; +import { + Index2PubkeyCache, + UnfinalizedPubkeyIndexMap, + syncPubkeys, + toMemoryEfficientHexStr, + PubkeyHex, + newUnfinalizedPubkeyIndexMap, +} from "./pubkeyCache.js"; import { computeSyncCommitteeCache, getSyncCommitteeCache, SyncCommitteeCache, SyncCommitteeCacheEmpty, } from "./syncCommitteeCache.js"; +import {CachedBeaconStateAllForks} from "./stateCache.js"; /** `= PROPOSER_WEIGHT / (WEIGHT_DENOMINATOR - PROPOSER_WEIGHT)` */ export const PROPOSER_WEIGHT_FACTOR = PROPOSER_WEIGHT / (WEIGHT_DENOMINATOR - PROPOSER_WEIGHT); @@ -46,12 +75,12 @@ export type EpochCacheImmutableData = { config: BeaconConfig; pubkey2index: PubkeyIndexMap; index2pubkey: Index2PubkeyCache; + shufflingCache?: IShufflingCache; }; export type EpochCacheOpts = { skipSyncCommitteeCache?: boolean; skipSyncPubkeys?: boolean; - shufflingGetter?: ShufflingGetter; }; /** Defers computing proposers by persisting only the seed, and dropping it once indexes are computed */ @@ -82,23 +111,36 @@ type ProposersDeferred = {computed: false; seed: Uint8Array} | {computed: true; export class EpochCache { config: BeaconConfig; /** - * Unique globally shared pubkey registry. There should only exist one for the entire application. + * Unique globally shared finalized pubkey registry. There should only exist one for the entire application. * * TODO: this is a hack, we need a safety mechanism in case a bad eth1 majority vote is in, * or handle non finalized data differently, or use an immutable.js structure for cheap copies - * Warning: may contain pubkeys that do not yet exist in the current state, but do in a later processed state. + * + * New: This would include only validators whose activation_eligibility_epoch != FAR_FUTURE_EPOCH and hence it is + * insert only. Validators could be 1) Active 2) In the activation queue 3) Initialized but pending queued * * $VALIDATOR_COUNT x 192 char String -> Number Map */ pubkey2index: PubkeyIndexMap; /** - * Unique globally shared pubkey registry. There should only exist one for the entire application. + * Unique globally shared finalized pubkey registry. There should only exist one for the entire application. * - * Warning: may contain indices that do not yet exist in the current state, but do in a later processed state. + * New: This would include only validators whose activation_eligibility_epoch != FAR_FUTURE_EPOCH and hence it is + * insert only. Validators could be 1) Active 2) In the activation queue 3) Initialized but pending queued * * $VALIDATOR_COUNT x BLST deserialized pubkey (Jacobian coordinates) */ index2pubkey: Index2PubkeyCache; + /** + * Unique pubkey registry shared in the same fork. There should only exist one for the fork. + */ + unfinalizedPubkey2index: UnfinalizedPubkeyIndexMap; + /** + * ShufflingCache is passed in from `beacon-node` so should be available at runtime but may not be + * present during testing. + */ + shufflingCache?: IShufflingCache; + /** * Indexes of the block proposers for the current epoch. * @@ -116,6 +158,12 @@ export class EpochCache { */ proposersNextEpoch: ProposersDeferred; + /** + * Epoch decision roots to look up correct shuffling from the Shuffling Cache + */ + previousDecisionRoot: RootHex; + currentDecisionRoot: RootHex; + nextDecisionRoot: RootHex; /** * Shuffling of validator indexes. Immutable through the epoch, then it's replaced entirely. * Note: Per spec definition, shuffling will always be defined. They are never called before loadState() @@ -126,7 +174,12 @@ export class EpochCache { /** Same as previousShuffling */ currentShuffling: EpochShuffling; /** Same as previousShuffling */ - nextShuffling: EpochShuffling; + nextShuffling: EpochShuffling | null; + /** + * Cache nextActiveIndices so that in afterProcessEpoch the next shuffling can be build synchronously + * in case it is not built or the ShufflingCache is not available + */ + nextActiveIndices: Uint32Array; /** * Effective balances, for altair processAttestations() */ @@ -161,6 +214,7 @@ export class EpochCache { * initiateValidatorExit(). This value may vary on each fork of the state. * * NOTE: Changes block to block + * NOTE: No longer used by initiateValidatorExit post-electra */ exitQueueEpoch: Epoch; /** @@ -168,6 +222,7 @@ export class EpochCache { * initiateValidatorExit(). This value may vary on each fork of the state. * * NOTE: Changes block to block + * NOTE: No longer used by initiateValidatorExit post-electra */ exitQueueChurn: number; @@ -196,19 +251,39 @@ export class EpochCache { nextSyncCommitteeIndexed: SyncCommitteeCache; // TODO: Helper stats - epoch: Epoch; syncPeriod: SyncPeriod; + /** + * state.validators.length of every state at epoch boundary + * They are saved in increasing order of epoch. + * The first validator length in the list corresponds to the state AFTER the latest finalized checkpoint state. ie. state.finalizedCheckpoint.epoch - 1 + * The last validator length corresponds to the latest epoch state ie. this.epoch + * eg. latest epoch = 105, latest finalized cp state epoch = 102 + * then the list will be (in terms of epoch) [103, 104, 105] + */ + historicalValidatorLengths: immutable.List; + + epoch: Epoch; + + get nextEpoch(): Epoch { + return this.epoch + 1; + } constructor(data: { config: BeaconConfig; pubkey2index: PubkeyIndexMap; index2pubkey: Index2PubkeyCache; + unfinalizedPubkey2index: UnfinalizedPubkeyIndexMap; + shufflingCache?: IShufflingCache; proposers: number[]; proposersPrevEpoch: number[] | null; proposersNextEpoch: ProposersDeferred; + previousDecisionRoot: RootHex; + currentDecisionRoot: RootHex; + nextDecisionRoot: RootHex; previousShuffling: EpochShuffling; currentShuffling: EpochShuffling; - nextShuffling: EpochShuffling; + nextShuffling: EpochShuffling | null; + nextActiveIndices: Uint32Array; effectiveBalanceIncrements: EffectiveBalanceIncrements; totalSlashingsByIncrement: number; syncParticipantReward: number; @@ -225,16 +300,23 @@ export class EpochCache { nextSyncCommitteeIndexed: SyncCommitteeCache; epoch: Epoch; syncPeriod: SyncPeriod; + historialValidatorLengths: immutable.List; }) { this.config = data.config; this.pubkey2index = data.pubkey2index; this.index2pubkey = data.index2pubkey; + this.unfinalizedPubkey2index = data.unfinalizedPubkey2index; + this.shufflingCache = data.shufflingCache; this.proposers = data.proposers; this.proposersPrevEpoch = data.proposersPrevEpoch; this.proposersNextEpoch = data.proposersNextEpoch; + this.previousDecisionRoot = data.previousDecisionRoot; + this.currentDecisionRoot = data.currentDecisionRoot; + this.nextDecisionRoot = data.nextDecisionRoot; this.previousShuffling = data.previousShuffling; this.currentShuffling = data.currentShuffling; this.nextShuffling = data.nextShuffling; + this.nextActiveIndices = data.nextActiveIndices; this.effectiveBalanceIncrements = data.effectiveBalanceIncrements; this.totalSlashingsByIncrement = data.totalSlashingsByIncrement; this.syncParticipantReward = data.syncParticipantReward; @@ -251,25 +333,20 @@ export class EpochCache { this.nextSyncCommitteeIndexed = data.nextSyncCommitteeIndexed; this.epoch = data.epoch; this.syncPeriod = data.syncPeriod; + this.historicalValidatorLengths = data.historialValidatorLengths; } /** * Create an epoch cache - * @param validators cached validators that matches `state.validators` + * @param state a finalized beacon state. Passing in unfinalized state may cause unexpected behaviour eg. empty unfinalized cache * * SLOW CODE - 🐢 */ static createFromState( state: BeaconStateAllForks, - {config, pubkey2index, index2pubkey}: EpochCacheImmutableData, + {config, pubkey2index, index2pubkey, shufflingCache}: EpochCacheImmutableData, opts?: EpochCacheOpts ): EpochCache { - // syncPubkeys here to ensure EpochCacheImmutableData is popualted before computing the rest of caches - // - computeSyncCommitteeCache() needs a fully populated pubkey2index cache - if (!opts?.skipSyncPubkeys) { - syncPubkeys(state, pubkey2index, index2pubkey); - } - const currentEpoch = computeEpochAtSlot(state.slot); const isGenesis = currentEpoch === GENESIS_EPOCH; const previousEpoch = isGenesis ? GENESIS_EPOCH : currentEpoch - 1; @@ -282,20 +359,26 @@ export class EpochCache { const validators = state.validators.getAllReadonlyValues(); const validatorCount = validators.length; + // syncPubkeys here to ensure EpochCacheImmutableData is popualted before computing the rest of caches + // - computeSyncCommitteeCache() needs a fully populated pubkey2index cache + if (!opts?.skipSyncPubkeys) { + syncPubkeys(validators, pubkey2index, index2pubkey); + } + const effectiveBalanceIncrements = getEffectiveBalanceIncrementsWithLen(validatorCount); const totalSlashingsByIncrement = getTotalSlashingsByIncrement(state); - const previousActiveIndices: ValidatorIndex[] = []; - const currentActiveIndices: ValidatorIndex[] = []; - const nextActiveIndices: ValidatorIndex[] = []; + const previousActiveIndicesAsNumberArray: ValidatorIndex[] = []; + const currentActiveIndicesAsNumberArray: ValidatorIndex[] = []; + const nextActiveIndicesAsNumberArray: ValidatorIndex[] = []; // BeaconChain could provide a shuffling cache to avoid re-computing shuffling every epoch // in that case, we don't need to compute shufflings again - const previousShufflingDecisionBlock = getShufflingDecisionBlock(state, previousEpoch); - const cachedPreviousShuffling = opts?.shufflingGetter?.(previousEpoch, previousShufflingDecisionBlock); - const currentShufflingDecisionBlock = getShufflingDecisionBlock(state, currentEpoch); - const cachedCurrentShuffling = opts?.shufflingGetter?.(currentEpoch, currentShufflingDecisionBlock); - const nextShufflingDecisionBlock = getShufflingDecisionBlock(state, nextEpoch); - const cachedNextShuffling = opts?.shufflingGetter?.(nextEpoch, nextShufflingDecisionBlock); + const previousDecisionRoot = calculateShufflingDecisionRoot(config, state, previousEpoch); + const cachedPreviousShuffling = shufflingCache?.getSync(previousEpoch, previousDecisionRoot); + const currentDecisionRoot = calculateShufflingDecisionRoot(config, state, currentEpoch); + const cachedCurrentShuffling = shufflingCache?.getSync(currentEpoch, currentDecisionRoot); + const nextDecisionRoot = calculateShufflingDecisionRoot(config, state, nextEpoch); + const cachedNextShuffling = shufflingCache?.getSync(nextEpoch, nextDecisionRoot); for (let i = 0; i < validatorCount; i++) { const validator = validators[i]; @@ -306,17 +389,17 @@ export class EpochCache { // we only need to track active indices for previous, current and next epoch if we have to compute shufflings // skip doing that if we already have cached shufflings if (cachedPreviousShuffling == null && isActiveValidator(validator, previousEpoch)) { - previousActiveIndices.push(i); + previousActiveIndicesAsNumberArray.push(i); } if (isActiveValidator(validator, currentEpoch)) { if (cachedCurrentShuffling == null) { - currentActiveIndices.push(i); + currentActiveIndicesAsNumberArray.push(i); } // We track totalActiveBalanceIncrements as ETH to fit total network balance in a JS number (53 bits) totalActiveBalanceIncrements += effectiveBalanceIncrements[i]; } if (cachedNextShuffling == null && isActiveValidator(validator, nextEpoch)) { - nextActiveIndices.push(i); + nextActiveIndicesAsNumberArray.push(i); } const {exitEpoch} = validator; @@ -338,23 +421,60 @@ export class EpochCache { throw Error("totalActiveBalanceIncrements >= Number.MAX_SAFE_INTEGER. MAX_EFFECTIVE_BALANCE is too low."); } - const currentShuffling = - cachedCurrentShuffling ?? - computeEpochShuffling(state, currentActiveIndices, currentActiveIndices.length, currentEpoch); - const previousShuffling = - cachedPreviousShuffling ?? - (isGenesis - ? currentShuffling - : computeEpochShuffling(state, previousActiveIndices, previousActiveIndices.length, previousEpoch)); - const nextShuffling = - cachedNextShuffling ?? computeEpochShuffling(state, nextActiveIndices, nextActiveIndices.length, nextEpoch); + const nextActiveIndices = new Uint32Array(nextActiveIndicesAsNumberArray); + let previousShuffling: EpochShuffling; + let currentShuffling: EpochShuffling; + let nextShuffling: EpochShuffling; + + if (!shufflingCache) { + // Only for testing. shufflingCache should always be available in prod + previousShuffling = computeEpochShuffling( + state, + new Uint32Array(previousActiveIndicesAsNumberArray), + previousEpoch + ); + + currentShuffling = isGenesis + ? previousShuffling + : computeEpochShuffling(state, new Uint32Array(currentActiveIndicesAsNumberArray), currentEpoch); + + nextShuffling = computeEpochShuffling(state, nextActiveIndices, nextEpoch); + } else { + currentShuffling = cachedCurrentShuffling + ? cachedCurrentShuffling + : shufflingCache.getSync(currentEpoch, currentDecisionRoot, { + state, + activeIndices: new Uint32Array(currentActiveIndicesAsNumberArray), + }); + + previousShuffling = cachedPreviousShuffling + ? cachedPreviousShuffling + : isGenesis + ? currentShuffling + : shufflingCache.getSync(previousEpoch, previousDecisionRoot, { + state, + activeIndices: new Uint32Array(previousActiveIndicesAsNumberArray), + }); + + nextShuffling = cachedNextShuffling + ? cachedNextShuffling + : shufflingCache.getSync(nextEpoch, nextDecisionRoot, { + state, + activeIndices: nextActiveIndices, + }); + } const currentProposerSeed = getSeed(state, currentEpoch, DOMAIN_BEACON_PROPOSER); // Allow to create CachedBeaconState for empty states, or no active validators const proposers = currentShuffling.activeIndices.length > 0 - ? computeProposers(currentProposerSeed, currentShuffling, effectiveBalanceIncrements) + ? computeProposers( + config.getForkSeqAtEpoch(currentEpoch), + currentProposerSeed, + currentShuffling, + effectiveBalanceIncrements + ) : []; const proposersNextEpoch: ProposersDeferred = { @@ -431,13 +551,20 @@ export class EpochCache { config, pubkey2index, index2pubkey, + // `createFromFinalizedState()` creates cache with empty unfinalizedPubkey2index. Be cautious to only pass in finalized state + unfinalizedPubkey2index: newUnfinalizedPubkeyIndexMap(), + shufflingCache, proposers, // On first epoch, set to null to prevent unnecessary work since this is only used for metrics proposersPrevEpoch: null, proposersNextEpoch, + previousDecisionRoot, + currentDecisionRoot, + nextDecisionRoot, previousShuffling, currentShuffling, nextShuffling, + nextActiveIndices, effectiveBalanceIncrements, totalSlashingsByIncrement, syncParticipantReward, @@ -454,6 +581,7 @@ export class EpochCache { nextSyncCommitteeIndexed, epoch: currentEpoch, syncPeriod: computeSyncPeriodAtEpoch(currentEpoch), + historialValidatorLengths: immutable.List(), }); } @@ -469,13 +597,20 @@ export class EpochCache { // Common append-only structures shared with all states, no need to clone pubkey2index: this.pubkey2index, index2pubkey: this.index2pubkey, + // No need to clone this reference. On each mutation the `unfinalizedPubkey2index` reference is replaced, @see `addPubkey` + unfinalizedPubkey2index: this.unfinalizedPubkey2index, + shufflingCache: this.shufflingCache, // Immutable data proposers: this.proposers, proposersPrevEpoch: this.proposersPrevEpoch, proposersNextEpoch: this.proposersNextEpoch, + previousDecisionRoot: this.previousDecisionRoot, + currentDecisionRoot: this.currentDecisionRoot, + nextDecisionRoot: this.nextDecisionRoot, previousShuffling: this.previousShuffling, currentShuffling: this.currentShuffling, nextShuffling: this.nextShuffling, + nextActiveIndices: this.nextActiveIndices, // Uint8Array, requires cloning, but it is cloned only when necessary before an epoch transition // See EpochCache.beforeEpochTransition() effectiveBalanceIncrements: this.effectiveBalanceIncrements, @@ -495,45 +630,104 @@ export class EpochCache { nextSyncCommitteeIndexed: this.nextSyncCommitteeIndexed, epoch: this.epoch, syncPeriod: this.syncPeriod, + historialValidatorLengths: this.historicalValidatorLengths, }); } /** * Called to re-use information, such as the shuffling of the next epoch, after transitioning into a - * new epoch. + * new epoch. Also handles pre-computation of values that may change during the upcoming epoch and + * that get used in the following epoch transition. Often those pre-computations are not used by the + * chain but are courtesy values that are served via the API for epoch look ahead of duties. + * + * Steps for afterProcessEpoch + * 1) update previous/current/next values of cached items */ afterProcessEpoch( - state: BeaconStateAllForks, + state: CachedBeaconStateAllForks, epochTransitionCache: { - nextEpochShufflingActiveValidatorIndices: ValidatorIndex[]; - nextEpochShufflingActiveIndicesLength: number; + nextShufflingDecisionRoot: RootHex; + nextShufflingActiveIndices: Uint32Array; nextEpochTotalActiveBalanceByIncrement: number; } ): void { + // Because the slot was incremented before entering this function the "next epoch" is actually the "current epoch" + // in this context but that is not actually true because the state transition happens in the last 4 seconds of the + // epoch. For the context of this function "upcoming epoch" is used to denote the epoch that will begin after this + // function returns. The epoch that is "next" once the state transition is complete is referred to as the + // epochAfterUpcoming for the same reason to help minimize confusion. + const upcomingEpoch = this.nextEpoch; + const epochAfterUpcoming = upcomingEpoch + 1; + + // move current to previous this.previousShuffling = this.currentShuffling; - this.currentShuffling = this.nextShuffling; - const currEpoch = this.currentShuffling.epoch; - const nextEpoch = currEpoch + 1; - - this.nextShuffling = computeEpochShuffling( - state, - epochTransitionCache.nextEpochShufflingActiveValidatorIndices, - epochTransitionCache.nextEpochShufflingActiveIndicesLength, - nextEpoch - ); - - // Roll current proposers into previous proposers for metrics + this.previousDecisionRoot = this.currentDecisionRoot; this.proposersPrevEpoch = this.proposers; - const currentProposerSeed = getSeed(state, this.currentShuffling.epoch, DOMAIN_BEACON_PROPOSER); - this.proposers = computeProposers(currentProposerSeed, this.currentShuffling, this.effectiveBalanceIncrements); + // move next to current or calculate upcoming + this.currentDecisionRoot = this.nextDecisionRoot; + if (this.nextShuffling) { + // was already pulled from the ShufflingCache to the EpochCache (should be in most cases) + this.currentShuffling = this.nextShuffling; + } else { + this.shufflingCache?.metrics?.shufflingCache.nextShufflingNotOnEpochCache.inc(); + this.currentShuffling = + this.shufflingCache?.getSync(upcomingEpoch, this.currentDecisionRoot, { + state, + // have to use the "nextActiveIndices" that were saved in the last transition here to calculate + // the upcoming shuffling if it is not already built (similar condition to the below computation) + activeIndices: this.nextActiveIndices, + }) ?? + // allow for this case during testing where the ShufflingCache is not present, may affect perf testing + // so should be taken into account when structuring tests. Should not affect unit or other tests though + computeEpochShuffling(state, this.nextActiveIndices, upcomingEpoch); + } + const upcomingProposerSeed = getSeed(state, upcomingEpoch, DOMAIN_BEACON_PROPOSER); + // next epoch was moved to current epoch so use current here + this.proposers = computeProposers( + this.config.getForkSeqAtEpoch(upcomingEpoch), + upcomingProposerSeed, + this.currentShuffling, + this.effectiveBalanceIncrements + ); + + // handle next values + this.nextDecisionRoot = epochTransitionCache.nextShufflingDecisionRoot; + this.nextActiveIndices = epochTransitionCache.nextShufflingActiveIndices; + if (this.shufflingCache) { + this.nextShuffling = null; + // This promise will resolve immediately after the synchronous code of the state-transition runs. Until + // the build is done on a worker thread it will be calculated immediately after the epoch transition + // completes. Once the work is done concurrently it should be ready by time this get runs so the promise + // will resolve directly on the next spin of the event loop because the epoch transition and shuffling take + // about the same time to calculate so theoretically its ready now. Do not await here though in case it + // is not ready yet as the transition must not be asynchronous. + this.shufflingCache + .get(epochAfterUpcoming, this.nextDecisionRoot) + .then((shuffling) => { + if (!shuffling) { + throw new Error("EpochShuffling not returned from get in afterProcessEpoch"); + } + this.nextShuffling = shuffling; + }) + .catch((err) => { + this.shufflingCache?.logger?.error( + "EPOCH_CONTEXT_SHUFFLING_BUILD_ERROR", + {epoch: epochAfterUpcoming, decisionRoot: epochTransitionCache.nextShufflingDecisionRoot}, + err + ); + }); + } else { + // Only for testing. shufflingCache should always be available in prod + this.nextShuffling = computeEpochShuffling(state, this.nextActiveIndices, epochAfterUpcoming); + } // Only pre-compute the seed since it's very cheap. Do the expensive computeProposers() call only on demand. - this.proposersNextEpoch = {computed: false, seed: getSeed(state, this.nextShuffling.epoch, DOMAIN_BEACON_PROPOSER)}; + this.proposersNextEpoch = {computed: false, seed: getSeed(state, epochAfterUpcoming, DOMAIN_BEACON_PROPOSER)}; // TODO: DEDUPLICATE from createEpochCache // - // Precompute churnLimit for efficient initiateValidatorExit() during block proposing MUST be recompute everytime the + // Precompute churnLimit for efficient initiateValidatorExit() during block proposing MUST be recompute every time the // active validator indices set changes in size. Validators change active status only when: // - validator.activation_epoch is set. Only changes in process_registry_updates() if validator can be activated. If // the value changes it will be set to `epoch + 1 + MAX_SEED_LOOKAHEAD`. @@ -555,14 +749,14 @@ export class EpochCache { ); // Maybe advance exitQueueEpoch at the end of the epoch if there haven't been any exists for a while - const exitQueueEpoch = computeActivationExitEpoch(currEpoch); + const exitQueueEpoch = computeActivationExitEpoch(upcomingEpoch); if (exitQueueEpoch > this.exitQueueEpoch) { this.exitQueueEpoch = exitQueueEpoch; this.exitQueueChurn = 0; } this.totalActiveBalanceIncrements = epochTransitionCache.nextEpochTotalActiveBalanceByIncrement; - if (currEpoch >= this.config.ALTAIR_FORK_EPOCH) { + if (upcomingEpoch >= this.config.ALTAIR_FORK_EPOCH) { this.syncParticipantReward = computeSyncParticipantReward(this.totalActiveBalanceIncrements); this.syncProposerReward = Math.floor(this.syncParticipantReward * PROPOSER_WEIGHT_FACTOR); this.baseRewardPerIncrement = computeBaseRewardPerIncrement(this.totalActiveBalanceIncrements); @@ -579,27 +773,78 @@ export class EpochCache { // ``` this.epoch = computeEpochAtSlot(state.slot); this.syncPeriod = computeSyncPeriodAtEpoch(this.epoch); + // ELECTRA Only: Add current cpState.validators.length + // Only keep validatorLength for epochs after finalized cpState.epoch + // eg. [100(epoch 1), 102(epoch 2)].push(104(epoch 3)), this.epoch = 3, finalized cp epoch = 1 + // We keep the last (3 - 1) items = [102, 104] + if (upcomingEpoch >= this.config.ELECTRA_FORK_EPOCH) { + this.historicalValidatorLengths = this.historicalValidatorLengths.push(state.validators.length); + + // If number of validatorLengths we want to keep exceeds the current list size, it implies + // finalized checkpoint hasn't advanced, and no need to slice + const hasFinalizedCpAdvanced = + this.epoch - state.finalizedCheckpoint.epoch < this.historicalValidatorLengths.size; + + if (hasFinalizedCpAdvanced) { + // We use finalized cp epoch - this.epoch which is a negative number to keep the last n entries and discard the rest + this.historicalValidatorLengths = this.historicalValidatorLengths.slice( + state.finalizedCheckpoint.epoch - this.epoch + ); + } + } } beforeEpochTransition(): void { // Clone (copy) before being mutated in processEffectiveBalanceUpdates - // NOTE: Force to use Uint8Array.slice (copy) instead of Buffer.call (not copy) - this.effectiveBalanceIncrements = Uint8Array.prototype.slice.call(this.effectiveBalanceIncrements, 0); + // NOTE: Force to use Uint16Array.slice (copy) instead of Buffer.call (not copy) + this.effectiveBalanceIncrements = Uint16Array.prototype.slice.call(this.effectiveBalanceIncrements, 0); } /** * Return the beacon committee at slot for index. */ getBeaconCommittee(slot: Slot, index: CommitteeIndex): Uint32Array { + return this.getBeaconCommittees(slot, [index]); + } + + /** + * Return a single Uint32Array representing concatted committees of indices + */ + getBeaconCommittees(slot: Slot, indices: CommitteeIndex[]): Uint32Array { + if (indices.length === 0) { + throw new Error("Attempt to get committees without providing CommitteeIndex"); + } + const slotCommittees = this.getShufflingAtSlot(slot).committees[slot % SLOTS_PER_EPOCH]; - if (index >= slotCommittees.length) { - throw new EpochCacheError({ - code: EpochCacheErrorCode.COMMITTEE_INDEX_OUT_OF_RANGE, - index, - maxIndex: slotCommittees.length, - }); + const committees = []; + + for (const index of indices) { + if (index >= slotCommittees.length) { + throw new EpochCacheError({ + code: EpochCacheErrorCode.COMMITTEE_INDEX_OUT_OF_RANGE, + index, + maxIndex: slotCommittees.length, + }); + } + committees.push(slotCommittees[index]); + } + + // Early return if only one index + if (committees.length === 1) { + return committees[0]; } - return slotCommittees[index]; + + // Create a new Uint32Array to flatten `committees` + const totalLength = committees.reduce((acc, curr) => acc + curr.length, 0); + const result = new Uint32Array(totalLength); + + let offset = 0; + for (const committee of committees) { + result.set(committee, offset); + offset += committee.length; + } + + return result; } getCommitteeCountPerSlot(epoch: Epoch): number { @@ -672,8 +917,9 @@ export class EpochCache { getBeaconProposersNextEpoch(): ValidatorIndex[] { if (!this.proposersNextEpoch.computed) { const indexes = computeProposers( + this.config.getForkSeqAtEpoch(this.epoch + 1), this.proposersNextEpoch.seed, - this.nextShuffling, + this.getShufflingAtEpoch(this.nextEpoch), this.effectiveBalanceIncrements ); this.proposersNextEpoch = {computed: true, indexes}; @@ -685,10 +931,9 @@ export class EpochCache { /** * Return the indexed attestation corresponding to ``attestation``. */ - getIndexedAttestation(attestation: phase0.Attestation): phase0.IndexedAttestation { - const {aggregationBits, data} = attestation; - const committeeIndices = this.getBeaconCommittee(data.slot, data.index); - const attestingIndices = aggregationBits.intersectValues(committeeIndices); + getIndexedAttestation(fork: ForkSeq, attestation: Attestation): IndexedAttestation { + const {data} = attestation; + const attestingIndices = this.getAttestingIndices(fork, attestation); // sort in-place attestingIndices.sort((a, b) => a - b); @@ -699,34 +944,37 @@ export class EpochCache { }; } + /** + * Return indices of validators who attestested in `attestation` + */ + getAttestingIndices(fork: ForkSeq, attestation: Attestation): number[] { + if (fork < ForkSeq.electra) { + const {aggregationBits, data} = attestation; + const validatorIndices = this.getBeaconCommittee(data.slot, data.index); + + return aggregationBits.intersectValues(validatorIndices); + } else { + const {aggregationBits, committeeBits, data} = attestation as electra.Attestation; + + // There is a naming conflict on the term `committeeIndices` + // In Lodestar it usually means a list of validator indices of participants in a committee + // In the spec it means a list of committee indices according to committeeBits + // This `committeeIndices` refers to the latter + // TODO Electra: resolve the naming conflicts + const committeeIndices = committeeBits.getTrueBitIndexes(); + + const validatorIndices = this.getBeaconCommittees(data.slot, committeeIndices); + + return aggregationBits.intersectValues(validatorIndices); + } + } + getCommitteeAssignments( epoch: Epoch, requestedValidatorIndices: ValidatorIndex[] ): Map { - const requestedValidatorIndicesSet = new Set(requestedValidatorIndices); - const duties = new Map(); - - const epochCommittees = this.getShufflingAtEpoch(epoch).committees; - for (let epochSlot = 0; epochSlot < SLOTS_PER_EPOCH; epochSlot++) { - const slotCommittees = epochCommittees[epochSlot]; - for (let i = 0, committeesAtSlot = slotCommittees.length; i < committeesAtSlot; i++) { - for (let j = 0, committeeLength = slotCommittees[i].length; j < committeeLength; j++) { - const validatorIndex = slotCommittees[i][j]; - if (requestedValidatorIndicesSet.has(validatorIndex)) { - duties.set(validatorIndex, { - validatorIndex, - committeeLength, - committeesAtSlot, - validatorCommitteeIndex: j, - committeeIndex: i, - slot: epoch * SLOTS_PER_EPOCH + epochSlot, - }); - } - } - } - } - - return duties; + const shuffling = this.getShufflingAtEpoch(epoch); + return calculateCommitteeAssignments(shuffling, requestedValidatorIndices); } /** @@ -766,9 +1014,78 @@ export class EpochCache { return isAggregatorFromCommitteeLength(committee.length, slotSignature); } + /** + * Return finalized pubkey given the validator index. + * Only finalized pubkey as we do not store unfinalized pubkey because no where in the spec has a + * need to make such enquiry + */ + getPubkey(index: ValidatorIndex): PublicKey | undefined { + return this.index2pubkey[index]; + } + + getValidatorIndex(pubkey: Uint8Array): ValidatorIndex | null { + if (this.isPostElectra()) { + return this.pubkey2index.get(pubkey) ?? this.unfinalizedPubkey2index.get(toMemoryEfficientHexStr(pubkey)) ?? null; + } else { + return this.pubkey2index.get(pubkey); + } + } + + /** + * + * Add unfinalized pubkeys + * + */ addPubkey(index: ValidatorIndex, pubkey: Uint8Array): void { + if (this.isPostElectra()) { + this.addUnFinalizedPubkey(index, pubkey); + } else { + // deposit mechanism pre ELECTRA follows a safe distance with assumption + // that they are already canonical + this.addFinalizedPubkey(index, pubkey); + } + } + + addUnFinalizedPubkey(index: ValidatorIndex, pubkey: PubkeyHex | Uint8Array, metrics?: EpochCacheMetrics): void { + this.unfinalizedPubkey2index = this.unfinalizedPubkey2index.set(toMemoryEfficientHexStr(pubkey), index); + metrics?.newUnFinalizedPubkey.inc(); + } + + addFinalizedPubkeys(pubkeyMap: UnfinalizedPubkeyIndexMap, metrics?: EpochCacheMetrics): void { + pubkeyMap.forEach((index, pubkey) => this.addFinalizedPubkey(index, pubkey, metrics)); + } + + /** + * Add finalized validator index and pubkey into finalized cache. + * Since addFinalizedPubkey() primarily takes pubkeys from unfinalized cache, it can take pubkey hex string directly + */ + addFinalizedPubkey(index: ValidatorIndex, pubkeyOrHex: PubkeyHex | Uint8Array, metrics?: EpochCacheMetrics): void { + const pubkey = typeof pubkeyOrHex === "string" ? fromHex(pubkeyOrHex) : pubkeyOrHex; + const existingIndex = this.pubkey2index.get(pubkey); + + if (existingIndex !== null) { + if (existingIndex === index) { + // Repeated insert. + metrics?.finalizedPubkeyDuplicateInsert.inc(); + return; + } else { + // attempt to insert the same pubkey with different index, should never happen. + throw Error( + `inserted existing pubkey into finalizedPubkey2index cache with a different index, index=${index} priorIndex=${existingIndex}` + ); + } + } + this.pubkey2index.set(pubkey, index); - this.index2pubkey[index] = PublicKey.fromBytes(pubkey); // Optimize for aggregation + const pubkeyBytes = pubkey instanceof Uint8Array ? pubkey : fromHex(pubkey); + this.index2pubkey[index] = PublicKey.fromBytes(pubkeyBytes); // Optimize for aggregation + } + + /** + * Delete pubkeys from unfinalized cache + */ + deleteUnfinalizedPubkeys(pubkeys: Iterable): void { + this.unfinalizedPubkey2index = this.unfinalizedPubkey2index.deleteAll(pubkeys); } getShufflingAtSlot(slot: Slot): EpochShuffling { @@ -784,6 +1101,13 @@ export class EpochCache { getShufflingAtEpoch(epoch: Epoch): EpochShuffling { const shuffling = this.getShufflingAtEpochOrNull(epoch); if (shuffling === null) { + if (epoch === this.nextEpoch) { + throw new EpochCacheError({ + code: EpochCacheErrorCode.NEXT_SHUFFLING_NOT_AVAILABLE, + epoch: epoch, + decisionRoot: this.getShufflingDecisionRoot(this.nextEpoch), + }); + } throw new EpochCacheError({ code: EpochCacheErrorCode.COMMITTEE_EPOCH_OUT_OF_RANGE, currentEpoch: this.currentShuffling.epoch, @@ -794,15 +1118,37 @@ export class EpochCache { return shuffling; } + getShufflingDecisionRoot(epoch: Epoch): RootHex { + switch (epoch) { + case this.epoch - 1: + return this.previousDecisionRoot; + case this.epoch: + return this.currentDecisionRoot; + case this.nextEpoch: + return this.nextDecisionRoot; + default: + throw new EpochCacheError({ + code: EpochCacheErrorCode.DECISION_ROOT_EPOCH_OUT_OF_RANGE, + currentEpoch: this.epoch, + requestedEpoch: epoch, + }); + } + } + getShufflingAtEpochOrNull(epoch: Epoch): EpochShuffling | null { - if (epoch === this.previousShuffling.epoch) { - return this.previousShuffling; - } else if (epoch === this.currentShuffling.epoch) { - return this.currentShuffling; - } else if (epoch === this.nextShuffling.epoch) { - return this.nextShuffling; - } else { - return null; + switch (epoch) { + case this.epoch - 1: + return this.previousShuffling; + case this.epoch: + return this.currentShuffling; + case this.nextEpoch: + if (!this.nextShuffling) { + this.nextShuffling = + this.shufflingCache?.getSync(this.nextEpoch, this.getShufflingDecisionRoot(this.nextEpoch)) ?? null; + } + return this.nextShuffling; + default: + return null; } } @@ -845,15 +1191,53 @@ export class EpochCache { } effectiveBalanceIncrementsSet(index: number, effectiveBalance: number): void { - if (index >= this.effectiveBalanceIncrements.length) { - // Clone and extend effectiveBalanceIncrements + if (this.isPostElectra()) { + // TODO: electra + // getting length and setting getEffectiveBalanceIncrementsByteLen is not fork safe + // so each time we add an index, we should new the Uint8Array to keep it forksafe + // one simple optimization could be to increment the length once per block rather + // on each add/set + // + // there could still be some unused length remaining from the prev ELECTRA padding + const newLength = + index >= this.effectiveBalanceIncrements.length ? index + 1 : this.effectiveBalanceIncrements.length; const effectiveBalanceIncrements = this.effectiveBalanceIncrements; - this.effectiveBalanceIncrements = new Uint8Array(getEffectiveBalanceIncrementsByteLen(index + 1)); + this.effectiveBalanceIncrements = new Uint16Array(newLength); this.effectiveBalanceIncrements.set(effectiveBalanceIncrements, 0); + } else { + if (index >= this.effectiveBalanceIncrements.length) { + // Clone and extend effectiveBalanceIncrements + const effectiveBalanceIncrements = this.effectiveBalanceIncrements; + this.effectiveBalanceIncrements = new Uint16Array(getEffectiveBalanceIncrementsByteLen(index + 1)); + this.effectiveBalanceIncrements.set(effectiveBalanceIncrements, 0); + } } this.effectiveBalanceIncrements[index] = Math.floor(effectiveBalance / EFFECTIVE_BALANCE_INCREMENT); } + + isPostElectra(): boolean { + return this.epoch >= this.config.ELECTRA_FORK_EPOCH; + } + + getValidatorCountAtEpoch(targetEpoch: Epoch): number | undefined { + const currentEpoch = this.epoch; + + if (targetEpoch === currentEpoch) { + return this.historicalValidatorLengths.get(-1); + } + + // Attempt to get validator count from future epoch + if (targetEpoch > currentEpoch) { + return undefined; + } + + // targetEpoch is so far back that historicalValidatorLengths doesnt contain such info + if (targetEpoch < currentEpoch - this.historicalValidatorLengths.size + 1) { + return undefined; + } + return this.historicalValidatorLengths.get(targetEpoch - currentEpoch - 1); + } } function getEffectiveBalanceIncrementsByteLen(validatorCount: number): number { @@ -861,19 +1245,11 @@ function getEffectiveBalanceIncrementsByteLen(validatorCount: number): number { return 1024 * Math.ceil(validatorCount / 1024); } -// Copied from lodestar-api package to avoid depending on the package -type AttesterDuty = { - validatorIndex: ValidatorIndex; - committeeIndex: CommitteeIndex; - committeeLength: number; - committeesAtSlot: number; - validatorCommitteeIndex: number; - slot: Slot; -}; - export enum EpochCacheErrorCode { COMMITTEE_INDEX_OUT_OF_RANGE = "EPOCH_CONTEXT_ERROR_COMMITTEE_INDEX_OUT_OF_RANGE", COMMITTEE_EPOCH_OUT_OF_RANGE = "EPOCH_CONTEXT_ERROR_COMMITTEE_EPOCH_OUT_OF_RANGE", + DECISION_ROOT_EPOCH_OUT_OF_RANGE = "EPOCH_CONTEXT_ERROR_DECISION_ROOT_EPOCH_OUT_OF_RANGE", + NEXT_SHUFFLING_NOT_AVAILABLE = "EPOCH_CONTEXT_ERROR_NEXT_SHUFFLING_NOT_AVAILABLE", NO_SYNC_COMMITTEE = "EPOCH_CONTEXT_ERROR_NO_SYNC_COMMITTEE", PROPOSER_EPOCH_MISMATCH = "EPOCH_CONTEXT_ERROR_PROPOSER_EPOCH_MISMATCH", } @@ -889,6 +1265,16 @@ type EpochCacheErrorType = requestedEpoch: Epoch; currentEpoch: Epoch; } + | { + code: EpochCacheErrorCode.DECISION_ROOT_EPOCH_OUT_OF_RANGE; + requestedEpoch: Epoch; + currentEpoch: Epoch; + } + | { + code: EpochCacheErrorCode.NEXT_SHUFFLING_NOT_AVAILABLE; + epoch: Epoch; + decisionRoot: RootHex; + } | { code: EpochCacheErrorCode.NO_SYNC_COMMITTEE; epoch: Epoch; diff --git a/packages/state-transition/src/cache/epochTransitionCache.ts b/packages/state-transition/src/cache/epochTransitionCache.ts index e6f84de6c62e..27b781e8a6a1 100644 --- a/packages/state-transition/src/cache/epochTransitionCache.ts +++ b/packages/state-transition/src/cache/epochTransitionCache.ts @@ -1,6 +1,12 @@ -import {Epoch, ValidatorIndex} from "@lodestar/types"; -import {intDiv} from "@lodestar/utils"; -import {EPOCHS_PER_SLASHINGS_VECTOR, FAR_FUTURE_EPOCH, ForkSeq, MAX_EFFECTIVE_BALANCE} from "@lodestar/params"; +import {phase0, Epoch, RootHex, ValidatorIndex} from "@lodestar/types"; +import {intDiv, toRootHex} from "@lodestar/utils"; +import { + EPOCHS_PER_SLASHINGS_VECTOR, + FAR_FUTURE_EPOCH, + ForkSeq, + SLOTS_PER_HISTORICAL_ROOT, + MIN_ACTIVATION_BALANCE, +} from "@lodestar/params"; import { hasMarkers, @@ -78,7 +84,7 @@ export interface EpochTransitionCache { /** * Indices of validators that just joined and will be eligible for the active queue. * ``` - * v.activationEligibilityEpoch === FAR_FUTURE_EPOCH && v.effectiveBalance === MAX_EFFECTIVE_BALANCE + * v.activationEligibilityEpoch === FAR_FUTURE_EPOCH && v.effectiveBalance >= MAX_EFFECTIVE_BALANCE * ``` * All validators in indicesEligibleForActivationQueue get activationEligibilityEpoch set. So it can only include * validators that have just joined the registry through a valid full deposit(s). @@ -127,6 +133,18 @@ export interface EpochTransitionCache { flags: number[]; + /** + * Validators in the current epoch, should use it for read-only value instead of accessing state.validators directly. + * Note that during epoch processing, validators could be updated so need to use it with care. + */ + validators: phase0.Validator[]; + + /** + * This is for electra only + * Validators that're switched to compounding during processPendingConsolidations(), not available in beforeProcessEpoch() + */ + newCompoundingValidators?: Set; + /** * balances array will be populated by processRewardsAndPenalties() and consumed by processEffectiveBalanceUpdates(). * processRewardsAndPenalties() already has a regular Javascript array of balances. @@ -143,12 +161,12 @@ export interface EpochTransitionCache { * | beforeProcessEpoch | calculate during the validator loop| * | afterEpochTransitionCache | read it | */ - nextEpochShufflingActiveValidatorIndices: ValidatorIndex[]; + nextShufflingActiveIndices: Uint32Array; /** - * We do not use up to `nextEpochShufflingActiveValidatorIndices.length`, use this to control that + * Shuffling decision root that gets set on the EpochCache in afterProcessEpoch */ - nextEpochShufflingActiveIndicesLength: number; + nextShufflingDecisionRoot: RootHex; /** * Altair specific, this is total active balances for the next epoch. @@ -297,12 +315,12 @@ export function beforeProcessEpoch( // def is_eligible_for_activation_queue(validator: Validator) -> bool: // return ( // validator.activation_eligibility_epoch == FAR_FUTURE_EPOCH - // and validator.effective_balance == MAX_EFFECTIVE_BALANCE + // and validator.effective_balance >= MAX_EFFECTIVE_BALANCE # [Modified in Electra] // ) // ``` if ( validator.activationEligibilityEpoch === FAR_FUTURE_EPOCH && - validator.effectiveBalance === MAX_EFFECTIVE_BALANCE + validator.effectiveBalance >= MIN_ACTIVATION_BALANCE ) { indicesEligibleForActivationQueue.push(i); } @@ -348,6 +366,24 @@ export function beforeProcessEpoch( } } + // Trigger async build of shuffling for epoch after next (nextShuffling post epoch transition) + const epochAfterNext = state.epochCtx.nextEpoch + 1; + // cannot call calculateShufflingDecisionRoot here because spec prevent getting current slot + // as a decision block. we are part way through the transition though and this was added in + // process slot beforeProcessEpoch happens so it available and valid + const nextShufflingDecisionRoot = toRootHex(state.blockRoots.get(state.slot % SLOTS_PER_HISTORICAL_ROOT)); + const nextShufflingActiveIndices = new Uint32Array(nextEpochShufflingActiveIndicesLength); + if (nextEpochShufflingActiveIndicesLength > nextEpochShufflingActiveValidatorIndices.length) { + throw new Error( + `Invalid activeValidatorCount: ${nextEpochShufflingActiveIndicesLength} > ${nextEpochShufflingActiveValidatorIndices.length}` + ); + } + // only the first `activeValidatorCount` elements are copied to `activeIndices` + for (let i = 0; i < nextEpochShufflingActiveIndicesLength; i++) { + nextShufflingActiveIndices[i] = nextEpochShufflingActiveValidatorIndices[i]; + } + state.epochCtx.shufflingCache?.build(epochAfterNext, nextShufflingDecisionRoot, state, nextShufflingActiveIndices); + if (totalActiveStakeByIncrement < 1) { totalActiveStakeByIncrement = 1; } else if (totalActiveStakeByIncrement >= Number.MAX_SAFE_INTEGER) { @@ -471,8 +507,8 @@ export function beforeProcessEpoch( indicesEligibleForActivationQueue, indicesEligibleForActivation, indicesToEject, - nextEpochShufflingActiveValidatorIndices, - nextEpochShufflingActiveIndicesLength, + nextShufflingDecisionRoot, + nextShufflingActiveIndices, // to be updated in processEffectiveBalanceUpdates nextEpochTotalActiveBalanceByIncrement: 0, isActivePrevEpoch, @@ -481,7 +517,9 @@ export function beforeProcessEpoch( proposerIndices, inclusionDelays, flags, - + validators, + // will be assigned in processPendingConsolidations() + newCompoundingValidators: undefined, // Will be assigned in processRewardsAndPenalties() balances: undefined, }; diff --git a/packages/state-transition/src/cache/pubkeyCache.ts b/packages/state-transition/src/cache/pubkeyCache.ts index 0fd7a80fe990..f96436ec14f4 100644 --- a/packages/state-transition/src/cache/pubkeyCache.ts +++ b/packages/state-transition/src/cache/pubkeyCache.ts @@ -1,10 +1,18 @@ import {PublicKey} from "@chainsafe/blst"; -import {ValidatorIndex} from "@lodestar/types"; -import {BeaconStateAllForks} from "./types.js"; +import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; +import * as immutable from "immutable"; +import {ValidatorIndex, phase0} from "@lodestar/types"; export type Index2PubkeyCache = PublicKey[]; +/** + * OrderedMap preserves the order of entries in which they are `set()`. + * We assume `values()` yields validator indices in strictly increasing order + * as new validator indices are assigned in increasing order. + * EIP-6914 will break this assumption. + */ +export type UnfinalizedPubkeyIndexMap = immutable.Map; -type PubkeyHex = string; +export type PubkeyHex = string; /** * toHexString() creates hex strings via string concatenation, which are very memory inefficient. @@ -14,7 +22,7 @@ type PubkeyHex = string; * * See https://github.com/ChainSafe/lodestar/issues/3446 */ -function toMemoryEfficientHexStr(hex: Uint8Array | string): string { +export function toMemoryEfficientHexStr(hex: Uint8Array | string): string { if (typeof hex === "string") { if (hex.startsWith("0x")) { hex = hex.slice(2); @@ -25,24 +33,11 @@ function toMemoryEfficientHexStr(hex: Uint8Array | string): string { return Buffer.from(hex.buffer, hex.byteOffset, hex.byteLength).toString("hex"); } -export class PubkeyIndexMap { - // We don't really need the full pubkey. We could just use the first 20 bytes like an Ethereum address - readonly map = new Map(); - - get size(): number { - return this.map.size; - } - - /** - * Must support reading with string for API support where pubkeys are already strings - */ - get(key: Uint8Array | PubkeyHex): ValidatorIndex | undefined { - return this.map.get(toMemoryEfficientHexStr(key)); - } - - set(key: Uint8Array, value: ValidatorIndex): void { - this.map.set(toMemoryEfficientHexStr(key), value); - } +/** + * A wrapper for calling immutable.js. To abstract the initialization of UnfinalizedPubkeyIndexMap + */ +export function newUnfinalizedPubkeyIndexMap(): UnfinalizedPubkeyIndexMap { + return immutable.Map(); } /** @@ -53,7 +48,7 @@ export class PubkeyIndexMap { * If pubkey caches are empty: SLOW CODE - 🐢 */ export function syncPubkeys( - state: BeaconStateAllForks, + validators: phase0.Validator[], pubkey2index: PubkeyIndexMap, index2pubkey: Index2PubkeyCache ): void { @@ -61,16 +56,14 @@ export function syncPubkeys( throw new Error(`Pubkey indices have fallen out of sync: ${pubkey2index.size} != ${index2pubkey.length}`); } - // Get the validators sub tree once for all the loop - const validators = state.validators; - - const newCount = state.validators.length; + const newCount = validators.length; + index2pubkey.length = newCount; for (let i = pubkey2index.size; i < newCount; i++) { - const pubkey = validators.getReadonly(i).pubkey; + const pubkey = validators[i].pubkey; pubkey2index.set(pubkey, i); // Pubkeys must be checked for group + inf. This must be done only once when the validator deposit is processed. // Afterwards any public key is the state consider validated. // > Do not do any validation here - index2pubkey.push(PublicKey.fromBytes(pubkey)); // Optimize for aggregation + index2pubkey[i] = PublicKey.fromBytes(pubkey); // Optimize for aggregation } } diff --git a/packages/state-transition/src/cache/stateCache.ts b/packages/state-transition/src/cache/stateCache.ts index f4e637e5d665..5412675352e9 100644 --- a/packages/state-transition/src/cache/stateCache.ts +++ b/packages/state-transition/src/cache/stateCache.ts @@ -10,6 +10,7 @@ import { BeaconStateBellatrix, BeaconStateCapella, BeaconStateDeneb, + BeaconStateElectra, } from "./types.js"; import {RewardCache, createEmptyRewardCache} from "./rewardCache.js"; @@ -130,11 +131,13 @@ export type CachedBeaconStateAltair = CachedBeaconState; export type CachedBeaconStateBellatrix = CachedBeaconState; export type CachedBeaconStateCapella = CachedBeaconState; export type CachedBeaconStateDeneb = CachedBeaconState; +export type CachedBeaconStateElectra = CachedBeaconState; export type CachedBeaconStateAllForks = CachedBeaconState; export type CachedBeaconStateExecutions = CachedBeaconState; /** * Create CachedBeaconState computing a new EpochCache instance + * TODO ELECTRA: rename this to createFinalizedCachedBeaconState() as it's intended for finalized state only */ export function createCachedBeaconState( state: T, @@ -158,7 +161,7 @@ export function createCachedBeaconState( * Create a CachedBeaconState given a cached seed state and state bytes * This guarantees that the returned state shares the same tree with the seed state * Check loadState() api for more details - * // TODO: rename to loadUnfinalizedCachedBeaconState() due to EIP-6110 + * // TODO: rename to loadUnfinalizedCachedBeaconState() due to ELECTRA */ export function loadCachedBeaconState( cachedSeedState: T, @@ -172,7 +175,7 @@ export function loadCachedBeaconState; @@ -81,8 +82,8 @@ function computeSyncCommitteeIndices( const pubkeys = syncCommittee.pubkeys.getAllReadonly(); for (const pubkey of pubkeys) { const validatorIndex = pubkey2index.get(pubkey); - if (validatorIndex === undefined) { - throw Error(`SyncCommittee pubkey is unknown ${toHexString(pubkey)}`); + if (validatorIndex === null) { + throw Error(`SyncCommittee pubkey is unknown ${toPubkeyHex(pubkey)}`); } validatorIndices.push(validatorIndex); diff --git a/packages/state-transition/src/cache/types.ts b/packages/state-transition/src/cache/types.ts index d6d8a3c37904..b3fe6fc8ed5b 100644 --- a/packages/state-transition/src/cache/types.ts +++ b/packages/state-transition/src/cache/types.ts @@ -8,6 +8,7 @@ export type BeaconStateAltair = CompositeViewDU>; export type BeaconStateCapella = CompositeViewDU>; export type BeaconStateDeneb = CompositeViewDU>; +export type BeaconStateElectra = CompositeViewDU>; export type BeaconStateAllForks = CompositeViewDU>; export type BeaconStateExecutions = CompositeViewDU>; diff --git a/packages/state-transition/src/epoch/index.ts b/packages/state-transition/src/epoch/index.ts index b55ebe291fb9..bfb415b9ed6a 100644 --- a/packages/state-transition/src/epoch/index.ts +++ b/packages/state-transition/src/epoch/index.ts @@ -11,6 +11,7 @@ import { CachedBeaconStateAltair, CachedBeaconStatePhase0, EpochTransitionCache, + CachedBeaconStateElectra, } from "../types.js"; import {BeaconStateTransitionMetrics} from "../metrics.js"; import {processEffectiveBalanceUpdates} from "./processEffectiveBalanceUpdates.js"; @@ -27,6 +28,8 @@ import {processRewardsAndPenalties} from "./processRewardsAndPenalties.js"; import {processSlashings} from "./processSlashings.js"; import {processSlashingsReset} from "./processSlashingsReset.js"; import {processSyncCommitteeUpdates} from "./processSyncCommitteeUpdates.js"; +import {processPendingBalanceDeposits} from "./processPendingBalanceDeposits.js"; +import {processPendingConsolidations} from "./processPendingConsolidations.js"; // For spec tests export {getRewardsAndPenalties} from "./processRewardsAndPenalties.js"; @@ -45,6 +48,8 @@ export { processParticipationFlagUpdates, processSyncCommitteeUpdates, processHistoricalSummariesUpdate, + processPendingBalanceDeposits, + processPendingConsolidations, }; export {computeUnrealizedCheckpoints} from "./computeUnrealizedCheckpoints.js"; @@ -65,6 +70,8 @@ export enum EpochTransitionStep { processEffectiveBalanceUpdates = "processEffectiveBalanceUpdates", processParticipationFlagUpdates = "processParticipationFlagUpdates", processSyncCommitteeUpdates = "processSyncCommitteeUpdates", + processPendingBalanceDeposits = "processPendingBalanceDeposits", + processPendingConsolidations = "processPendingConsolidations", } export function processEpoch( @@ -76,7 +83,7 @@ export function processEpoch( // state.slashings is initially a Gwei (BigInt) vector, however since Nov 2023 it's converted to UintNum64 (number) vector in the state transition because: // - state.slashings[nextEpoch % EPOCHS_PER_SLASHINGS_VECTOR] is reset per epoch in processSlashingsReset() // - max slashed validators per epoch is SLOTS_PER_EPOCH * MAX_ATTESTER_SLASHINGS * MAX_VALIDATORS_PER_COMMITTEE which is 32 * 2 * 2048 = 131072 on mainnet - // - with that and 32_000_000_000 MAX_EFFECTIVE_BALANCE, it still fits in a number given that Math.floor(Number.MAX_SAFE_INTEGER / 32_000_000_000) = 281474 + // - with that and 32_000_000_000 MAX_EFFECTIVE_BALANCE or 2048_000_000_000 MAX_EFFECTIVE_BALANCE_ELECTRA, it still fits in a number given that Math.floor(Number.MAX_SAFE_INTEGER / 32_000_000_000) = 281474 if (maxValidatorsPerStateSlashing > maxSafeValidators) { throw new Error("Lodestar does not support this network, parameters don't fit number value inside state.slashings"); } @@ -100,7 +107,7 @@ export function processEpoch( // processRewardsAndPenalties(state, cache); { const timer = metrics?.epochTransitionStepTime.startTimer({step: EpochTransitionStep.processRegistryUpdates}); - processRegistryUpdates(state, cache); + processRegistryUpdates(fork, state, cache); timer?.(); } @@ -120,12 +127,32 @@ export function processEpoch( processEth1DataReset(state, cache); + if (fork >= ForkSeq.electra) { + const stateElectra = state as CachedBeaconStateElectra; + { + const timer = metrics?.epochTransitionStepTime.startTimer({ + step: EpochTransitionStep.processPendingBalanceDeposits, + }); + processPendingBalanceDeposits(stateElectra, cache); + timer?.(); + } + + { + const timer = metrics?.epochTransitionStepTime.startTimer({ + step: EpochTransitionStep.processPendingConsolidations, + }); + processPendingConsolidations(stateElectra, cache); + timer?.(); + } + } + { const timer = metrics?.epochTransitionStepTime.startTimer({ step: EpochTransitionStep.processEffectiveBalanceUpdates, }); - processEffectiveBalanceUpdates(state, cache); + const numUpdate = processEffectiveBalanceUpdates(fork, state, cache); timer?.(); + metrics?.numEffectiveBalanceUpdates.set(numUpdate); } processSlashingsReset(state, cache); @@ -152,7 +179,7 @@ export function processEpoch( const timer = metrics?.epochTransitionStepTime.startTimer({ step: EpochTransitionStep.processSyncCommitteeUpdates, }); - processSyncCommitteeUpdates(state as CachedBeaconStateAltair); + processSyncCommitteeUpdates(fork, state as CachedBeaconStateAltair); timer?.(); } } diff --git a/packages/state-transition/src/epoch/processEffectiveBalanceUpdates.ts b/packages/state-transition/src/epoch/processEffectiveBalanceUpdates.ts index 5f1df35b7215..0ea4b49dddf4 100644 --- a/packages/state-transition/src/epoch/processEffectiveBalanceUpdates.ts +++ b/packages/state-transition/src/epoch/processEffectiveBalanceUpdates.ts @@ -5,9 +5,12 @@ import { HYSTERESIS_QUOTIENT, HYSTERESIS_UPWARD_MULTIPLIER, MAX_EFFECTIVE_BALANCE, + MAX_EFFECTIVE_BALANCE_ELECTRA, + MIN_ACTIVATION_BALANCE, TIMELY_TARGET_FLAG_INDEX, } from "@lodestar/params"; import {EpochTransitionCache, CachedBeaconStateAllForks, BeaconStateAltair} from "../types.js"; +import {hasCompoundingWithdrawalCredential} from "../util/electra.js"; /** Same to https://github.com/ethereum/eth2.0-specs/blob/v1.1.0-alpha.5/specs/altair/beacon-chain.md#has_flag */ const TIMELY_TARGET = 1 << TIMELY_TARGET_FLAG_INDEX; @@ -20,8 +23,14 @@ const TIMELY_TARGET = 1 << TIMELY_TARGET_FLAG_INDEX; * * - On normal mainnet conditions 0 validators change their effective balance * - In case of big innactivity event a medium portion of validators may have their effectiveBalance updated + * + * Return number of validators updated */ -export function processEffectiveBalanceUpdates(state: CachedBeaconStateAllForks, cache: EpochTransitionCache): void { +export function processEffectiveBalanceUpdates( + fork: ForkSeq, + state: CachedBeaconStateAllForks, + cache: EpochTransitionCache +): number { const HYSTERESIS_INCREMENT = EFFECTIVE_BALANCE_INCREMENT / HYSTERESIS_QUOTIENT; const DOWNWARD_THRESHOLD = HYSTERESIS_INCREMENT * HYSTERESIS_DOWNWARD_MULTIPLIER; const UPWARD_THRESHOLD = HYSTERESIS_INCREMENT * HYSTERESIS_UPWARD_MULTIPLIER; @@ -32,10 +41,14 @@ export function processEffectiveBalanceUpdates(state: CachedBeaconStateAllForks, // update effective balances with hysteresis - // epochTransitionCache.balances is set in processRewardsAndPenalties(), so it's recycled here for performance. - // It defaults to `state.balances.getAll()` to make Typescript happy and for spec tests + // epochTransitionCache.balances is initialized in processRewardsAndPenalties() + // and updated in processPendingBalanceDeposits() and processPendingConsolidations() + // so it's recycled here for performance. const balances = cache.balances ?? state.balances.getAll(); + const currentEpochValidators = cache.validators; + const newCompoundingValidators = cache.newCompoundingValidators ?? new Set(); + let numUpdate = 0; for (let i = 0, len = balances.length; i < len; i++) { const balance = balances[i]; @@ -43,16 +56,28 @@ export function processEffectiveBalanceUpdates(state: CachedBeaconStateAllForks, let effectiveBalanceIncrement = effectiveBalanceIncrements[i]; let effectiveBalance = effectiveBalanceIncrement * EFFECTIVE_BALANCE_INCREMENT; + let effectiveBalanceLimit: number; + if (fork < ForkSeq.electra) { + effectiveBalanceLimit = MAX_EFFECTIVE_BALANCE; + } else { + // from electra, effectiveBalanceLimit is per validator + const isCompoundingValidator = + hasCompoundingWithdrawalCredential(currentEpochValidators[i].withdrawalCredentials) || + newCompoundingValidators.has(i); + effectiveBalanceLimit = isCompoundingValidator ? MAX_EFFECTIVE_BALANCE_ELECTRA : MIN_ACTIVATION_BALANCE; + } + if ( // Too big effectiveBalance > balance + DOWNWARD_THRESHOLD || // Too small. Check effectiveBalance < MAX_EFFECTIVE_BALANCE to prevent unnecessary updates - (effectiveBalance < MAX_EFFECTIVE_BALANCE && effectiveBalance < balance - UPWARD_THRESHOLD) + (effectiveBalance < effectiveBalanceLimit && effectiveBalance + UPWARD_THRESHOLD < balance) ) { - effectiveBalance = Math.min(balance - (balance % EFFECTIVE_BALANCE_INCREMENT), MAX_EFFECTIVE_BALANCE); // Update the state tree // Should happen rarely, so it's fine to update the tree const validator = validators.get(i); + + effectiveBalance = Math.min(balance - (balance % EFFECTIVE_BALANCE_INCREMENT), effectiveBalanceLimit); validator.effectiveBalance = effectiveBalance; // Also update the fast cached version const newEffectiveBalanceIncrement = Math.floor(effectiveBalance / EFFECTIVE_BALANCE_INCREMENT); @@ -76,6 +101,7 @@ export function processEffectiveBalanceUpdates(state: CachedBeaconStateAllForks, effectiveBalanceIncrement = newEffectiveBalanceIncrement; effectiveBalanceIncrements[i] = effectiveBalanceIncrement; + numUpdate++; } // TODO: Do this in afterEpochTransitionCache, looping a Uint8Array should be very cheap @@ -86,4 +112,5 @@ export function processEffectiveBalanceUpdates(state: CachedBeaconStateAllForks, } cache.nextEpochTotalActiveBalanceByIncrement = nextEpochTotalActiveBalanceByIncrement; + return numUpdate; } diff --git a/packages/state-transition/src/epoch/processPendingBalanceDeposits.ts b/packages/state-transition/src/epoch/processPendingBalanceDeposits.ts new file mode 100644 index 000000000000..bef3ec0b2724 --- /dev/null +++ b/packages/state-transition/src/epoch/processPendingBalanceDeposits.ts @@ -0,0 +1,70 @@ +import {FAR_FUTURE_EPOCH} from "@lodestar/params"; +import {CachedBeaconStateElectra, EpochTransitionCache} from "../types.js"; +import {increaseBalance} from "../util/balance.js"; +import {getActivationExitChurnLimit} from "../util/validator.js"; + +/** + * Starting from Electra: + * Process pending balance deposits from state subject to churn limit and depsoitBalanceToConsume. + * For each eligible `deposit`, call `increaseBalance()`. + * Remove the processed deposits from `state.pendingBalanceDeposits`. + * Update `state.depositBalanceToConsume` for the next epoch + * + * TODO Electra: Update ssz library to support batch push to `pendingBalanceDeposits` + */ +export function processPendingBalanceDeposits(state: CachedBeaconStateElectra, cache: EpochTransitionCache): void { + const nextEpoch = state.epochCtx.epoch + 1; + const availableForProcessing = state.depositBalanceToConsume + BigInt(getActivationExitChurnLimit(state.epochCtx)); + let processedAmount = 0n; + let nextDepositIndex = 0; + const depositsToPostpone = []; + const validators = state.validators; + const cachedBalances = cache.balances; + + for (const deposit of state.pendingBalanceDeposits.getAllReadonly()) { + const {amount, index: depositIndex} = deposit; + const validator = validators.getReadonly(depositIndex); + + // Validator is exiting, postpone the deposit until after withdrawable epoch + if (validator.exitEpoch < FAR_FUTURE_EPOCH) { + if (nextEpoch <= validator.withdrawableEpoch) { + depositsToPostpone.push(deposit); + } else { + // Deposited balance will never become active. Increase balance but do not consume churn + increaseBalance(state, depositIndex, Number(amount)); + if (cachedBalances) { + cachedBalances[depositIndex] += Number(amount); + } + } + } else { + // Validator is not exiting, attempt to process deposit + if (processedAmount + amount > availableForProcessing) { + // Deposit does not fit in the churn, no more deposit processing in this epoch. + break; + } else { + // Deposit fits in the churn, process it. Increase balance and consume churn. + increaseBalance(state, depositIndex, Number(amount)); + if (cachedBalances) { + cachedBalances[depositIndex] += Number(amount); + } + processedAmount = processedAmount + amount; + } + } + // Regardless of how the deposit was handled, we move on in the queue. + nextDepositIndex++; + } + + const remainingPendingBalanceDeposits = state.pendingBalanceDeposits.sliceFrom(nextDepositIndex); + state.pendingBalanceDeposits = remainingPendingBalanceDeposits; + + if (remainingPendingBalanceDeposits.length === 0) { + state.depositBalanceToConsume = 0n; + } else { + state.depositBalanceToConsume = availableForProcessing - processedAmount; + } + + // TODO Electra: add a function in ListCompositeTreeView to support batch push operation + for (const deposit of depositsToPostpone) { + state.pendingBalanceDeposits.push(deposit); + } +} diff --git a/packages/state-transition/src/epoch/processPendingConsolidations.ts b/packages/state-transition/src/epoch/processPendingConsolidations.ts new file mode 100644 index 000000000000..28178a509bba --- /dev/null +++ b/packages/state-transition/src/epoch/processPendingConsolidations.ts @@ -0,0 +1,56 @@ +import {ValidatorIndex} from "@lodestar/types"; +import {CachedBeaconStateElectra, EpochTransitionCache} from "../types.js"; +import {decreaseBalance, increaseBalance} from "../util/balance.js"; +import {getActiveBalance} from "../util/validator.js"; +import {switchToCompoundingValidator} from "../util/electra.js"; + +/** + * Starting from Electra: + * Process every `pendingConsolidation` in `state.pendingConsolidations`. + * Churn limit was applied when enqueueing so we don't care about the limit here + * However we only process consolidations up to current epoch + * + * For each valid `pendingConsolidation`, update withdrawal credential of target + * validator to compounding, decrease balance of source validator and increase balance + * of target validator. + * + * Dequeue all processed consolidations from `state.pendingConsolidation` + * + */ +export function processPendingConsolidations(state: CachedBeaconStateElectra, cache: EpochTransitionCache): void { + const nextEpoch = state.epochCtx.epoch + 1; + let nextPendingConsolidation = 0; + const validators = state.validators; + const cachedBalances = cache.balances; + const newCompoundingValidators = new Set(); + + for (const pendingConsolidation of state.pendingConsolidations.getAllReadonly()) { + const {sourceIndex, targetIndex} = pendingConsolidation; + const sourceValidator = validators.getReadonly(sourceIndex); + + if (sourceValidator.slashed) { + nextPendingConsolidation++; + continue; + } + + if (sourceValidator.withdrawableEpoch > nextEpoch) { + break; + } + // Churn any target excess active balance of target and raise its max + switchToCompoundingValidator(state, targetIndex); + newCompoundingValidators.add(targetIndex); + // Move active balance to target. Excess balance is withdrawable. + const activeBalance = getActiveBalance(state, sourceIndex); + decreaseBalance(state, sourceIndex, activeBalance); + increaseBalance(state, targetIndex, activeBalance); + if (cachedBalances) { + cachedBalances[sourceIndex] -= activeBalance; + cachedBalances[targetIndex] += activeBalance; + } + + nextPendingConsolidation++; + } + + cache.newCompoundingValidators = newCompoundingValidators; + state.pendingConsolidations = state.pendingConsolidations.sliceFrom(nextPendingConsolidation); +} diff --git a/packages/state-transition/src/epoch/processRegistryUpdates.ts b/packages/state-transition/src/epoch/processRegistryUpdates.ts index 905e7b567c01..d2e93632dabe 100644 --- a/packages/state-transition/src/epoch/processRegistryUpdates.ts +++ b/packages/state-transition/src/epoch/processRegistryUpdates.ts @@ -1,3 +1,4 @@ +import {ForkSeq} from "@lodestar/params"; import {computeActivationExitEpoch} from "../util/index.js"; import {initiateValidatorExit} from "../block/index.js"; import {EpochTransitionCache, CachedBeaconStateAllForks} from "../types.js"; @@ -16,7 +17,11 @@ import {EpochTransitionCache, CachedBeaconStateAllForks} from "../types.js"; * - indicesEligibleForActivationQueue: 0 * - indicesToEject: 0 */ -export function processRegistryUpdates(state: CachedBeaconStateAllForks, cache: EpochTransitionCache): void { +export function processRegistryUpdates( + fork: ForkSeq, + state: CachedBeaconStateAllForks, + cache: EpochTransitionCache +): void { const {epochCtx} = state; // Get the validators sub tree once for all the loop @@ -28,7 +33,7 @@ export function processRegistryUpdates(state: CachedBeaconStateAllForks, cache: for (const index of cache.indicesToEject) { // set validator exit epoch and withdrawable epoch // TODO: Figure out a way to quickly set properties on the validators tree - initiateValidatorExit(state, validators.get(index)); + initiateValidatorExit(fork, state, validators.get(index)); } // set new activation eligibilities @@ -38,7 +43,10 @@ export function processRegistryUpdates(state: CachedBeaconStateAllForks, cache: const finalityEpoch = state.finalizedCheckpoint.epoch; // this avoids an array allocation compared to `slice(0, epochCtx.activationChurnLimit)` - const len = Math.min(cache.indicesEligibleForActivation.length, epochCtx.activationChurnLimit); + const len = + fork < ForkSeq.electra + ? Math.min(cache.indicesEligibleForActivation.length, epochCtx.activationChurnLimit) + : cache.indicesEligibleForActivation.length; const activationEpoch = computeActivationExitEpoch(cache.currentEpoch); // dequeue validators for activation up to churn limit for (let i = 0; i < len; i++) { diff --git a/packages/state-transition/src/epoch/processRewardsAndPenalties.ts b/packages/state-transition/src/epoch/processRewardsAndPenalties.ts index 61680b81002a..6c5d5aa3cb5a 100644 --- a/packages/state-transition/src/epoch/processRewardsAndPenalties.ts +++ b/packages/state-transition/src/epoch/processRewardsAndPenalties.ts @@ -28,7 +28,8 @@ export function processRewardsAndPenalties( const balances = state.balances.getAll(); for (let i = 0, len = rewards.length; i < len; i++) { - balances[i] += rewards[i] - penalties[i] - (slashingPenalties[i] ?? 0); + const result = balances[i] + rewards[i] - penalties[i] - (slashingPenalties[i] ?? 0); + balances[i] = Math.max(result, 0); } // important: do not change state one balance at a time. Set them all at once, constructing the tree in one go diff --git a/packages/state-transition/src/epoch/processSlashings.ts b/packages/state-transition/src/epoch/processSlashings.ts index ba4b483dffc2..23ee815cabb3 100644 --- a/packages/state-transition/src/epoch/processSlashings.ts +++ b/packages/state-transition/src/epoch/processSlashings.ts @@ -50,6 +50,10 @@ export function processSlashings( totalBalanceByIncrement ); const increment = EFFECTIVE_BALANCE_INCREMENT; + + const penaltyPerEffectiveBalanceIncrement = Math.floor( + (adjustedTotalSlashingBalanceByIncrement * increment) / totalBalanceByIncrement + ); const penalties: number[] = []; const penaltiesByEffectiveBalanceIncrement = new Map(); @@ -57,8 +61,12 @@ export function processSlashings( const effectiveBalanceIncrement = effectiveBalanceIncrements[index]; let penalty = penaltiesByEffectiveBalanceIncrement.get(effectiveBalanceIncrement); if (penalty === undefined) { - const penaltyNumeratorByIncrement = effectiveBalanceIncrement * adjustedTotalSlashingBalanceByIncrement; - penalty = Math.floor(penaltyNumeratorByIncrement / totalBalanceByIncrement) * increment; + if (fork < ForkSeq.electra) { + const penaltyNumeratorByIncrement = effectiveBalanceIncrement * adjustedTotalSlashingBalanceByIncrement; + penalty = Math.floor(penaltyNumeratorByIncrement / totalBalanceByIncrement) * increment; + } else { + penalty = penaltyPerEffectiveBalanceIncrement * effectiveBalanceIncrement; + } penaltiesByEffectiveBalanceIncrement.set(effectiveBalanceIncrement, penalty); } diff --git a/packages/state-transition/src/epoch/processSyncCommitteeUpdates.ts b/packages/state-transition/src/epoch/processSyncCommitteeUpdates.ts index b3fd9b45053c..f01f2055420a 100644 --- a/packages/state-transition/src/epoch/processSyncCommitteeUpdates.ts +++ b/packages/state-transition/src/epoch/processSyncCommitteeUpdates.ts @@ -1,5 +1,5 @@ import {aggregateSerializedPublicKeys} from "@chainsafe/blst"; -import {EPOCHS_PER_SYNC_COMMITTEE_PERIOD} from "@lodestar/params"; +import {EPOCHS_PER_SYNC_COMMITTEE_PERIOD, ForkSeq} from "@lodestar/params"; import {ssz} from "@lodestar/types"; import {getNextSyncCommitteeIndices} from "../util/seed.js"; import {CachedBeaconStateAltair} from "../types.js"; @@ -10,23 +10,23 @@ import {CachedBeaconStateAltair} from "../types.js"; * PERF: Once every `EPOCHS_PER_SYNC_COMMITTEE_PERIOD`, do an expensive operation to compute the next committee. * Calculating the next sync committee has a proportional cost to $VALIDATOR_COUNT */ -export function processSyncCommitteeUpdates(state: CachedBeaconStateAltair): void { +export function processSyncCommitteeUpdates(fork: ForkSeq, state: CachedBeaconStateAltair): void { const nextEpoch = state.epochCtx.epoch + 1; if (nextEpoch % EPOCHS_PER_SYNC_COMMITTEE_PERIOD === 0) { - const activeValidatorIndices = state.epochCtx.nextShuffling.activeIndices; + const activeValidatorIndices = state.epochCtx.nextActiveIndices; const {effectiveBalanceIncrements} = state.epochCtx; const nextSyncCommitteeIndices = getNextSyncCommitteeIndices( + fork, state, activeValidatorIndices, effectiveBalanceIncrements ); + const validators = state.validators; // Using the index2pubkey cache is slower because it needs the serialized pubkey. - const nextSyncCommitteePubkeys = nextSyncCommitteeIndices.map( - (index) => state.validators.getReadonly(index).pubkey - ); + const nextSyncCommitteePubkeys = nextSyncCommitteeIndices.map((index) => validators.getReadonly(index).pubkey); // Rotate syncCommittee in state state.currentSyncCommittee = state.nextSyncCommittee; diff --git a/packages/state-transition/src/index.ts b/packages/state-transition/src/index.ts index 0ef460e784af..600bbf173462 100644 --- a/packages/state-transition/src/index.ts +++ b/packages/state-transition/src/index.ts @@ -11,6 +11,7 @@ export type { CachedBeaconStateBellatrix, CachedBeaconStateCapella, CachedBeaconStateDeneb, + CachedBeaconStateElectra, CachedBeaconStateAllForks, CachedBeaconStateExecutions, // Non-cached states @@ -19,6 +20,7 @@ export type { BeaconStateBellatrix, BeaconStateCapella, BeaconStateDeneb, + BeaconStateElectra, BeaconStateAllForks, BeaconStateExecutions, } from "./types.js"; @@ -39,10 +41,15 @@ export { EpochCacheError, EpochCacheErrorCode, } from "./cache/epochCache.js"; +export {toMemoryEfficientHexStr} from "./cache/pubkeyCache.js"; export {type EpochTransitionCache, beforeProcessEpoch} from "./cache/epochTransitionCache.js"; // Aux data-structures -export {PubkeyIndexMap, type Index2PubkeyCache} from "./cache/pubkeyCache.js"; +export { + type Index2PubkeyCache, + type UnfinalizedPubkeyIndexMap, + newUnfinalizedPubkeyIndexMap, +} from "./cache/pubkeyCache.js"; export { type EffectiveBalanceIncrements, diff --git a/packages/state-transition/src/metrics.ts b/packages/state-transition/src/metrics.ts index 12cec46d9a49..a5e5463231fa 100644 --- a/packages/state-transition/src/metrics.ts +++ b/packages/state-transition/src/metrics.ts @@ -11,6 +11,7 @@ export type BeaconStateTransitionMetrics = { processBlockTime: Histogram; processBlockCommitTime: Histogram; stateHashTreeRootTime: Histogram<{source: StateHashTreeRootSource}>; + numEffectiveBalanceUpdates: Gauge; preStateBalancesNodesPopulatedMiss: Gauge<{source: StateCloneSource}>; preStateBalancesNodesPopulatedHit: Gauge<{source: StateCloneSource}>; preStateValidatorsNodesPopulatedMiss: Gauge<{source: StateCloneSource}>; @@ -30,6 +31,11 @@ export type BeaconStateTransitionMetrics = { ) => void; }; +export type EpochCacheMetrics = { + finalizedPubkeyDuplicateInsert: Gauge; + newUnFinalizedPubkey: Gauge; +}; + export function onStateCloneMetrics( state: CachedBeaconStateAllForks, metrics: BeaconStateTransitionMetrics, diff --git a/packages/state-transition/src/signatureSets/attesterSlashings.ts b/packages/state-transition/src/signatureSets/attesterSlashings.ts index f0de50e5d0b2..8088c2522282 100644 --- a/packages/state-transition/src/signatureSets/attesterSlashings.ts +++ b/packages/state-transition/src/signatureSets/attesterSlashings.ts @@ -1,4 +1,4 @@ -import {SignedBeaconBlock, phase0, ssz} from "@lodestar/types"; +import {SignedBeaconBlock, ssz, AttesterSlashing, IndexedAttestationBigint} from "@lodestar/types"; import {DOMAIN_BEACON_ATTESTER} from "@lodestar/params"; import {computeSigningRoot, computeStartSlotAtEpoch, ISignatureSet, SignatureSetType} from "../util/index.js"; import {CachedBeaconStateAllForks} from "../types.js"; @@ -16,7 +16,7 @@ export function getAttesterSlashingsSignatureSets( /** Get signature sets from a single AttesterSlashing object */ export function getAttesterSlashingSignatureSets( state: CachedBeaconStateAllForks, - attesterSlashing: phase0.AttesterSlashing + attesterSlashing: AttesterSlashing ): ISignatureSet[] { return [attesterSlashing.attestation1, attesterSlashing.attestation2].map((attestation) => getIndexedAttestationBigintSignatureSet(state, attestation) @@ -25,15 +25,14 @@ export function getAttesterSlashingSignatureSets( export function getIndexedAttestationBigintSignatureSet( state: CachedBeaconStateAllForks, - indexedAttestation: phase0.IndexedAttestationBigint + indexedAttestation: IndexedAttestationBigint ): ISignatureSet { - const {index2pubkey} = state.epochCtx; const slot = computeStartSlotAtEpoch(Number(indexedAttestation.data.target.epoch as bigint)); const domain = state.config.getDomain(state.slot, DOMAIN_BEACON_ATTESTER, slot); return { type: SignatureSetType.aggregate, - pubkeys: indexedAttestation.attestingIndices.map((i) => index2pubkey[i]), + pubkeys: indexedAttestation.attestingIndices.map((i) => state.epochCtx.index2pubkey[i]), signingRoot: computeSigningRoot(ssz.phase0.AttestationDataBigint, indexedAttestation.data, domain), signature: indexedAttestation.signature, }; diff --git a/packages/state-transition/src/signatureSets/index.ts b/packages/state-transition/src/signatureSets/index.ts index 983e131e00e6..c883bb0587f8 100644 --- a/packages/state-transition/src/signatureSets/index.ts +++ b/packages/state-transition/src/signatureSets/index.ts @@ -31,6 +31,9 @@ export function getBlockSignatureSets( skipProposerSignature?: boolean; } ): ISignatureSet[] { + // fork based validations + const fork = state.config.getForkSeq(signedBlock.message.slot); + const signatureSets = [ getRandaoRevealSignatureSet(state, signedBlock.message), ...getProposerSlashingsSignatureSets(state, signedBlock), @@ -43,9 +46,6 @@ export function getBlockSignatureSets( signatureSets.push(getBlockProposerSignatureSet(state, signedBlock)); } - // fork based validations - const fork = state.config.getForkSeq(signedBlock.message.slot); - // Only after altair fork, validate tSyncCommitteeSignature if (fork >= ForkSeq.altair) { const syncCommitteeSignatureSet = getSyncCommitteeSignatureSet( diff --git a/packages/state-transition/src/signatureSets/indexedAttestation.ts b/packages/state-transition/src/signatureSets/indexedAttestation.ts index 9ae6627d0b56..86535fece8b8 100644 --- a/packages/state-transition/src/signatureSets/indexedAttestation.ts +++ b/packages/state-transition/src/signatureSets/indexedAttestation.ts @@ -41,7 +41,11 @@ export function getAttestationsSignatureSets( state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock ): ISignatureSet[] { + // TODO: figure how to get attesting indices of an attestation once per block processing return signedBlock.message.body.attestations.map((attestation) => - getIndexedAttestationSignatureSet(state, state.epochCtx.getIndexedAttestation(attestation)) + getIndexedAttestationSignatureSet( + state, + state.epochCtx.getIndexedAttestation(state.config.getForkSeq(signedBlock.message.slot), attestation) + ) ); } diff --git a/packages/state-transition/src/slot/index.ts b/packages/state-transition/src/slot/index.ts index 6c4add1d1230..b05bd7ac93f2 100644 --- a/packages/state-transition/src/slot/index.ts +++ b/packages/state-transition/src/slot/index.ts @@ -7,6 +7,7 @@ export {upgradeStateToAltair} from "./upgradeStateToAltair.js"; export {upgradeStateToBellatrix} from "./upgradeStateToBellatrix.js"; export {upgradeStateToCapella} from "./upgradeStateToCapella.js"; export {upgradeStateToDeneb} from "./upgradeStateToDeneb.js"; +export {upgradeStateToElectra} from "./upgradeStateToElectra.js"; /** * Dial state to next slot. Common for all forks diff --git a/packages/state-transition/src/slot/upgradeStateToAltair.ts b/packages/state-transition/src/slot/upgradeStateToAltair.ts index 0afa43930ef0..cfe43e097939 100644 --- a/packages/state-transition/src/slot/upgradeStateToAltair.ts +++ b/packages/state-transition/src/slot/upgradeStateToAltair.ts @@ -70,8 +70,9 @@ export function upgradeStateToAltair(statePhase0: CachedBeaconStatePhase0): Cach stateAltair.inactivityScores = ssz.altair.InactivityScores.toViewDU(newZeroedArray(validatorCount)); const {syncCommittee, indices} = getNextSyncCommittee( + ForkSeq.altair, stateAltair, - stateAltair.epochCtx.nextShuffling.activeIndices, + stateAltair.epochCtx.nextActiveIndices, stateAltair.epochCtx.effectiveBalanceIncrements ); const syncCommitteeView = ssz.altair.SyncCommittee.toViewDU(syncCommittee); diff --git a/packages/state-transition/src/slot/upgradeStateToElectra.ts b/packages/state-transition/src/slot/upgradeStateToElectra.ts new file mode 100644 index 000000000000..0bd36a909b46 --- /dev/null +++ b/packages/state-transition/src/slot/upgradeStateToElectra.ts @@ -0,0 +1,160 @@ +import {Epoch, ValidatorIndex, ssz} from "@lodestar/types"; +import {FAR_FUTURE_EPOCH, UNSET_DEPOSIT_REQUESTS_START_INDEX} from "@lodestar/params"; +import {CachedBeaconStateDeneb} from "../types.js"; +import {CachedBeaconStateElectra, getCachedBeaconState} from "../cache/stateCache.js"; +import { + hasCompoundingWithdrawalCredential, + queueEntireBalanceAndResetValidator, + queueExcessActiveBalance, +} from "../util/electra.js"; +import {computeActivationExitEpoch} from "../util/epoch.js"; +import {getActivationExitChurnLimit, getConsolidationChurnLimit} from "../util/validator.js"; + +/** + * Upgrade a state from Deneb to Electra. + */ +export function upgradeStateToElectra(stateDeneb: CachedBeaconStateDeneb): CachedBeaconStateElectra { + const {config} = stateDeneb; + + ssz.deneb.BeaconState.commitViewDU(stateDeneb); + const stateElectraCloned = stateDeneb; + + const stateElectraView = ssz.electra.BeaconState.defaultViewDU(); + stateElectraView.genesisTime = stateElectraCloned.genesisTime; + stateElectraView.genesisValidatorsRoot = stateElectraCloned.genesisValidatorsRoot; + stateElectraView.slot = stateElectraCloned.slot; + stateElectraView.fork = ssz.phase0.Fork.toViewDU({ + previousVersion: stateDeneb.fork.currentVersion, + currentVersion: config.ELECTRA_FORK_VERSION, + epoch: stateDeneb.epochCtx.epoch, + }); + stateElectraView.latestBlockHeader = stateElectraCloned.latestBlockHeader; + stateElectraView.blockRoots = stateElectraCloned.blockRoots; + stateElectraView.stateRoots = stateElectraCloned.stateRoots; + stateElectraView.historicalRoots = stateElectraCloned.historicalRoots; + stateElectraView.eth1Data = stateElectraCloned.eth1Data; + stateElectraView.eth1DataVotes = stateElectraCloned.eth1DataVotes; + stateElectraView.eth1DepositIndex = stateElectraCloned.eth1DepositIndex; + stateElectraView.validators = stateElectraCloned.validators; + stateElectraView.balances = stateElectraCloned.balances; + stateElectraView.randaoMixes = stateElectraCloned.randaoMixes; + stateElectraView.slashings = stateElectraCloned.slashings; + stateElectraView.previousEpochParticipation = stateElectraCloned.previousEpochParticipation; + stateElectraView.currentEpochParticipation = stateElectraCloned.currentEpochParticipation; + stateElectraView.justificationBits = stateElectraCloned.justificationBits; + stateElectraView.previousJustifiedCheckpoint = stateElectraCloned.previousJustifiedCheckpoint; + stateElectraView.currentJustifiedCheckpoint = stateElectraCloned.currentJustifiedCheckpoint; + stateElectraView.finalizedCheckpoint = stateElectraCloned.finalizedCheckpoint; + stateElectraView.inactivityScores = stateElectraCloned.inactivityScores; + stateElectraView.currentSyncCommittee = stateElectraCloned.currentSyncCommittee; + stateElectraView.nextSyncCommittee = stateElectraCloned.nextSyncCommittee; + stateElectraView.latestExecutionPayloadHeader = stateElectraCloned.latestExecutionPayloadHeader; + stateElectraView.nextWithdrawalIndex = stateDeneb.nextWithdrawalIndex; + stateElectraView.nextWithdrawalValidatorIndex = stateDeneb.nextWithdrawalValidatorIndex; + stateElectraView.historicalSummaries = stateElectraCloned.historicalSummaries; + + // default value of depositRequestsStartIndex is UNSET_DEPOSIT_REQUESTS_START_INDEX + stateElectraView.depositRequestsStartIndex = UNSET_DEPOSIT_REQUESTS_START_INDEX; + stateElectraView.depositBalanceToConsume = BigInt(0); + stateElectraView.exitBalanceToConsume = BigInt(0); + + const validatorsArr = stateElectraView.validators.getAllReadonly(); + const exitEpochs: Epoch[] = []; + + // [EIP-7251]: add validators that are not yet active to pending balance deposits + const preActivation: ValidatorIndex[] = []; + for (let validatorIndex = 0; validatorIndex < validatorsArr.length; validatorIndex++) { + const {activationEpoch, exitEpoch} = validatorsArr[validatorIndex]; + if (activationEpoch === FAR_FUTURE_EPOCH) { + preActivation.push(validatorIndex); + } + if (exitEpoch !== FAR_FUTURE_EPOCH) { + exitEpochs.push(exitEpoch); + } + } + + const currentEpochPre = stateDeneb.epochCtx.epoch; + + if (exitEpochs.length === 0) { + exitEpochs.push(currentEpochPre); + } + stateElectraView.earliestExitEpoch = Math.max(...exitEpochs) + 1; + stateElectraView.consolidationBalanceToConsume = BigInt(0); + stateElectraView.earliestConsolidationEpoch = computeActivationExitEpoch(currentEpochPre); + // stateElectraView.pendingBalanceDeposits = ssz.electra.PendingBalanceDeposits.defaultViewDU(); + // pendingBalanceDeposits, pendingPartialWithdrawals, pendingConsolidations are default values + // TODO-electra: can we improve this? + stateElectraView.commit(); + const tmpElectraState = getCachedBeaconState(stateElectraView, stateDeneb); + stateElectraView.exitBalanceToConsume = BigInt(getActivationExitChurnLimit(tmpElectraState.epochCtx)); + stateElectraView.consolidationBalanceToConsume = BigInt(getConsolidationChurnLimit(tmpElectraState.epochCtx)); + + preActivation.sort((i0, i1) => { + const res = validatorsArr[i0].activationEligibilityEpoch - validatorsArr[i1].activationEligibilityEpoch; + return res !== 0 ? res : i0 - i1; + }); + + for (const validatorIndex of preActivation) { + queueEntireBalanceAndResetValidator(stateElectraView as CachedBeaconStateElectra, validatorIndex); + } + + for (let i = 0; i < validatorsArr.length; i++) { + const validator = validatorsArr[i]; + + // [EIP-7251]: Ensure early adopters of compounding credentials go through the activation churn + const withdrawalCredential = validator.withdrawalCredentials; + if (hasCompoundingWithdrawalCredential(withdrawalCredential)) { + queueExcessActiveBalance(stateElectraView as CachedBeaconStateElectra, i); + } + } + + const stateElectra = getCachedBeaconState(stateElectraView, stateDeneb); + // Commit new added fields ViewDU to the root node + stateElectra.commit(); + // Clear cache to ensure the cache of deneb fields is not used by new ELECTRA fields + stateElectra["clearCache"](); + + return stateElectra; +} + +export function upgradeStateToElectraOriginal(stateDeneb: CachedBeaconStateDeneb): CachedBeaconStateElectra { + const {config} = stateDeneb; + + const stateElectraNode = ssz.deneb.BeaconState.commitViewDU(stateDeneb); + const stateElectraView = ssz.electra.BeaconState.getViewDU(stateElectraNode); + + const stateElectra = getCachedBeaconState(stateElectraView, stateDeneb); + + stateElectra.fork = ssz.phase0.Fork.toViewDU({ + previousVersion: stateDeneb.fork.currentVersion, + currentVersion: config.ELECTRA_FORK_VERSION, + epoch: stateDeneb.epochCtx.epoch, + }); + + // default value of depositRequestsStartIndex is UNSET_DEPOSIT_REQUESTS_START_INDEX + stateElectra.depositRequestsStartIndex = UNSET_DEPOSIT_REQUESTS_START_INDEX; + + const validatorsArr = stateElectra.validators.getAllReadonly(); + + for (let i = 0; i < validatorsArr.length; i++) { + const validator = validatorsArr[i]; + + // [EIP-7251]: add validators that are not yet active to pending balance deposits + if (validator.activationEligibilityEpoch === FAR_FUTURE_EPOCH) { + queueEntireBalanceAndResetValidator(stateElectra, i); + } + + // [EIP-7251]: Ensure early adopters of compounding credentials go through the activation churn + const withdrawalCredential = validator.withdrawalCredentials; + if (hasCompoundingWithdrawalCredential(withdrawalCredential)) { + queueExcessActiveBalance(stateElectra, i); + } + } + + // Commit new added fields ViewDU to the root node + stateElectra.commit(); + // Clear cache to ensure the cache of deneb fields is not used by new ELECTRA fields + stateElectra["clearCache"](); + + return stateElectra; +} diff --git a/packages/state-transition/src/stateTransition.ts b/packages/state-transition/src/stateTransition.ts index 78bcaa140c62..f025c685b1a6 100644 --- a/packages/state-transition/src/stateTransition.ts +++ b/packages/state-transition/src/stateTransition.ts @@ -1,6 +1,6 @@ -import {toHexString} from "@chainsafe/ssz"; import {SignedBeaconBlock, SignedBlindedBeaconBlock, Slot, ssz} from "@lodestar/types"; import {SLOTS_PER_EPOCH} from "@lodestar/params"; +import {toRootHex} from "@lodestar/utils"; import {BeaconStateTransitionMetrics, onPostStateMetrics, onStateCloneMetrics} from "./metrics.js"; import {beforeProcessEpoch, EpochTransitionCache, EpochTransitionCacheOpts} from "./cache/epochTransitionCache.js"; import { @@ -9,6 +9,7 @@ import { CachedBeaconStateAltair, CachedBeaconStateBellatrix, CachedBeaconStateCapella, + CachedBeaconStateDeneb, } from "./types.js"; import {computeEpochAtSlot} from "./util/index.js"; import {verifyProposerSignature} from "./signatureSets/index.js"; @@ -18,6 +19,7 @@ import { upgradeStateToBellatrix, upgradeStateToCapella, upgradeStateToDeneb, + upgradeStateToElectra, } from "./slot/index.js"; import {processBlock} from "./block/index.js"; import {EpochTransitionStep, processEpoch} from "./epoch/index.js"; @@ -52,6 +54,7 @@ export enum StateHashTreeRootSource { blockTransition = "block_transition", prepareNextSlot = "prepare_next_slot", prepareNextEpoch = "prepare_next_epoch", + regenState = "regen_state", computeNewStateRoot = "compute_new_state_root", } @@ -123,7 +126,7 @@ export function stateTransition( if (!ssz.Root.equals(block.stateRoot, stateRoot)) { throw new Error( - `Invalid state root at slot ${block.slot}, expected=${toHexString(block.stateRoot)}, actual=${toHexString( + `Invalid state root at slot ${block.slot}, expected=${toRootHex(block.stateRoot)}, actual=${toRootHex( stateRoot )}` ); @@ -164,6 +167,25 @@ export function processSlots( /** * All processSlot() logic but separate so stateTransition() can recycle the caches + * + * Epoch transition will be processed at the last slot of an epoch. Note that compute_shuffling() is going + * to be executed in parallel (either by napi-rs or worker thread) with processEpoch() like below: + * + * state-transition + * ╔══════════════════════════════════════════════════════════════════════════════════╗ + * ║ beforeProcessEpoch processEpoch afterPRocessEpoch ║ + * ║ |-------------------------|--------------------|-------------------------------|║ + * ║ | | | ║ + * ╚═══════════════════════|═══════════════════════════════|══════════════════════════╝ + * | | + * build() get() + * | | + * ╔═══════════════════════V═══════════════════════════════V═══════════════════════════╗ + * ║ | | ║ + * ║ |-------------------------------| ║ + * ║ compute_shuffling() ║ + * ╚═══════════════════════════════════════════════════════════════════════════════════╝ + * beacon-node ShufflingCache */ function processSlotsWithTransientCache( postState: CachedBeaconStateAllForks, @@ -226,19 +248,22 @@ function processSlotsWithTransientCache( epochTransitionTimer?.(); // Upgrade state if exactly at epoch boundary - const stateSlot = computeEpochAtSlot(postState.slot); - if (stateSlot === config.ALTAIR_FORK_EPOCH) { + const stateEpoch = computeEpochAtSlot(postState.slot); + if (stateEpoch === config.ALTAIR_FORK_EPOCH) { postState = upgradeStateToAltair(postState as CachedBeaconStatePhase0) as CachedBeaconStateAllForks; } - if (stateSlot === config.BELLATRIX_FORK_EPOCH) { + if (stateEpoch === config.BELLATRIX_FORK_EPOCH) { postState = upgradeStateToBellatrix(postState as CachedBeaconStateAltair) as CachedBeaconStateAllForks; } - if (stateSlot === config.CAPELLA_FORK_EPOCH) { + if (stateEpoch === config.CAPELLA_FORK_EPOCH) { postState = upgradeStateToCapella(postState as CachedBeaconStateBellatrix) as CachedBeaconStateAllForks; } - if (stateSlot === config.DENEB_FORK_EPOCH) { + if (stateEpoch === config.DENEB_FORK_EPOCH) { postState = upgradeStateToDeneb(postState as CachedBeaconStateCapella) as CachedBeaconStateAllForks; } + if (stateEpoch === config.ELECTRA_FORK_EPOCH) { + postState = upgradeStateToElectra(postState as CachedBeaconStateDeneb) as CachedBeaconStateAllForks; + } } else { postState.slot++; } diff --git a/packages/state-transition/src/types.ts b/packages/state-transition/src/types.ts index 6b6b1f6260b2..d3a1ed69a7a9 100644 --- a/packages/state-transition/src/types.ts +++ b/packages/state-transition/src/types.ts @@ -9,6 +9,7 @@ export type { CachedBeaconStateBellatrix, CachedBeaconStateCapella, CachedBeaconStateDeneb, + CachedBeaconStateElectra, } from "./cache/stateCache.js"; export type { @@ -19,4 +20,5 @@ export type { BeaconStateBellatrix, BeaconStateCapella, BeaconStateDeneb, + BeaconStateElectra, } from "./cache/types.js"; diff --git a/packages/state-transition/src/util/balance.ts b/packages/state-transition/src/util/balance.ts index e305c745ab72..e9b7a06e4130 100644 --- a/packages/state-transition/src/util/balance.ts +++ b/packages/state-transition/src/util/balance.ts @@ -56,8 +56,8 @@ export function getEffectiveBalanceIncrementsZeroInactive( const validatorCount = justifiedState.validators.length; const {effectiveBalanceIncrements} = justifiedState.epochCtx; // Slice up to `validatorCount` since it won't be mutated, nor accessed beyond `validatorCount` - // NOTE: Force to use Uint8Array.slice (copy) instead of Buffer.call (not copy) - const effectiveBalanceIncrementsZeroInactive = Uint8Array.prototype.slice.call( + // NOTE: Force to use Uint16Array.slice (copy) instead of Buffer.call (not copy) + const effectiveBalanceIncrementsZeroInactive = Uint16Array.prototype.slice.call( effectiveBalanceIncrements, 0, validatorCount diff --git a/packages/state-transition/src/util/calculateCommitteeAssignments.ts b/packages/state-transition/src/util/calculateCommitteeAssignments.ts new file mode 100644 index 000000000000..992c5efbdaaa --- /dev/null +++ b/packages/state-transition/src/util/calculateCommitteeAssignments.ts @@ -0,0 +1,43 @@ +import {CommitteeIndex, Slot, ValidatorIndex} from "@lodestar/types"; +import {SLOTS_PER_EPOCH} from "@lodestar/params"; +import {EpochShuffling} from "./epochShuffling.js"; + +// Copied from lodestar-api package to avoid depending on the package +export interface AttesterDuty { + validatorIndex: ValidatorIndex; + committeeIndex: CommitteeIndex; + committeeLength: number; + committeesAtSlot: number; + validatorCommitteeIndex: number; + slot: Slot; +} + +export function calculateCommitteeAssignments( + epochShuffling: EpochShuffling, + requestedValidatorIndices: ValidatorIndex[] +): Map { + const requestedValidatorIndicesSet = new Set(requestedValidatorIndices); + const duties = new Map(); + + const epochCommittees = epochShuffling.committees; + for (let epochSlot = 0; epochSlot < SLOTS_PER_EPOCH; epochSlot++) { + const slotCommittees = epochCommittees[epochSlot]; + for (let i = 0, committeesAtSlot = slotCommittees.length; i < committeesAtSlot; i++) { + for (let j = 0, committeeLength = slotCommittees[i].length; j < committeeLength; j++) { + const validatorIndex = slotCommittees[i][j]; + if (requestedValidatorIndicesSet.has(validatorIndex)) { + duties.set(validatorIndex, { + validatorIndex, + committeeLength, + committeesAtSlot, + validatorCommitteeIndex: j, + committeeIndex: i, + slot: epochShuffling.epoch * SLOTS_PER_EPOCH + epochSlot, + }); + } + } + } + } + + return duties; +} diff --git a/packages/state-transition/src/util/computeAnchorCheckpoint.ts b/packages/state-transition/src/util/computeAnchorCheckpoint.ts new file mode 100644 index 000000000000..e2efc18952c2 --- /dev/null +++ b/packages/state-transition/src/util/computeAnchorCheckpoint.ts @@ -0,0 +1,38 @@ +import {ChainForkConfig} from "@lodestar/config"; +import {ssz, phase0} from "@lodestar/types"; +import {GENESIS_SLOT, ZERO_HASH} from "@lodestar/params"; +import {BeaconStateAllForks} from "../types.js"; +import {blockToHeader} from "./blockRoot.js"; +import {computeCheckpointEpochAtStateSlot} from "./epoch.js"; + +export function computeAnchorCheckpoint( + config: ChainForkConfig, + anchorState: BeaconStateAllForks +): {checkpoint: phase0.Checkpoint; blockHeader: phase0.BeaconBlockHeader} { + let blockHeader; + let root; + const blockTypes = config.getForkTypes(anchorState.latestBlockHeader.slot); + + if (anchorState.latestBlockHeader.slot === GENESIS_SLOT) { + const block = blockTypes.BeaconBlock.defaultValue(); + block.stateRoot = anchorState.hashTreeRoot(); + blockHeader = blockToHeader(config, block); + root = ssz.phase0.BeaconBlockHeader.hashTreeRoot(blockHeader); + } else { + blockHeader = ssz.phase0.BeaconBlockHeader.clone(anchorState.latestBlockHeader); + if (ssz.Root.equals(blockHeader.stateRoot, ZERO_HASH)) { + blockHeader.stateRoot = anchorState.hashTreeRoot(); + } + root = ssz.phase0.BeaconBlockHeader.hashTreeRoot(blockHeader); + } + + return { + checkpoint: { + root, + // the checkpoint epoch = computeEpochAtSlot(anchorState.slot) + 1 if slot is not at epoch boundary + // this is similar to a process_slots() call + epoch: computeCheckpointEpochAtStateSlot(anchorState.slot), + }, + blockHeader, + }; +} diff --git a/packages/state-transition/src/util/deposit.ts b/packages/state-transition/src/util/deposit.ts new file mode 100644 index 000000000000..e8ef93c515d2 --- /dev/null +++ b/packages/state-transition/src/util/deposit.ts @@ -0,0 +1,24 @@ +import {ForkSeq, MAX_DEPOSITS} from "@lodestar/params"; +import {UintNum64, phase0} from "@lodestar/types"; +import {CachedBeaconStateAllForks, CachedBeaconStateElectra} from "../types.js"; + +export function getEth1DepositCount(state: CachedBeaconStateAllForks, eth1Data?: phase0.Eth1Data): UintNum64 { + const eth1DataToUse = eth1Data ?? state.eth1Data; + if (state.config.getForkSeq(state.slot) >= ForkSeq.electra) { + const electraState = state as CachedBeaconStateElectra; + // eth1DataIndexLimit = min(UintNum64, UintBn64) can be safely casted as UintNum64 + // since the result lies within upper and lower bound of UintNum64 + const eth1DataIndexLimit: UintNum64 = + eth1DataToUse.depositCount < electraState.depositRequestsStartIndex + ? eth1DataToUse.depositCount + : Number(electraState.depositRequestsStartIndex); + + if (state.eth1DepositIndex < eth1DataIndexLimit) { + return Math.min(MAX_DEPOSITS, eth1DataIndexLimit - state.eth1DepositIndex); + } else { + return 0; + } + } else { + return Math.min(MAX_DEPOSITS, eth1DataToUse.depositCount - state.eth1DepositIndex); + } +} diff --git a/packages/state-transition/src/util/electra.ts b/packages/state-transition/src/util/electra.ts new file mode 100644 index 000000000000..ac34da6407de --- /dev/null +++ b/packages/state-transition/src/util/electra.ts @@ -0,0 +1,58 @@ +import {COMPOUNDING_WITHDRAWAL_PREFIX, FAR_FUTURE_EPOCH, MIN_ACTIVATION_BALANCE} from "@lodestar/params"; +import {ValidatorIndex, ssz} from "@lodestar/types"; +import {CachedBeaconStateElectra} from "../types.js"; +import {hasEth1WithdrawalCredential} from "./capella.js"; + +export function hasCompoundingWithdrawalCredential(withdrawalCredentials: Uint8Array): boolean { + return withdrawalCredentials[0] === COMPOUNDING_WITHDRAWAL_PREFIX; +} + +export function hasExecutionWithdrawalCredential(withdrawalCredentials: Uint8Array): boolean { + return ( + hasCompoundingWithdrawalCredential(withdrawalCredentials) || hasEth1WithdrawalCredential(withdrawalCredentials) + ); +} + +export function switchToCompoundingValidator(state: CachedBeaconStateElectra, index: ValidatorIndex): void { + const validator = state.validators.get(index); + + if (hasEth1WithdrawalCredential(validator.withdrawalCredentials)) { + // directly modifying the byte leads to ssz missing the modification resulting into + // wrong root compute, although slicing can be avoided but anyway this is not going + // to be a hot path so its better to clean slice and avoid side effects + const newWithdrawalCredentials = validator.withdrawalCredentials.slice(); + newWithdrawalCredentials[0] = COMPOUNDING_WITHDRAWAL_PREFIX; + validator.withdrawalCredentials = newWithdrawalCredentials; + queueExcessActiveBalance(state, index); + } +} + +export function queueExcessActiveBalance(state: CachedBeaconStateElectra, index: ValidatorIndex): void { + const balance = state.balances.get(index); + if (balance > MIN_ACTIVATION_BALANCE) { + const excessBalance = balance - MIN_ACTIVATION_BALANCE; + state.balances.set(index, MIN_ACTIVATION_BALANCE); + + const pendingBalanceDeposit = ssz.electra.PendingBalanceDeposit.toViewDU({ + index, + amount: BigInt(excessBalance), + }); + state.pendingBalanceDeposits.push(pendingBalanceDeposit); + } +} + +export function queueEntireBalanceAndResetValidator(state: CachedBeaconStateElectra, index: ValidatorIndex): void { + const balance = state.balances.get(index); + state.balances.set(index, 0); + + const validator = state.validators.get(index); + validator.effectiveBalance = 0; + state.epochCtx.effectiveBalanceIncrementsSet(index, 0); + validator.activationEligibilityEpoch = FAR_FUTURE_EPOCH; + + const pendingBalanceDeposit = ssz.electra.PendingBalanceDeposit.toViewDU({ + index, + amount: BigInt(balance), + }); + state.pendingBalanceDeposits.push(pendingBalanceDeposit); +} diff --git a/packages/state-transition/src/util/epoch.ts b/packages/state-transition/src/util/epoch.ts index bb66fb04eb94..7fed5e53f1f3 100644 --- a/packages/state-transition/src/util/epoch.ts +++ b/packages/state-transition/src/util/epoch.ts @@ -1,5 +1,7 @@ import {EPOCHS_PER_SYNC_COMMITTEE_PERIOD, GENESIS_EPOCH, MAX_SEED_LOOKAHEAD, SLOTS_PER_EPOCH} from "@lodestar/params"; -import {BeaconState, Epoch, Slot, SyncPeriod} from "@lodestar/types"; +import {BeaconState, Epoch, Slot, SyncPeriod, Gwei} from "@lodestar/types"; +import {CachedBeaconStateElectra} from "../types.js"; +import {getActivationExitChurnLimit, getConsolidationChurnLimit} from "./validator.js"; /** * Return the epoch number at the given slot. @@ -39,6 +41,60 @@ export function computeActivationExitEpoch(epoch: Epoch): Epoch { return epoch + 1 + MAX_SEED_LOOKAHEAD; } +export function computeExitEpochAndUpdateChurn(state: CachedBeaconStateElectra, exitBalance: Gwei): number { + let earliestExitEpoch = Math.max(state.earliestExitEpoch, computeActivationExitEpoch(state.epochCtx.epoch)); + const perEpochChurn = getActivationExitChurnLimit(state.epochCtx); + + // New epoch for exits. + let exitBalanceToConsume = + state.earliestExitEpoch < earliestExitEpoch ? perEpochChurn : Number(state.exitBalanceToConsume); + + // Exit doesn't fit in the current earliest epoch. + if (exitBalance > exitBalanceToConsume) { + const balanceToProcess = Number(exitBalance) - exitBalanceToConsume; + const additionalEpochs = Math.floor((balanceToProcess - 1) / perEpochChurn) + 1; + earliestExitEpoch += additionalEpochs; + exitBalanceToConsume += additionalEpochs * perEpochChurn; + } + + // Consume the balance and update state variables. + state.exitBalanceToConsume = BigInt(exitBalanceToConsume) - exitBalance; + state.earliestExitEpoch = earliestExitEpoch; + + return state.earliestExitEpoch; +} + +export function computeConsolidationEpochAndUpdateChurn( + state: CachedBeaconStateElectra, + consolidationBalance: Gwei +): number { + let earliestConsolidationEpoch = Math.max( + state.earliestConsolidationEpoch, + computeActivationExitEpoch(state.epochCtx.epoch) + ); + const perEpochConsolidationChurn = getConsolidationChurnLimit(state.epochCtx); + + // New epoch for consolidations + let consolidationBalanceToConsume = + state.earliestConsolidationEpoch < earliestConsolidationEpoch + ? perEpochConsolidationChurn + : Number(state.consolidationBalanceToConsume); + + // Consolidation doesn't fit in the current earliest epoch. + if (consolidationBalance > consolidationBalanceToConsume) { + const balanceToProcess = Number(consolidationBalance) - consolidationBalanceToConsume; + const additionalEpochs = Math.floor((balanceToProcess - 1) / perEpochConsolidationChurn) + 1; + earliestConsolidationEpoch += additionalEpochs; + consolidationBalanceToConsume += additionalEpochs * perEpochConsolidationChurn; + } + + // Consume the balance and update state variables. + state.consolidationBalanceToConsume = BigInt(consolidationBalanceToConsume) - consolidationBalance; + state.earliestConsolidationEpoch = earliestConsolidationEpoch; + + return state.earliestConsolidationEpoch; +} + /** * Return the current epoch of the given state. */ @@ -70,3 +126,10 @@ export function computeSyncPeriodAtSlot(slot: Slot): SyncPeriod { export function computeSyncPeriodAtEpoch(epoch: Epoch): SyncPeriod { return Math.floor(epoch / EPOCHS_PER_SYNC_COMMITTEE_PERIOD); } + +/** + * Determine if the given slot is start slot of an epoch + */ +export function isStartSlotOfEpoch(slot: Slot): boolean { + return slot % SLOTS_PER_EPOCH === 0; +} diff --git a/packages/state-transition/src/util/epochShuffling.ts b/packages/state-transition/src/util/epochShuffling.ts index e101da38f297..c26c62fd2079 100644 --- a/packages/state-transition/src/util/epochShuffling.ts +++ b/packages/state-transition/src/util/epochShuffling.ts @@ -1,17 +1,60 @@ -import {toHexString} from "@chainsafe/ssz"; -import {Epoch, RootHex, ValidatorIndex} from "@lodestar/types"; -import {intDiv} from "@lodestar/utils"; +import {Epoch, RootHex, ssz, ValidatorIndex} from "@lodestar/types"; +import {GaugeExtra, intDiv, Logger, NoLabels, toRootHex} from "@lodestar/utils"; import { DOMAIN_BEACON_ATTESTER, + GENESIS_SLOT, MAX_COMMITTEES_PER_SLOT, SLOTS_PER_EPOCH, TARGET_COMMITTEE_SIZE, } from "@lodestar/params"; +import {BeaconConfig} from "@lodestar/config"; import {BeaconStateAllForks} from "../types.js"; import {getSeed} from "./seed.js"; import {unshuffleList} from "./shuffle.js"; import {computeStartSlotAtEpoch} from "./epoch.js"; import {getBlockRootAtSlot} from "./blockRoot.js"; +import {computeAnchorCheckpoint} from "./computeAnchorCheckpoint.js"; + +export interface ShufflingBuildProps { + state: BeaconStateAllForks; + activeIndices: Uint32Array; +} + +export interface PublicShufflingCacheMetrics { + shufflingCache: { + nextShufflingNotOnEpochCache: GaugeExtra; + }; +} +export interface IShufflingCache { + metrics: PublicShufflingCacheMetrics | null; + logger: Logger | null; + /** + * Gets a cached shuffling via the epoch and decision root. If the state and + * activeIndices are passed and a shuffling is not available it will be built + * synchronously. If the state is not passed and the shuffling is not available + * nothing will be returned. + * + * NOTE: If a shuffling is already queued and not calculated it will build and resolve + * the promise but the already queued build will happen at some later time + */ + getSync( + epoch: Epoch, + decisionRoot: RootHex, + buildProps?: T + ): T extends ShufflingBuildProps ? EpochShuffling : EpochShuffling | null; + + /** + * Gets a cached shuffling via the epoch and decision root. Returns a promise + * for the shuffling if it hs not calculated yet. Returns null if a build has + * not been queued nor a shuffling was calculated. + */ + get(epoch: Epoch, decisionRoot: RootHex): Promise; + + /** + * Queue asynchronous build for an EpochShuffling + */ + build(epoch: Epoch, decisionRoot: RootHex, state: BeaconStateAllForks, activeIndices: Uint32Array): void; +} /** * Readonly interface for EpochShuffling. @@ -61,21 +104,13 @@ export function computeCommitteeCount(activeValidatorCount: number): number { export function computeEpochShuffling( state: BeaconStateAllForks, - activeIndices: ArrayLike, - activeValidatorCount: number, + activeIndices: Uint32Array, epoch: Epoch ): EpochShuffling { - const seed = getSeed(state, epoch, DOMAIN_BEACON_ATTESTER); + const activeValidatorCount = activeIndices.length; - if (activeValidatorCount > activeIndices.length) { - throw new Error(`Invalid activeValidatorCount: ${activeValidatorCount} > ${activeIndices.length}`); - } - // only the first `activeValidatorCount` elements are copied to `activeIndices` - const _activeIndices = new Uint32Array(activeValidatorCount); - for (let i = 0; i < activeValidatorCount; i++) { - _activeIndices[i] = activeIndices[i]; - } - const shuffling = _activeIndices.slice(); + const shuffling = activeIndices.slice(); + const seed = getSeed(state, epoch, DOMAIN_BEACON_ATTESTER); unshuffleList(shuffling, seed); const committeesPerSlot = computeCommitteeCount(activeValidatorCount); @@ -99,14 +134,29 @@ export function computeEpochShuffling( return { epoch, - activeIndices: _activeIndices, + activeIndices, shuffling, committees, committeesPerSlot, }; } -export function getShufflingDecisionBlock(state: BeaconStateAllForks, epoch: Epoch): RootHex { +function calculateDecisionRoot(state: BeaconStateAllForks, epoch: Epoch): RootHex { const pivotSlot = computeStartSlotAtEpoch(epoch - 1) - 1; - return toHexString(getBlockRootAtSlot(state, pivotSlot)); + return toRootHex(getBlockRootAtSlot(state, pivotSlot)); +} + +/** + * Get the shuffling decision block root for the given epoch of given state + * - Special case close to genesis block, return the genesis block root + * - This is similar to forkchoice.getDependentRoot() function, otherwise we cannot get cached shuffing in attestation verification when syncing from genesis. + */ +export function calculateShufflingDecisionRoot( + config: BeaconConfig, + state: BeaconStateAllForks, + epoch: Epoch +): RootHex { + return state.slot > GENESIS_SLOT + ? calculateDecisionRoot(state, epoch) + : toRootHex(ssz.phase0.BeaconBlockHeader.hashTreeRoot(computeAnchorCheckpoint(config, state).blockHeader)); } diff --git a/packages/state-transition/src/util/execution.ts b/packages/state-transition/src/util/execution.ts index 1c5046354fcb..b9243ebe7874 100644 --- a/packages/state-transition/src/util/execution.ts +++ b/packages/state-transition/src/util/execution.ts @@ -170,5 +170,7 @@ export function executionPayloadToPayloadHeader(fork: ForkSeq, payload: Executio ).excessBlobGas; } + // No change in Electra + return bellatrixPayloadFields; } diff --git a/packages/state-transition/src/util/genesis.ts b/packages/state-transition/src/util/genesis.ts index 1041c33d0eb3..54507d0ef235 100644 --- a/packages/state-transition/src/util/genesis.ts +++ b/packages/state-transition/src/util/genesis.ts @@ -4,18 +4,21 @@ import { EFFECTIVE_BALANCE_INCREMENT, EPOCHS_PER_HISTORICAL_VECTOR, ForkName, + ForkSeq, GENESIS_EPOCH, GENESIS_SLOT, MAX_EFFECTIVE_BALANCE, + UNSET_DEPOSIT_REQUESTS_START_INDEX, } from "@lodestar/params"; import {Bytes32, phase0, Root, ssz, TimeSeconds} from "@lodestar/types"; -import {CachedBeaconStateAllForks, BeaconStateAllForks} from "../types.js"; +import {CachedBeaconStateAllForks, BeaconStateAllForks, CachedBeaconStateElectra} from "../types.js"; import {createCachedBeaconState} from "../cache/stateCache.js"; import {EpochCacheImmutableData} from "../cache/epochCache.js"; import {processDeposit} from "../block/processDeposit.js"; +import {increaseBalance} from "../index.js"; import {computeEpochAtSlot} from "./epoch.js"; -import {getActiveValidatorIndices} from "./validator.js"; +import {getActiveValidatorIndices, getMaxEffectiveBalance} from "./validator.js"; import {getTemporaryBlockHeader} from "./blockRoot.js"; import {newFilledArray} from "./array.js"; import {getNextSyncCommittee} from "./syncCommittee.js"; @@ -131,6 +134,7 @@ export function applyDeposits( newDeposits: phase0.Deposit[], fullDepositDataRootList?: DepositDataRootViewDU ): {activatedValidatorCount: number} { + const fork = config.getForkSeq(state.slot); const depositDataRootList: Root[] = []; const fullDepositDataRootArr = fullDepositDataRootList ? fullDepositDataRootList.getAllReadonlyValues() : null; @@ -163,6 +167,16 @@ export function applyDeposits( processDeposit(fork, state, deposit); } + // Process deposit balance updates + if (fork >= ForkSeq.electra) { + const stateElectra = state as CachedBeaconStateElectra; + stateElectra.commit(); + for (const {index: validatorIndex, amount} of stateElectra.pendingBalanceDeposits.getAllReadonly()) { + increaseBalance(state, validatorIndex, Number(amount)); + } + stateElectra.pendingBalanceDeposits = ssz.electra.PendingBalanceDeposits.defaultViewDU(); + } + // Process activations const {epochCtx} = state; const balancesArr = state.balances.getAll(); @@ -179,12 +193,15 @@ export function applyDeposits( } const balance = balancesArr[i]; - const effectiveBalance = Math.min(balance - (balance % EFFECTIVE_BALANCE_INCREMENT), MAX_EFFECTIVE_BALANCE); + const effectiveBalance = Math.min( + balance - (balance % EFFECTIVE_BALANCE_INCREMENT), + getMaxEffectiveBalance(validator.withdrawalCredentials) + ); validator.effectiveBalance = effectiveBalance; epochCtx.effectiveBalanceIncrementsSet(i, effectiveBalance); - if (validator.effectiveBalance === MAX_EFFECTIVE_BALANCE) { + if (validator.effectiveBalance >= MAX_EFFECTIVE_BALANCE) { validator.activationEligibilityEpoch = GENESIS_EPOCH; validator.activationEpoch = GENESIS_EPOCH; activatedValidatorCount++; @@ -214,6 +231,7 @@ export function initializeBeaconStateFromEth1( | typeof ssz.bellatrix.ExecutionPayloadHeader | typeof ssz.capella.ExecutionPayloadHeader | typeof ssz.deneb.ExecutionPayloadHeader + | typeof ssz.electra.ExecutionPayloadHeader > ): CachedBeaconStateAllForks { const stateView = getGenesisBeaconState( @@ -224,6 +242,8 @@ export function initializeBeaconStateFromEth1( getTemporaryBlockHeader(config, config.getForkTypes(GENESIS_SLOT).BeaconBlock.defaultValue()) ); + const fork = config.getForkSeq(GENESIS_SLOT); + // We need a CachedBeaconState to run processDeposit() which uses various caches. // However at this point the state's syncCommittees are not known. // This function can be called by: @@ -244,8 +264,9 @@ export function initializeBeaconStateFromEth1( state.commit(); const activeValidatorIndices = getActiveValidatorIndices(state, computeEpochAtSlot(GENESIS_SLOT)); - if (GENESIS_SLOT >= config.ALTAIR_FORK_EPOCH) { + if (fork >= ForkSeq.altair) { const {syncCommittee} = getNextSyncCommittee( + fork, state, activeValidatorIndices, state.epochCtx.effectiveBalanceIncrements @@ -257,7 +278,7 @@ export function initializeBeaconStateFromEth1( stateAltair.nextSyncCommittee = ssz.altair.SyncCommittee.toViewDU(syncCommittee); } - if (GENESIS_SLOT >= config.BELLATRIX_FORK_EPOCH) { + if (fork >= ForkSeq.bellatrix) { const stateBellatrix = state as CompositeViewDU; stateBellatrix.fork.previousVersion = config.BELLATRIX_FORK_VERSION; stateBellatrix.fork.currentVersion = config.BELLATRIX_FORK_VERSION; @@ -266,7 +287,7 @@ export function initializeBeaconStateFromEth1( ssz.bellatrix.ExecutionPayloadHeader.defaultViewDU(); } - if (GENESIS_SLOT >= config.CAPELLA_FORK_EPOCH) { + if (fork >= ForkSeq.capella) { const stateCapella = state as CompositeViewDU; stateCapella.fork.previousVersion = config.CAPELLA_FORK_VERSION; stateCapella.fork.currentVersion = config.CAPELLA_FORK_VERSION; @@ -275,7 +296,7 @@ export function initializeBeaconStateFromEth1( ssz.capella.ExecutionPayloadHeader.defaultViewDU(); } - if (GENESIS_SLOT >= config.DENEB_FORK_EPOCH) { + if (fork >= ForkSeq.deneb) { const stateDeneb = state as CompositeViewDU; stateDeneb.fork.previousVersion = config.DENEB_FORK_VERSION; stateDeneb.fork.currentVersion = config.DENEB_FORK_VERSION; @@ -284,6 +305,16 @@ export function initializeBeaconStateFromEth1( ssz.deneb.ExecutionPayloadHeader.defaultViewDU(); } + if (fork >= ForkSeq.electra) { + const stateElectra = state as CompositeViewDU; + stateElectra.fork.previousVersion = config.ELECTRA_FORK_VERSION; + stateElectra.fork.currentVersion = config.ELECTRA_FORK_VERSION; + stateElectra.latestExecutionPayloadHeader = + (executionPayloadHeader as CompositeViewDU) ?? + ssz.electra.ExecutionPayloadHeader.defaultViewDU(); + stateElectra.depositRequestsStartIndex = UNSET_DEPOSIT_REQUESTS_START_INDEX; + } + state.commit(); return state; diff --git a/packages/state-transition/src/util/index.ts b/packages/state-transition/src/util/index.ts index 3f2e91da9a77..5f8d9e5cdcfc 100644 --- a/packages/state-transition/src/util/index.ts +++ b/packages/state-transition/src/util/index.ts @@ -4,7 +4,9 @@ export * from "./attestation.js"; export * from "./attesterStatus.js"; export * from "./balance.js"; export * from "./blindedBlock.js"; +export * from "./calculateCommitteeAssignments.js"; export * from "./capella.js"; +export * from "./computeAnchorCheckpoint.js"; export * from "./execution.js"; export * from "./blockRoot.js"; export * from "./domain.js"; @@ -23,3 +25,6 @@ export * from "./slot.js"; export * from "./syncCommittee.js"; export * from "./validator.js"; export * from "./weakSubjectivity.js"; +export * from "./deposit.js"; +export * from "./electra.js"; +export * from "./loadState/index.js"; diff --git a/packages/state-transition/src/util/loadState/index.ts b/packages/state-transition/src/util/loadState/index.ts index 706de3c11540..78ffe7877c09 100644 --- a/packages/state-transition/src/util/loadState/index.ts +++ b/packages/state-transition/src/util/loadState/index.ts @@ -1 +1 @@ -export {loadState} from "./loadState.js"; +export {loadState, loadStateAndValidators} from "./loadState.js"; diff --git a/packages/state-transition/src/util/loadState/loadState.ts b/packages/state-transition/src/util/loadState/loadState.ts index dc9f8fe4fcab..6e3e9c6719fa 100644 --- a/packages/state-transition/src/util/loadState/loadState.ts +++ b/packages/state-transition/src/util/loadState/loadState.ts @@ -66,6 +66,25 @@ export function loadState( return {state: migratedState, modifiedValidators}; } +/** + * Load state and validators Uint8Array from state bytes. + */ +export function loadStateAndValidators( + chainForkConfig: ChainForkConfig, + stateBytes: Uint8Array +): {state: BeaconStateAllForks; validatorsBytes: Uint8Array} { + // stateType could be any types, casting just to make typescript happy + const stateType = getStateTypeFromBytes(chainForkConfig, stateBytes) as typeof ssz.phase0.BeaconState; + const state = stateType.deserializeToViewDU(stateBytes); + const dataView = new DataView(stateBytes.buffer, stateBytes.byteOffset, stateBytes.byteLength); + const fieldRanges = stateType.getFieldRanges(dataView, 0, stateBytes.length); + const allFields = Object.keys(stateType.fields); + const validatorFieldIndex = allFields.indexOf("validators"); + const validatorRange = fieldRanges[validatorFieldIndex]; + const validatorsBytes = stateBytes.subarray(validatorRange.start, validatorRange.end); + return {state, validatorsBytes}; +} + /** * This value is rarely changed as monitored 3 month state diffs on mainnet as of Sep 2023. * Reusing this data helps save hashTreeRoot time of state ~500ms diff --git a/packages/state-transition/src/util/seed.ts b/packages/state-transition/src/util/seed.ts index cf48fda8bec4..a5a0028d6c17 100644 --- a/packages/state-transition/src/util/seed.ts +++ b/packages/state-transition/src/util/seed.ts @@ -5,7 +5,9 @@ import { DOMAIN_SYNC_COMMITTEE, EFFECTIVE_BALANCE_INCREMENT, EPOCHS_PER_HISTORICAL_VECTOR, + ForkSeq, MAX_EFFECTIVE_BALANCE, + MAX_EFFECTIVE_BALANCE_ELECTRA, MIN_SEED_LOOKAHEAD, SHUFFLE_ROUND_COUNT, SLOTS_PER_EPOCH, @@ -20,6 +22,7 @@ import {computeEpochAtSlot} from "./epoch.js"; * Compute proposer indices for an epoch */ export function computeProposers( + fork: ForkSeq, epochSeed: Uint8Array, shuffling: {epoch: Epoch; activeIndices: ArrayLike}, effectiveBalanceIncrements: EffectiveBalanceIncrements @@ -29,6 +32,7 @@ export function computeProposers( for (let slot = startSlot; slot < startSlot + SLOTS_PER_EPOCH; slot++) { proposers.push( computeProposerIndex( + fork, effectiveBalanceIncrements, shuffling.activeIndices, digest(Buffer.concat([epochSeed, intToBytes(slot, 8)])) @@ -44,6 +48,7 @@ export function computeProposers( * SLOW CODE - 🐢 */ export function computeProposerIndex( + fork: ForkSeq, effectiveBalanceIncrements: EffectiveBalanceIncrements, indices: ArrayLike, seed: Uint8Array @@ -54,7 +59,10 @@ export function computeProposerIndex( // TODO: Inline outside this function const MAX_RANDOM_BYTE = 2 ** 8 - 1; - const MAX_EFFECTIVE_BALANCE_INCREMENT = MAX_EFFECTIVE_BALANCE / EFFECTIVE_BALANCE_INCREMENT; + const MAX_EFFECTIVE_BALANCE_INCREMENT = + fork >= ForkSeq.electra + ? MAX_EFFECTIVE_BALANCE_ELECTRA / EFFECTIVE_BALANCE_INCREMENT + : MAX_EFFECTIVE_BALANCE / EFFECTIVE_BALANCE_INCREMENT; let i = 0; /* eslint-disable-next-line no-constant-condition */ @@ -73,9 +81,6 @@ export function computeProposerIndex( return candidateIndex; } i += 1; - if (i === indices.length) { - return -1; - } } } @@ -90,13 +95,17 @@ export function computeProposerIndex( * SLOW CODE - 🐢 */ export function getNextSyncCommitteeIndices( + fork: ForkSeq, state: BeaconStateAllForks, activeValidatorIndices: ArrayLike, effectiveBalanceIncrements: EffectiveBalanceIncrements ): ValidatorIndex[] { // TODO: Bechmark if it's necessary to inline outside of this function const MAX_RANDOM_BYTE = 2 ** 8 - 1; - const MAX_EFFECTIVE_BALANCE_INCREMENT = MAX_EFFECTIVE_BALANCE / EFFECTIVE_BALANCE_INCREMENT; + const MAX_EFFECTIVE_BALANCE_INCREMENT = + fork >= ForkSeq.electra + ? MAX_EFFECTIVE_BALANCE_ELECTRA / EFFECTIVE_BALANCE_INCREMENT + : MAX_EFFECTIVE_BALANCE / EFFECTIVE_BALANCE_INCREMENT; const epoch = computeEpochAtSlot(state.slot) + 1; diff --git a/packages/state-transition/src/util/syncCommittee.ts b/packages/state-transition/src/util/syncCommittee.ts index b6de821d5406..c1f53632e521 100644 --- a/packages/state-transition/src/util/syncCommittee.ts +++ b/packages/state-transition/src/util/syncCommittee.ts @@ -2,6 +2,7 @@ import {aggregateSerializedPublicKeys} from "@chainsafe/blst"; import { BASE_REWARD_FACTOR, EFFECTIVE_BALANCE_INCREMENT, + ForkSeq, SLOTS_PER_EPOCH, SYNC_COMMITTEE_SIZE, SYNC_REWARD_WEIGHT, @@ -19,11 +20,12 @@ import {getNextSyncCommitteeIndices} from "./seed.js"; * SLOW CODE - 🐢 */ export function getNextSyncCommittee( + fork: ForkSeq, state: BeaconStateAllForks, activeValidatorIndices: ArrayLike, effectiveBalanceIncrements: EffectiveBalanceIncrements ): {indices: ValidatorIndex[]; syncCommittee: altair.SyncCommittee} { - const indices = getNextSyncCommitteeIndices(state, activeValidatorIndices, effectiveBalanceIncrements); + const indices = getNextSyncCommitteeIndices(fork, state, activeValidatorIndices, effectiveBalanceIncrements); // Using the index2pubkey cache is slower because it needs the serialized pubkey. const pubkeys = indices.map((index) => state.validators.getReadonly(index).pubkey); diff --git a/packages/state-transition/src/util/validator.ts b/packages/state-transition/src/util/validator.ts index 99f1e6fa0b19..ebad21d9d25c 100644 --- a/packages/state-transition/src/util/validator.ts +++ b/packages/state-transition/src/util/validator.ts @@ -1,8 +1,14 @@ import {Epoch, phase0, ValidatorIndex} from "@lodestar/types"; import {intDiv} from "@lodestar/utils"; import {ChainForkConfig} from "@lodestar/config"; -import {ForkSeq} from "@lodestar/params"; -import {BeaconStateAllForks} from "../types.js"; +import { + EFFECTIVE_BALANCE_INCREMENT, + ForkSeq, + MAX_EFFECTIVE_BALANCE_ELECTRA, + MIN_ACTIVATION_BALANCE, +} from "@lodestar/params"; +import {BeaconStateAllForks, CachedBeaconStateElectra, EpochCache} from "../types.js"; +import {hasCompoundingWithdrawalCredential} from "./electra.js"; /** * Check if [[validator]] is active @@ -47,3 +53,48 @@ export function getActivationChurnLimit(config: ChainForkConfig, fork: ForkSeq, export function getChurnLimit(config: ChainForkConfig, activeValidatorCount: number): number { return Math.max(config.MIN_PER_EPOCH_CHURN_LIMIT, intDiv(activeValidatorCount, config.CHURN_LIMIT_QUOTIENT)); } + +/** + * Get combined churn limit of activation-exit and consolidation + */ +export function getBalanceChurnLimit(epochCtx: EpochCache): number { + const churnLimitByTotalActiveBalance = Math.floor( + (epochCtx.totalActiveBalanceIncrements / epochCtx.config.CHURN_LIMIT_QUOTIENT) * EFFECTIVE_BALANCE_INCREMENT + ); // TODO Electra: verify calculation + + const churn = Math.max(churnLimitByTotalActiveBalance, epochCtx.config.MIN_PER_EPOCH_CHURN_LIMIT_ELECTRA); + + return churn - (churn % EFFECTIVE_BALANCE_INCREMENT); +} + +export function getActivationExitChurnLimit(epochCtx: EpochCache): number { + return Math.min(epochCtx.config.MAX_PER_EPOCH_ACTIVATION_EXIT_CHURN_LIMIT, getBalanceChurnLimit(epochCtx)); +} + +export function getConsolidationChurnLimit(epochCtx: EpochCache): number { + return getBalanceChurnLimit(epochCtx) - getActivationExitChurnLimit(epochCtx); +} + +export function getMaxEffectiveBalance(withdrawalCredentials: Uint8Array): number { + // Compounding withdrawal credential only available since Electra + if (hasCompoundingWithdrawalCredential(withdrawalCredentials)) { + return MAX_EFFECTIVE_BALANCE_ELECTRA; + } else { + return MIN_ACTIVATION_BALANCE; + } +} + +export function getActiveBalance(state: CachedBeaconStateElectra, validatorIndex: ValidatorIndex): number { + const validatorMaxEffectiveBalance = getMaxEffectiveBalance( + state.validators.getReadonly(validatorIndex).withdrawalCredentials + ); + + return Math.min(state.balances.get(validatorIndex), validatorMaxEffectiveBalance); +} + +export function getPendingBalanceToWithdraw(state: CachedBeaconStateElectra, validatorIndex: ValidatorIndex): number { + return state.pendingPartialWithdrawals + .getAllReadonly() + .filter((item) => item.index === validatorIndex) + .reduce((total, item) => total + Number(item.amount), 0); +} diff --git a/packages/state-transition/src/util/weakSubjectivity.ts b/packages/state-transition/src/util/weakSubjectivity.ts index 6bd6636c3e70..0e606e66340f 100644 --- a/packages/state-transition/src/util/weakSubjectivity.ts +++ b/packages/state-transition/src/util/weakSubjectivity.ts @@ -1,9 +1,9 @@ -import {toHexString} from "@chainsafe/ssz"; import {BeaconConfig, ChainForkConfig} from "@lodestar/config"; import {EFFECTIVE_BALANCE_INCREMENT, MAX_DEPOSITS, MAX_EFFECTIVE_BALANCE, SLOTS_PER_EPOCH} from "@lodestar/params"; import {Epoch, Root} from "@lodestar/types"; import {ssz} from "@lodestar/types"; import {Checkpoint} from "@lodestar/types/phase0"; +import {toRootHex} from "@lodestar/utils"; import {ZERO_HASH} from "../constants/constants.js"; import {BeaconStateAllForks, CachedBeaconStateAllForks} from "../types.js"; import {computeEpochAtSlot, getCurrentEpoch, computeCheckpointEpochAtStateSlot} from "./epoch.js"; @@ -127,9 +127,7 @@ export function ensureWithinWeakSubjectivityPeriod( const wsStateEpoch = computeCheckpointEpochAtStateSlot(wsState.slot); const blockRoot = getLatestBlockRoot(wsState); if (!ssz.Root.equals(blockRoot, wsCheckpoint.root)) { - throw new Error( - `Roots do not match. expected=${toHexString(wsCheckpoint.root)}, actual=${toHexString(blockRoot)}` - ); + throw new Error(`Roots do not match. expected=${toRootHex(wsCheckpoint.root)}, actual=${toRootHex(blockRoot)}`); } if (!ssz.Epoch.equals(wsStateEpoch, wsCheckpoint.epoch)) { throw new Error(`Epochs do not match. expected=${wsCheckpoint.epoch}, actual=${wsStateEpoch}`); diff --git a/packages/state-transition/test/memory/effectiveBalanceIncrements.ts b/packages/state-transition/test/memory/effectiveBalanceIncrements.ts deleted file mode 100644 index f1c603b85657..000000000000 --- a/packages/state-transition/test/memory/effectiveBalanceIncrements.ts +++ /dev/null @@ -1,62 +0,0 @@ -import {MutableVector} from "@chainsafe/persistent-ts"; -import {testRunnerMemory} from "@lodestar/beacon-node/test/memory/testRunnerMemory"; -import {newZeroedArray} from "../../src/index.js"; - -// Results in Linux Feb 2022 -// -// EffectiveBalanceIncrements Uint8Array 300000 - 299873.5 bytes / instance -// EffectiveBalanceIncrements array 300000 - 2400093.1 bytes / instance -// EffectiveBalanceIncrements MutableVector 300000 - 4380557.0 bytes / instance -// EffectiveBalanceIncrements MutableVector 300000 cloned 10 - 4399575.0 bytes / instance -// -// With MutableVector, break even at 14 instances of Uint8Array -// 4380557 / 299873 = 14 - -const vc = 300_000; -const cloneTimes = 10; - -testRunnerMemoryBpi([ - { - id: `EffectiveBalanceIncrements Uint8Array ${vc}`, - getInstance: () => new Uint8Array(vc), - }, - { - id: `EffectiveBalanceIncrements array ${vc}`, - getInstance: () => newZeroedArray(vc), - }, - { - id: `EffectiveBalanceIncrements MutableVector ${vc}`, - getInstance: () => MutableVector.from(newZeroedArray(vc)), - }, - { - id: `EffectiveBalanceIncrements MutableVector ${vc} cloned ${cloneTimes}`, - getInstance: () => { - const mv = MutableVector.from(newZeroedArray(vc)); - const mvs = [mv]; - for (let i = 0; i < cloneTimes; i++) { - const mvc = mv.clone(); - mvc.push(0); - mvs.push(mvc); - } - return mvs; - }, - }, -]); - -/** - * Test bytes per instance in different representations of raw binary data - */ -function testRunnerMemoryBpi(testCases: {getInstance: (bytes: number) => unknown; id: string}[]): void { - const longestId = Math.max(...testCases.map(({id}) => id.length)); - - for (const {id, getInstance} of testCases) { - const bpi = testRunnerMemory({ - getInstance, - convergeFactor: 1 / 100, - sampleEvery: 5, - }); - - // eslint-disable-next-line no-console - console.log(`${id.padEnd(longestId)} - ${bpi.toFixed(1)} bytes / instance`); - } -} diff --git a/packages/state-transition/test/perf/analyzeEpochs.ts b/packages/state-transition/test/perf/analyzeEpochs.ts index 6f61bc81abbc..deb0861427bf 100644 --- a/packages/state-transition/test/perf/analyzeEpochs.ts +++ b/packages/state-transition/test/perf/analyzeEpochs.ts @@ -152,6 +152,9 @@ async function analyzeEpochs(network: NetworkName, fromEpoch?: number): Promise< // processRegistryUpdates: function of registry updates // processSlashingsAllForks: function of process.indicesToSlash // processSlashingsReset: free + // -- electra + // processPendingBalanceDeposits: - + // processPendingConsolidations: - // -- altair // processInactivityUpdates: - // processParticipationFlagUpdates: - diff --git a/packages/state-transition/test/perf/block/processWithdrawals.test.ts b/packages/state-transition/test/perf/block/processWithdrawals.test.ts index 997f401d32ce..66d624b39bfd 100644 --- a/packages/state-transition/test/perf/block/processWithdrawals.test.ts +++ b/packages/state-transition/test/perf/block/processWithdrawals.test.ts @@ -1,4 +1,5 @@ import {itBench} from "@dapplion/benchmark"; +import {ForkSeq} from "@lodestar/params"; import {CachedBeaconStateCapella} from "../../../src/index.js"; import {getExpectedWithdrawals} from "../../../src/block/processWithdrawals.js"; import {numValidators} from "../util.js"; @@ -9,7 +10,7 @@ import {getExpectedWithdrawalsTestData, WithdrawalOpts} from "../../utils/capell // having BLS withdrawal credential prefix as that validator probe is wasted. // // Best case: -// All Validator have balances > MAX_EFFECTIVE_BALANCE and ETH1 withdrawal credential prefix set +// All Validator have balances > MAX_EFFECTIVE_BALANCE and ETH1 withdrawal credential prefix set // TODO Electra: Not true anymore // // Worst case: // All balances are low enough or withdrawal credential not set @@ -69,7 +70,7 @@ describe("getExpectedWithdrawals", () => { return opts.cache ? state : state.clone(true); }, fn: (state) => { - const {sampledValidators} = getExpectedWithdrawals(state); + const {sampledValidators} = getExpectedWithdrawals(ForkSeq.capella, state); // TODO Electra: Do test for electra if (sampledValidators !== opts.sampled) { throw Error(`Wrong sampledValidators ${sampledValidators} != ${opts.sampled}`); } diff --git a/packages/state-transition/test/perf/dataStructures/arrayish.memory.ts b/packages/state-transition/test/perf/dataStructures/arrayish.memory.ts index 1f4141912627..7e10f447181f 100644 --- a/packages/state-transition/test/perf/dataStructures/arrayish.memory.ts +++ b/packages/state-transition/test/perf/dataStructures/arrayish.memory.ts @@ -1,5 +1,3 @@ -import {MutableVector} from "@chainsafe/persistent-ts"; - const refs: any[] = []; const xs: number[] = []; const arrayBuffersArr: number[] = []; @@ -23,7 +21,6 @@ const size = 100; const testType = TestType.Set; let arrayNumGlobal: number[] | null = null; -let mutableVectorGlobal: MutableVector | null = null; for (let i = 0; i < 1e8; i++) { switch (testType as TestType) { @@ -65,49 +62,6 @@ for (let i = 0; i < 1e8; i++) { break; } - // size | 100 | 1000 | 10000 | - // ---- | ------ | ------ | ------ | - // rssM | 1817.4 | 15518. | 154335 | - case TestType.MutableVector: { - const items = createArray(size); - const mutableVector = MutableVector.from(items); - refs.push(mutableVector); - break; - } - - // size | 100 | 1000 | - // ---- | ------ | ------ | - // rssM | 58.68 | 55.89 | - case TestType.MutableVectorClone: { - if (!mutableVectorGlobal) { - const items = createArray(size); - mutableVectorGlobal = MutableVector.from(items); - } - refs.push(mutableVectorGlobal.clone()); - break; - } - - // Grid of size / changes, all values = rssM in bytes - // | 100 | 1000 | 10000 | - // ----- | ------ | ------ | ------ | - // 1 | 793.45 | 801.53 | 1137.9 | - // 10 | 803.98 | 802.36 | 1144.9 | - // 100 | 1573.2 | 1826.4 | 2172.0 | - // 1000 | - | 11250. | 11886. | - // 10000 | - | - | 111365 | - case TestType.MutableVectorCloneAndMutate: { - if (!mutableVectorGlobal) { - const items = createArray(size); - mutableVectorGlobal = MutableVector.from(items); - } - const newArr = mutableVectorGlobal.clone(); - for (let j = 0; j < 10000; j++) { - newArr.set(j, i); - } - refs.push(newArr); - break; - } - // size | 100 | 1000 | // ---- | ------ | ------ | // rssM | 2646.8 | 20855. | @@ -161,14 +115,6 @@ for (let i = 0; i < 1e8; i++) { } } -function createArray(n: number): number[] { - const items: number[] = []; - for (let i = 0; i < n; i++) { - items.push(i); - } - return items; -} - /** * From https://github.com/simple-statistics/simple-statistics/blob/d0d177baf74976a2421638bce98ab028c5afb537/src/linear_regression.js * diff --git a/packages/state-transition/test/perf/dataStructures/arrayish.test.ts b/packages/state-transition/test/perf/dataStructures/arrayish.test.ts index 59162b6eecca..5b6af0d989b6 100644 --- a/packages/state-transition/test/perf/dataStructures/arrayish.test.ts +++ b/packages/state-transition/test/perf/dataStructures/arrayish.test.ts @@ -1,6 +1,5 @@ import {itBench, setBenchOpts} from "@dapplion/benchmark"; import {LeafNode, toGindex, Tree, zeroNode} from "@chainsafe/persistent-merkle-tree"; -import {MutableVector} from "@chainsafe/persistent-ts"; // Understand the cost of each array-ish data structure to: // - Get one element @@ -99,48 +98,6 @@ describe("Tree (persistent-merkle-tree)", () => { } }); -describe("MutableVector", () => { - // Don't track regressions in CI - setBenchOpts({noThreshold: true}); - - let items: number[]; - let mutableVector: MutableVector; - - before(function () { - items = createArray(n); - mutableVector = MutableVector.from(items); - }); - - itBench(`MutableVector ${n} create`, () => { - MutableVector.from(items); - }); - - itBench({id: `MutableVector ${n} get(${ih})`, runsFactor}, () => { - for (let i = 0; i < runsFactor; i++) mutableVector.get(ih - i); - }); - - itBench({id: `MutableVector ${n} set(${ih})`, runsFactor}, () => { - for (let i = 0; i < runsFactor; i++) mutableVector.set(ih - i, 10000000); - }); - - itBench(`MutableVector ${n} toArray()`, () => { - mutableVector.toArray(); - }); - - itBench(`MutableVector ${n} iterate all - toArray() + loop`, () => { - const mvArr = mutableVector.toArray(); - for (let i = 0; i < n; i++) { - mvArr[i]; - } - }); - - itBench(`MutableVector ${n} iterate all - get(i)`, () => { - for (let i = 0; i < n; i++) { - mutableVector.get(i); - } - }); -}); - describe("Array", () => { // Don't track regressions in CI setBenchOpts({noThreshold: true}); diff --git a/packages/state-transition/test/perf/dataStructures/effectiveBalanceIncrements.test.ts b/packages/state-transition/test/perf/dataStructures/effectiveBalanceIncrements.test.ts deleted file mode 100644 index 13c2d982e86b..000000000000 --- a/packages/state-transition/test/perf/dataStructures/effectiveBalanceIncrements.test.ts +++ /dev/null @@ -1,32 +0,0 @@ -import {itBench, setBenchOpts} from "@dapplion/benchmark"; -import {MutableVector} from "@chainsafe/persistent-ts"; -import {newZeroedArray} from "../../../src/index.js"; - -describe("effectiveBalanceIncrements", () => { - setBenchOpts({noThreshold: true}); - - const vc = 300_000; - const uint8Array = new Uint8Array(vc); - const mv = MutableVector.from(newZeroedArray(vc)); - - itBench(`effectiveBalanceIncrements clone Uint8Array ${vc}`, () => { - uint8Array.slice(0); - }); - - itBench(`effectiveBalanceIncrements clone MutableVector ${vc}`, () => { - mv.clone(); - }); - - itBench(`effectiveBalanceIncrements rw all Uint8Array ${vc}`, () => { - for (let i = 0; i < vc; i++) { - uint8Array[i]++; - } - }); - - itBench(`effectiveBalanceIncrements rw all MutableVector ${vc}`, () => { - for (let i = 0; i < vc; i++) { - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - mv.set(i, mv.get(i)! + 1); - } - }); -}); diff --git a/packages/state-transition/test/perf/epoch/epochAltair.test.ts b/packages/state-transition/test/perf/epoch/epochAltair.test.ts index 273353d8632b..5a10fd4d8bbd 100644 --- a/packages/state-transition/test/perf/epoch/epochAltair.test.ts +++ b/packages/state-transition/test/perf/epoch/epochAltair.test.ts @@ -46,6 +46,7 @@ describe(`altair processEpoch - ${stateId}`, () => { fn: (state) => { const cache = beforeProcessEpoch(state); processEpoch(fork, state as CachedBeaconStateAltair, cache); + state.slot++; state.epochCtx.afterProcessEpoch(state, cache); // Simulate root computation through the next block to account for changes // 74184 hash64 ops - 92.730 ms @@ -120,7 +121,7 @@ function benchmarkAltairEpochSteps(stateOg: LazyValue itBench({ id: `${stateId} - altair processRegistryUpdates`, beforeEach: () => stateOg.value.clone(), - fn: (state) => processRegistryUpdates(state, cache.value), + fn: (state) => processRegistryUpdates(ForkSeq.altair, state, cache.value), }); // TODO: Needs a better state to test with, current does not include enough actions: 39.985 us/op @@ -141,7 +142,9 @@ function benchmarkAltairEpochSteps(stateOg: LazyValue itBench({ id: `${stateId} - altair processEffectiveBalanceUpdates`, beforeEach: () => stateOg.value.clone(), - fn: (state) => processEffectiveBalanceUpdates(state, cache.value), + fn: (state) => { + processEffectiveBalanceUpdates(ForkSeq.altair, state, cache.value); + }, }); itBench({ @@ -172,7 +175,7 @@ function benchmarkAltairEpochSteps(stateOg: LazyValue id: `${stateId} - altair processSyncCommitteeUpdates`, convergeFactor: 1 / 100, // Very unstable make it converge faster beforeEach: () => stateOg.value.clone() as CachedBeaconStateAltair, - fn: (state) => processSyncCommitteeUpdates(state), + fn: (state) => processSyncCommitteeUpdates(ForkSeq.altair, state), }); itBench({ @@ -185,6 +188,9 @@ function benchmarkAltairEpochSteps(stateOg: LazyValue return {state, cache: cacheAfter}; }, beforeEach: ({state, cache}) => ({state: state.clone(), cache}), - fn: ({state, cache}) => state.epochCtx.afterProcessEpoch(state, cache), + fn: ({state, cache}) => { + state.slot++; + state.epochCtx.afterProcessEpoch(state, cache); + }, }); } diff --git a/packages/state-transition/test/perf/epoch/epochCapella.test.ts b/packages/state-transition/test/perf/epoch/epochCapella.test.ts index eeaf8bfc5400..a4daf308aaa0 100644 --- a/packages/state-transition/test/perf/epoch/epochCapella.test.ts +++ b/packages/state-transition/test/perf/epoch/epochCapella.test.ts @@ -46,6 +46,7 @@ describe(`capella processEpoch - ${stateId}`, () => { fn: (state) => { const cache = beforeProcessEpoch(state); processEpoch(fork, state as CachedBeaconStateCapella, cache); + state.slot++; state.epochCtx.afterProcessEpoch(state, cache); // Simulate root computation through the next block to account for changes // 74184 hash64 ops - 92.730 ms @@ -99,7 +100,7 @@ function benchmarkAltairEpochSteps(stateOg: LazyValue itBench({ id: `${stateId} - capella processRegistryUpdates`, beforeEach: () => stateOg.value.clone(), - fn: (state) => processRegistryUpdates(state, cache.value), + fn: (state) => processRegistryUpdates(ForkSeq.capella, state, cache.value), }); // TODO: Needs a better state to test with, current does not include enough actions: 39.985 us/op @@ -120,7 +121,9 @@ function benchmarkAltairEpochSteps(stateOg: LazyValue itBench({ id: `${stateId} - capella processEffectiveBalanceUpdates`, beforeEach: () => stateOg.value.clone(), - fn: (state) => processEffectiveBalanceUpdates(state, cache.value), + fn: (state) => { + processEffectiveBalanceUpdates(ForkSeq.capella, state, cache.value); + }, }); itBench({ @@ -157,6 +160,9 @@ function benchmarkAltairEpochSteps(stateOg: LazyValue return {state, cache: cacheAfter}; }, beforeEach: ({state, cache}) => ({state: state.clone(), cache}), - fn: ({state, cache}) => state.epochCtx.afterProcessEpoch(state, cache), + fn: ({state, cache}) => { + state.slot++; + state.epochCtx.afterProcessEpoch(state, cache); + }, }); } diff --git a/packages/state-transition/test/perf/epoch/epochPhase0.test.ts b/packages/state-transition/test/perf/epoch/epochPhase0.test.ts index 4e43634b1669..5c19b347af62 100644 --- a/packages/state-transition/test/perf/epoch/epochPhase0.test.ts +++ b/packages/state-transition/test/perf/epoch/epochPhase0.test.ts @@ -43,6 +43,7 @@ describe(`phase0 processEpoch - ${stateId}`, () => { fn: (state) => { const cache = beforeProcessEpoch(state); processEpoch(fork, state as CachedBeaconStatePhase0, cache); + state.slot++; state.epochCtx.afterProcessEpoch(state, cache); // Simulate root computation through the next block to account for changes state.hashTreeRoot(); @@ -102,7 +103,7 @@ function benchmarkPhase0EpochSteps(stateOg: LazyValue itBench({ id: `${stateId} - phase0 processRegistryUpdates`, beforeEach: () => stateOg.value.clone(), - fn: (state) => processRegistryUpdates(state, cache.value), + fn: (state) => processRegistryUpdates(ForkSeq.phase0, state, cache.value), }); // TODO: Needs a better state to test with, current does not include enough actions: 39.985 us/op @@ -123,7 +124,9 @@ function benchmarkPhase0EpochSteps(stateOg: LazyValue itBench({ id: `${stateId} - phase0 processEffectiveBalanceUpdates`, beforeEach: () => stateOg.value.clone(), - fn: (state) => processEffectiveBalanceUpdates(state, cache.value), + fn: (state) => { + processEffectiveBalanceUpdates(ForkSeq.phase0, state, cache.value); + }, }); itBench({ @@ -160,6 +163,9 @@ function benchmarkPhase0EpochSteps(stateOg: LazyValue return {state, cache: cacheAfter}; }, beforeEach: ({state, cache}) => ({state: state.clone(), cache}), - fn: ({state, cache}) => state.epochCtx.afterProcessEpoch(state, cache), + fn: ({state, cache}) => { + state.slot++; + state.epochCtx.afterProcessEpoch(state, cache); + }, }); } diff --git a/packages/state-transition/test/perf/epoch/processEffectiveBalanceUpdates.test.ts b/packages/state-transition/test/perf/epoch/processEffectiveBalanceUpdates.test.ts index 0fb1d448142f..19f18df86c2e 100644 --- a/packages/state-transition/test/perf/epoch/processEffectiveBalanceUpdates.test.ts +++ b/packages/state-transition/test/perf/epoch/processEffectiveBalanceUpdates.test.ts @@ -1,6 +1,7 @@ import {itBench} from "@dapplion/benchmark"; import {ssz} from "@lodestar/types"; import {config} from "@lodestar/config/default"; +import {ForkSeq} from "@lodestar/params"; import {beforeProcessEpoch, CachedBeaconStateAllForks, EpochTransitionCache} from "../../../src/index.js"; import {processEffectiveBalanceUpdates} from "../../../src/epoch/processEffectiveBalanceUpdates.js"; import {numValidators} from "../util.js"; @@ -35,7 +36,9 @@ describe("phase0 processEffectiveBalanceUpdates", () => { minRuns: 5, // Worst case is very slow before: () => getEffectiveBalanceTestData(vc, changeRatio), beforeEach: ({state, cache}) => ({state: state.clone(), cache}), - fn: ({state, cache}) => processEffectiveBalanceUpdates(state, cache), + fn: ({state, cache}) => { + processEffectiveBalanceUpdates(ForkSeq.phase0, state, cache); + }, }); } }); diff --git a/packages/state-transition/test/perf/epoch/processRegistryUpdates.test.ts b/packages/state-transition/test/perf/epoch/processRegistryUpdates.test.ts index ccfd2405a665..2d57de44f8ee 100644 --- a/packages/state-transition/test/perf/epoch/processRegistryUpdates.test.ts +++ b/packages/state-transition/test/perf/epoch/processRegistryUpdates.test.ts @@ -1,4 +1,5 @@ import {itBench} from "@dapplion/benchmark"; +import {ForkSeq} from "@lodestar/params"; import {beforeProcessEpoch, CachedBeaconStateAllForks, EpochTransitionCache} from "../../../src/index.js"; import {processRegistryUpdates} from "../../../src/epoch/processRegistryUpdates.js"; import {generatePerfTestCachedStatePhase0, numValidators} from "../util.js"; @@ -62,7 +63,7 @@ describe("phase0 processRegistryUpdates", () => { noThreshold: notTrack, before: () => getRegistryUpdatesTestData(vc, lengths), beforeEach: async ({state, cache}) => ({state: state.clone(), cache}), - fn: ({state, cache}) => processRegistryUpdates(state, cache), + fn: ({state, cache}) => processRegistryUpdates(ForkSeq.phase0, state, cache), }); } }); diff --git a/packages/state-transition/test/perf/epoch/processSyncCommitteeUpdates.test.ts b/packages/state-transition/test/perf/epoch/processSyncCommitteeUpdates.test.ts index ffde30e1302c..4497dc16be0c 100644 --- a/packages/state-transition/test/perf/epoch/processSyncCommitteeUpdates.test.ts +++ b/packages/state-transition/test/perf/epoch/processSyncCommitteeUpdates.test.ts @@ -1,5 +1,5 @@ import {itBench} from "@dapplion/benchmark"; -import {EPOCHS_PER_SYNC_COMMITTEE_PERIOD} from "@lodestar/params"; +import {EPOCHS_PER_SYNC_COMMITTEE_PERIOD, ForkSeq} from "@lodestar/params"; import {processSyncCommitteeUpdates} from "../../../src/epoch/processSyncCommitteeUpdates.js"; import {StateAltair} from "../types.js"; import {generatePerfTestCachedStateAltair, numValidators} from "../util.js"; @@ -21,7 +21,7 @@ describe("altair processSyncCommitteeUpdates", () => { }, fn: (state) => { const nextSyncCommitteeBefore = state.nextSyncCommittee; - processSyncCommitteeUpdates(state); + processSyncCommitteeUpdates(ForkSeq.altair, state); if (state.nextSyncCommittee === nextSyncCommitteeBefore) { throw Error("nextSyncCommittee instance has not changed"); } diff --git a/packages/state-transition/test/perf/util.ts b/packages/state-transition/test/perf/util.ts index 4b2a7da4a50e..c764e2d039f9 100644 --- a/packages/state-transition/test/perf/util.ts +++ b/packages/state-transition/test/perf/util.ts @@ -1,5 +1,6 @@ import {BitArray, fromHexString} from "@chainsafe/ssz"; import {PublicKey, SecretKey} from "@chainsafe/blst"; +import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; import {phase0, ssz, Slot, BeaconState} from "@lodestar/types"; import {config} from "@lodestar/config/default"; import {createBeaconConfig, createChainForkConfig} from "@lodestar/config"; @@ -7,6 +8,7 @@ import { EPOCHS_PER_ETH1_VOTING_PERIOD, EPOCHS_PER_HISTORICAL_VECTOR, ForkName, + ForkSeq, MAX_ATTESTATIONS, MAX_EFFECTIVE_BALANCE, SLOTS_PER_EPOCH, @@ -16,7 +18,6 @@ import { interopSecretKey, computeEpochAtSlot, getActiveValidatorIndices, - PubkeyIndexMap, newFilledArray, createCachedBeaconState, computeCommitteeCount, @@ -273,7 +274,12 @@ export function generatePerformanceStateAltair(pubkeysArg?: Uint8Array[]): Beaco const activeValidatorIndices = getActiveValidatorIndices(altairState, epoch); const effectiveBalanceIncrements = getEffectiveBalanceIncrements(altairState); - const {syncCommittee} = getNextSyncCommittee(altairState, activeValidatorIndices, effectiveBalanceIncrements); + const {syncCommittee} = getNextSyncCommittee( + ForkSeq.altair, + altairState, + activeValidatorIndices, + effectiveBalanceIncrements + ); state.currentSyncCommittee = syncCommittee; state.nextSyncCommittee = syncCommittee; diff --git a/packages/state-transition/test/perf/util/loadState/loadState.test.ts b/packages/state-transition/test/perf/util/loadState/loadState.test.ts index a8a1b1399dc5..9f6175e95684 100644 --- a/packages/state-transition/test/perf/util/loadState/loadState.test.ts +++ b/packages/state-transition/test/perf/util/loadState/loadState.test.ts @@ -1,8 +1,9 @@ import {itBench, setBenchOpts} from "@dapplion/benchmark"; import {PublicKey} from "@chainsafe/blst"; +import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; import {loadState} from "../../../../src/util/loadState/loadState.js"; import {createCachedBeaconState} from "../../../../src/cache/stateCache.js"; -import {Index2PubkeyCache, PubkeyIndexMap} from "../../../../src/cache/pubkeyCache.js"; +import {Index2PubkeyCache} from "../../../../src/cache/pubkeyCache.js"; import {generatePerfTestCachedStateAltair} from "../../util.js"; /** @@ -79,17 +80,15 @@ describe("loadState", function () { pubkey2index.set(pubkey, validatorIndex); index2pubkey[validatorIndex] = PublicKey.fromBytes(pubkey); } - // skip computimg shuffling in performance test because in reality we have a ShufflingCache - // eslint-disable-next-line @typescript-eslint/explicit-function-return-type - const shufflingGetter = () => seedState.epochCtx.currentShuffling; createCachedBeaconState( migratedState, { config: seedState.config, pubkey2index, index2pubkey, + shufflingCache: seedState.epochCtx.shufflingCache, }, - {skipSyncPubkeys: true, skipSyncCommitteeCache: true, shufflingGetter} + {skipSyncPubkeys: true, skipSyncCommitteeCache: true} ); }, }); diff --git a/packages/state-transition/test/perf/util/shufflings.test.ts b/packages/state-transition/test/perf/util/shufflings.test.ts index 96c7878a46ac..41767c184349 100644 --- a/packages/state-transition/test/perf/util/shufflings.test.ts +++ b/packages/state-transition/test/perf/util/shufflings.test.ts @@ -27,27 +27,25 @@ describe("epoch shufflings", () => { itBench({ id: `computeProposers - vc ${numValidators}`, fn: () => { - const epochSeed = getSeed(state, state.epochCtx.nextShuffling.epoch, DOMAIN_BEACON_PROPOSER); - computeProposers(epochSeed, state.epochCtx.nextShuffling, state.epochCtx.effectiveBalanceIncrements); + const epochSeed = getSeed(state, state.epochCtx.epoch, DOMAIN_BEACON_PROPOSER); + const fork = state.config.getForkSeq(state.slot); + computeProposers(fork, epochSeed, state.epochCtx.currentShuffling, state.epochCtx.effectiveBalanceIncrements); }, }); itBench({ id: `computeEpochShuffling - vc ${numValidators}`, fn: () => { - const {activeIndices} = state.epochCtx.nextShuffling; - computeEpochShuffling(state, activeIndices, activeIndices.length, nextEpoch); + const {nextActiveIndices} = state.epochCtx; + computeEpochShuffling(state, nextActiveIndices, nextEpoch); }, }); itBench({ id: `getNextSyncCommittee - vc ${numValidators}`, fn: () => { - getNextSyncCommittee( - state, - state.epochCtx.nextShuffling.activeIndices, - state.epochCtx.effectiveBalanceIncrements - ); + const fork = state.config.getForkSeq(state.slot); + getNextSyncCommittee(fork, state, state.epochCtx.nextActiveIndices, state.epochCtx.effectiveBalanceIncrements); }, }); }); diff --git a/packages/state-transition/test/unit/block/processWithdrawals.test.ts b/packages/state-transition/test/unit/block/processWithdrawals.test.ts index 2841da635472..7b708d108a7b 100644 --- a/packages/state-transition/test/unit/block/processWithdrawals.test.ts +++ b/packages/state-transition/test/unit/block/processWithdrawals.test.ts @@ -1,4 +1,5 @@ import {describe, it, expect} from "vitest"; +import {ForkSeq} from "@lodestar/params"; import {getExpectedWithdrawals} from "../../../src/block/processWithdrawals.js"; import {numValidators} from "../../perf/util.js"; import {getExpectedWithdrawalsTestData, WithdrawalOpts} from "../../utils/capella.js"; @@ -36,8 +37,9 @@ describe("getExpectedWithdrawals", () => { // Clone true to drop cache const state = beforeValue(() => getExpectedWithdrawalsTestData(vc, opts).clone(true)); + // TODO Electra: Add test for electra it(`getExpectedWithdrawals ${vc} ${caseID}`, () => { - const {sampledValidators, withdrawals} = getExpectedWithdrawals(state.value); + const {sampledValidators, withdrawals} = getExpectedWithdrawals(ForkSeq.capella, state.value); expect(sampledValidators).toBe(opts.sampled); expect(withdrawals.length).toBe(opts.withdrawals); }); diff --git a/packages/state-transition/test/unit/cachedBeaconState.test.ts b/packages/state-transition/test/unit/cachedBeaconState.test.ts index 2891cd3e6216..96c026340143 100644 --- a/packages/state-transition/test/unit/cachedBeaconState.test.ts +++ b/packages/state-transition/test/unit/cachedBeaconState.test.ts @@ -1,14 +1,14 @@ +import {fromHexString} from "@chainsafe/ssz"; import {describe, it, expect} from "vitest"; -import {Epoch, ssz, RootHex} from "@lodestar/types"; +import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; +import {ssz} from "@lodestar/types"; import {toHexString} from "@lodestar/utils"; import {config as defaultConfig} from "@lodestar/config/default"; -import {createBeaconConfig} from "@lodestar/config"; +import {createBeaconConfig, createChainForkConfig} from "@lodestar/config"; import {createCachedBeaconStateTest} from "../utils/state.js"; -import {PubkeyIndexMap} from "../../src/cache/pubkeyCache.js"; import {createCachedBeaconState, loadCachedBeaconState} from "../../src/cache/stateCache.js"; import {interopPubkeysCached} from "../utils/interop.js"; import {modifyStateSameValidator, newStateWithValidators} from "../utils/capella.js"; -import {EpochShuffling, getShufflingDecisionBlock} from "../../src/util/epochShuffling.js"; describe("CachedBeaconState", () => { it("Clone and mutate", () => { @@ -28,6 +28,65 @@ describe("CachedBeaconState", () => { expect(state2.epochCtx.epoch).toBe(0); }); + it("Clone and mutate cache pre-Electra", () => { + const stateView = ssz.altair.BeaconState.defaultViewDU(); + const state1 = createCachedBeaconStateTest(stateView); + + const pubkey1 = fromHexString( + "0x84105a985058fc8740a48bf1ede9d223ef09e8c6b1735ba0a55cf4a9ff2ff92376b778798365e488dab07a652eb04576" + ); + const index1 = 123; + const pubkey2 = fromHexString( + "0xa41726266b1d83ef609d759ba7796d54cfe549154e01e4730a3378309bc81a7638140d7e184b33593c072595f23f032d" + ); + const index2 = 456; + + state1.epochCtx.addPubkey(index1, pubkey1); + + const state2 = state1.clone(); + state2.epochCtx.addPubkey(index2, pubkey2); + + expect(state1.epochCtx.getValidatorIndex(pubkey1)).toBe(index1); + expect(state2.epochCtx.getValidatorIndex(pubkey1)).toBe(index1); + expect(state1.epochCtx.getValidatorIndex(pubkey2)).toBe(index2); + expect(state2.epochCtx.getValidatorIndex(pubkey2)).toBe(index2); + }); + + /* eslint-disable @typescript-eslint/naming-convention */ + it("Clone and mutate cache post-Electra", () => { + const stateView = ssz.electra.BeaconState.defaultViewDU(); + const state1 = createCachedBeaconStateTest( + stateView, + createChainForkConfig({ + ALTAIR_FORK_EPOCH: 0, + BELLATRIX_FORK_EPOCH: 0, + CAPELLA_FORK_EPOCH: 0, + DENEB_FORK_EPOCH: 0, + ELECTRA_FORK_EPOCH: 0, + }), + {skipSyncCommitteeCache: true, skipSyncPubkeys: true} + ); + + const pubkey1 = fromHexString( + "0x84105a985058fc8740a48bf1ede9d223ef09e8c6b1735ba0a55cf4a9ff2ff92376b778798365e488dab07a652eb04576" + ); + const index1 = 123; + const pubkey2 = fromHexString( + "0xa41726266b1d83ef609d759ba7796d54cfe549154e01e4730a3378309bc81a7638140d7e184b33593c072595f23f032d" + ); + const index2 = 456; + + state1.epochCtx.addPubkey(index1, pubkey1); + + const state2 = state1.clone(); + state2.epochCtx.addPubkey(index2, pubkey2); + + expect(state1.epochCtx.getValidatorIndex(pubkey1)).toBe(index1); + expect(state2.epochCtx.getValidatorIndex(pubkey1)).toBe(index1); + expect(state1.epochCtx.getValidatorIndex(pubkey2)).toBe(null); + expect(state2.epochCtx.getValidatorIndex(pubkey2)).toBe(index2); + }); + it("Auto-commit on hashTreeRoot", () => { // Use Checkpoint instead of BeaconState to speed up the test const cp1 = ssz.phase0.Checkpoint.defaultViewDU(); @@ -71,7 +130,7 @@ describe("CachedBeaconState", () => { const capellaStateType = ssz.capella.BeaconState; - for (let validatorCountDelta = -numValidator; validatorCountDelta <= numValidator; validatorCountDelta++) { + for (let validatorCountDelta = -numValidator + 1; validatorCountDelta <= numValidator; validatorCountDelta++) { const testName = `loadCachedBeaconState - ${validatorCountDelta > 0 ? "more" : "less"} ${Math.abs( validatorCountDelta )} validators`; @@ -129,42 +188,21 @@ describe("CachedBeaconState", () => { // confirm loadState() result const stateBytes = state.serialize(); - const newCachedState = loadCachedBeaconState(seedState, stateBytes, {skipSyncCommitteeCache: true}); + const newCachedState = loadCachedBeaconState(seedState, stateBytes, { + skipSyncCommitteeCache: true, + }); const newStateBytes = newCachedState.serialize(); expect(newStateBytes).toEqual(stateBytes); expect(newCachedState.hashTreeRoot()).toEqual(state.hashTreeRoot()); - const shufflingGetter = (shufflingEpoch: Epoch, dependentRoot: RootHex): EpochShuffling | null => { - if ( - shufflingEpoch === seedState.epochCtx.epoch - 1 && - dependentRoot === getShufflingDecisionBlock(seedState, shufflingEpoch) - ) { - return seedState.epochCtx.previousShuffling; - } - - if ( - shufflingEpoch === seedState.epochCtx.epoch && - dependentRoot === getShufflingDecisionBlock(seedState, shufflingEpoch) - ) { - return seedState.epochCtx.currentShuffling; - } - - if ( - shufflingEpoch === seedState.epochCtx.epoch + 1 && - dependentRoot === getShufflingDecisionBlock(seedState, shufflingEpoch) - ) { - return seedState.epochCtx.nextShuffling; - } - - return null; - }; const cachedState = createCachedBeaconState( state, { config, pubkey2index: new PubkeyIndexMap(), index2pubkey: [], + shufflingCache: seedState.epochCtx.shufflingCache, }, - {skipSyncCommitteeCache: true, shufflingGetter} + {skipSyncCommitteeCache: true} ); // validatorCountDelta < 0 is unrealistic and shuffling computation results in a different result if (validatorCountDelta >= 0) { diff --git a/packages/state-transition/test/unit/upgradeState.test.ts b/packages/state-transition/test/unit/upgradeState.test.ts index 2ea8eef182ac..301cb105dc98 100644 --- a/packages/state-transition/test/unit/upgradeState.test.ts +++ b/packages/state-transition/test/unit/upgradeState.test.ts @@ -1,12 +1,13 @@ import {expect, describe, it} from "vitest"; +import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; import {ssz} from "@lodestar/types"; import {ForkName} from "@lodestar/params"; import {createBeaconConfig, ChainForkConfig, createChainForkConfig} from "@lodestar/config"; import {config as chainConfig} from "@lodestar/config/default"; import {upgradeStateToDeneb} from "../../src/slot/upgradeStateToDeneb.js"; +import {upgradeStateToElectra} from "../../src/slot/upgradeStateToElectra.js"; import {createCachedBeaconState} from "../../src/cache/stateCache.js"; -import {PubkeyIndexMap} from "../../src/cache/pubkeyCache.js"; describe("upgradeState", () => { it("upgradeStateToDeneb", () => { @@ -24,6 +25,21 @@ describe("upgradeState", () => { const newState = upgradeStateToDeneb(stateView); expect(() => newState.toValue()).not.toThrow(); }); + it("upgradeStateToElectra", () => { + const denebState = ssz.deneb.BeaconState.defaultViewDU(); + const config = getConfig(ForkName.deneb); + const stateView = createCachedBeaconState( + denebState, + { + config: createBeaconConfig(config, denebState.genesisValidatorsRoot), + pubkey2index: new PubkeyIndexMap(), + index2pubkey: [], + }, + {skipSyncCommitteeCache: true} + ); + const newState = upgradeStateToElectra(stateView); + expect(() => newState.toValue()).not.toThrow(); + }); }); const ZERO_HASH = Buffer.alloc(32, 0); @@ -55,5 +71,13 @@ function getConfig(fork: ForkName, forkEpoch = 0): ChainForkConfig { CAPELLA_FORK_EPOCH: 0, DENEB_FORK_EPOCH: forkEpoch, }); + case ForkName.electra: + return createChainForkConfig({ + ALTAIR_FORK_EPOCH: 0, + BELLATRIX_FORK_EPOCH: 0, + CAPELLA_FORK_EPOCH: 0, + DENEB_FORK_EPOCH: 0, + ELECTRA_FORK_EPOCH: forkEpoch, + }); } } diff --git a/packages/state-transition/test/unit/util/cachedBeaconState.test.ts b/packages/state-transition/test/unit/util/cachedBeaconState.test.ts index 654e0752adb8..c85a8c7a2ffd 100644 --- a/packages/state-transition/test/unit/util/cachedBeaconState.test.ts +++ b/packages/state-transition/test/unit/util/cachedBeaconState.test.ts @@ -1,8 +1,9 @@ import {describe, it} from "vitest"; +import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; import {createBeaconConfig} from "@lodestar/config"; import {config} from "@lodestar/config/default"; import {ssz} from "@lodestar/types"; -import {createCachedBeaconState, PubkeyIndexMap} from "../../../src/index.js"; +import {createCachedBeaconState} from "../../../src/index.js"; describe("CachedBeaconState", () => { it("Create empty CachedBeaconState", () => { diff --git a/packages/state-transition/test/unit/util/deposit.test.ts b/packages/state-transition/test/unit/util/deposit.test.ts new file mode 100644 index 000000000000..3cfa4abb3409 --- /dev/null +++ b/packages/state-transition/test/unit/util/deposit.test.ts @@ -0,0 +1,99 @@ +import {describe, it, expect} from "vitest"; +import {ssz} from "@lodestar/types"; +import {createChainForkConfig} from "@lodestar/config"; +import {MAX_DEPOSITS} from "@lodestar/params"; +import {getEth1DepositCount} from "../../../src/index.js"; +import {createCachedBeaconStateTest} from "../../utils/state.js"; + +describe("getEth1DepositCount", () => { + it("Pre Electra", () => { + const stateView = ssz.altair.BeaconState.defaultViewDU(); + const preElectraState = createCachedBeaconStateTest(stateView); + + if (preElectraState.epochCtx.isPostElectra()) { + throw Error("Not a pre-Electra state"); + } + + preElectraState.eth1Data.depositCount = 123; + + // 1. Should get less than MAX_DEPOSIT + preElectraState.eth1DepositIndex = 120; + expect(getEth1DepositCount(preElectraState)).toBe(3); + + // 2. Should get MAX_DEPOSIT + preElectraState.eth1DepositIndex = 100; + expect(getEth1DepositCount(preElectraState)).toBe(MAX_DEPOSITS); + }); + it("Post Electra with eth1 deposit", () => { + const stateView = ssz.electra.BeaconState.defaultViewDU(); + const postElectraState = createCachedBeaconStateTest( + stateView, + createChainForkConfig({ + /* eslint-disable @typescript-eslint/naming-convention */ + ALTAIR_FORK_EPOCH: 0, + BELLATRIX_FORK_EPOCH: 0, + CAPELLA_FORK_EPOCH: 0, + DENEB_FORK_EPOCH: 0, + ELECTRA_FORK_EPOCH: 0, + }), + {skipSyncCommitteeCache: true, skipSyncPubkeys: true} + ); + + if (!postElectraState.epochCtx.isPostElectra()) { + throw Error("Not a post-Electra state"); + } + + postElectraState.depositRequestsStartIndex = 1000n; + postElectraState.eth1Data.depositCount = 995; + + // 1. Should get less than MAX_DEPOSIT + postElectraState.eth1DepositIndex = 990; + expect(getEth1DepositCount(postElectraState)).toBe(5); + + // 2. Should get MAX_DEPOSIT + postElectraState.eth1DepositIndex = 100; + expect(getEth1DepositCount(postElectraState)).toBe(MAX_DEPOSITS); + + // 3. Should be 0 + postElectraState.eth1DepositIndex = 1000; + expect(getEth1DepositCount(postElectraState)).toBe(0); + }); + it("Post Electra without eth1 deposit", () => { + const stateView = ssz.electra.BeaconState.defaultViewDU(); + const postElectraState = createCachedBeaconStateTest( + stateView, + createChainForkConfig({ + /* eslint-disable @typescript-eslint/naming-convention */ + ALTAIR_FORK_EPOCH: 0, + BELLATRIX_FORK_EPOCH: 0, + CAPELLA_FORK_EPOCH: 0, + DENEB_FORK_EPOCH: 0, + ELECTRA_FORK_EPOCH: 0, + }), + {skipSyncCommitteeCache: true, skipSyncPubkeys: true} + ); + + if (!postElectraState.epochCtx.isPostElectra()) { + throw Error("Not a post-Electra state"); + } + + postElectraState.depositRequestsStartIndex = 1000n; + postElectraState.eth1Data.depositCount = 1005; + + // Before eth1DepositIndex reaching the start index + // 1. Should get less than MAX_DEPOSIT + postElectraState.eth1DepositIndex = 990; + expect(getEth1DepositCount(postElectraState)).toBe(10); + + // 2. Should get MAX_DEPOSIT + postElectraState.eth1DepositIndex = 983; + expect(getEth1DepositCount(postElectraState)).toBe(MAX_DEPOSITS); + + // After eth1DepositIndex reaching the start index + // 1. Should be 0 + postElectraState.eth1DepositIndex = 1000; + expect(getEth1DepositCount(postElectraState)).toBe(0); + postElectraState.eth1DepositIndex = 1003; + expect(getEth1DepositCount(postElectraState)).toBe(0); + }); +}); diff --git a/packages/state-transition/test/unit/util/loadState.test.ts b/packages/state-transition/test/unit/util/loadState.test.ts new file mode 100644 index 000000000000..97a792a28adb --- /dev/null +++ b/packages/state-transition/test/unit/util/loadState.test.ts @@ -0,0 +1,40 @@ +import {describe, it, expect} from "vitest"; +import {ssz} from "@lodestar/types"; +import {mainnetChainConfig} from "@lodestar/config/networks"; +import {createChainForkConfig} from "@lodestar/config"; +import {ForkName, SLOTS_PER_EPOCH} from "@lodestar/params"; +import {loadStateAndValidators} from "../../../src/util/loadState/loadState.js"; + +describe("loadStateAndValidators", () => { + const numValidator = 10; + const config = createChainForkConfig(mainnetChainConfig); + + const testCases: {name: ForkName; slot: number}[] = [ + {name: ForkName.phase0, slot: 100}, + {name: ForkName.altair, slot: mainnetChainConfig.ALTAIR_FORK_EPOCH * SLOTS_PER_EPOCH + 100}, + {name: ForkName.capella, slot: mainnetChainConfig.CAPELLA_FORK_EPOCH * SLOTS_PER_EPOCH + 100}, + {name: ForkName.deneb, slot: mainnetChainConfig.DENEB_FORK_EPOCH * SLOTS_PER_EPOCH + 100}, + ]; + + for (const {name, slot} of testCases) { + it(`fork: ${name}, slot: ${slot}`, () => { + const state = config.getForkTypes(slot).BeaconState.defaultViewDU(); + state.slot = slot; + for (let i = 0; i < numValidator; i++) { + const validator = ssz.phase0.Validator.defaultViewDU(); + validator.pubkey = Buffer.alloc(48, i); + state.validators.push(validator); + state.balances.push(32 * 1e9); + } + state.commit(); + + const stateBytes = state.serialize(); + const stateRoot = state.hashTreeRoot(); + const {state: loadedState, validatorsBytes} = loadStateAndValidators(config, stateBytes); + expect(loadedState.hashTreeRoot()).toEqual(stateRoot); + // serialize() somehow takes time, however comparing state root would be enough + // expect(loadedState.serialize()).toEqual(stateBytes); + expect(validatorsBytes).toEqual(state.validators.serialize()); + }); + } +}); diff --git a/packages/state-transition/test/utils/state.ts b/packages/state-transition/test/utils/state.ts index 29a1f98b5562..9a79faf74480 100644 --- a/packages/state-transition/test/utils/state.ts +++ b/packages/state-transition/test/utils/state.ts @@ -1,3 +1,4 @@ +import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; import {config as minimalConfig} from "@lodestar/config/default"; import { EPOCHS_PER_HISTORICAL_VECTOR, @@ -18,7 +19,6 @@ import { CachedBeaconStateAllForks, BeaconStateAllForks, createCachedBeaconState, - PubkeyIndexMap, } from "../../src/index.js"; import {BeaconStateCache} from "../../src/cache/stateCache.js"; import {EpochCacheOpts} from "../../src/cache/epochCache.js"; diff --git a/packages/test-utils/package.json b/packages/test-utils/package.json index f3b0a2da223c..5bf61891a9d5 100644 --- a/packages/test-utils/package.json +++ b/packages/test-utils/package.json @@ -1,7 +1,7 @@ { "name": "@lodestar/test-utils", "private": true, - "version": "1.20.2", + "version": "1.22.0", "description": "Test utilities reused across other packages", "author": "ChainSafe Systems", "license": "Apache-2.0", @@ -57,10 +57,10 @@ "blockchain" ], "dependencies": { - "@chainsafe/blst": "^2.0.3", "@chainsafe/bls-keystore": "^3.1.0", - "@lodestar/params": "^1.20.2", - "@lodestar/utils": "^1.20.2", + "@chainsafe/blst": "^2.0.3", + "@lodestar/params": "^1.22.0", + "@lodestar/utils": "^1.22.0", "axios": "^1.3.4", "testcontainers": "^10.2.1", "tmp": "^0.2.1", diff --git a/packages/types/package.json b/packages/types/package.json index d5512763b197..f3e034ec1b35 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.20.2", + "version": "1.22.0", "type": "module", "exports": { ".": { @@ -29,6 +29,9 @@ "./deneb": { "import": "./lib/deneb/index.js" }, + "./electra": { + "import": "./lib/electra/index.js" + }, "./phase0": { "import": "./lib/phase0/index.js" } @@ -70,8 +73,8 @@ }, "types": "lib/index.d.ts", "dependencies": { - "@chainsafe/ssz": "^0.15.1", - "@lodestar/params": "^1.20.2", + "@chainsafe/ssz": "^0.17.1", + "@lodestar/params": "^1.22.0", "ethereum-cryptography": "^2.0.0" }, "keywords": [ diff --git a/packages/types/src/deneb/sszTypes.ts b/packages/types/src/deneb/sszTypes.ts index c93c3f145beb..076973bba579 100644 --- a/packages/types/src/deneb/sszTypes.ts +++ b/packages/types/src/deneb/sszTypes.ts @@ -312,3 +312,21 @@ export const SSEPayloadAttributes = new ContainerType( }, {typeName: "SSEPayloadAttributes", jsonCase: "eth2"} ); + +export const BlockContents = new ContainerType( + { + block: BeaconBlock, + kzgProofs: KZGProofs, + blobs: Blobs, + }, + {typeName: "BlockContents", jsonCase: "eth2"} +); + +export const SignedBlockContents = new ContainerType( + { + signedBlock: SignedBeaconBlock, + kzgProofs: KZGProofs, + blobs: Blobs, + }, + {typeName: "SignedBlockContents", jsonCase: "eth2"} +); diff --git a/packages/types/src/deneb/types.ts b/packages/types/src/deneb/types.ts index 9a901c9a1a81..7ee6648aeaf2 100644 --- a/packages/types/src/deneb/types.ts +++ b/packages/types/src/deneb/types.ts @@ -1,6 +1,4 @@ import {ValueOf} from "@chainsafe/ssz"; -import {ForkName} from "@lodestar/params"; -import type {BlockContents} from "../types.js"; import * as ssz from "./sszTypes.js"; export type KZGProof = ValueOf; @@ -49,4 +47,7 @@ export type LightClientOptimisticUpdate = ValueOf; export type ProducedBlobSidecars = Omit; -export type Contents = Omit, "block">; + +export type BlockContents = ValueOf; +export type SignedBlockContents = ValueOf; +export type Contents = Omit; diff --git a/packages/types/src/electra/index.ts b/packages/types/src/electra/index.ts new file mode 100644 index 000000000000..981b2015e02a --- /dev/null +++ b/packages/types/src/electra/index.ts @@ -0,0 +1,4 @@ +export * from "./types.js"; +import * as ts from "./types.js"; +import * as ssz from "./sszTypes.js"; +export {ts, ssz}; diff --git a/packages/types/src/electra/sszTypes.ts b/packages/types/src/electra/sszTypes.ts new file mode 100644 index 000000000000..9d995c38efd5 --- /dev/null +++ b/packages/types/src/electra/sszTypes.ts @@ -0,0 +1,418 @@ +import { + BitListType, + BitVectorType, + ContainerType, + ListBasicType, + ListCompositeType, + VectorCompositeType, +} from "@chainsafe/ssz"; +import { + HISTORICAL_ROOTS_LIMIT, + EPOCHS_PER_SYNC_COMMITTEE_PERIOD, + SLOTS_PER_EPOCH, + MAX_DEPOSIT_REQUESTS_PER_PAYLOAD, + MAX_VALIDATORS_PER_COMMITTEE, + MAX_COMMITTEES_PER_SLOT, + MAX_ATTESTATIONS_ELECTRA, + MAX_ATTESTER_SLASHINGS_ELECTRA, + MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD, + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD, + PENDING_BALANCE_DEPOSITS_LIMIT, + PENDING_PARTIAL_WITHDRAWALS_LIMIT, + PENDING_CONSOLIDATIONS_LIMIT, + FINALIZED_ROOT_DEPTH_ELECTRA, + NEXT_SYNC_COMMITTEE_DEPTH_ELECTRA, +} from "@lodestar/params"; +import {ssz as primitiveSsz} from "../primitive/index.js"; +import {ssz as phase0Ssz} from "../phase0/index.js"; +import {ssz as altairSsz} from "../altair/index.js"; +import {ssz as bellatrixSsz} from "../bellatrix/index.js"; +import {ssz as capellaSsz} from "../capella/index.js"; +import {ssz as denebSsz} from "../deneb/index.js"; + +const { + Epoch, + Gwei, + UintNum64, + Slot, + Root, + BLSSignature, + UintBn256, + Bytes32, + BLSPubkey, + DepositIndex, + UintBn64, + ExecutionAddress, + ValidatorIndex, +} = primitiveSsz; + +export const AggregationBits = new BitListType(MAX_VALIDATORS_PER_COMMITTEE * MAX_COMMITTEES_PER_SLOT); + +// This CommitteeBits serves a different purpose than CommitteeBits in phase0 +// TODO Electra: Rename phase0.CommitteeBits to ParticipationBits to avoid confusion +export const CommitteeBits = new BitVectorType(MAX_COMMITTEES_PER_SLOT); + +export const AttestingIndices = new ListBasicType( + ValidatorIndex, + MAX_VALIDATORS_PER_COMMITTEE * MAX_COMMITTEES_PER_SLOT +); + +export const Attestation = new ContainerType( + { + aggregationBits: AggregationBits, // Modified in ELECTRA + data: phase0Ssz.AttestationData, + signature: BLSSignature, + committeeBits: CommitteeBits, // New in ELECTRA + }, + {typeName: "Attestation", jsonCase: "eth2"} +); + +export const IndexedAttestation = new ContainerType( + { + attestingIndices: AttestingIndices, // Modified in ELECTRA + data: phase0Ssz.AttestationData, + signature: BLSSignature, + }, + {typeName: "IndexedAttestation", jsonCase: "eth2"} +); + +/** Same as `IndexedAttestation` but epoch, slot and index are not bounded and must be a bigint */ +export const IndexedAttestationBigint = new ContainerType( + { + attestingIndices: AttestingIndices, // Modified in ELECTRA + data: phase0Ssz.AttestationDataBigint, + signature: BLSSignature, + }, + {typeName: "IndexedAttestation", jsonCase: "eth2"} +); + +export const AttesterSlashing = new ContainerType( + { + attestation1: IndexedAttestationBigint, // Modified in ELECTRA + attestation2: IndexedAttestationBigint, // Modified in ELECTRA + }, + {typeName: "AttesterSlashing", jsonCase: "eth2"} +); + +export const AggregateAndProof = new ContainerType( + { + aggregatorIndex: ValidatorIndex, + aggregate: Attestation, // Modified in ELECTRA + selectionProof: BLSSignature, + }, + {typeName: "AggregateAndProof", jsonCase: "eth2", cachePermanentRootStruct: true} +); + +export const SignedAggregateAndProof = new ContainerType( + { + message: AggregateAndProof, // Modified in ELECTRA + signature: BLSSignature, + }, + {typeName: "SignedAggregateAndProof", jsonCase: "eth2"} +); + +export const DepositRequest = new ContainerType( + { + pubkey: BLSPubkey, + withdrawalCredentials: Bytes32, + // this is actually gwei uintbn64 type, but super unlikely to get a high amount here + // to warrant a bn type + amount: UintNum64, + signature: BLSSignature, + index: DepositIndex, + }, + {typeName: "DepositRequest", jsonCase: "eth2"} +); + +export const DepositRequests = new ListCompositeType(DepositRequest, MAX_DEPOSIT_REQUESTS_PER_PAYLOAD); + +export const WithdrawalRequest = new ContainerType( + { + sourceAddress: ExecutionAddress, + validatorPubkey: BLSPubkey, + amount: Gwei, + }, + {typeName: "WithdrawalRequest", jsonCase: "eth2"} +); +export const WithdrawalRequests = new ListCompositeType(WithdrawalRequest, MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD); +export const ConsolidationRequest = new ContainerType( + { + sourceAddress: ExecutionAddress, + sourcePubkey: BLSPubkey, + targetPubkey: BLSPubkey, + }, + {typeName: "ConsolidationRequest", jsonCase: "eth2"} +); +export const ConsolidationRequests = new ListCompositeType( + ConsolidationRequest, + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD +); + +export const ExecutionRequests = new ContainerType( + { + deposits: DepositRequests, + withdrawals: WithdrawalRequests, + consolidations: ConsolidationRequests, + }, + {typeName: "ExecutionRequests", jsonCase: "eth2"} +); + +// Explicitly defining electra containers for consistency's sake +export const ExecutionPayloadHeader = denebSsz.ExecutionPayloadHeader; +export const ExecutionPayload = denebSsz.ExecutionPayload; + +// We have to preserve Fields ordering while changing the type of ExecutionPayload +export const BeaconBlockBody = new ContainerType( + { + randaoReveal: phase0Ssz.BeaconBlockBody.fields.randaoReveal, + eth1Data: phase0Ssz.BeaconBlockBody.fields.eth1Data, + graffiti: phase0Ssz.BeaconBlockBody.fields.graffiti, + proposerSlashings: phase0Ssz.BeaconBlockBody.fields.proposerSlashings, + attesterSlashings: new ListCompositeType(AttesterSlashing, MAX_ATTESTER_SLASHINGS_ELECTRA), // Modified in ELECTRA + attestations: new ListCompositeType(Attestation, MAX_ATTESTATIONS_ELECTRA), // Modified in ELECTRA + deposits: phase0Ssz.BeaconBlockBody.fields.deposits, + voluntaryExits: phase0Ssz.BeaconBlockBody.fields.voluntaryExits, + syncAggregate: altairSsz.BeaconBlockBody.fields.syncAggregate, + executionPayload: ExecutionPayload, + blsToExecutionChanges: capellaSsz.BeaconBlockBody.fields.blsToExecutionChanges, + blobKzgCommitments: denebSsz.BeaconBlockBody.fields.blobKzgCommitments, + executionRequests: ExecutionRequests, // New in ELECTRA:EIP7251 + }, + {typeName: "BeaconBlockBody", jsonCase: "eth2", cachePermanentRootStruct: true} +); + +export const BeaconBlock = new ContainerType( + { + ...denebSsz.BeaconBlock.fields, + body: BeaconBlockBody, // Modified in ELECTRA + }, + {typeName: "BeaconBlock", jsonCase: "eth2", cachePermanentRootStruct: true} +); + +export const SignedBeaconBlock = new ContainerType( + { + message: BeaconBlock, // Modified in ELECTRA + signature: BLSSignature, + }, + {typeName: "SignedBeaconBlock", jsonCase: "eth2"} +); + +export const BlindedBeaconBlockBody = new ContainerType( + { + randaoReveal: phase0Ssz.BeaconBlockBody.fields.randaoReveal, + eth1Data: phase0Ssz.BeaconBlockBody.fields.eth1Data, + graffiti: phase0Ssz.BeaconBlockBody.fields.graffiti, + proposerSlashings: phase0Ssz.BeaconBlockBody.fields.proposerSlashings, + attesterSlashings: new ListCompositeType(AttesterSlashing, MAX_ATTESTER_SLASHINGS_ELECTRA), // Modified in ELECTRA + attestations: new ListCompositeType(Attestation, MAX_ATTESTATIONS_ELECTRA), // Modified in ELECTRA + deposits: phase0Ssz.BeaconBlockBody.fields.deposits, + voluntaryExits: phase0Ssz.BeaconBlockBody.fields.voluntaryExits, + syncAggregate: altairSsz.SyncAggregate, + executionPayloadHeader: ExecutionPayloadHeader, + blsToExecutionChanges: capellaSsz.BeaconBlockBody.fields.blsToExecutionChanges, + blobKzgCommitments: denebSsz.BeaconBlockBody.fields.blobKzgCommitments, + }, + {typeName: "BlindedBeaconBlockBody", jsonCase: "eth2", cachePermanentRootStruct: true} +); + +export const BlindedBeaconBlock = new ContainerType( + { + ...denebSsz.BlindedBeaconBlock.fields, + body: BlindedBeaconBlockBody, // Modified in ELECTRA + }, + {typeName: "BlindedBeaconBlock", jsonCase: "eth2", cachePermanentRootStruct: true} +); + +export const SignedBlindedBeaconBlock = new ContainerType( + { + message: BlindedBeaconBlock, // Modified in ELECTRA + signature: BLSSignature, + }, + {typeName: "SignedBlindedBeaconBlock", jsonCase: "eth2"} +); + +export const BuilderBid = new ContainerType( + { + header: ExecutionPayloadHeader, // Modified in ELECTRA + blindedBlobsBundle: denebSsz.BlobKzgCommitments, + value: UintBn256, + pubkey: BLSPubkey, + }, + {typeName: "BuilderBid", jsonCase: "eth2"} +); + +export const SignedBuilderBid = new ContainerType( + { + message: BuilderBid, + signature: BLSSignature, + }, + {typeName: "SignedBuilderBid", jsonCase: "eth2"} +); + +export const PendingBalanceDeposit = new ContainerType( + { + index: ValidatorIndex, + amount: Gwei, + }, + {typeName: "PendingBalanceDeposit", jsonCase: "eth2"} +); + +export const PendingBalanceDeposits = new ListCompositeType(PendingBalanceDeposit, PENDING_BALANCE_DEPOSITS_LIMIT); + +export const PendingPartialWithdrawal = new ContainerType( + { + index: ValidatorIndex, + amount: Gwei, + withdrawableEpoch: Epoch, + }, + {typeName: "PendingPartialWithdrawal", jsonCase: "eth2"} +); + +export const PendingConsolidation = new ContainerType( + { + sourceIndex: ValidatorIndex, + targetIndex: ValidatorIndex, + }, + {typeName: "PendingConsolidation", jsonCase: "eth2"} +); + +// In EIP-7251, we spread deneb fields as new fields are appended at the end +export const BeaconState = new ContainerType( + { + genesisTime: UintNum64, + genesisValidatorsRoot: Root, + slot: primitiveSsz.Slot, + fork: phase0Ssz.Fork, + // History + latestBlockHeader: phase0Ssz.BeaconBlockHeader, + blockRoots: phase0Ssz.HistoricalBlockRoots, + stateRoots: phase0Ssz.HistoricalStateRoots, + // historical_roots Frozen in Capella, replaced by historical_summaries + historicalRoots: new ListCompositeType(Root, HISTORICAL_ROOTS_LIMIT), + // Eth1 + eth1Data: phase0Ssz.Eth1Data, + eth1DataVotes: phase0Ssz.Eth1DataVotes, + eth1DepositIndex: UintNum64, + // Registry + validators: phase0Ssz.Validators, + balances: phase0Ssz.Balances, + randaoMixes: phase0Ssz.RandaoMixes, + // Slashings + slashings: phase0Ssz.Slashings, + // Participation + previousEpochParticipation: altairSsz.EpochParticipation, + currentEpochParticipation: altairSsz.EpochParticipation, + // Finality + justificationBits: phase0Ssz.JustificationBits, + previousJustifiedCheckpoint: phase0Ssz.Checkpoint, + currentJustifiedCheckpoint: phase0Ssz.Checkpoint, + finalizedCheckpoint: phase0Ssz.Checkpoint, + // Inactivity + inactivityScores: altairSsz.InactivityScores, + // Sync + currentSyncCommittee: altairSsz.SyncCommittee, + nextSyncCommittee: altairSsz.SyncCommittee, + // Execution + latestExecutionPayloadHeader: ExecutionPayloadHeader, + // Withdrawals + nextWithdrawalIndex: capellaSsz.BeaconState.fields.nextWithdrawalIndex, + nextWithdrawalValidatorIndex: capellaSsz.BeaconState.fields.nextWithdrawalValidatorIndex, + // Deep history valid from Capella onwards + historicalSummaries: capellaSsz.BeaconState.fields.historicalSummaries, + depositRequestsStartIndex: UintBn64, // New in ELECTRA:EIP6110 + depositBalanceToConsume: Gwei, // New in ELECTRA:EIP7251 + exitBalanceToConsume: Gwei, // New in ELECTRA:EIP7251 + earliestExitEpoch: Epoch, // New in ELECTRA:EIP7251 + consolidationBalanceToConsume: Gwei, // New in ELECTRA:EIP7251 + earliestConsolidationEpoch: Epoch, // New in ELECTRA:EIP7251 + pendingBalanceDeposits: PendingBalanceDeposits, // New in ELECTRA:EIP7251 + pendingPartialWithdrawals: new ListCompositeType(PendingPartialWithdrawal, PENDING_PARTIAL_WITHDRAWALS_LIMIT), // New in ELECTRA:EIP7251 + pendingConsolidations: new ListCompositeType(PendingConsolidation, PENDING_CONSOLIDATIONS_LIMIT), // New in ELECTRA:EIP7251 + }, + {typeName: "BeaconState", jsonCase: "eth2"} +); + +export const LightClientBootstrap = new ContainerType( + { + header: denebSsz.LightClientHeader, + currentSyncCommittee: altairSsz.SyncCommittee, + currentSyncCommitteeBranch: new VectorCompositeType(Bytes32, NEXT_SYNC_COMMITTEE_DEPTH_ELECTRA), + }, + {typeName: "LightClientBootstrap", jsonCase: "eth2"} +); + +export const LightClientUpdate = new ContainerType( + { + attestedHeader: denebSsz.LightClientHeader, + nextSyncCommittee: altairSsz.SyncCommittee, + nextSyncCommitteeBranch: new VectorCompositeType(Bytes32, NEXT_SYNC_COMMITTEE_DEPTH_ELECTRA), // Modified in ELECTRA + finalizedHeader: denebSsz.LightClientHeader, + finalityBranch: new VectorCompositeType(Bytes32, FINALIZED_ROOT_DEPTH_ELECTRA), // Modified in ELECTRA + syncAggregate: altairSsz.SyncAggregate, + signatureSlot: Slot, + }, + {typeName: "LightClientUpdate", jsonCase: "eth2"} +); + +export const LightClientFinalityUpdate = new ContainerType( + { + attestedHeader: denebSsz.LightClientHeader, + finalizedHeader: denebSsz.LightClientHeader, + finalityBranch: new VectorCompositeType(Bytes32, FINALIZED_ROOT_DEPTH_ELECTRA), // Modified in ELECTRA + syncAggregate: altairSsz.SyncAggregate, + signatureSlot: Slot, + }, + {typeName: "LightClientFinalityUpdate", jsonCase: "eth2"} +); + +export const LightClientOptimisticUpdate = new ContainerType( + { + attestedHeader: denebSsz.LightClientHeader, + syncAggregate: altairSsz.SyncAggregate, + signatureSlot: Slot, + }, + {typeName: "LightClientOptimisticUpdate", jsonCase: "eth2"} +); + +export const LightClientStore = new ContainerType( + { + snapshot: LightClientBootstrap, + validUpdates: new ListCompositeType(LightClientUpdate, EPOCHS_PER_SYNC_COMMITTEE_PERIOD * SLOTS_PER_EPOCH), + }, + {typeName: "LightClientStore", jsonCase: "eth2"} +); + +// PayloadAttributes primarily for SSE event +export const PayloadAttributes = new ContainerType( + { + ...capellaSsz.PayloadAttributes.fields, + parentBeaconBlockRoot: Root, + }, + {typeName: "PayloadAttributes", jsonCase: "eth2"} +); + +export const SSEPayloadAttributes = new ContainerType( + { + ...bellatrixSsz.SSEPayloadAttributesCommon.fields, + payloadAttributes: PayloadAttributes, + }, + {typeName: "SSEPayloadAttributes", jsonCase: "eth2"} +); + +export const BlockContents = new ContainerType( + { + block: BeaconBlock, + kzgProofs: denebSsz.KZGProofs, + blobs: denebSsz.Blobs, + }, + {typeName: "BlockContents", jsonCase: "eth2"} +); + +export const SignedBlockContents = new ContainerType( + { + signedBlock: SignedBeaconBlock, + kzgProofs: denebSsz.KZGProofs, + blobs: denebSsz.Blobs, + }, + {typeName: "BlockContents", jsonCase: "eth2"} +); diff --git a/packages/types/src/electra/types.ts b/packages/types/src/electra/types.ts new file mode 100644 index 000000000000..f7996cf336f9 --- /dev/null +++ b/packages/types/src/electra/types.ts @@ -0,0 +1,50 @@ +import {ValueOf} from "@chainsafe/ssz"; +import * as ssz from "./sszTypes.js"; + +export type Attestation = ValueOf; +export type IndexedAttestation = ValueOf; +export type IndexedAttestationBigint = ValueOf; +export type AttesterSlashing = ValueOf; + +export type AggregateAndProof = ValueOf; +export type SignedAggregateAndProof = ValueOf; + +export type DepositRequest = ValueOf; +export type DepositRequests = ValueOf; + +export type WithdrawalRequest = ValueOf; +export type WithdrawalRequests = ValueOf; + +export type ConsolidationRequest = ValueOf; +export type ConsolidationRequests = ValueOf; + +export type ExecutionPayload = ValueOf; +export type ExecutionPayloadHeader = ValueOf; +export type ExecutionRequests = ValueOf; + +export type BeaconBlockBody = ValueOf; +export type BeaconBlock = ValueOf; +export type SignedBeaconBlock = ValueOf; + +export type BeaconState = ValueOf; + +export type BlindedBeaconBlockBody = ValueOf; +export type BlindedBeaconBlock = ValueOf; +export type SignedBlindedBeaconBlock = ValueOf; + +export type BuilderBid = ValueOf; +export type SignedBuilderBid = ValueOf; +export type SSEPayloadAttributes = ValueOf; + +export type LightClientBootstrap = ValueOf; +export type LightClientUpdate = ValueOf; +export type LightClientFinalityUpdate = ValueOf; +export type LightClientOptimisticUpdate = ValueOf; +export type LightClientStore = ValueOf; + +export type PendingBalanceDeposit = ValueOf; +export type PendingPartialWithdrawal = ValueOf; +export type PendingConsolidation = ValueOf; + +export type BlockContents = ValueOf; +export type SignedBlockContents = ValueOf; diff --git a/packages/types/src/index.ts b/packages/types/src/index.ts index e0745834c7d1..dc9139dc967e 100644 --- a/packages/types/src/index.ts +++ b/packages/types/src/index.ts @@ -9,3 +9,4 @@ export * from "./utils/typeguards.js"; export {StringType, stringType} from "./utils/stringType.js"; // Container utils export * from "./utils/container.js"; +export * from "./utils/validatorStatus.js"; diff --git a/packages/types/src/phase0/sszTypes.ts b/packages/types/src/phase0/sszTypes.ts index 9eb2a13e5fae..4a04701b789d 100644 --- a/packages/types/src/phase0/sszTypes.ts +++ b/packages/types/src/phase0/sszTypes.ts @@ -236,7 +236,7 @@ export const RandaoMixes = new VectorCompositeType(Bytes32, EPOCHS_PER_HISTORICA * This is initially a Gwei (BigInt) vector, however since Nov 2023 it's converted to UintNum64 (number) vector in the state transition because: * - state.slashings[nextEpoch % EPOCHS_PER_SLASHINGS_VECTOR] is reset per epoch in processSlashingsReset() * - max slashed validators per epoch is SLOTS_PER_EPOCH * MAX_ATTESTER_SLASHINGS * MAX_VALIDATORS_PER_COMMITTEE which is 32 * 2 * 2048 = 131072 on mainnet - * - with that and 32_000_000_000 MAX_EFFECTIVE_BALANCE, it still fits in a number given that Math.floor(Number.MAX_SAFE_INTEGER / 32_000_000_000) = 281474 + * - with that and 32_000_000_000 MAX_EFFECTIVE_BALANCE or 2048_000_000_000 MAX_EFFECTIVE_BALANCE_ELECTRA, it still fits in a number given that Math.floor(Number.MAX_SAFE_INTEGER / 32_000_000_000) = 281474 * - we don't need to compute the total slashings from state.slashings, it's handled by totalSlashingsByIncrement in EpochCache */ export const Slashings = new VectorBasicType(UintNum64, EPOCHS_PER_SLASHINGS_VECTOR); diff --git a/packages/types/src/primitive/sszTypes.ts b/packages/types/src/primitive/sszTypes.ts index 068a32e2cc17..376e17c3f1b6 100644 --- a/packages/types/src/primitive/sszTypes.ts +++ b/packages/types/src/primitive/sszTypes.ts @@ -50,6 +50,7 @@ export const SubcommitteeIndex = UintNum64; */ export const ValidatorIndex = UintNum64; export const WithdrawalIndex = UintNum64; +export const DepositIndex = UintNum64; export const Gwei = UintBn64; export const Wei = UintBn256; export const Root = new ByteVectorType(32); diff --git a/packages/types/src/sszTypes.ts b/packages/types/src/sszTypes.ts index 60980fa0822a..4399904a94bc 100644 --- a/packages/types/src/sszTypes.ts +++ b/packages/types/src/sszTypes.ts @@ -5,9 +5,10 @@ import {ssz as altair} from "./altair/index.js"; import {ssz as bellatrix} from "./bellatrix/index.js"; import {ssz as capella} from "./capella/index.js"; import {ssz as deneb} from "./deneb/index.js"; +import {ssz as electra} from "./electra/index.js"; export * from "./primitive/sszTypes.js"; -export {phase0, altair, bellatrix, capella, deneb}; +export {phase0, altair, bellatrix, capella, deneb, electra}; /** * Index the ssz types that differ by fork @@ -19,6 +20,7 @@ const typesByFork = { [ForkName.bellatrix]: {...phase0, ...altair, ...bellatrix}, [ForkName.capella]: {...phase0, ...altair, ...bellatrix, ...capella}, [ForkName.deneb]: {...phase0, ...altair, ...bellatrix, ...capella, ...deneb}, + [ForkName.electra]: {...phase0, ...altair, ...bellatrix, ...capella, ...deneb, ...electra}, }; /** diff --git a/packages/types/src/types.ts b/packages/types/src/types.ts index 46641d55667e..08fc06ac6cb9 100644 --- a/packages/types/src/types.ts +++ b/packages/types/src/types.ts @@ -1,9 +1,18 @@ -import {ForkAll, ForkBlobs, ForkExecution, ForkLightClient, ForkName, ForkPreBlobs} from "@lodestar/params"; +import { + ForkAll, + ForkBlobs, + ForkExecution, + ForkLightClient, + ForkName, + ForkPostElectra, + ForkPreBlobs, +} from "@lodestar/params"; import {ts as phase0} from "./phase0/index.js"; import {ts as altair} from "./altair/index.js"; import {ts as bellatrix} from "./bellatrix/index.js"; import {ts as capella} from "./capella/index.js"; import {ts as deneb} from "./deneb/index.js"; +import {ts as electra} from "./electra/index.js"; import {Slot} from "./primitive/types.js"; export * from "./primitive/types.js"; @@ -12,6 +21,7 @@ export {ts as altair} from "./altair/index.js"; export {ts as bellatrix} from "./bellatrix/index.js"; export {ts as capella} from "./capella/index.js"; export {ts as deneb} from "./deneb/index.js"; +export {ts as electra} from "./electra/index.js"; /** Common non-spec type to represent roots as strings */ export type RootHex = string; @@ -34,6 +44,12 @@ type TypesByFork = { BeaconState: phase0.BeaconState; SignedBeaconBlock: phase0.SignedBeaconBlock; Metadata: phase0.Metadata; + Attestation: phase0.Attestation; + IndexedAttestation: phase0.IndexedAttestation; + IndexedAttestationBigint: phase0.IndexedAttestationBigint; + AttesterSlashing: phase0.AttesterSlashing; + AggregateAndProof: phase0.AggregateAndProof; + SignedAggregateAndProof: phase0.SignedAggregateAndProof; }; [ForkName.altair]: { BeaconBlockHeader: phase0.BeaconBlockHeader; @@ -51,6 +67,12 @@ type TypesByFork = { LightClientStore: altair.LightClientStore; SyncCommittee: altair.SyncCommittee; SyncAggregate: altair.SyncAggregate; + Attestation: phase0.Attestation; + IndexedAttestation: phase0.IndexedAttestation; + IndexedAttestationBigint: phase0.IndexedAttestationBigint; + AttesterSlashing: phase0.AttesterSlashing; + AggregateAndProof: phase0.AggregateAndProof; + SignedAggregateAndProof: phase0.SignedAggregateAndProof; }; [ForkName.bellatrix]: { BeaconBlockHeader: phase0.BeaconBlockHeader; @@ -76,6 +98,12 @@ type TypesByFork = { SSEPayloadAttributes: bellatrix.SSEPayloadAttributes; SyncCommittee: altair.SyncCommittee; SyncAggregate: altair.SyncAggregate; + Attestation: phase0.Attestation; + IndexedAttestation: phase0.IndexedAttestation; + IndexedAttestationBigint: phase0.IndexedAttestationBigint; + AttesterSlashing: phase0.AttesterSlashing; + AggregateAndProof: phase0.AggregateAndProof; + SignedAggregateAndProof: phase0.SignedAggregateAndProof; }; [ForkName.capella]: { BeaconBlockHeader: phase0.BeaconBlockHeader; @@ -101,6 +129,12 @@ type TypesByFork = { SSEPayloadAttributes: capella.SSEPayloadAttributes; SyncCommittee: altair.SyncCommittee; SyncAggregate: altair.SyncAggregate; + Attestation: phase0.Attestation; + IndexedAttestation: phase0.IndexedAttestation; + IndexedAttestationBigint: phase0.IndexedAttestationBigint; + AttesterSlashing: phase0.AttesterSlashing; + AggregateAndProof: phase0.AggregateAndProof; + SignedAggregateAndProof: phase0.SignedAggregateAndProof; }; [ForkName.deneb]: { BeaconBlockHeader: phase0.BeaconBlockHeader; @@ -124,17 +158,56 @@ type TypesByFork = { BuilderBid: deneb.BuilderBid; SignedBuilderBid: deneb.SignedBuilderBid; SSEPayloadAttributes: deneb.SSEPayloadAttributes; - BlockContents: {block: BeaconBlock; kzgProofs: deneb.KZGProofs; blobs: deneb.Blobs}; - SignedBlockContents: { - signedBlock: SignedBeaconBlock; - kzgProofs: deneb.KZGProofs; - blobs: deneb.Blobs; - }; + BlockContents: deneb.BlockContents; + SignedBlockContents: deneb.SignedBlockContents; ExecutionPayloadAndBlobsBundle: deneb.ExecutionPayloadAndBlobsBundle; BlobsBundle: deneb.BlobsBundle; Contents: deneb.Contents; SyncCommittee: altair.SyncCommittee; SyncAggregate: altair.SyncAggregate; + Attestation: phase0.Attestation; + IndexedAttestation: phase0.IndexedAttestation; + IndexedAttestationBigint: phase0.IndexedAttestationBigint; + AttesterSlashing: phase0.AttesterSlashing; + AggregateAndProof: phase0.AggregateAndProof; + SignedAggregateAndProof: phase0.SignedAggregateAndProof; + }; + [ForkName.electra]: { + BeaconBlockHeader: phase0.BeaconBlockHeader; + SignedBeaconBlockHeader: phase0.SignedBeaconBlockHeader; + BeaconBlock: electra.BeaconBlock; + BeaconBlockBody: electra.BeaconBlockBody; + BeaconState: electra.BeaconState; + SignedBeaconBlock: electra.SignedBeaconBlock; + Metadata: altair.Metadata; + LightClientHeader: deneb.LightClientHeader; + LightClientBootstrap: electra.LightClientBootstrap; + LightClientUpdate: electra.LightClientUpdate; + LightClientFinalityUpdate: electra.LightClientFinalityUpdate; + LightClientOptimisticUpdate: electra.LightClientOptimisticUpdate; + LightClientStore: electra.LightClientStore; + BlindedBeaconBlock: electra.BlindedBeaconBlock; + BlindedBeaconBlockBody: electra.BlindedBeaconBlockBody; + SignedBlindedBeaconBlock: electra.SignedBlindedBeaconBlock; + ExecutionPayload: deneb.ExecutionPayload; + ExecutionPayloadHeader: deneb.ExecutionPayloadHeader; + BuilderBid: electra.BuilderBid; + SignedBuilderBid: electra.SignedBuilderBid; + SSEPayloadAttributes: electra.SSEPayloadAttributes; + BlockContents: electra.BlockContents; + SignedBlockContents: electra.SignedBlockContents; + ExecutionPayloadAndBlobsBundle: deneb.ExecutionPayloadAndBlobsBundle; + BlobsBundle: deneb.BlobsBundle; + Contents: deneb.Contents; + SyncCommittee: altair.SyncCommittee; + SyncAggregate: altair.SyncAggregate; + Attestation: electra.Attestation; + IndexedAttestation: electra.IndexedAttestation; + IndexedAttestationBigint: electra.IndexedAttestationBigint; + AttesterSlashing: electra.AttesterSlashing; + AggregateAndProof: electra.AggregateAndProof; + SignedAggregateAndProof: electra.SignedAggregateAndProof; + ExecutionRequests: electra.ExecutionRequests; }; }; @@ -169,6 +242,7 @@ export type SignedBeaconBlockOrContents = TypesByFork[F]["ExecutionPayload"]; export type ExecutionPayloadHeader = TypesByFork[F]["ExecutionPayloadHeader"]; +export type ExecutionRequests = TypesByFork[F]["ExecutionRequests"]; export type BlobsBundle = TypesByFork[F]["BlobsBundle"]; export type Contents = TypesByFork[F]["Contents"]; @@ -193,3 +267,10 @@ export type Metadata = TypesByFork[F]["Metadata"]; export type BuilderBid = TypesByFork[F]["BuilderBid"]; export type SignedBuilderBid = TypesByFork[F]["SignedBuilderBid"]; export type SSEPayloadAttributes = TypesByFork[F]["SSEPayloadAttributes"]; + +export type Attestation = TypesByFork[F]["Attestation"]; +export type IndexedAttestation = TypesByFork[F]["IndexedAttestation"]; +export type IndexedAttestationBigint = TypesByFork[F]["IndexedAttestationBigint"]; +export type AttesterSlashing = TypesByFork[F]["AttesterSlashing"]; +export type AggregateAndProof = TypesByFork[F]["AggregateAndProof"]; +export type SignedAggregateAndProof = TypesByFork[F]["SignedAggregateAndProof"]; diff --git a/packages/types/src/utils/typeguards.ts b/packages/types/src/utils/typeguards.ts index f006227e03c9..a892c3a0c9c0 100644 --- a/packages/types/src/utils/typeguards.ts +++ b/packages/types/src/utils/typeguards.ts @@ -1,4 +1,4 @@ -import {ForkBlobs, ForkExecution} from "@lodestar/params"; +import {FINALIZED_ROOT_DEPTH_ELECTRA, ForkBlobs, ForkExecution, ForkPostElectra} from "@lodestar/params"; import { BlockContents, SignedBeaconBlock, @@ -13,6 +13,8 @@ import { BlindedBeaconBlockBody, SignedBlockContents, BeaconBlock, + Attestation, + LightClientUpdate, } from "../types.js"; export function isExecutionPayload( @@ -66,3 +68,15 @@ export function isSignedBlockContents( ): data is SignedBlockContents { return (data as SignedBlockContents).kzgProofs !== undefined; } + +export function isElectraAttestation(attestation: Attestation): attestation is Attestation { + return (attestation as Attestation).committeeBits !== undefined; +} + +export function isElectraLightClientUpdate(update: LightClientUpdate): update is LightClientUpdate { + const updatePostElectra = update as LightClientUpdate; + return ( + updatePostElectra.finalityBranch !== undefined && + updatePostElectra.finalityBranch.length === FINALIZED_ROOT_DEPTH_ELECTRA + ); +} diff --git a/packages/types/src/utils/validatorStatus.ts b/packages/types/src/utils/validatorStatus.ts new file mode 100644 index 000000000000..e14a4b14c412 --- /dev/null +++ b/packages/types/src/utils/validatorStatus.ts @@ -0,0 +1,46 @@ +import {FAR_FUTURE_EPOCH} from "@lodestar/params"; +import {Epoch, phase0} from "../types.js"; + +export type ValidatorStatus = + | "active" + | "pending_initialized" + | "pending_queued" + | "active_ongoing" + | "active_exiting" + | "active_slashed" + | "exited_unslashed" + | "exited_slashed" + | "withdrawal_possible" + | "withdrawal_done"; + +/** + * Get the status of the validator + * based on conditions outlined in https://hackmd.io/ofFJ5gOmQpu1jjHilHbdQQ + */ +export function getValidatorStatus(validator: phase0.Validator, currentEpoch: Epoch): ValidatorStatus { + // pending + if (validator.activationEpoch > currentEpoch) { + if (validator.activationEligibilityEpoch === FAR_FUTURE_EPOCH) { + return "pending_initialized"; + } else if (validator.activationEligibilityEpoch < FAR_FUTURE_EPOCH) { + return "pending_queued"; + } + } + // active + if (validator.activationEpoch <= currentEpoch && currentEpoch < validator.exitEpoch) { + if (validator.exitEpoch === FAR_FUTURE_EPOCH) { + return "active_ongoing"; + } else if (validator.exitEpoch < FAR_FUTURE_EPOCH) { + return validator.slashed ? "active_slashed" : "active_exiting"; + } + } + // exited + if (validator.exitEpoch <= currentEpoch && currentEpoch < validator.withdrawableEpoch) { + return validator.slashed ? "exited_slashed" : "exited_unslashed"; + } + // withdrawal + if (validator.withdrawableEpoch <= currentEpoch) { + return validator.effectiveBalance !== 0 ? "withdrawal_possible" : "withdrawal_done"; + } + throw new Error("ValidatorStatus unknown"); +} diff --git a/packages/types/test/unit/blinded.test.ts b/packages/types/test/unit/blinded.test.ts new file mode 100644 index 000000000000..3a4b346d29bf --- /dev/null +++ b/packages/types/test/unit/blinded.test.ts @@ -0,0 +1,35 @@ +import {describe, it, expect} from "vitest"; +import {ForkName, isForkExecution} from "@lodestar/params"; +import {ssz} from "../../src/index.js"; + +describe("blinded data structures", function () { + it("should have the same number of fields as non-blinded", () => { + const blindedTypes = [ + {a: "BlindedBeaconBlockBody" as const, b: "BeaconBlockBody" as const}, + {a: "ExecutionPayloadHeader" as const, b: "ExecutionPayload" as const}, + ]; + + for (const {a, b} of blindedTypes) { + for (const fork of Object.keys(ssz.sszTypesFor) as ForkName[]) { + if (!isForkExecution(fork)) { + continue; + } + + const blindedType = ssz[fork][a]; + if (blindedType === undefined) { + expect.fail(`fork: ${fork}, type ${a} is undefined`); + } + + const type = ssz[fork][b]; + if (type === undefined) { + expect.fail(`fork: ${fork}, type ${b} is undefined`); + } + + expect(Object.keys(blindedType.fields).length).toBeWithMessage( + Object.keys(type.fields).length, + `fork: ${fork}, types ${a} and ${b} have different number of fields` + ); + } + } + }); +}); diff --git a/packages/types/test/unit/validatorStatus.test.ts b/packages/types/test/unit/validatorStatus.test.ts new file mode 100644 index 000000000000..8d04c0f98e3d --- /dev/null +++ b/packages/types/test/unit/validatorStatus.test.ts @@ -0,0 +1,100 @@ +import {describe, it, expect} from "vitest"; +import {getValidatorStatus} from "../../src/utils/validatorStatus.js"; +import {phase0} from "../../src/types.js"; + +describe("getValidatorStatus", function () { + it("should return PENDING_INITIALIZED", function () { + const validator = { + activationEpoch: 1, + activationEligibilityEpoch: Infinity, + } as phase0.Validator; + const currentEpoch = 0; + const status = getValidatorStatus(validator, currentEpoch); + expect(status).toBe("pending_initialized"); + }); + it("should return PENDING_QUEUED", function () { + const validator = { + activationEpoch: 1, + activationEligibilityEpoch: 101010101101010, + } as phase0.Validator; + const currentEpoch = 0; + const status = getValidatorStatus(validator, currentEpoch); + expect(status).toBe("pending_queued"); + }); + it("should return ACTIVE_ONGOING", function () { + const validator = { + activationEpoch: 1, + exitEpoch: Infinity, + } as phase0.Validator; + const currentEpoch = 1; + const status = getValidatorStatus(validator, currentEpoch); + expect(status).toBe("active_ongoing"); + }); + it("should return ACTIVE_SLASHED", function () { + const validator = { + activationEpoch: 1, + exitEpoch: 101010101101010, + slashed: true, + } as phase0.Validator; + const currentEpoch = 1; + const status = getValidatorStatus(validator, currentEpoch); + expect(status).toBe("active_slashed"); + }); + it("should return ACTIVE_EXITING", function () { + const validator = { + activationEpoch: 1, + exitEpoch: 101010101101010, + slashed: false, + } as phase0.Validator; + const currentEpoch = 1; + const status = getValidatorStatus(validator, currentEpoch); + expect(status).toBe("active_exiting"); + }); + it("should return EXITED_SLASHED", function () { + const validator = { + exitEpoch: 1, + withdrawableEpoch: 3, + slashed: true, + } as phase0.Validator; + const currentEpoch = 2; + const status = getValidatorStatus(validator, currentEpoch); + expect(status).toBe("exited_slashed"); + }); + it("should return EXITED_UNSLASHED", function () { + const validator = { + exitEpoch: 1, + withdrawableEpoch: 3, + slashed: false, + } as phase0.Validator; + const currentEpoch = 2; + const status = getValidatorStatus(validator, currentEpoch); + expect(status).toBe("exited_unslashed"); + }); + it("should return WITHDRAWAL_POSSIBLE", function () { + const validator = { + withdrawableEpoch: 1, + effectiveBalance: 32, + } as phase0.Validator; + const currentEpoch = 1; + const status = getValidatorStatus(validator, currentEpoch); + expect(status).toBe("withdrawal_possible"); + }); + it("should return WITHDRAWAL_DONE", function () { + const validator = { + withdrawableEpoch: 1, + effectiveBalance: 0, + } as phase0.Validator; + const currentEpoch = 1; + const status = getValidatorStatus(validator, currentEpoch); + expect(status).toBe("withdrawal_done"); + }); + it("should error", function () { + const validator = {} as phase0.Validator; + const currentEpoch = 0; + try { + getValidatorStatus(validator, currentEpoch); + } catch (error) { + expect(error).toHaveProperty("message", "ValidatorStatus unknown"); + } + }); +}); diff --git a/packages/utils/package.json b/packages/utils/package.json index e3ada3e2901b..0723f5f1815c 100644 --- a/packages/utils/package.json +++ b/packages/utils/package.json @@ -11,7 +11,7 @@ "bugs": { "url": "https://github.com/ChainSafe/lodestar/issues" }, - "version": "1.20.2", + "version": "1.22.0", "type": "module", "exports": "./lib/index.js", "files": [ @@ -39,7 +39,7 @@ }, "types": "lib/index.d.ts", "dependencies": { - "@chainsafe/as-sha256": "^0.4.1", + "@chainsafe/as-sha256": "^0.5.0", "any-signal": "3.0.1", "bigint-buffer": "^1.1.5", "case": "^1.6.3", diff --git a/packages/utils/src/assert.ts b/packages/utils/src/assert.ts index aa86161cca44..91612b0e6407 100644 --- a/packages/utils/src/assert.ts +++ b/packages/utils/src/assert.ts @@ -21,6 +21,18 @@ export const assert = { } }, + /** + * Assert not null + * ``` + * actual !== null + * ``` + */ + notNull(actual: T | null, message?: string): asserts actual is T { + if (!(actual !== null)) { + throw new AssertionError(`${message || "Expected value to be not null"}`); + } + }, + /** * Assert less than or equal * ```js diff --git a/packages/utils/src/bytes.ts b/packages/utils/src/bytes.ts index 5395c4315d22..95bb62ebd548 100644 --- a/packages/utils/src/bytes.ts +++ b/packages/utils/src/bytes.ts @@ -3,6 +3,9 @@ import {toBufferLE, toBigIntLE, toBufferBE, toBigIntBE} from "bigint-buffer"; type Endianness = "le" | "be"; const hexByByte: string[] = []; +/** + * @deprecated Use toHex() instead. + */ export function toHexString(bytes: Uint8Array): string { let hex = "0x"; for (const byte of bytes) { @@ -46,17 +49,22 @@ export function bytesToBigInt(value: Uint8Array, endianness: Endianness = "le"): throw new Error("endianness must be either 'le' or 'be'"); } -export function toHex(buffer: Uint8Array | Parameters[0]): string { - if (Buffer.isBuffer(buffer)) { - return "0x" + buffer.toString("hex"); - } else if (buffer instanceof Uint8Array) { - return "0x" + Buffer.from(buffer.buffer, buffer.byteOffset, buffer.length).toString("hex"); - } else { - return "0x" + Buffer.from(buffer).toString("hex"); +export function formatBytes(bytes: number): string { + if (bytes < 0) { + throw new Error("bytes must be a positive number, got " + bytes); } -} -export function fromHex(hex: string): Uint8Array { - const b = Buffer.from(hex.replace("0x", ""), "hex"); - return new Uint8Array(b.buffer, b.byteOffset, b.length); + if (bytes === 0) { + return "0 Bytes"; + } + + // size of a kb + const k = 1024; + + // only support up to GB + const units = ["Bytes", "KB", "MB", "GB"]; + const i = Math.min(Math.floor(Math.log(bytes) / Math.log(k)), units.length - 1); + const formattedSize = (bytes / Math.pow(k, i)).toFixed(2); + + return `${formattedSize} ${units[i]}`; } diff --git a/packages/utils/src/bytes/browser.ts b/packages/utils/src/bytes/browser.ts new file mode 100644 index 000000000000..f610e2912c04 --- /dev/null +++ b/packages/utils/src/bytes/browser.ts @@ -0,0 +1,87 @@ +// "0".charCodeAt(0) = 48 +const CHAR_CODE_0 = 48; +// "x".charCodeAt(0) = 120 +const CHAR_CODE_X = 120; + +export function toHex(bytes: Uint8Array): string { + const charCodes = new Array(bytes.length * 2 + 2); + charCodes[0] = CHAR_CODE_0; + charCodes[1] = CHAR_CODE_X; + + bytesIntoCharCodes(bytes, charCodes); + return String.fromCharCode(...charCodes); +} + +const rootCharCodes = new Array(32 * 2 + 2); +rootCharCodes[0] = CHAR_CODE_0; +rootCharCodes[1] = CHAR_CODE_X; + +/** + * Convert a Uint8Array, length 32, to 0x-prefixed hex string + */ +export function toRootHex(root: Uint8Array): string { + if (root.length !== 32) { + throw Error(`Expect root to be 32 bytes, got ${root.length}`); + } + + bytesIntoCharCodes(root, rootCharCodes); + return String.fromCharCode(...rootCharCodes); +} + +const pubkeyCharCodes = new Array(48 * 2 + 2); +pubkeyCharCodes[0] = CHAR_CODE_0; +pubkeyCharCodes[1] = CHAR_CODE_X; + +/** + * Convert a Uint8Array, length 48, to 0x-prefixed hex string + */ +export function toPubkeyHex(pubkey: Uint8Array): string { + if (pubkey.length !== CHAR_CODE_0) { + throw Error(`Expect pubkey to be 48 bytes, got ${pubkey.length}`); + } + + bytesIntoCharCodes(pubkey, pubkeyCharCodes); + return String.fromCharCode(...pubkeyCharCodes); +} + +export function fromHex(hex: string): Uint8Array { + if (typeof hex !== "string") { + throw new Error(`hex argument type ${typeof hex} must be of type string`); + } + + if (hex.startsWith("0x")) { + hex = hex.slice(2); + } + + if (hex.length % 2 !== 0) { + throw new Error(`hex string length ${hex.length} must be multiple of 2`); + } + + const byteLen = hex.length / 2; + const bytes = new Uint8Array(byteLen); + for (let i = 0; i < byteLen; i++) { + const byte = parseInt(hex.slice(i * 2, (i + 1) * 2), 16); + bytes[i] = byte; + } + return bytes; +} + +/** + * Populate charCodes from bytes. Note that charCodes index 0 and 1 ("0x") are not populated. + */ +function bytesIntoCharCodes(bytes: Uint8Array, charCodes: number[]): void { + if (bytes.length * 2 + 2 !== charCodes.length) { + throw Error(`Expect charCodes to be of length ${bytes.length * 2 + 2}, got ${charCodes.length}`); + } + + for (let i = 0; i < bytes.length; i++) { + const byte = bytes[i]; + const first = (byte & 0xf0) >> 4; + const second = byte & 0x0f; + + // "0".charCodeAt(0) = 48 + // "a".charCodeAt(0) = 97 => delta = 87 + charCodes[2 + 2 * i] = first < 10 ? first + 48 : first + 87; + charCodes[2 + 2 * i + 1] = second < 10 ? second + 48 : second + 87; + } +} diff --git a/packages/utils/src/bytes/index.ts b/packages/utils/src/bytes/index.ts new file mode 100644 index 000000000000..a079764738b7 --- /dev/null +++ b/packages/utils/src/bytes/index.ts @@ -0,0 +1,26 @@ +import { + toHex as browserToHex, + toRootHex as browserToRootHex, + fromHex as browserFromHex, + toPubkeyHex as browserToPubkeyHex, +} from "./browser.js"; +import { + toHex as nodeToHex, + toRootHex as nodeToRootHex, + fromHex as nodeFromHex, + toPubkeyHex as nodeToPubkeyHex, +} from "./nodejs.js"; + +let toHex = browserToHex; +let toRootHex = browserToRootHex; +let toPubkeyHex = browserToPubkeyHex; +let fromHex = browserFromHex; + +if (typeof Buffer !== "undefined") { + toHex = nodeToHex; + toRootHex = nodeToRootHex; + toPubkeyHex = nodeToPubkeyHex; + fromHex = nodeFromHex; +} + +export {toHex, toRootHex, toPubkeyHex, fromHex}; diff --git a/packages/utils/src/bytes/nodejs.ts b/packages/utils/src/bytes/nodejs.ts new file mode 100644 index 000000000000..4cf8e78c67c6 --- /dev/null +++ b/packages/utils/src/bytes/nodejs.ts @@ -0,0 +1,61 @@ +export function toHex(buffer: Uint8Array | Parameters[0]): string { + if (Buffer.isBuffer(buffer)) { + return "0x" + buffer.toString("hex"); + } else if (buffer instanceof Uint8Array) { + return "0x" + Buffer.from(buffer.buffer, buffer.byteOffset, buffer.length).toString("hex"); + } else { + return "0x" + Buffer.from(buffer).toString("hex"); + } +} + +// Shared buffer to convert root to hex +let rootBuf: Buffer | undefined; + +/** + * Convert a Uint8Array, length 32, to 0x-prefixed hex string + */ +export function toRootHex(root: Uint8Array): string { + if (root.length !== 32) { + throw Error(`Expect root to be 32 bytes, got ${root.length}`); + } + + if (rootBuf === undefined) { + rootBuf = Buffer.alloc(32); + } + + rootBuf.set(root); + return `0x${rootBuf.toString("hex")}`; +} + +// Shared buffer to convert pubkey to hex +let pubkeyBuf: Buffer | undefined; + +export function toPubkeyHex(pubkey: Uint8Array): string { + if (pubkey.length !== 48) { + throw Error(`Expect pubkey to be 48 bytes, got ${pubkey.length}`); + } + + if (pubkeyBuf === undefined) { + pubkeyBuf = Buffer.alloc(48); + } + + pubkeyBuf.set(pubkey); + return `0x${pubkeyBuf.toString("hex")}`; +} + +export function fromHex(hex: string): Uint8Array { + if (typeof hex !== "string") { + throw new Error(`hex argument type ${typeof hex} must be of type string`); + } + + if (hex.startsWith("0x")) { + hex = hex.slice(2); + } + + if (hex.length % 2 !== 0) { + throw new Error(`hex string length ${hex.length} must be multiple of 2`); + } + + const b = Buffer.from(hex, "hex"); + return new Uint8Array(b.buffer, b.byteOffset, b.length); +} diff --git a/packages/utils/src/diff.ts b/packages/utils/src/diff.ts new file mode 100644 index 000000000000..204989016b46 --- /dev/null +++ b/packages/utils/src/diff.ts @@ -0,0 +1,232 @@ +/* eslint-disable no-console */ +import fs from "node:fs"; + +const primitiveTypeof = ["number", "string", "bigint", "boolean"]; +export type BufferType = Uint8Array | Uint32Array; +export type PrimitiveType = number | string | bigint | boolean | BufferType; +export type DiffableCollection = Record; +export type Diffable = PrimitiveType | Array | DiffableCollection; + +export interface Diff { + objectPath: string; + errorMessage?: string; + val1: Diffable; + val2: Diffable; +} + +export function diffUint8Array(val1: Uint8Array, val2: PrimitiveType, objectPath: string): Diff[] { + if (!(val2 instanceof Uint8Array)) { + return [ + { + objectPath, + errorMessage: `val1${objectPath} is a Uint8Array, but val2${objectPath} is not`, + val1, + val2, + }, + ]; + } + const hex1 = Buffer.from(val1).toString("hex"); + const hex2 = Buffer.from(val2).toString("hex"); + if (hex1 !== hex2) { + return [ + { + objectPath, + val1: `0x${hex1}`, + val2: `0x${hex2}`, + }, + ]; + } + return []; +} + +export function diffUint32Array(val1: Uint32Array, val2: PrimitiveType, objectPath: string): Diff[] { + if (!(val2 instanceof Uint32Array)) { + return [ + { + objectPath, + errorMessage: `val1${objectPath} is a Uint32Array, but val2${objectPath} is not`, + val1, + val2, + }, + ]; + } + const diffs: Diff[] = []; + val1.forEach((value, index) => { + const value2 = val2[index]; + if (value !== value2) { + diffs.push({ + objectPath: `${objectPath}[${index}]`, + val1: `0x${value.toString(16).padStart(8, "0")}`, + val2: value2 ? `0x${val2[index].toString(16).padStart(8, "0")}` : "undefined", + }); + } + }); + return diffs; +} + +function diffPrimitiveValue(val1: PrimitiveType, val2: PrimitiveType, objectPath: string): Diff[] { + if (val1 instanceof Uint8Array) { + return diffUint8Array(val1, val2, objectPath); + } + if (val1 instanceof Uint32Array) { + return diffUint32Array(val1, val2, objectPath); + } + + const diff = {objectPath, val1, val2} as Diff; + const type1 = typeof val1; + if (!primitiveTypeof.includes(type1)) { + diff.errorMessage = `val1${objectPath} is not a supported type`; + } + const type2 = typeof val2; + if (!primitiveTypeof.includes(type2)) { + diff.errorMessage = `val2${objectPath} is not a supported type`; + } + if (type1 !== type2) { + diff.errorMessage = `val1${objectPath} is not the same type as val2${objectPath}`; + } + if (val1 !== val2) { + return [diff]; + } + return []; +} + +function isPrimitiveValue(val: unknown): val is PrimitiveType { + if (Array.isArray(val)) return false; + if (typeof val === "object") { + return val instanceof Uint8Array || val instanceof Uint32Array; + } + return true; +} + +function isDiffable(val: unknown): val is Diffable { + return !(typeof val === "function" || typeof val === "symbol" || typeof val === "undefined" || val === null); +} + +export function getDiffs(val1: Diffable, val2: Diffable, objectPath: string): Diff[] { + if (isPrimitiveValue(val1)) { + if (!isPrimitiveValue(val2)) { + return [ + { + objectPath, + errorMessage: `val1${objectPath} is a primitive value and val2${objectPath} is not`, + val1, + val2, + }, + ]; + } + return diffPrimitiveValue(val1, val2, objectPath); + } + + const isArray = Array.isArray(val1); + let errorMessage: string | undefined; + if (isArray && !Array.isArray(val2)) { + errorMessage = `val1${objectPath} is an array and val2${objectPath} is not`; + } else if (typeof val1 === "object" && typeof val2 !== "object") { + errorMessage = `val1${objectPath} is a nested object and val2${objectPath} is not`; + } + if (errorMessage) { + return [ + { + objectPath, + errorMessage, + val1, + val2, + }, + ]; + } + + const diffs: Diff[] = []; + for (const [index, value] of Object.entries(val1)) { + if (!isDiffable(value)) { + diffs.push({objectPath, val1, val2, errorMessage: `val1${objectPath} is not Diffable`}); + continue; + } + const value2 = (val2 as DiffableCollection)[index]; + if (!isDiffable(value2)) { + diffs.push({objectPath, val1, val2, errorMessage: `val2${objectPath} is not Diffable`}); + continue; + } + const innerPath = isArray ? `${objectPath}[${index}]` : `${objectPath}.${index}`; + diffs.push(...getDiffs(value, value2, innerPath)); + } + return diffs; +} + +/** + * Find the different values on complex, nested objects. Outputs the path through the object to + * each value that does not match from val1 and val2. Optionally can output the values that differ. + * + * For objects that differ greatly, can write to a file instead of the terminal for analysis + * + * ## Example + * ```ts + * const obj1 = { + * key1: { + * key2: [ + * { key3: 1 }, + * { key3: new Uint8Array([1, 2, 3]) } + * ] + * }, + * key4: new Uint32Array([1, 2, 3]), + * key5: 362436 + * }; + * + * const obj2 = { + * key1: { + * key2: [ + * { key3: 1 }, + * { key3: new Uint8Array([1, 2, 4]) } + * ] + * }, + * key4: new Uint32Array([1, 2, 4]) + * key5: true + * }; + * + * diffObjects(obj1, obj2, true); + * + * + * ``` + * + * ## Output + * ```sh + * val.key1.key2[1].key3 + * - 0x010203 + * - 0x010204 + * val.key4[2] + * - 0x00000003 + * - 0x00000004 + * val.key5 + * val1.key5 is not the same type as val2.key5 + * - 362436 + * - true + * ``` + */ +export function diff(val1: unknown, val2: unknown, outputValues = false, filename?: string): void { + if (!isDiffable(val1)) { + console.log("val1 is not Diffable"); + return; + } + if (!isDiffable(val2)) { + console.log("val2 is not Diffable"); + return; + } + const diffs = getDiffs(val1, val2, ""); + let output = ""; + if (diffs.length) { + diffs.forEach((diff) => { + let diffOutput = `value${diff.objectPath}`; + if (diff.errorMessage) { + diffOutput += `\n ${diff.errorMessage}`; + } + if (outputValues) { + diffOutput += `\n - ${diff.val1.toString()}\n - ${diff.val2.toString()}\n`; + } + output += `${diffOutput}\n`; + }); + if (filename) { + fs.writeFileSync(filename, output); + } else { + console.log(output); + } + } +} diff --git a/packages/utils/src/format.ts b/packages/utils/src/format.ts index 8bd8a40273f1..5567eb89cc68 100644 --- a/packages/utils/src/format.ts +++ b/packages/utils/src/format.ts @@ -1,4 +1,4 @@ -import {toHexString} from "./bytes.js"; +import {toRootHex} from "./bytes/index.js"; import {ETH_TO_WEI} from "./ethConversion.js"; /** @@ -6,7 +6,7 @@ import {ETH_TO_WEI} from "./ethConversion.js"; * 4 bytes can represent 4294967296 values, so the chance of collision is low */ export function prettyBytes(root: Uint8Array | string): string { - const str = typeof root === "string" ? root : toHexString(root); + const str = typeof root === "string" ? root : toRootHex(root); return `${str.slice(0, 6)}…${str.slice(-4)}`; } @@ -15,7 +15,7 @@ export function prettyBytes(root: Uint8Array | string): string { * Paired with block numbers or slots, it can still act as a decent identify-able format */ export function prettyBytesShort(root: Uint8Array | string): string { - const str = typeof root === "string" ? root : toHexString(root); + const str = typeof root === "string" ? root : toRootHex(root); return `${str.slice(0, 6)}…`; } @@ -25,7 +25,7 @@ export function prettyBytesShort(root: Uint8Array | string): string { * values on explorers like beaconcha.in while improving readability of logs */ export function truncBytes(root: Uint8Array | string): string { - const str = typeof root === "string" ? root : toHexString(root); + const str = typeof root === "string" ? root : toRootHex(root); return str.slice(0, 14); } diff --git a/packages/utils/src/index.ts b/packages/utils/src/index.ts index 2057e50e07bc..4e0be0c592a1 100644 --- a/packages/utils/src/index.ts +++ b/packages/utils/src/index.ts @@ -2,7 +2,9 @@ export * from "./yaml/index.js"; export * from "./assert.js"; export * from "./base64.js"; export * from "./bytes.js"; +export * from "./bytes/index.js"; export * from "./command.js"; +export * from "./diff.js"; export * from "./err.js"; export * from "./errors.js"; export * from "./format.js"; diff --git a/packages/utils/test/perf/bytes.test.ts b/packages/utils/test/perf/bytes.test.ts new file mode 100644 index 000000000000..649ea0b51e3e --- /dev/null +++ b/packages/utils/test/perf/bytes.test.ts @@ -0,0 +1,59 @@ +import {itBench} from "@dapplion/benchmark"; +import {toHex, toRootHex} from "../../src/bytes/nodejs.js"; +import {toHex as browserToHex, toRootHex as browserToRootHex} from "../../src/bytes/browser.js"; +import {toHexString} from "../../src/bytes.js"; + +describe("bytes utils", function () { + const runsFactor = 1000; + const blockRoot = new Uint8Array(Array.from({length: 32}, (_, i) => i)); + + itBench({ + id: "nodejs block root to RootHex using toHex", + fn: () => { + for (let i = 0; i < runsFactor; i++) { + toHex(blockRoot); + } + }, + runsFactor, + }); + + itBench({ + id: "nodejs block root to RootHex using toRootHex", + fn: () => { + for (let i = 0; i < runsFactor; i++) { + toRootHex(blockRoot); + } + }, + runsFactor, + }); + + itBench({ + id: "browser block root to RootHex using the deprecated toHexString", + fn: () => { + for (let i = 0; i < runsFactor; i++) { + toHexString(blockRoot); + } + }, + runsFactor, + }); + + itBench({ + id: "browser block root to RootHex using toHex", + fn: () => { + for (let i = 0; i < runsFactor; i++) { + browserToHex(blockRoot); + } + }, + runsFactor, + }); + + itBench({ + id: "browser block root to RootHex using toRootHex", + fn: () => { + for (let i = 0; i < runsFactor; i++) { + browserToRootHex(blockRoot); + } + }, + runsFactor, + }); +}); diff --git a/packages/utils/test/unit/assert.test.ts b/packages/utils/test/unit/assert.test.ts index 0555bcbd01a0..3b413efa11be 100644 --- a/packages/utils/test/unit/assert.test.ts +++ b/packages/utils/test/unit/assert.test.ts @@ -20,8 +20,18 @@ describe("assert", () => { }); }); + describe("notNull with custom message", () => { + it("Should not throw error with not null value", () => { + expect(() => assert.notNull(0)).not.toThrow(); + expect(() => assert.notNull("")).not.toThrow(); + }); + it("Should throw with null value", () => { + expect(() => assert.notNull(null, "something must not be null")).toThrow("something must not be null"); + }); + }); + const cases: { - op: keyof Omit; + op: keyof Omit; args: [number, number]; ok: boolean; }[] = [ diff --git a/packages/utils/test/unit/bytes.test.ts b/packages/utils/test/unit/bytes.test.ts index 8410e667187a..af4df6652f13 100644 --- a/packages/utils/test/unit/bytes.test.ts +++ b/packages/utils/test/unit/bytes.test.ts @@ -1,5 +1,14 @@ import {describe, it, expect} from "vitest"; -import {intToBytes, bytesToInt, toHex, fromHex, toHexString} from "../../src/index.js"; +import { + intToBytes, + bytesToInt, + toHex, + fromHex, + toHexString, + toRootHex, + toPubkeyHex, + formatBytes, +} from "../../src/index.js"; describe("intToBytes", () => { const zeroedArray = (length: number): number[] => Array.from({length}, () => 0); @@ -48,7 +57,7 @@ describe("bytesToInt", () => { }); describe("toHex", () => { - const testCases: {input: Buffer | Uint8Array | string; output: string}[] = [ + const testCases: {input: Uint8Array; output: string}[] = [ {input: Buffer.from("Hello, World!", "utf-8"), output: "0x48656c6c6f2c20576f726c6421"}, {input: new Uint8Array([72, 101, 108, 108, 111]), output: "0x48656c6c6f"}, {input: Buffer.from([72, 101, 108, 108, 111]), output: "0x48656c6c6f"}, @@ -61,6 +70,44 @@ describe("toHex", () => { } }); +describe("toRootHex", () => { + const testCases: {input: Uint8Array; output: string}[] = [ + { + input: new Uint8Array(Array.from({length: 32}, (_, i) => i)), + output: "0x000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f", + }, + { + input: new Uint8Array(Array.from({length: 32}, () => 0)), + output: "0x0000000000000000000000000000000000000000000000000000000000000000", + }, + ]; + + for (const {input, output} of testCases) { + it(`should convert root to hex string ${output}`, () => { + expect(toRootHex(input)).toBe(output); + }); + } +}); + +describe("toPubkeyHex", () => { + const testCases: {input: Uint8Array; output: string}[] = [ + { + input: new Uint8Array(Array.from({length: 48}, (_, i) => i)), + output: "0x000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f", + }, + { + input: new Uint8Array(Array.from({length: 48}, () => 0)), + output: "0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + }, + ]; + + for (const {input, output} of testCases) { + it(`should convert root to hex string ${output}`, () => { + expect(toPubkeyHex(input)).toBe(output); + }); + } +}); + describe("fromHex", () => { const testCases: {input: string; output: Buffer | Uint8Array}[] = [ { @@ -97,3 +144,24 @@ describe("toHexString", () => { }); } }); + +describe("formatBytes", () => { + const testCases: {input: number; output: string}[] = [ + {input: 0, output: "0 Bytes"}, + {input: 1, output: "1.00 Bytes"}, + {input: 1024, output: "1.00 KB"}, + {input: 1024 + 0.12 * 1024, output: "1.12 KB"}, + {input: 1024 * 1024, output: "1.00 MB"}, + {input: 1024 * 1024 + 0.12 * (1024 * 1024), output: "1.12 MB"}, + {input: 1024 * 1024 * 1024, output: "1.00 GB"}, + {input: 1024 * 1024 * 1024 + 0.12 * 1024 * 1024 * 1024, output: "1.12 GB"}, + // too big + {input: 1024 * 1024 * 1024 * 1024, output: "1024.00 GB"}, + ]; + + for (const {input, output} of testCases) { + it(`should format ${input} bytes as ${output}`, () => { + expect(formatBytes(input)).toBe(output); + }); + } +}); diff --git a/packages/utils/vitest.config.ts b/packages/utils/vitest.config.ts index 7a6069341168..b2b7cc82b0e2 100644 --- a/packages/utils/vitest.config.ts +++ b/packages/utils/vitest.config.ts @@ -6,6 +6,11 @@ export default mergeConfig( defineConfig({ test: { globalSetup: ["./test/globalSetup.ts"], + typecheck: { + // For some reason Vitest tries to run perf test files which causes an error + // as we use Mocha for those. This ignores all errors outside of test files. + ignoreSourceErrors: true, + }, }, }) ); diff --git a/packages/validator/package.json b/packages/validator/package.json index abf5f8797d89..65ed656f010d 100644 --- a/packages/validator/package.json +++ b/packages/validator/package.json @@ -1,6 +1,6 @@ { "name": "@lodestar/validator", - "version": "1.20.2", + "version": "1.22.0", "description": "A Typescript implementation of the validator client", "author": "ChainSafe Systems", "license": "LGPL-3.0", @@ -46,18 +46,18 @@ ], "dependencies": { "@chainsafe/blst": "^2.0.3", - "@chainsafe/ssz": "^0.15.1", - "@lodestar/api": "^1.20.2", - "@lodestar/config": "^1.20.2", - "@lodestar/db": "^1.20.2", - "@lodestar/params": "^1.20.2", - "@lodestar/state-transition": "^1.20.2", - "@lodestar/types": "^1.20.2", - "@lodestar/utils": "^1.20.2", + "@chainsafe/ssz": "^0.17.1", + "@lodestar/api": "^1.22.0", + "@lodestar/config": "^1.22.0", + "@lodestar/db": "^1.22.0", + "@lodestar/params": "^1.22.0", + "@lodestar/state-transition": "^1.22.0", + "@lodestar/types": "^1.22.0", + "@lodestar/utils": "^1.22.0", "strict-event-emitter-types": "^2.0.0" }, "devDependencies": { - "@lodestar/test-utils": "^1.20.2", + "@lodestar/test-utils": "^1.22.0", "bigint-buffer": "^1.1.5", "rimraf": "^4.4.1" } diff --git a/packages/validator/src/metrics.ts b/packages/validator/src/metrics.ts index dc7d1a11ffac..a437328e8d5f 100644 --- a/packages/validator/src/metrics.ts +++ b/packages/validator/src/metrics.ts @@ -8,9 +8,7 @@ export enum MessageSource { export enum BeaconHealth { READY = 0, SYNCING = 1, - NOT_INITIALIZED_OR_ISSUES = 2, - UNKNOWN = 3, - ERROR = 4, + ERROR = 2, } export type Metrics = ReturnType; @@ -279,7 +277,7 @@ export function getMetrics(register: MetricsRegisterExtra, gitData: LodestarGitD beaconHealth: register.gauge({ name: "vc_beacon_health", - help: `Current health status of the beacon(s) the validator is connected too. ${renderEnumNumeric(BeaconHealth)}`, + help: `Current health status of the beacon(s) the validator is connected to. ${renderEnumNumeric(BeaconHealth)}`, }), restApiClient: { diff --git a/packages/validator/src/services/attestation.ts b/packages/validator/src/services/attestation.ts index 57a8a7621a97..7f0dffa3e970 100644 --- a/packages/validator/src/services/attestation.ts +++ b/packages/validator/src/services/attestation.ts @@ -1,8 +1,9 @@ -import {toHexString} from "@chainsafe/ssz"; -import {BLSSignature, phase0, Slot, ssz} from "@lodestar/types"; +import {BLSSignature, phase0, Slot, ssz, Attestation, SignedAggregateAndProof} from "@lodestar/types"; +import {ForkSeq} from "@lodestar/params"; import {computeEpochAtSlot, isAggregatorFromCommitteeLength} from "@lodestar/state-transition"; -import {sleep} from "@lodestar/utils"; +import {prettyBytes, sleep, toRootHex} from "@lodestar/utils"; import {ApiClient, routes} from "@lodestar/api"; +import {ChainForkConfig} from "@lodestar/config"; import {IClock, LoggerVc} from "../util/index.js"; import {PubkeyHex} from "../types.js"; import {Metrics} from "../metrics.js"; @@ -10,6 +11,7 @@ import {ValidatorStore} from "./validatorStore.js"; import {AttestationDutiesService, AttDutyAndProof} from "./attestationDuties.js"; import {groupAttDutiesByCommitteeIndex} from "./utils.js"; import {ChainHeaderTracker} from "./chainHeaderTracker.js"; +import {SyncingStatusTracker} from "./syncingStatusTracker.js"; import {ValidatorEventEmitter} from "./emitter.js"; export type AttestationServiceOpts = { @@ -40,12 +42,23 @@ export class AttestationService { private readonly validatorStore: ValidatorStore, private readonly emitter: ValidatorEventEmitter, chainHeadTracker: ChainHeaderTracker, + syncingStatusTracker: SyncingStatusTracker, private readonly metrics: Metrics | null, + private readonly config: ChainForkConfig, private readonly opts?: AttestationServiceOpts ) { - this.dutiesService = new AttestationDutiesService(logger, api, clock, validatorStore, chainHeadTracker, metrics, { - distributedAggregationSelection: opts?.distributedAggregationSelection, - }); + this.dutiesService = new AttestationDutiesService( + logger, + api, + clock, + validatorStore, + chainHeadTracker, + syncingStatusTracker, + metrics, + { + distributedAggregationSelection: opts?.distributedAggregationSelection, + } + ); // At most every slot, check existing duties from AttestationDutiesService and run tasks clock.runEverySlot(this.runAttestationTasks); @@ -126,7 +139,7 @@ export class AttestationService { // Then download, sign and publish a `SignedAggregateAndProof` for each // validator that is elected to aggregate for this `slot` and `committeeIndex`. - await this.produceAndPublishAggregates(attestation, dutiesSameCommittee); + await this.produceAndPublishAggregates(attestation, index, dutiesSameCommittee); } private async runAttestationTasksGrouped( @@ -146,13 +159,14 @@ export class AttestationService { this.metrics?.attesterStepCallProduceAggregate.observe(this.clock.secFromSlot(slot + 2 / 3)); const dutiesByCommitteeIndex = groupAttDutiesByCommitteeIndex(dutiesAll); + const isPostElectra = this.config.getForkSeq(slot) >= ForkSeq.electra; // Then download, sign and publish a `SignedAggregateAndProof` for each // validator that is elected to aggregate for this `slot` and `committeeIndex`. await Promise.all( Array.from(dutiesByCommitteeIndex.entries()).map(([index, dutiesSameCommittee]) => { - const attestationData: phase0.AttestationData = {...attestationNoCommittee, index}; - return this.produceAndPublishAggregates(attestationData, dutiesSameCommittee); + const attestationData: phase0.AttestationData = {...attestationNoCommittee, index: isPostElectra ? 0 : index}; + return this.produceAndPublishAggregates(attestationData, index, dutiesSameCommittee); }) ); } @@ -179,13 +193,14 @@ export class AttestationService { attestationNoCommittee: phase0.AttestationData, duties: AttDutyAndProof[] ): Promise { - const signedAttestations: phase0.Attestation[] = []; - const headRootHex = toHexString(attestationNoCommittee.beaconBlockRoot); + const signedAttestations: Attestation[] = []; + const headRootHex = toRootHex(attestationNoCommittee.beaconBlockRoot); const currentEpoch = computeEpochAtSlot(slot); + const isPostElectra = currentEpoch >= this.config.ELECTRA_FORK_EPOCH; await Promise.all( duties.map(async ({duty}) => { - const index = duty.committeeIndex; + const index = isPostElectra ? 0 : duty.committeeIndex; const attestationData: phase0.AttestationData = {...attestationNoCommittee, index}; const logCtxValidator = {slot, index, head: headRootHex, validatorIndex: duty.validatorIndex}; @@ -221,8 +236,16 @@ export class AttestationService { ...(this.opts?.disableAttestationGrouping && {index: attestationNoCommittee.index}), }; try { - (await this.api.beacon.submitPoolAttestations({signedAttestations})).assertOk(); - this.logger.info("Published attestations", {...logCtx, count: signedAttestations.length}); + if (isPostElectra) { + (await this.api.beacon.submitPoolAttestationsV2({signedAttestations})).assertOk(); + } else { + (await this.api.beacon.submitPoolAttestations({signedAttestations})).assertOk(); + } + this.logger.info("Published attestations", { + ...logCtx, + head: prettyBytes(headRootHex), + count: signedAttestations.length, + }); this.metrics?.publishedAttestations.inc(signedAttestations.length); } catch (e) { // Note: metric counts only 1 since we don't know how many signedAttestations are invalid @@ -243,9 +266,11 @@ export class AttestationService { */ private async produceAndPublishAggregates( attestation: phase0.AttestationData, + committeeIndex: number, duties: AttDutyAndProof[] ): Promise { - const logCtx = {slot: attestation.slot, index: attestation.index}; + const logCtx = {slot: attestation.slot, index: committeeIndex}; + const isPostElectra = this.config.getForkSeq(attestation.slot) >= ForkSeq.electra; // No validator is aggregator, skip if (duties.every(({selectionProof}) => selectionProof === null)) { @@ -253,14 +278,21 @@ export class AttestationService { } this.logger.verbose("Aggregating attestations", logCtx); - const res = await this.api.validator.getAggregatedAttestation({ - attestationDataRoot: ssz.phase0.AttestationData.hashTreeRoot(attestation), - slot: attestation.slot, - }); + const res = isPostElectra + ? await this.api.validator.getAggregatedAttestationV2({ + attestationDataRoot: ssz.phase0.AttestationData.hashTreeRoot(attestation), + slot: attestation.slot, + committeeIndex, + }) + : await this.api.validator.getAggregatedAttestation({ + attestationDataRoot: ssz.phase0.AttestationData.hashTreeRoot(attestation), + slot: attestation.slot, + }); const aggregate = res.value(); - this.metrics?.numParticipantsInAggregate.observe(aggregate.aggregationBits.getTrueBitIndexes().length); + const participants = aggregate.aggregationBits.getTrueBitIndexes().length; + this.metrics?.numParticipantsInAggregate.observe(participants); - const signedAggregateAndProofs: phase0.SignedAggregateAndProof[] = []; + const signedAggregateAndProofs: SignedAggregateAndProof[] = []; await Promise.all( duties.map(async ({duty, selectionProof}) => { @@ -283,8 +315,16 @@ export class AttestationService { if (signedAggregateAndProofs.length > 0) { try { - (await this.api.validator.publishAggregateAndProofs({signedAggregateAndProofs})).assertOk(); - this.logger.info("Published aggregateAndProofs", {...logCtx, count: signedAggregateAndProofs.length}); + if (isPostElectra) { + (await this.api.validator.publishAggregateAndProofsV2({signedAggregateAndProofs})).assertOk(); + } else { + (await this.api.validator.publishAggregateAndProofs({signedAggregateAndProofs})).assertOk(); + } + this.logger.info("Published aggregateAndProofs", { + ...logCtx, + participants, + count: signedAggregateAndProofs.length, + }); this.metrics?.publishedAggregates.inc(signedAggregateAndProofs.length); } catch (e) { this.logger.error("Error publishing aggregateAndProofs", logCtx, e as Error); diff --git a/packages/validator/src/services/attestationDuties.ts b/packages/validator/src/services/attestationDuties.ts index 1f278aebbd89..83838afe1492 100644 --- a/packages/validator/src/services/attestationDuties.ts +++ b/packages/validator/src/services/attestationDuties.ts @@ -1,7 +1,6 @@ -import {toHexString} from "@chainsafe/ssz"; import {SLOTS_PER_EPOCH} from "@lodestar/params"; -import {sleep} from "@lodestar/utils"; -import {computeEpochAtSlot, isAggregatorFromCommitteeLength} from "@lodestar/state-transition"; +import {sleep, toPubkeyHex} from "@lodestar/utils"; +import {computeEpochAtSlot, isAggregatorFromCommitteeLength, isStartSlotOfEpoch} from "@lodestar/state-transition"; import {BLSSignature, Epoch, Slot, ValidatorIndex, RootHex} from "@lodestar/types"; import {ApiClient, routes} from "@lodestar/api"; import {batchItems, IClock, LoggerVc} from "../util/index.js"; @@ -9,6 +8,7 @@ import {PubkeyHex} from "../types.js"; import {Metrics} from "../metrics.js"; import {ValidatorStore} from "./validatorStore.js"; import {ChainHeaderTracker, HeadEventData} from "./chainHeaderTracker.js"; +import {SyncingStatusTracker} from "./syncingStatusTracker.js"; /** Only retain `HISTORICAL_DUTIES_EPOCHS` duties prior to the current epoch. */ const HISTORICAL_DUTIES_EPOCHS = 2; @@ -52,6 +52,7 @@ export class AttestationDutiesService { private clock: IClock, private readonly validatorStore: ValidatorStore, chainHeadTracker: ChainHeaderTracker, + syncingStatusTracker: SyncingStatusTracker, private readonly metrics: Metrics | null, private readonly opts?: AttestationDutiesServiceOpts ) { @@ -60,6 +61,12 @@ export class AttestationDutiesService { clock.runEveryEpoch(this.runDutiesTasks); clock.runEverySlot(this.prepareForNextEpoch); chainHeadTracker.runOnNewHead(this.onNewHead); + syncingStatusTracker.runOnResynced(async (slot) => { + // Skip on first slot of epoch since tasks are already scheduled + if (!isStartSlotOfEpoch(slot)) { + return this.runDutiesTasks(computeEpochAtSlot(slot)); + } + }); if (metrics) { metrics.attesterDutiesCount.addCollect(() => { @@ -89,7 +96,7 @@ export class AttestationDutiesService { removeDutiesForKey(pubkey: PubkeyHex): void { for (const [epoch, attDutiesAtEpoch] of this.dutiesByIndexByEpoch) { for (const [vIndex, attDutyAndProof] of attDutiesAtEpoch.dutiesByIndex) { - if (toHexString(attDutyAndProof.duty.pubkey) === pubkey) { + if (toPubkeyHex(attDutyAndProof.duty.pubkey) === pubkey) { attDutiesAtEpoch.dutiesByIndex.delete(vIndex); if (attDutiesAtEpoch.dutiesByIndex.size === 0) { this.dutiesByIndexByEpoch.delete(epoch); @@ -236,7 +243,7 @@ export class AttestationDutiesService { const attesterDuties = res.value(); const {dependentRoot} = res.meta(); const relevantDuties = attesterDuties.filter((duty) => { - const pubkeyHex = toHexString(duty.pubkey); + const pubkeyHex = toPubkeyHex(duty.pubkey); return this.validatorStore.hasVotingPubkey(pubkeyHex) && this.validatorStore.isDoppelgangerSafe(pubkeyHex); }); diff --git a/packages/validator/src/services/block.ts b/packages/validator/src/services/block.ts index a9dd7654a8fc..c3acf19c1669 100644 --- a/packages/validator/src/services/block.ts +++ b/packages/validator/src/services/block.ts @@ -1,4 +1,3 @@ -import {toHexString} from "@chainsafe/ssz"; import { BLSPubkey, Slot, @@ -15,7 +14,7 @@ import { } from "@lodestar/types"; import {ChainForkConfig} from "@lodestar/config"; import {ForkPreBlobs, ForkBlobs, ForkSeq, ForkExecution, ForkName} from "@lodestar/params"; -import {extendError, prettyBytes, prettyWeiToEth} from "@lodestar/utils"; +import {extendError, prettyBytes, prettyWeiToEth, toPubkeyHex} from "@lodestar/utils"; import {ApiClient, routes} from "@lodestar/api"; import {IClock, LoggerVc} from "../util/index.js"; import {PubkeyHex} from "../types.js"; @@ -110,7 +109,7 @@ export class BlockProposingService { /** Produce a block at the given slot for pubkey */ private async createAndPublishBlock(pubkey: BLSPubkey, slot: Slot): Promise { - const pubkeyHex = toHexString(pubkey); + const pubkeyHex = toPubkeyHex(pubkey); const logCtx = {slot, validator: prettyBytes(pubkeyHex)}; // Wrap with try catch here to re-use `logCtx` @@ -160,7 +159,7 @@ export class BlockProposingService { this.logger.debug("Produced block", {...debugLogCtx, ...blockContents.debugLogCtx}); this.metrics?.blocksProduced.inc(); - const signedBlock = await this.validatorStore.signBlock(pubkey, blockContents.block, slot); + const signedBlock = await this.validatorStore.signBlock(pubkey, blockContents.block, slot, this.logger); const {broadcastValidation} = this.opts; const publishOpts = {broadcastValidation}; diff --git a/packages/validator/src/services/blockDuties.ts b/packages/validator/src/services/blockDuties.ts index 3282987f5d9e..d0e16f60e816 100644 --- a/packages/validator/src/services/blockDuties.ts +++ b/packages/validator/src/services/blockDuties.ts @@ -1,8 +1,7 @@ -import {toHexString} from "@chainsafe/ssz"; import {computeEpochAtSlot, computeStartSlotAtEpoch} from "@lodestar/state-transition"; import {BLSPubkey, Epoch, RootHex, Slot} from "@lodestar/types"; import {ApiClient, routes} from "@lodestar/api"; -import {sleep} from "@lodestar/utils"; +import {sleep, toPubkeyHex} from "@lodestar/utils"; import {ChainConfig} from "@lodestar/config"; import {IClock, differenceHex, LoggerVc} from "../util/index.js"; import {PubkeyHex} from "../types.js"; @@ -67,7 +66,7 @@ export class BlockDutiesService { if (dutyAtEpoch) { for (const proposer of dutyAtEpoch.data) { if (proposer.slot === slot) { - publicKeys.set(toHexString(proposer.pubkey), proposer.pubkey); + publicKeys.set(toPubkeyHex(proposer.pubkey), proposer.pubkey); } } } @@ -78,7 +77,7 @@ export class BlockDutiesService { removeDutiesForKey(pubkey: PubkeyHex): void { for (const blockDutyAtEpoch of this.proposers.values()) { blockDutyAtEpoch.data = blockDutyAtEpoch.data.filter((proposer) => { - return toHexString(proposer.pubkey) !== pubkey; + return toPubkeyHex(proposer.pubkey) !== pubkey; }); } } @@ -187,7 +186,7 @@ export class BlockDutiesService { const proposerDuties = res.value(); const {dependentRoot} = res.meta(); const relevantDuties = proposerDuties.filter((duty) => { - const pubkeyHex = toHexString(duty.pubkey); + const pubkeyHex = toPubkeyHex(duty.pubkey); return this.validatorStore.hasVotingPubkey(pubkeyHex) && this.validatorStore.isDoppelgangerSafe(pubkeyHex); }); diff --git a/packages/validator/src/services/chainHeaderTracker.ts b/packages/validator/src/services/chainHeaderTracker.ts index 845743264d0b..1c0b0d9a56d8 100644 --- a/packages/validator/src/services/chainHeaderTracker.ts +++ b/packages/validator/src/services/chainHeaderTracker.ts @@ -1,6 +1,5 @@ -import {fromHexString} from "@chainsafe/ssz"; import {ApiClient, routes} from "@lodestar/api"; -import {Logger} from "@lodestar/utils"; +import {Logger, fromHex} from "@lodestar/utils"; import {Slot, Root, RootHex} from "@lodestar/types"; import {GENESIS_SLOT} from "@lodestar/params"; import {ValidatorEvent, ValidatorEventEmitter} from "./emitter.js"; @@ -64,7 +63,7 @@ export class ChainHeaderTracker { const {message} = event; const {slot, block, previousDutyDependentRoot, currentDutyDependentRoot} = message; this.headBlockSlot = slot; - this.headBlockRoot = fromHexString(block); + this.headBlockRoot = fromHex(block); const headEventData = { slot: this.headBlockSlot, diff --git a/packages/validator/src/services/doppelgangerService.ts b/packages/validator/src/services/doppelgangerService.ts index 5435c5aed37f..e167ee1d5028 100644 --- a/packages/validator/src/services/doppelgangerService.ts +++ b/packages/validator/src/services/doppelgangerService.ts @@ -1,7 +1,6 @@ -import {fromHexString} from "@chainsafe/ssz"; import {Epoch, ValidatorIndex} from "@lodestar/types"; import {ApiClient, routes} from "@lodestar/api"; -import {Logger, sleep, truncBytes} from "@lodestar/utils"; +import {Logger, fromHex, sleep, truncBytes} from "@lodestar/utils"; import {computeStartSlotAtEpoch} from "@lodestar/state-transition"; import {ISlashingProtection} from "../slashingProtection/index.js"; import {ProcessShutdownCallback, PubkeyHex} from "../types.js"; @@ -69,7 +68,7 @@ export class DoppelgangerService { if (remainingEpochs > 0) { const previousEpoch = currentEpoch - 1; const attestedInPreviousEpoch = await this.slashingProtection.hasAttestedInEpoch( - fromHexString(pubkeyHex), + fromHex(pubkeyHex), previousEpoch ); diff --git a/packages/validator/src/services/externalSignerSync.ts b/packages/validator/src/services/externalSignerSync.ts index 2f1880dda1bb..2f6828d9e09b 100644 --- a/packages/validator/src/services/externalSignerSync.ts +++ b/packages/validator/src/services/externalSignerSync.ts @@ -1,8 +1,7 @@ -import {fromHexString} from "@chainsafe/ssz"; import {PublicKey} from "@chainsafe/blst"; import {ChainForkConfig} from "@lodestar/config"; import {SLOTS_PER_EPOCH} from "@lodestar/params"; -import {toPrintableUrl} from "@lodestar/utils"; +import {fromHex, toPrintableUrl} from "@lodestar/utils"; import {LoggerVc} from "../util/index.js"; import {externalSignerGetKeys} from "../util/externalSignerClient.js"; @@ -77,7 +76,7 @@ export function pollExternalSignerPubkeys( function assertValidPubkeysHex(pubkeysHex: string[]): void { for (const pubkeyHex of pubkeysHex) { - const pubkeyBytes = fromHexString(pubkeyHex); + const pubkeyBytes = fromHex(pubkeyHex); PublicKey.fromBytes(pubkeyBytes, true); } } diff --git a/packages/validator/src/services/indices.ts b/packages/validator/src/services/indices.ts index c6ef40b473e5..c3f511669bf1 100644 --- a/packages/validator/src/services/indices.ts +++ b/packages/validator/src/services/indices.ts @@ -1,15 +1,16 @@ -import {toHexString} from "@chainsafe/ssz"; import {ValidatorIndex} from "@lodestar/types"; -import {Logger, MapDef} from "@lodestar/utils"; +import {Logger, MapDef, toPubkeyHex} from "@lodestar/utils"; import {ApiClient, routes} from "@lodestar/api"; import {batchItems} from "../util/index.js"; import {Metrics} from "../metrics.js"; /** - * URLs have a limitation on size, adding an unbounded num of pubkeys will break the request. - * For reasoning on the specific number see: https://github.com/ethereum/beacon-APIs/pull/328 + * This is to prevent the "Request body is too large" issue for http post. + * Typical servers accept up to 1MB (2 ** 20 bytes) of request body, for example fastify and nginx. + * A hex encoded public key with "0x"-prefix has a size of 98 bytes + 2 bytes to account for commas + * and other JSON padding. `Math.floor(2 ** 20 / 100) == 10485`, we can send up to ~10k keys per request. */ -const PUBKEYS_PER_REQUEST = 64; +const PUBKEYS_PER_REQUEST = 10_000; // To assist with readability type PubkeyHex = string; @@ -109,7 +110,7 @@ export class IndicesService { } // Query the remote BN to resolve a pubkey to a validator index. - // support up to 1000 pubkeys per poll + // support up to 10k pubkeys per poll const pubkeysHexBatches = batchItems(pubkeysHexToDiscover, {batchSize: PUBKEYS_PER_REQUEST}); const newIndices: number[] = []; @@ -124,7 +125,7 @@ export class IndicesService { } private async fetchValidatorIndices(pubkeysHex: string[]): Promise { - const validators = (await this.api.beacon.getStateValidators({stateId: "head", validatorIds: pubkeysHex})).value(); + const validators = (await this.api.beacon.postStateValidators({stateId: "head", validatorIds: pubkeysHex})).value(); const newIndices = []; @@ -135,7 +136,7 @@ export class IndicesService { const status = statusToSimpleStatusMapping(validator.status); allValidatorStatuses.set(status, allValidatorStatuses.getOrDefault(status) + 1); - const pubkeyHex = toHexString(validator.validator.pubkey); + const pubkeyHex = toPubkeyHex(validator.validator.pubkey); if (!this.pubkey2index.has(pubkeyHex)) { this.logger.info("Validator seen on beacon chain", { validatorIndex: validator.index, diff --git a/packages/validator/src/services/syncCommittee.ts b/packages/validator/src/services/syncCommittee.ts index 06926724141c..c960adc6986b 100644 --- a/packages/validator/src/services/syncCommittee.ts +++ b/packages/validator/src/services/syncCommittee.ts @@ -11,6 +11,7 @@ import {SyncCommitteeDutiesService, SyncDutyAndProofs} from "./syncCommitteeDuti import {groupSyncDutiesBySubcommitteeIndex, SubcommitteeDuty} from "./utils.js"; import {ChainHeaderTracker} from "./chainHeaderTracker.js"; import {ValidatorEventEmitter} from "./emitter.js"; +import {SyncingStatusTracker} from "./syncingStatusTracker.js"; export type SyncCommitteeServiceOpts = { scAfterBlockDelaySlotFraction?: number; @@ -31,12 +32,22 @@ export class SyncCommitteeService { private readonly validatorStore: ValidatorStore, private readonly emitter: ValidatorEventEmitter, private readonly chainHeaderTracker: ChainHeaderTracker, + readonly syncingStatusTracker: SyncingStatusTracker, private readonly metrics: Metrics | null, private readonly opts?: SyncCommitteeServiceOpts ) { - this.dutiesService = new SyncCommitteeDutiesService(config, logger, api, clock, validatorStore, metrics, { - distributedAggregationSelection: opts?.distributedAggregationSelection, - }); + this.dutiesService = new SyncCommitteeDutiesService( + config, + logger, + api, + clock, + validatorStore, + syncingStatusTracker, + metrics, + { + distributedAggregationSelection: opts?.distributedAggregationSelection, + } + ); // At most every slot, check existing duties from SyncCommitteeDutiesService and run tasks clock.runEverySlot(this.runSyncCommitteeTasks); diff --git a/packages/validator/src/services/syncCommitteeDuties.ts b/packages/validator/src/services/syncCommitteeDuties.ts index edc62dea575c..ea448add15ec 100644 --- a/packages/validator/src/services/syncCommitteeDuties.ts +++ b/packages/validator/src/services/syncCommitteeDuties.ts @@ -1,14 +1,21 @@ -import {toHexString} from "@chainsafe/ssz"; import {EPOCHS_PER_SYNC_COMMITTEE_PERIOD, SYNC_COMMITTEE_SUBNET_SIZE} from "@lodestar/params"; -import {computeSyncPeriodAtEpoch, computeSyncPeriodAtSlot, isSyncCommitteeAggregator} from "@lodestar/state-transition"; +import { + computeEpochAtSlot, + computeSyncPeriodAtEpoch, + computeSyncPeriodAtSlot, + isStartSlotOfEpoch, + isSyncCommitteeAggregator, +} from "@lodestar/state-transition"; import {ChainForkConfig} from "@lodestar/config"; import {BLSSignature, Epoch, Slot, SyncPeriod, ValidatorIndex} from "@lodestar/types"; import {ApiClient, routes} from "@lodestar/api"; +import {toPubkeyHex} from "@lodestar/utils"; import {IClock, LoggerVc} from "../util/index.js"; import {PubkeyHex} from "../types.js"; import {Metrics} from "../metrics.js"; import {ValidatorStore} from "./validatorStore.js"; import {syncCommitteeIndicesToSubnets} from "./utils.js"; +import {SyncingStatusTracker} from "./syncingStatusTracker.js"; /** Only retain `HISTORICAL_DUTIES_PERIODS` duties prior to the current periods. */ const HISTORICAL_DUTIES_PERIODS = 2; @@ -80,12 +87,19 @@ export class SyncCommitteeDutiesService { private readonly api: ApiClient, clock: IClock, private readonly validatorStore: ValidatorStore, + syncingStatusTracker: SyncingStatusTracker, metrics: Metrics | null, private readonly opts?: SyncCommitteeDutiesServiceOpts ) { // Running this task every epoch is safe since a re-org of many epochs is very unlikely // TODO: If the re-org event is reliable consider re-running then clock.runEveryEpoch(this.runDutiesTasks); + syncingStatusTracker.runOnResynced(async (slot) => { + // Skip on first slot of epoch since tasks are already scheduled + if (!isStartSlotOfEpoch(slot)) { + return this.runDutiesTasks(computeEpochAtSlot(slot)); + } + }); if (metrics) { metrics.syncCommitteeDutiesCount.addCollect(() => { @@ -273,7 +287,7 @@ export class SyncCommitteeDutiesService { // Using `alreadyWarnedReorg` avoids excessive logs. // TODO: Use memory-efficient toHexString() - const pubkeyHex = toHexString(duty.pubkey); + const pubkeyHex = toPubkeyHex(duty.pubkey); dutiesByIndex.set(validatorIndex, {duty: {pubkey: pubkeyHex, validatorIndex, subnets}}); } diff --git a/packages/validator/src/services/syncingStatusTracker.ts b/packages/validator/src/services/syncingStatusTracker.ts new file mode 100644 index 000000000000..4c38e670092d --- /dev/null +++ b/packages/validator/src/services/syncingStatusTracker.ts @@ -0,0 +1,74 @@ +import {ApiClient, routes} from "@lodestar/api"; +import {Logger} from "@lodestar/utils"; +import {Slot} from "@lodestar/types"; +import {IClock} from "../util/clock.js"; +import {BeaconHealth, Metrics} from "../metrics.js"; + +export type SyncingStatus = routes.node.SyncingStatus; + +type RunOnResyncedFn = (slot: Slot, signal: AbortSignal) => Promise; + +/** + * Track the syncing status of connected beacon node(s) + */ +export class SyncingStatusTracker { + private prevSyncingStatus?: SyncingStatus | Error; + + private readonly fns: RunOnResyncedFn[] = []; + + constructor( + private readonly logger: Logger, + private readonly api: ApiClient, + private readonly clock: IClock, + private readonly metrics: Metrics | null + ) { + this.clock.runEverySlot(this.checkSyncingStatus); + } + + /** + * Run function when node status changes from syncing to synced + * + * Note: does not consider if execution client is offline or syncing and + * hence it is not useful to schedule tasks that require a non-optimistic node. + */ + runOnResynced(fn: RunOnResyncedFn): void { + this.fns.push(fn); + } + + private checkSyncingStatus = async (slot: Slot, signal: AbortSignal): Promise => { + try { + const syncingStatus = (await this.api.node.getSyncingStatus()).value(); + const {isSyncing, headSlot, syncDistance, isOptimistic, elOffline} = syncingStatus; + const prevErrorOrSyncing = this.prevSyncingStatus instanceof Error || this.prevSyncingStatus?.isSyncing === true; + + if (isSyncing === true) { + this.logger.warn("Node is syncing", {slot, headSlot, syncDistance}); + } else if (this.prevSyncingStatus === undefined || prevErrorOrSyncing) { + this.logger.info("Node is synced", {slot, headSlot, isOptimistic, elOffline}); + } + this.logger.verbose("Node syncing status", {slot, ...syncingStatus}); + + this.prevSyncingStatus = syncingStatus; + + this.metrics?.beaconHealth.set( + !isSyncing && !isOptimistic && !elOffline ? BeaconHealth.READY : BeaconHealth.SYNCING + ); + + if (prevErrorOrSyncing && isSyncing === false) { + await Promise.all( + this.fns.map((fn) => + fn(slot, signal).catch((e) => this.logger.error("Error calling resynced event handler", e)) + ) + ); + } + } catch (e) { + // Error likely due to node being offline. In any case, handle failure to + // check syncing status the same way as if node was previously syncing + this.prevSyncingStatus = e as Error; + + this.metrics?.beaconHealth.set(BeaconHealth.ERROR); + + this.logger.error("Failed to check syncing status", {slot}, this.prevSyncingStatus); + } + }; +} diff --git a/packages/validator/src/services/validatorStore.ts b/packages/validator/src/services/validatorStore.ts index 53299463ad2f..c2b18bfd7e09 100644 --- a/packages/validator/src/services/validatorStore.ts +++ b/packages/validator/src/services/validatorStore.ts @@ -1,4 +1,4 @@ -import {BitArray, fromHexString, toHexString} from "@chainsafe/ssz"; +import {BitArray} from "@chainsafe/ssz"; import {SecretKey} from "@chainsafe/blst"; import { computeEpochAtSlot, @@ -19,6 +19,8 @@ import { DOMAIN_SYNC_COMMITTEE, DOMAIN_SYNC_COMMITTEE_SELECTION_PROOF, DOMAIN_APPLICATION_BUILDER, + ForkSeq, + MAX_COMMITTEES_PER_SLOT, } from "@lodestar/params"; import { altair, @@ -35,8 +37,12 @@ import { Slot, ssz, ValidatorIndex, + Attestation, + AggregateAndProof, + SignedAggregateAndProof, } from "@lodestar/types"; import {routes} from "@lodestar/api"; +import {fromHex, toPubkeyHex, toRootHex} from "@lodestar/utils"; import {ISlashingProtection} from "../slashingProtection/index.js"; import {PubkeyHex} from "../types.js"; import {externalSignerPostSignature, SignableMessageType, SignableMessage} from "../util/externalSignerClient.js"; @@ -453,8 +459,8 @@ export class ValidatorStore { logger?.debug("Signing the block proposal", { slot: signingSlot, - blockRoot: toHexString(blockRoot), - signingRoot: toHexString(signingRoot), + blockRoot: toRootHex(blockRoot), + signingRoot: toRootHex(signingRoot), }); try { @@ -493,7 +499,7 @@ export class ValidatorStore { duty: routes.validator.AttesterDuty, attestationData: phase0.AttestationData, currentEpoch: Epoch - ): Promise { + ): Promise { // Make sure the target epoch is not higher than the current epoch to avoid potential attacks. if (attestationData.target.epoch > currentEpoch) { throw Error( @@ -525,21 +531,30 @@ export class ValidatorStore { data: attestationData, }; - return { - aggregationBits: BitArray.fromSingleBit(duty.committeeLength, duty.validatorCommitteeIndex), - data: attestationData, - signature: await this.getSignature(duty.pubkey, signingRoot, signingSlot, signableMessage), - }; + if (this.config.getForkSeq(signingSlot) >= ForkSeq.electra) { + return { + aggregationBits: BitArray.fromSingleBit(duty.committeeLength, duty.validatorCommitteeIndex), + data: attestationData, + signature: await this.getSignature(duty.pubkey, signingRoot, signingSlot, signableMessage), + committeeBits: BitArray.fromSingleBit(MAX_COMMITTEES_PER_SLOT, duty.committeeIndex), + }; + } else { + return { + aggregationBits: BitArray.fromSingleBit(duty.committeeLength, duty.validatorCommitteeIndex), + data: attestationData, + signature: await this.getSignature(duty.pubkey, signingRoot, signingSlot, signableMessage), + } as phase0.Attestation; + } } async signAggregateAndProof( duty: routes.validator.AttesterDuty, selectionProof: BLSSignature, - aggregate: phase0.Attestation - ): Promise { + aggregate: Attestation + ): Promise { this.validateAttestationDuty(duty, aggregate.data); - const aggregateAndProof: phase0.AggregateAndProof = { + const aggregateAndProof: AggregateAndProof = { aggregate, aggregatorIndex: duty.validatorIndex, selectionProof, @@ -547,10 +562,13 @@ export class ValidatorStore { const signingSlot = aggregate.data.slot; const domain = this.config.getDomain(signingSlot, DOMAIN_AGGREGATE_AND_PROOF); - const signingRoot = computeSigningRoot(ssz.phase0.AggregateAndProof, aggregateAndProof, domain); + const isPostElectra = this.config.getForkSeq(signingSlot) >= ForkSeq.electra; + const signingRoot = isPostElectra + ? computeSigningRoot(ssz.electra.AggregateAndProof, aggregateAndProof, domain) + : computeSigningRoot(ssz.phase0.AggregateAndProof, aggregateAndProof, domain); const signableMessage: SignableMessage = { - type: SignableMessageType.AGGREGATE_AND_PROOF, + type: isPostElectra ? SignableMessageType.AGGREGATE_AND_PROOF_V2 : SignableMessageType.AGGREGATE_AND_PROOF, data: aggregateAndProof, }; @@ -675,8 +693,8 @@ export class ValidatorStore { regAttributes: {feeRecipient: Eth1Address; gasLimit: number}, _slot: Slot ): Promise { - const pubkey = typeof pubkeyMaybeHex === "string" ? fromHexString(pubkeyMaybeHex) : pubkeyMaybeHex; - const feeRecipient = fromHexString(regAttributes.feeRecipient); + const pubkey = typeof pubkeyMaybeHex === "string" ? fromHex(pubkeyMaybeHex) : pubkeyMaybeHex; + const feeRecipient = fromHex(regAttributes.feeRecipient); const {gasLimit} = regAttributes; const validatorRegistration: bellatrix.ValidatorRegistrationV1 = { @@ -706,7 +724,7 @@ export class ValidatorStore { regAttributes: {feeRecipient: Eth1Address; gasLimit: number}, slot: Slot ): Promise { - const pubkeyHex = typeof pubkeyMaybeHex === "string" ? pubkeyMaybeHex : toHexString(pubkeyMaybeHex); + const pubkeyHex = typeof pubkeyMaybeHex === "string" ? pubkeyMaybeHex : toPubkeyHex(pubkeyMaybeHex); const {feeRecipient, gasLimit} = regAttributes; const regFullKey = `${feeRecipient}-${gasLimit}`; const validatorData = this.validators.get(pubkeyHex); @@ -730,8 +748,8 @@ export class ValidatorStore { signingSlot: Slot, signableMessage: SignableMessage ): Promise { - // TODO: Refactor indexing to not have to run toHexString() on the pubkey every time - const pubkeyHex = typeof pubkey === "string" ? pubkey : toHexString(pubkey); + // TODO: Refactor indexing to not have to run toHex() on the pubkey every time + const pubkeyHex = typeof pubkey === "string" ? pubkey : toPubkeyHex(pubkey); const signer = this.validators.get(pubkeyHex)?.signer; if (!signer) { @@ -757,7 +775,7 @@ export class ValidatorStore { signingSlot, signableMessage ); - return fromHexString(signatureHex); + return fromHex(signatureHex); } catch (e) { this.metrics?.remoteSignErrors.inc(); throw e; @@ -769,8 +787,8 @@ export class ValidatorStore { } private getSignerAndPubkeyHex(pubkey: BLSPubkeyMaybeHex): [Signer, string] { - // TODO: Refactor indexing to not have to run toHexString() on the pubkey every time - const pubkeyHex = typeof pubkey === "string" ? pubkey : toHexString(pubkey); + // TODO: Refactor indexing to not have to run toHex() on the pubkey every time + const pubkeyHex = typeof pubkey === "string" ? pubkey : toPubkeyHex(pubkey); const signer = this.validators.get(pubkeyHex)?.signer; if (!signer) { throw Error(`Validator pubkey ${pubkeyHex} not known`); @@ -783,15 +801,20 @@ export class ValidatorStore { if (duty.slot !== data.slot) { throw Error(`Inconsistent duties during signing: duty.slot ${duty.slot} != att.slot ${data.slot}`); } - if (duty.committeeIndex != data.index) { + + const isPostElectra = this.config.getForkSeq(data.slot) >= ForkSeq.electra; + if (!isPostElectra && duty.committeeIndex != data.index) { throw Error( `Inconsistent duties during signing: duty.committeeIndex ${duty.committeeIndex} != att.committeeIndex ${data.index}` ); } + if (isPostElectra && data.index !== 0) { + throw Error(`Non-zero committee index post-electra during signing: att.committeeIndex ${data.index}`); + } } private assertDoppelgangerSafe(pubKey: PubkeyHex | BLSPubkey): void { - const pubkeyHex = typeof pubKey === "string" ? pubKey : toHexString(pubKey); + const pubkeyHex = typeof pubKey === "string" ? pubKey : toPubkeyHex(pubKey); if (!this.isDoppelgangerSafe(pubkeyHex)) { throw new Error(`Doppelganger state for key ${pubkeyHex} is not safe`); } @@ -801,7 +824,7 @@ export class ValidatorStore { function getSignerPubkeyHex(signer: Signer): PubkeyHex { switch (signer.type) { case SignerType.Local: - return toHexString(signer.secretKey.toPublicKey().toBytes()); + return toPubkeyHex(signer.secretKey.toPublicKey().toBytes()); case SignerType.Remote: if (!isValidatePubkeyHex(signer.pubkey)) { diff --git a/packages/validator/src/slashingProtection/index.ts b/packages/validator/src/slashingProtection/index.ts index dedbccf6cf94..bc57b0e51c13 100644 --- a/packages/validator/src/slashingProtection/index.ts +++ b/packages/validator/src/slashingProtection/index.ts @@ -1,6 +1,5 @@ -import {toHexString} from "@chainsafe/ssz"; import {BLSPubkey, Epoch, Root} from "@lodestar/types"; -import {Logger} from "@lodestar/utils"; +import {Logger, toPubkeyHex} from "@lodestar/utils"; import {LodestarValidatorDatabaseController} from "../types.js"; import {uniqueVectorArr} from "../slashingProtection/utils.js"; import {BlockBySlotRepository, SlashingProtectionBlockService} from "./block/index.js"; @@ -63,7 +62,7 @@ export class SlashingProtection implements ISlashingProtection { async importInterchange(interchange: Interchange, genesisValidatorsRoot: Root, logger?: Logger): Promise { const {data} = parseInterchange(interchange, genesisValidatorsRoot); for (const validator of data) { - logger?.info("Importing slashing protection", {pubkey: toHexString(validator.pubkey)}); + logger?.info("Importing slashing protection", {pubkey: toPubkeyHex(validator.pubkey)}); await this.blockService.importBlocks(validator.pubkey, validator.signedBlocks); await this.attestationService.importAttestations(validator.pubkey, validator.signedAttestations); } @@ -77,7 +76,7 @@ export class SlashingProtection implements ISlashingProtection { ): Promise { const validatorData: InterchangeLodestar["data"] = []; for (const pubkey of pubkeys) { - logger?.info("Exporting slashing protection", {pubkey: toHexString(pubkey)}); + logger?.info("Exporting slashing protection", {pubkey: toPubkeyHex(pubkey)}); validatorData.push({ pubkey, signedBlocks: await this.blockService.exportBlocks(pubkey), diff --git a/packages/validator/src/slashingProtection/interchange/formats/completeV4.ts b/packages/validator/src/slashingProtection/interchange/formats/completeV4.ts index 26d7f44f2e83..b43ced6c80d3 100644 --- a/packages/validator/src/slashingProtection/interchange/formats/completeV4.ts +++ b/packages/validator/src/slashingProtection/interchange/formats/completeV4.ts @@ -1,5 +1,5 @@ /* eslint-disable @typescript-eslint/naming-convention */ -import {fromHexString, toHexString} from "@chainsafe/ssz"; +import {fromHex, toPubkeyHex, toRootHex} from "@lodestar/utils"; import {InterchangeLodestar} from "../types.js"; import {fromOptionalHexString, numToString, toOptionalHexString} from "../../utils.js"; @@ -90,10 +90,10 @@ export function serializeInterchangeCompleteV4({ metadata: { interchange_format: "complete", interchange_format_version: "4", - genesis_validators_root: toHexString(genesisValidatorsRoot), + genesis_validators_root: toRootHex(genesisValidatorsRoot), }, data: data.map((validator) => ({ - pubkey: toHexString(validator.pubkey), + pubkey: toPubkeyHex(validator.pubkey), signed_blocks: validator.signedBlocks.map((block) => ({ slot: numToString(block.slot), signing_root: toOptionalHexString(block.signingRoot), @@ -109,9 +109,9 @@ export function serializeInterchangeCompleteV4({ export function parseInterchangeCompleteV4(interchange: InterchangeCompleteV4): InterchangeLodestar { return { - genesisValidatorsRoot: fromHexString(interchange.metadata.genesis_validators_root), + genesisValidatorsRoot: fromHex(interchange.metadata.genesis_validators_root), data: interchange.data.map((validator) => ({ - pubkey: fromHexString(validator.pubkey), + pubkey: fromHex(validator.pubkey), signedBlocks: validator.signed_blocks.map((block) => ({ slot: parseInt(block.slot, 10), signingRoot: fromOptionalHexString(block.signing_root), diff --git a/packages/validator/src/slashingProtection/interchange/formats/v5.ts b/packages/validator/src/slashingProtection/interchange/formats/v5.ts index c70dc84b1ed0..838ba76c1a57 100644 --- a/packages/validator/src/slashingProtection/interchange/formats/v5.ts +++ b/packages/validator/src/slashingProtection/interchange/formats/v5.ts @@ -1,5 +1,5 @@ /* eslint-disable @typescript-eslint/naming-convention */ -import {fromHexString, toHexString} from "@chainsafe/ssz"; +import {fromHex, toPubkeyHex, toRootHex} from "@lodestar/utils"; import {InterchangeLodestar} from "../types.js"; import {fromOptionalHexString, numToString, toOptionalHexString} from "../../utils.js"; @@ -85,10 +85,10 @@ export function serializeInterchangeV5({data, genesisValidatorsRoot}: Interchang return { metadata: { interchange_format_version: "5", - genesis_validators_root: toHexString(genesisValidatorsRoot), + genesis_validators_root: toRootHex(genesisValidatorsRoot), }, data: data.map((validator) => ({ - pubkey: toHexString(validator.pubkey), + pubkey: toPubkeyHex(validator.pubkey), signed_blocks: validator.signedBlocks.map((block) => ({ slot: numToString(block.slot), signing_root: toOptionalHexString(block.signingRoot), @@ -104,9 +104,9 @@ export function serializeInterchangeV5({data, genesisValidatorsRoot}: Interchang export function parseInterchangeV5(interchange: InterchangeV5): InterchangeLodestar { return { - genesisValidatorsRoot: fromHexString(interchange.metadata.genesis_validators_root), + genesisValidatorsRoot: fromHex(interchange.metadata.genesis_validators_root), data: interchange.data.map((validator) => ({ - pubkey: fromHexString(validator.pubkey), + pubkey: fromHex(validator.pubkey), signedBlocks: validator.signed_blocks.map((block) => ({ slot: parseInt(block.slot, 10), signingRoot: fromOptionalHexString(block.signing_root), diff --git a/packages/validator/src/slashingProtection/utils.ts b/packages/validator/src/slashingProtection/utils.ts index f1fc0f24b9ae..adc93bdafee5 100644 --- a/packages/validator/src/slashingProtection/utils.ts +++ b/packages/validator/src/slashingProtection/utils.ts @@ -1,5 +1,5 @@ -import {fromHexString, toHexString} from "@chainsafe/ssz"; import {Epoch, Root, ssz} from "@lodestar/types"; +import {fromHex, toHex, toRootHex} from "@lodestar/utils"; export const blsPubkeyLen = 48; export const ZERO_ROOT = ssz.Root.defaultValue(); @@ -13,11 +13,11 @@ export function isEqualNonZeroRoot(root1: Root, root2: Root): boolean { } export function fromOptionalHexString(hex: string | undefined): Root { - return hex ? fromHexString(hex) : ZERO_ROOT; + return hex ? fromHex(hex) : ZERO_ROOT; } export function toOptionalHexString(root: Root): string | undefined { - return isEqualRoot(root, ZERO_ROOT) ? undefined : toHexString(root); + return isEqualRoot(root, ZERO_ROOT) ? undefined : toRootHex(root); } /** @@ -34,7 +34,7 @@ export function minEpoch(epochs: Epoch[]): Epoch | null { export function uniqueVectorArr(buffers: Uint8Array[]): Uint8Array[] { const bufferStr = new Set(); return buffers.filter((buffer) => { - const str = toHexString(buffer); + const str = toHex(buffer); const seen = bufferStr.has(str); bufferStr.add(str); return !seen; diff --git a/packages/validator/src/util/difference.ts b/packages/validator/src/util/difference.ts index bafc6ff8f42f..b8c3c5e7b089 100644 --- a/packages/validator/src/util/difference.ts +++ b/packages/validator/src/util/difference.ts @@ -1,10 +1,10 @@ -import {toHexString} from "@chainsafe/ssz"; import {Root} from "@lodestar/types"; +import {toHex} from "@lodestar/utils"; /** * Return items included in `next` but not in `prev` */ export function differenceHex(prev: T[], next: T[]): T[] { - const existing = new Set(prev.map((item) => toHexString(item))); - return next.filter((item) => !existing.has(toHexString(item))); + const existing = new Set(prev.map((item) => toHex(item))); + return next.filter((item) => !existing.has(toHex(item))); } diff --git a/packages/validator/src/util/externalSignerClient.ts b/packages/validator/src/util/externalSignerClient.ts index dc1d0d0f1dd5..1d9778375b88 100644 --- a/packages/validator/src/util/externalSignerClient.ts +++ b/packages/validator/src/util/externalSignerClient.ts @@ -1,11 +1,24 @@ -import {ContainerType, toHexString, ValueOf} from "@chainsafe/ssz"; +import {ContainerType, ValueOf} from "@chainsafe/ssz"; import {fetch} from "@lodestar/api"; -import {phase0, altair, capella, BeaconBlock, BlindedBeaconBlock} from "@lodestar/types"; -import {ForkSeq} from "@lodestar/params"; +import { + phase0, + altair, + capella, + BeaconBlock, + BlindedBeaconBlock, + AggregateAndProof, + sszTypesFor, + ssz, + Slot, + Epoch, + RootHex, + Root, +} from "@lodestar/types"; +import {ForkPreExecution, ForkSeq} from "@lodestar/params"; import {ValidatorRegistrationV1} from "@lodestar/types/bellatrix"; import {BeaconConfig} from "@lodestar/config"; import {computeEpochAtSlot, blindedOrFullBlockToHeader} from "@lodestar/state-transition"; -import {Epoch, Root, RootHex, Slot, ssz} from "@lodestar/types"; +import {toHex, toRootHex} from "@lodestar/utils"; import {PubkeyHex} from "../types.js"; /* eslint-disable @typescript-eslint/naming-convention */ @@ -13,6 +26,7 @@ import {PubkeyHex} from "../types.js"; export enum SignableMessageType { AGGREGATION_SLOT = "AGGREGATION_SLOT", AGGREGATE_AND_PROOF = "AGGREGATE_AND_PROOF", + AGGREGATE_AND_PROOF_V2 = "AGGREGATE_AND_PROOF_V2", ATTESTATION = "ATTESTATION", BLOCK_V2 = "BLOCK_V2", DEPOSIT = "DEPOSIT", @@ -62,8 +76,9 @@ const SyncAggregatorSelectionDataType = new ContainerType( export type SignableMessage = | {type: SignableMessageType.AGGREGATION_SLOT; data: {slot: Slot}} | {type: SignableMessageType.AGGREGATE_AND_PROOF; data: phase0.AggregateAndProof} + | {type: SignableMessageType.AGGREGATE_AND_PROOF_V2; data: AggregateAndProof} | {type: SignableMessageType.ATTESTATION; data: phase0.AttestationData} - | {type: SignableMessageType.BLOCK_V2; data: BeaconBlock | BlindedBeaconBlock} + | {type: SignableMessageType.BLOCK_V2; data: BeaconBlock | BlindedBeaconBlock} | {type: SignableMessageType.DEPOSIT; data: ValueOf} | {type: SignableMessageType.RANDAO_REVEAL; data: {epoch: Epoch}} | {type: SignableMessageType.VOLUNTARY_EXIT; data: phase0.VoluntaryExit} @@ -76,6 +91,7 @@ export type SignableMessage = const requiresForkInfo: Record = { [SignableMessageType.AGGREGATION_SLOT]: true, [SignableMessageType.AGGREGATE_AND_PROOF]: true, + [SignableMessageType.AGGREGATE_AND_PROOF_V2]: true, [SignableMessageType.ATTESTATION]: true, [SignableMessageType.BLOCK_V2]: true, [SignableMessageType.DEPOSIT]: false, @@ -131,17 +147,17 @@ export async function externalSignerPostSignature( const requestObj = serializerSignableMessagePayload(config, signableMessage) as Web3SignerSerializedRequest; requestObj.type = signableMessage.type; - requestObj.signingRoot = toHexString(signingRoot); + requestObj.signingRoot = toRootHex(signingRoot); if (requiresForkInfo[signableMessage.type]) { const forkInfo = config.getForkInfo(signingSlot); requestObj.fork_info = { fork: { - previous_version: toHexString(forkInfo.prevVersion), - current_version: toHexString(forkInfo.version), + previous_version: toHex(forkInfo.prevVersion), + current_version: toHex(forkInfo.version), epoch: String(computeEpochAtSlot(signingSlot)), }, - genesis_validators_root: toHexString(config.genesisValidatorsRoot), + genesis_validators_root: toRootHex(config.genesisValidatorsRoot), }; } @@ -202,6 +218,16 @@ function serializerSignableMessagePayload(config: BeaconConfig, payload: Signabl case SignableMessageType.AGGREGATE_AND_PROOF: return {aggregate_and_proof: ssz.phase0.AggregateAndProof.toJson(payload.data)}; + case SignableMessageType.AGGREGATE_AND_PROOF_V2: { + const fork = config.getForkName(payload.data.aggregate.data.slot); + return { + aggregate_and_proof: { + version: fork.toUpperCase(), + data: sszTypesFor(fork).AggregateAndProof.toJson(payload.data), + }, + }; + } + case SignableMessageType.ATTESTATION: return {attestation: ssz.phase0.AttestationData.toJson(payload.data)}; diff --git a/packages/validator/src/util/params.ts b/packages/validator/src/util/params.ts index 8ccaf9fe75ba..6d6705f512df 100644 --- a/packages/validator/src/util/params.ts +++ b/packages/validator/src/util/params.ts @@ -73,6 +73,7 @@ function getSpecCriticalParams(localConfig: ChainConfig): Record - this.fetchBeaconHealth() - .then((health) => metrics.beaconHealth.set(health)) - .catch((e) => this.logger.error("Error on fetchBeaconHealth", {}, e)) - ); } // "start" the validator @@ -225,6 +223,7 @@ export class Validator { emitter.setMaxListeners(Infinity); const chainHeaderTracker = new ChainHeaderTracker(logger, api, emitter); + const syncingStatusTracker = new SyncingStatusTracker(logger, api, clock, metrics); const blockProposingService = new BlockProposingService(config, loggerVc, api, clock, validatorStore, metrics, { useProduceBlockV3: opts.useProduceBlockV3, @@ -239,7 +238,9 @@ export class Validator { validatorStore, emitter, chainHeaderTracker, + syncingStatusTracker, metrics, + config, { afterBlockDelaySlotFraction: opts.afterBlockDelaySlotFraction, disableAttestationGrouping: opts.disableAttestationGrouping || opts.distributed, @@ -255,6 +256,7 @@ export class Validator { validatorStore, emitter, chainHeaderTracker, + syncingStatusTracker, metrics, { scAfterBlockDelaySlotFraction: opts.scAfterBlockDelaySlotFraction, @@ -274,6 +276,7 @@ export class Validator { api, clock, chainHeaderTracker, + syncingStatusTracker, logger, db, metrics, @@ -369,7 +372,9 @@ export class Validator { * Create a signed voluntary exit message for the given validator by its key. */ async signVoluntaryExit(publicKey: string, exitEpoch?: number): Promise { - const validators = (await this.api.beacon.getStateValidators({stateId: "head", validatorIds: [publicKey]})).value(); + const validators = ( + await this.api.beacon.postStateValidators({stateId: "head", validatorIds: [publicKey]}) + ).value(); const validator = validators[0]; if (validator === undefined) { @@ -382,21 +387,6 @@ export class Validator { return this.validatorStore.signVoluntaryExit(publicKey, validator.index, exitEpoch); } - - private async fetchBeaconHealth(): Promise { - try { - const {status: healthCode} = await this.api.node.getHealth(); - - if (healthCode === routes.node.NodeHealth.READY) return BeaconHealth.READY; - if (healthCode === routes.node.NodeHealth.SYNCING) return BeaconHealth.SYNCING; - if (healthCode === routes.node.NodeHealth.NOT_INITIALIZED_OR_ISSUES) - return BeaconHealth.NOT_INITIALIZED_OR_ISSUES; - else return BeaconHealth.UNKNOWN; - } catch (e) { - // TODO: Filter by network error type - return BeaconHealth.ERROR; - } - } } /** Assert the same genesisValidatorRoot and genesisTime */ @@ -408,14 +398,14 @@ async function assertEqualGenesis(opts: ValidatorOptions, genesis: Genesis): Pro if (!ssz.Root.equals(genesisValidatorsRoot, nodeGenesisValidatorRoot)) { // this happens when the existing validator db served another network before opts.logger.error("Not the same genesisValidatorRoot", { - expected: toHexString(nodeGenesisValidatorRoot), - actual: toHexString(genesisValidatorsRoot), + expected: toRootHex(nodeGenesisValidatorRoot), + actual: toRootHex(genesisValidatorsRoot), }); throw new NotEqualParamsError("Not the same genesisValidatorRoot"); } } else { await metaDataRepository.setGenesisValidatorsRoot(nodeGenesisValidatorRoot); - opts.logger.info("Persisted genesisValidatorRoot", toHexString(nodeGenesisValidatorRoot)); + opts.logger.info("Persisted genesisValidatorRoot", toRootHex(nodeGenesisValidatorRoot)); } const nodeGenesisTime = genesis.genesisTime; diff --git a/packages/validator/test/e2e/web3signer.test.ts b/packages/validator/test/e2e/web3signer.test.ts index 326dbae8c4f7..ae22bc31446b 100644 --- a/packages/validator/test/e2e/web3signer.test.ts +++ b/packages/validator/test/e2e/web3signer.test.ts @@ -5,7 +5,7 @@ import {computeStartSlotAtEpoch, interopSecretKey, interopSecretKeys} from "@lod import {createBeaconConfig} from "@lodestar/config"; import {genesisData} from "@lodestar/config/networks"; import {getClient, routes} from "@lodestar/api"; -import {ssz} from "@lodestar/types"; +import {ssz, sszTypesFor} from "@lodestar/types"; import {ForkSeq} from "@lodestar/params"; import {getKeystoresStr, StartedExternalSigner, startExternalSigner} from "@lodestar/test-utils"; import {Interchange, ISlashingProtection, Signer, SignerType, ValidatorStore} from "../../src/index.js"; @@ -93,17 +93,27 @@ describe("web3signer signature test", function () { await assertSameSignature("signAttestation", duty, attestationData, epoch); }); - it("signAggregateAndProof", async () => { - const aggregateAndProof = ssz.phase0.AggregateAndProof.defaultValue(); - aggregateAndProof.aggregate.data.slot = duty.slot; - aggregateAndProof.aggregate.data.index = duty.committeeIndex; - await assertSameSignature( - "signAggregateAndProof", - duty, - aggregateAndProof.selectionProof, - aggregateAndProof.aggregate - ); - }); + for (const fork of config.forksAscendingEpochOrder) { + it(`signAggregateAndProof ${fork.name}`, async ({skip}) => { + // Only test till the fork the signer version supports + if (ForkSeq[fork.name] > externalSigner.supportedForkSeq) { + skip(); + return; + } + + const aggregateAndProof = sszTypesFor(fork.name).AggregateAndProof.defaultValue(); + const slot = computeStartSlotAtEpoch(fork.epoch); + aggregateAndProof.aggregate.data.slot = slot; + aggregateAndProof.aggregate.data.index = duty.committeeIndex; + + await assertSameSignature( + "signAggregateAndProof", + {...duty, slot}, + aggregateAndProof.selectionProof, + aggregateAndProof.aggregate + ); + }); + } it("signSyncCommitteeSignature", async () => { const beaconBlockRoot = ssz.phase0.BeaconBlockHeader.defaultValue().bodyRoot; diff --git a/packages/validator/test/unit/services/attestation.test.ts b/packages/validator/test/unit/services/attestation.test.ts index 0ffec323ee30..66b722273102 100644 --- a/packages/validator/test/unit/services/attestation.test.ts +++ b/packages/validator/test/unit/services/attestation.test.ts @@ -3,6 +3,9 @@ import {toHexString} from "@chainsafe/ssz"; import {SecretKey} from "@chainsafe/blst"; import {ssz} from "@lodestar/types"; import {routes} from "@lodestar/api"; +import {ChainConfig, createChainForkConfig} from "@lodestar/config"; +import {config as defaultConfig} from "@lodestar/config/default"; +import {ForkName} from "@lodestar/params"; import {AttestationService, AttestationServiceOpts} from "../../../src/services/attestation.js"; import {AttDutyAndProof} from "../../../src/services/attestationDuties.js"; import {ValidatorStore} from "../../../src/services/validatorStore.js"; @@ -10,12 +13,14 @@ import {getApiClientStub, mockApiResponse} from "../../utils/apiStub.js"; import {loggerVc} from "../../utils/logger.js"; import {ClockMock} from "../../utils/clock.js"; import {ChainHeaderTracker} from "../../../src/services/chainHeaderTracker.js"; +import {SyncingStatusTracker} from "../../../src/services/syncingStatusTracker.js"; import {ValidatorEventEmitter} from "../../../src/services/emitter.js"; import {ZERO_HASH, ZERO_HASH_HEX} from "../../utils/types.js"; vi.mock("../../../src/services/validatorStore.js"); vi.mock("../../../src/services/emitter.js"); vi.mock("../../../src/services/chainHeaderTracker.js"); +vi.mock("../../../src/services/syncingStatusTracker.js"); describe("AttestationService", function () { const api = getApiClientStub(); @@ -24,6 +29,8 @@ describe("AttestationService", function () { const emitter = vi.mocked(new ValidatorEventEmitter()); // @ts-expect-error - Mocked class don't need parameters const chainHeadTracker = vi.mocked(new ChainHeaderTracker()); + // @ts-expect-error - Mocked class don't need parameters + const syncingStatusTracker = vi.mocked(new SyncingStatusTracker()); let pubkeys: Uint8Array[]; // Initialize pubkeys in before() so bls is already initialized @@ -45,16 +52,23 @@ describe("AttestationService", function () { vi.resetAllMocks(); }); - const testContexts: [string, AttestationServiceOpts][] = [ - ["With default configuration", {}], - ["With attestation grouping disabled", {disableAttestationGrouping: true}], - ["With distributed aggregation selection enabled", {distributedAggregationSelection: true}], + // eslint-disable-next-line @typescript-eslint/naming-convention + const electraConfig: Partial = {ELECTRA_FORK_EPOCH: 0}; + + const testContexts: [string, AttestationServiceOpts, Partial][] = [ + ["With default configuration", {}, {}], + ["With default configuration post-electra", {}, electraConfig], + ["With attestation grouping disabled", {disableAttestationGrouping: true}, {}], + ["With attestation grouping disabled post-electra", {disableAttestationGrouping: true}, electraConfig], + ["With distributed aggregation selection enabled", {distributedAggregationSelection: true}, {}], ]; - for (const [title, opts] of testContexts) { + for (const [title, opts, chainConfig] of testContexts) { describe(title, () => { it("Should produce, sign, and publish an attestation + aggregate", async () => { const clock = new ClockMock(); + const config = createChainForkConfig({...defaultConfig, ...chainConfig}); + const isPostElectra = chainConfig.ELECTRA_FORK_EPOCH === 0; const attestationService = new AttestationService( loggerVc, api, @@ -62,12 +76,18 @@ describe("AttestationService", function () { validatorStore, emitter, chainHeadTracker, + syncingStatusTracker, null, + config, opts ); - const attestation = ssz.phase0.Attestation.defaultValue(); - const aggregate = ssz.phase0.SignedAggregateAndProof.defaultValue(); + const attestation = isPostElectra + ? ssz.electra.Attestation.defaultValue() + : ssz.phase0.Attestation.defaultValue(); + const aggregate = isPostElectra + ? ssz.electra.SignedAggregateAndProof.defaultValue() + : ssz.phase0.SignedAggregateAndProof.defaultValue(); const duties: AttDutyAndProof[] = [ { duty: { @@ -85,7 +105,7 @@ describe("AttestationService", function () { ]; // Return empty replies to duties service - api.beacon.getStateValidators.mockResolvedValue( + api.beacon.postStateValidators.mockResolvedValue( mockApiResponse({data: [], meta: {executionOptimistic: false, finalized: false}}) ); api.validator.getAttesterDuties.mockResolvedValue( @@ -97,10 +117,17 @@ describe("AttestationService", function () { // Mock beacon's attestation and aggregates endpoints api.validator.produceAttestationData.mockResolvedValue(mockApiResponse({data: attestation.data})); - api.validator.getAggregatedAttestation.mockResolvedValue(mockApiResponse({data: attestation})); - - api.beacon.submitPoolAttestations.mockResolvedValue(mockApiResponse({})); - api.validator.publishAggregateAndProofs.mockResolvedValue(mockApiResponse({})); + if (isPostElectra) { + api.validator.getAggregatedAttestationV2.mockResolvedValue( + mockApiResponse({data: attestation, meta: {version: ForkName.electra}}) + ); + api.beacon.submitPoolAttestationsV2.mockResolvedValue(mockApiResponse({})); + api.validator.publishAggregateAndProofsV2.mockResolvedValue(mockApiResponse({})); + } else { + api.validator.getAggregatedAttestation.mockResolvedValue(mockApiResponse({data: attestation})); + api.beacon.submitPoolAttestations.mockResolvedValue(mockApiResponse({})); + api.validator.publishAggregateAndProofs.mockResolvedValue(mockApiResponse({})); + } if (opts.distributedAggregationSelection) { // Mock distributed validator middleware client selections endpoint @@ -141,13 +168,25 @@ describe("AttestationService", function () { expect(api.validator.prepareBeaconCommitteeSubnet).toHaveBeenCalledWith({subscriptions: [subscription]}); } - // Must submit the attestation received through produceAttestationData() - expect(api.beacon.submitPoolAttestations).toHaveBeenCalledOnce(); - expect(api.beacon.submitPoolAttestations).toHaveBeenCalledWith({signedAttestations: [attestation]}); - - // Must submit the aggregate received through getAggregatedAttestation() then createAndSignAggregateAndProof() - expect(api.validator.publishAggregateAndProofs).toHaveBeenCalledOnce(); - expect(api.validator.publishAggregateAndProofs).toHaveBeenCalledWith({signedAggregateAndProofs: [aggregate]}); + if (isPostElectra) { + // Must submit the attestation received through produceAttestationData() + expect(api.beacon.submitPoolAttestationsV2).toHaveBeenCalledOnce(); + expect(api.beacon.submitPoolAttestationsV2).toHaveBeenCalledWith({signedAttestations: [attestation]}); + + // Must submit the aggregate received through getAggregatedAttestationV2() then createAndSignAggregateAndProof() + expect(api.validator.publishAggregateAndProofsV2).toHaveBeenCalledOnce(); + expect(api.validator.publishAggregateAndProofsV2).toHaveBeenCalledWith({ + signedAggregateAndProofs: [aggregate], + }); + } else { + // Must submit the attestation received through produceAttestationData() + expect(api.beacon.submitPoolAttestations).toHaveBeenCalledOnce(); + expect(api.beacon.submitPoolAttestations).toHaveBeenCalledWith({signedAttestations: [attestation]}); + + // Must submit the aggregate received through getAggregatedAttestation() then createAndSignAggregateAndProof() + expect(api.validator.publishAggregateAndProofs).toHaveBeenCalledOnce(); + expect(api.validator.publishAggregateAndProofs).toHaveBeenCalledWith({signedAggregateAndProofs: [aggregate]}); + } }); }); } diff --git a/packages/validator/test/unit/services/attestationDuties.test.ts b/packages/validator/test/unit/services/attestationDuties.test.ts index ad54c0735eea..a9d50eaf42c6 100644 --- a/packages/validator/test/unit/services/attestationDuties.test.ts +++ b/packages/validator/test/unit/services/attestationDuties.test.ts @@ -13,6 +13,7 @@ import {loggerVc} from "../../utils/logger.js"; import {ClockMock} from "../../utils/clock.js"; import {initValidatorStore} from "../../utils/validatorStore.js"; import {ChainHeaderTracker} from "../../../src/services/chainHeaderTracker.js"; +import {SyncingStatusTracker} from "../../../src/services/syncingStatusTracker.js"; import {ZERO_HASH_HEX} from "../../utils/types.js"; vi.mock("../../../src/services/chainHeaderTracker.js"); @@ -45,20 +46,22 @@ describe("AttestationDutiesService", function () { let controller: AbortController; // To stop clock beforeEach(() => { controller = new AbortController(); - }); - afterEach(() => controller.abort()); - - it("Should fetch indexes and duties", async function () { // Reply with an active validator that has an index const validatorResponse = { ...defaultValidator, index, validator: {...defaultValidator.validator, pubkey: pubkeys[0]}, }; - api.beacon.getStateValidators.mockResolvedValue( + api.beacon.postStateValidators.mockResolvedValue( mockApiResponse({data: [validatorResponse], meta: {executionOptimistic: false, finalized: false}}) ); + }); + afterEach(() => { + vi.restoreAllMocks(); + controller.abort(); + }); + it("Should fetch indexes and duties", async function () { // Reply with some duties const slot = 1; const epoch = computeEpochAtSlot(slot); @@ -78,9 +81,18 @@ describe("AttestationDutiesService", function () { // Accept all subscriptions api.validator.prepareBeaconCommitteeSubnet.mockResolvedValue(mockApiResponse({})); - // Clock will call runAttesterDutiesTasks() immediately + // Clock will call runDutiesTasks() immediately const clock = new ClockMock(); - const dutiesService = new AttestationDutiesService(loggerVc, api, clock, validatorStore, chainHeadTracker, null); + const syncingStatusTracker = new SyncingStatusTracker(loggerVc, api, clock, null); + const dutiesService = new AttestationDutiesService( + loggerVc, + api, + clock, + validatorStore, + chainHeadTracker, + syncingStatusTracker, + null + ); // Trigger clock onSlot for slot 0 await clock.tickEpochFns(0, controller.signal); @@ -107,16 +119,6 @@ describe("AttestationDutiesService", function () { }); it("Should remove signer from attestation duties", async function () { - // Reply with an active validator that has an index - const validatorResponse = { - ...defaultValidator, - index, - validator: {...defaultValidator.validator, pubkey: pubkeys[0]}, - }; - api.beacon.getStateValidators.mockResolvedValue( - mockApiResponse({data: [validatorResponse], meta: {executionOptimistic: false, finalized: false}}) - ); - // Reply with some duties const slot = 1; const duty: routes.validator.AttesterDuty = { @@ -135,9 +137,18 @@ describe("AttestationDutiesService", function () { // Accept all subscriptions api.validator.prepareBeaconCommitteeSubnet.mockResolvedValue(mockApiResponse({})); - // Clock will call runAttesterDutiesTasks() immediately + // Clock will call runDutiesTasks() immediately const clock = new ClockMock(); - const dutiesService = new AttestationDutiesService(loggerVc, api, clock, validatorStore, chainHeadTracker, null); + const syncingStatusTracker = new SyncingStatusTracker(loggerVc, api, clock, null); + const dutiesService = new AttestationDutiesService( + loggerVc, + api, + clock, + validatorStore, + chainHeadTracker, + syncingStatusTracker, + null + ); // Trigger clock onSlot for slot 0 await clock.tickEpochFns(0, controller.signal); @@ -153,4 +164,81 @@ describe("AttestationDutiesService", function () { dutiesService.removeDutiesForKey(toHexString(pubkeys[0])); expect(Object.fromEntries(dutiesService["dutiesByIndexByEpoch"])).toEqual({}); }); + + it("Should fetch duties when node is resynced", async function () { + // Node is syncing + api.node.getSyncingStatus.mockResolvedValue( + mockApiResponse({data: {headSlot: 0, syncDistance: 1, isSyncing: true, isOptimistic: false, elOffline: false}}) + ); + api.validator.getAttesterDuties.mockRejectedValue(Error("Node is syncing")); + api.validator.prepareBeaconCommitteeSubnet.mockRejectedValue(Error("Node is syncing")); + + // Clock will call runDutiesTasks() immediately + const clock = new ClockMock(); + const syncingStatusTracker = new SyncingStatusTracker(loggerVc, api, clock, null); + const dutiesService = new AttestationDutiesService( + loggerVc, + api, + clock, + validatorStore, + chainHeadTracker, + syncingStatusTracker, + null + ); + + // Trigger clock for slot and epoch + await clock.tickEpochFns(0, controller.signal); + await clock.tickSlotFns(1, controller.signal); + + const dutySlot = 3; + const epoch = computeEpochAtSlot(dutySlot); + + // Duties for slot should be empty as node is still syncing + expect(dutiesService.getDutiesAtSlot(dutySlot)).toEqual([]); + + // Node is synced now + api.node.getSyncingStatus.mockResolvedValue( + mockApiResponse({data: {headSlot: 1, syncDistance: 0, isSyncing: false, isOptimistic: false, elOffline: false}}) + ); + + // Reply with some duties on next call + const duty: routes.validator.AttesterDuty = { + slot: dutySlot, + committeeIndex: 1, + committeeLength: 120, + committeesAtSlot: 120, + validatorCommitteeIndex: 1, + validatorIndex: index, + pubkey: pubkeys[0], + }; + api.validator.getAttesterDuties.mockResolvedValue( + mockApiResponse({data: [duty], meta: {dependentRoot: ZERO_HASH_HEX, executionOptimistic: false}}) + ); + + // Accept all subscriptions + api.validator.prepareBeaconCommitteeSubnet.mockResolvedValue(mockApiResponse({})); + + // Only tick clock for slot to not trigger regular polling + await clock.tickSlotFns(2, controller.signal); + + // Validator index should be persisted + expect(validatorStore.getAllLocalIndices()).toEqual([index]); + expect(validatorStore.getPubkeyOfIndex(index)).toBe(toHexString(pubkeys[0])); + + // Duties for this and next epoch should be persisted + expect(Object.fromEntries(dutiesService["dutiesByIndexByEpoch"].get(epoch)?.dutiesByIndex || new Map())).toEqual({ + // Since the ZERO_HASH won't pass the isAggregator test, selectionProof is null + [index]: {duty, selectionProof: null}, + }); + expect( + Object.fromEntries(dutiesService["dutiesByIndexByEpoch"].get(epoch + 1)?.dutiesByIndex || new Map()) + ).toEqual({ + // Since the ZERO_HASH won't pass the isAggregator test, selectionProof is null + [index]: {duty, selectionProof: null}, + }); + + expect(dutiesService.getDutiesAtSlot(dutySlot)).toEqual([{duty, selectionProof: null}]); + + expect(api.validator.prepareBeaconCommitteeSubnet).toHaveBeenCalledOnce(); + }); }); diff --git a/packages/validator/test/unit/services/syncCommitteDuties.test.ts b/packages/validator/test/unit/services/syncCommitteDuties.test.ts index 52f2071f9102..dc43502d5b57 100644 --- a/packages/validator/test/unit/services/syncCommitteDuties.test.ts +++ b/packages/validator/test/unit/services/syncCommitteDuties.test.ts @@ -1,4 +1,4 @@ -import {describe, it, expect, beforeAll, beforeEach, afterEach} from "vitest"; +import {describe, it, expect, beforeAll, beforeEach, afterEach, vi} from "vitest"; import {when} from "vitest-when"; import {toBufferBE} from "bigint-buffer"; import {toHexString} from "@chainsafe/ssz"; @@ -13,6 +13,7 @@ import { SyncDutySubnet, } from "../../../src/services/syncCommitteeDuties.js"; import {ValidatorStore} from "../../../src/services/validatorStore.js"; +import {SyncingStatusTracker} from "../../../src/services/syncingStatusTracker.js"; import {getApiClientStub, mockApiResponse} from "../../utils/apiStub.js"; import {loggerVc} from "../../utils/logger.js"; import {ClockMock} from "../../utils/clock.js"; @@ -59,11 +60,14 @@ describe("SyncCommitteeDutiesService", function () { index: indices[i], validator: {...defaultValidator.validator, pubkey: pubkeys[i]}, })); - api.beacon.getStateValidators.mockResolvedValue( + api.beacon.postStateValidators.mockResolvedValue( mockApiResponse({data: validatorResponses, meta: {executionOptimistic: false, finalized: false}}) ); }); - afterEach(() => controller.abort()); + afterEach(() => { + vi.restoreAllMocks(); + controller.abort(); + }); it("Should fetch indexes and duties", async function () { // Reply with some duties @@ -80,14 +84,22 @@ describe("SyncCommitteeDutiesService", function () { // Accept all subscriptions api.validator.prepareSyncCommitteeSubnets.mockResolvedValue(mockApiResponse({})); - // Clock will call runAttesterDutiesTasks() immediately + // Clock will call runDutiesTasks() immediately const clock = new ClockMock(); - const dutiesService = new SyncCommitteeDutiesService(altair0Config, loggerVc, api, clock, validatorStore, null); + const syncingStatusTracker = new SyncingStatusTracker(loggerVc, api, clock, null); + const dutiesService = new SyncCommitteeDutiesService( + altair0Config, + loggerVc, + api, + clock, + validatorStore, + syncingStatusTracker, + null + ); // Trigger clock onSlot for slot 0 await clock.tickEpochFns(0, controller.signal); - // Validator index should be persisted // Validator index should be persisted expect(validatorStore.getAllLocalIndices()).toEqual(indices); for (let i = 0; i < indices.length; i++) { @@ -107,9 +119,9 @@ describe("SyncCommitteeDutiesService", function () { 1: {[indices[0]]: {duty: toSyncDutySubnet(duty)}}, } as typeof dutiesByIndexByPeriodObj); - expect(await dutiesService.getDutiesAtSlot(slot)).toEqual([ + expect(await dutiesService.getDutiesAtSlot(slot)).toEqual([ {duty: toSyncDutySubnet(duty), selectionProofs: [{selectionProof: null, subcommitteeIndex: 0}]}, - ] as SyncDutyAndProofs[]); + ]); expect(api.validator.prepareSyncCommitteeSubnets).toHaveBeenCalledOnce(); }); @@ -143,9 +155,18 @@ describe("SyncCommitteeDutiesService", function () { .calledWith({epoch: 1, indices}) .thenResolve(mockApiResponse({data: [duty2], meta: {executionOptimistic: false}})); - // Clock will call runAttesterDutiesTasks() immediately + // Clock will call runDutiesTasks() immediately const clock = new ClockMock(); - const dutiesService = new SyncCommitteeDutiesService(altair0Config, loggerVc, api, clock, validatorStore, null); + const syncingStatusTracker = new SyncingStatusTracker(loggerVc, api, clock, null); + const dutiesService = new SyncCommitteeDutiesService( + altair0Config, + loggerVc, + api, + clock, + validatorStore, + syncingStatusTracker, + null + ); // Trigger clock onSlot for slot 0 await clock.tickEpochFns(0, controller.signal); @@ -196,9 +217,18 @@ describe("SyncCommitteeDutiesService", function () { // Accept all subscriptions api.validator.prepareSyncCommitteeSubnets.mockResolvedValue(mockApiResponse({})); - // Clock will call runAttesterDutiesTasks() immediately + // Clock will call runDutiesTasks() immediately const clock = new ClockMock(); - const dutiesService = new SyncCommitteeDutiesService(altair0Config, loggerVc, api, clock, validatorStore, null); + const syncingStatusTracker = new SyncingStatusTracker(loggerVc, api, clock, null); + const dutiesService = new SyncCommitteeDutiesService( + altair0Config, + loggerVc, + api, + clock, + validatorStore, + syncingStatusTracker, + null + ); // Trigger clock onSlot for slot 0 await clock.tickEpochFns(0, controller.signal); @@ -236,6 +266,83 @@ describe("SyncCommitteeDutiesService", function () { 1: {[indices[1]]: {duty: toSyncDutySubnet(duty2)}}, } as typeof dutiesByIndexByPeriodObjAfterRemoval); }); + + it("Should fetch duties when node is resynced", async function () { + // Node is syncing + api.node.getSyncingStatus.mockResolvedValue( + mockApiResponse({data: {headSlot: 0, syncDistance: 1, isSyncing: true, isOptimistic: false, elOffline: false}}) + ); + api.validator.getSyncCommitteeDuties.mockRejectedValue(Error("Node is syncing")); + api.validator.prepareSyncCommitteeSubnets.mockRejectedValue(Error("Node is syncing")); + + // Clock will call runDutiesTasks() immediately + const clock = new ClockMock(); + const syncingStatusTracker = new SyncingStatusTracker(loggerVc, api, clock, null); + const dutiesService = new SyncCommitteeDutiesService( + altair0Config, + loggerVc, + api, + clock, + validatorStore, + syncingStatusTracker, + null + ); + + // Trigger clock for slot and epoch + await clock.tickEpochFns(0, controller.signal); + await clock.tickSlotFns(1, controller.signal); + + const dutySlot = 1; + + // Duties for slot should be empty as node is still syncing + expect(await dutiesService.getDutiesAtSlot(dutySlot)).toEqual([]); + + // Node is synced now + api.node.getSyncingStatus.mockResolvedValue( + mockApiResponse({data: {headSlot: 1, syncDistance: 0, isSyncing: false, isOptimistic: false, elOffline: false}}) + ); + + // Reply with some duties + const duty: routes.validator.SyncDuty = { + pubkey: pubkeys[0], + validatorIndex: indices[0], + validatorSyncCommitteeIndices: [7], + }; + api.validator.getSyncCommitteeDuties.mockResolvedValue( + mockApiResponse({data: [duty], meta: {executionOptimistic: false}}) + ); + + // Accept all subscriptions + api.validator.prepareSyncCommitteeSubnets.mockResolvedValue(mockApiResponse({})); + + // Only tick clock for slot to not trigger regular polling + await clock.tickSlotFns(2, controller.signal); + + // Validator index should be persisted + expect(validatorStore.getAllLocalIndices()).toEqual(indices); + for (let i = 0; i < indices.length; i++) { + expect(validatorStore.getPubkeyOfIndex(indices[i])).toBe(toHexString(pubkeys[i])); + } + + // Duties for this and next epoch should be persisted + const dutiesByIndexByPeriodObj = Object.fromEntries( + Array.from(dutiesService["dutiesByIndexByPeriod"].entries()).map(([period, dutiesByIndex]) => [ + period, + Object.fromEntries(dutiesByIndex), + ]) + ); + + expect(dutiesByIndexByPeriodObj).toEqual({ + 0: {[indices[0]]: {duty: toSyncDutySubnet(duty)}}, + 1: {[indices[0]]: {duty: toSyncDutySubnet(duty)}}, + } as typeof dutiesByIndexByPeriodObj); + + expect(await dutiesService.getDutiesAtSlot(dutySlot)).toEqual([ + {duty: toSyncDutySubnet(duty), selectionProofs: [{selectionProof: null, subcommitteeIndex: 0}]}, + ]); + + expect(api.validator.prepareSyncCommitteeSubnets).toHaveBeenCalledOnce(); + }); }); function toSyncDutySubnet(duty: routes.validator.SyncDuty): SyncDutySubnet { diff --git a/packages/validator/test/unit/services/syncCommittee.test.ts b/packages/validator/test/unit/services/syncCommittee.test.ts index c65912f12e9a..449f826c3806 100644 --- a/packages/validator/test/unit/services/syncCommittee.test.ts +++ b/packages/validator/test/unit/services/syncCommittee.test.ts @@ -12,12 +12,14 @@ import {getApiClientStub, mockApiResponse} from "../../utils/apiStub.js"; import {loggerVc} from "../../utils/logger.js"; import {ClockMock} from "../../utils/clock.js"; import {ChainHeaderTracker} from "../../../src/services/chainHeaderTracker.js"; +import {SyncingStatusTracker} from "../../../src/services/syncingStatusTracker.js"; import {ZERO_HASH} from "../../utils/types.js"; import {ValidatorEventEmitter} from "../../../src/services/emitter.js"; vi.mock("../../../src/services/validatorStore.js"); vi.mock("../../../src/services/emitter.js"); vi.mock("../../../src/services/chainHeaderTracker.js"); +vi.mock("../../../src/services/syncingStatusTracker.js"); /* eslint-disable @typescript-eslint/naming-convention */ @@ -28,6 +30,8 @@ describe("SyncCommitteeService", function () { const emitter = vi.mocked(new ValidatorEventEmitter()); // @ts-expect-error - Mocked class don't need parameters const chainHeaderTracker = vi.mocked(new ChainHeaderTracker()); + // @ts-expect-error - Mocked class don't need parameters + const syncingStatusTracker = vi.mocked(new SyncingStatusTracker()); let pubkeys: Uint8Array[]; // Initialize pubkeys in before() so bls is already initialized const config = createChainForkConfig({ @@ -71,6 +75,7 @@ describe("SyncCommitteeService", function () { validatorStore, emitter, chainHeaderTracker, + syncingStatusTracker, null, opts ); @@ -97,7 +102,7 @@ describe("SyncCommitteeService", function () { ]; // Return empty replies to duties service - api.beacon.getStateValidators.mockResolvedValue( + api.beacon.postStateValidators.mockResolvedValue( mockApiResponse({data: [], meta: {executionOptimistic: false, finalized: false}}) ); api.validator.getSyncCommitteeDuties.mockResolvedValue( diff --git a/packages/validator/test/unit/services/syncingStatusTracker.test.ts b/packages/validator/test/unit/services/syncingStatusTracker.test.ts new file mode 100644 index 000000000000..59029e1b9c51 --- /dev/null +++ b/packages/validator/test/unit/services/syncingStatusTracker.test.ts @@ -0,0 +1,149 @@ +import {describe, it, expect, vi, beforeEach, afterEach, MockedFunction} from "vitest"; +import {getApiClientStub, mockApiResponse} from "../../utils/apiStub.js"; +import {getMockedLogger} from "../../utils/logger.js"; +import {ClockMock} from "../../utils/clock.js"; +import {SyncingStatus, SyncingStatusTracker} from "../../../src/services/syncingStatusTracker.js"; + +describe("SyncingStatusTracker", function () { + const api = getApiClientStub(); + const logger = getMockedLogger(); + + let controller: AbortController; + let clock: ClockMock; + let syncingStatusTracker: SyncingStatusTracker; + let callOnResynced: MockedFunction<() => Promise>; + + beforeEach(() => { + controller = new AbortController(); + clock = new ClockMock(); + syncingStatusTracker = new SyncingStatusTracker(logger, api, clock, null); + callOnResynced = vi.fn().mockResolvedValue(undefined); + syncingStatusTracker.runOnResynced(callOnResynced); + }); + + afterEach(() => { + vi.resetAllMocks(); + controller.abort(); + }); + + it("should handle transition from syncing to synced", async function () { + // Node is syncing + const syncing: SyncingStatus = { + headSlot: 0, + syncDistance: 1, + isSyncing: true, + isOptimistic: false, + elOffline: false, + }; + api.node.getSyncingStatus.mockResolvedValue(mockApiResponse({data: syncing})); + + await clock.tickSlotFns(1, controller.signal); + + expect(logger.warn).toHaveBeenCalledWith("Node is syncing", {slot: 1, headSlot: 0, syncDistance: 1}); + expect(logger.verbose).toHaveBeenCalledWith("Node syncing status", { + slot: 1, + headSlot: 0, + syncDistance: 1, + isSyncing: true, + isOptimistic: false, + elOffline: false, + }); + expect(syncingStatusTracker["prevSyncingStatus"]).toBe(syncing); + + // Transition to synced + const synced: SyncingStatus = { + headSlot: 2, + syncDistance: 0, + isSyncing: false, + isOptimistic: false, + elOffline: false, + }; + api.node.getSyncingStatus.mockResolvedValue(mockApiResponse({data: synced})); + + await clock.tickSlotFns(2, controller.signal); + + expect(logger.info).toHaveBeenCalledWith("Node is synced", { + slot: 2, + headSlot: 2, + isOptimistic: false, + elOffline: false, + }); + expect(logger.verbose).toHaveBeenCalledWith("Node syncing status", { + slot: 2, + headSlot: 2, + syncDistance: 0, + isSyncing: false, + isOptimistic: false, + elOffline: false, + }); + expect(syncingStatusTracker["prevSyncingStatus"]).toBe(synced); + expect(callOnResynced).toHaveBeenCalledOnce(); + }); + + it("should handle errors when checking syncing status", async function () { + // Node is offline + const error = new Error("ECONNREFUSED"); + api.node.getSyncingStatus.mockRejectedValue(error); + + await clock.tickSlotFns(1, controller.signal); + + expect(logger.error).toHaveBeenCalledWith("Failed to check syncing status", {slot: 1}, error); + expect(syncingStatusTracker["prevSyncingStatus"]).toBe(error); + expect(callOnResynced).not.toHaveBeenCalled(); + }); + + it("should not call scheduled tasks if already synced", async function () { + // Node is already synced + const syncedHead1: SyncingStatus = { + headSlot: 1, + syncDistance: 0, + isSyncing: false, + isOptimistic: false, + elOffline: false, + }; + api.node.getSyncingStatus.mockResolvedValue(mockApiResponse({data: syncedHead1})); + + await clock.tickSlotFns(1, controller.signal); + + expect(logger.info).toHaveBeenCalledWith("Node is synced", { + slot: 1, + headSlot: 1, + isOptimistic: false, + elOffline: false, + }); + expect(logger.verbose).toHaveBeenCalledWith("Node syncing status", { + slot: 1, + headSlot: 1, + syncDistance: 0, + isSyncing: false, + isOptimistic: false, + elOffline: false, + }); + expect(syncingStatusTracker["prevSyncingStatus"]).toBe(syncedHead1); + + // Still synced on next tick + const syncedHead2: SyncingStatus = { + headSlot: 2, + syncDistance: 0, + isSyncing: false, + isOptimistic: false, + elOffline: false, + }; + api.node.getSyncingStatus.mockResolvedValue(mockApiResponse({data: syncedHead2})); + + await clock.tickSlotFns(2, controller.signal); + + // info log should only be printed out once, not every slot + expect(logger.info).toHaveBeenCalledOnce(); + expect(logger.verbose).toHaveBeenCalledWith("Node syncing status", { + slot: 2, + headSlot: 2, + syncDistance: 0, + isSyncing: false, + isOptimistic: false, + elOffline: false, + }); + expect(syncingStatusTracker["prevSyncingStatus"]).toBe(syncedHead2); + expect(callOnResynced).not.toHaveBeenCalled(); + }); +}); diff --git a/packages/validator/test/unit/utils/interopConfigs.ts b/packages/validator/test/unit/utils/interopConfigs.ts index 2c05203660f6..d263fa8c1d8f 100644 --- a/packages/validator/test/unit/utils/interopConfigs.ts +++ b/packages/validator/test/unit/utils/interopConfigs.ts @@ -30,6 +30,7 @@ export const lighthouseHoleskyConfig = { EJECTION_BALANCE: "28000000000", MIN_PER_EPOCH_CHURN_LIMIT: "4", MAX_PER_EPOCH_ACTIVATION_CHURN_LIMIT: "8", + MIN_PER_EPOCH_CHURN_LIMIT_ELECTRA: "128000000000", CHURN_LIMIT_QUOTIENT: "65536", PROPOSER_SCORE_BOOST: "40", DEPOSIT_CHAIN_ID: "17000", @@ -120,6 +121,14 @@ export const lighthouseHoleskyConfig = { DOMAIN_VOLUNTARY_EXIT: "0x04000000", DOMAIN_CONTRIBUTION_AND_PROOF: "0x09000000", DOMAIN_SYNC_COMMITTEE_SELECTION_PROOF: "0x08000000", + DOMAIN_BLS_TO_EXECUTION_CHANGE: "0x0A000000", + MAX_EFFECTIVE_BALANCE_ELECTRA: "2048000000000", + MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA: "65536", + MIN_ACTIVATION_BALANCE: "32000000000", + PENDING_BALANCE_DEPOSITS_LIMIT: "134217728", + PENDING_PARTIAL_WITHDRAWALS_LIMIT: "134217728", + PENDING_CONSOLIDATIONS_LIMIT: "262144", + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD: "1", }; export const prysmHoleskyConfig = { @@ -207,10 +216,12 @@ export const prysmHoleskyConfig = { MIN_GENESIS_ACTIVE_VALIDATOR_COUNT: "16384", MIN_GENESIS_TIME: "1695902100", MIN_PER_EPOCH_CHURN_LIMIT: "4", + MIN_PER_EPOCH_CHURN_LIMIT_ELECTRA: "128000000000", MIN_SEED_LOOKAHEAD: "1", MIN_SLASHING_PENALTY_QUOTIENT: "128", MIN_SLASHING_PENALTY_QUOTIENT_ALTAIR: "64", MIN_SLASHING_PENALTY_QUOTIENT_BELLATRIX: "32", + MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA: "65536", MIN_SYNC_COMMITTEE_PARTICIPANTS: "1", MIN_VALIDATOR_WITHDRAWABILITY_DELAY: "256", NODE_ID_BITS: "256", @@ -252,6 +263,12 @@ export const prysmHoleskyConfig = { VALIDATOR_REGISTRY_LIMIT: "1099511627776", WEIGHT_DENOMINATOR: "64", WHISTLEBLOWER_REWARD_QUOTIENT: "512", + MAX_EFFECTIVE_BALANCE_ELECTRA: "2048000000000", + MIN_ACTIVATION_BALANCE: "32000000000", + PENDING_BALANCE_DEPOSITS_LIMIT: "134217728", + PENDING_PARTIAL_WITHDRAWALS_LIMIT: "134217728", + PENDING_CONSOLIDATIONS_LIMIT: "262144", + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD: "1", }; export const tekuHoleskyConfig = { @@ -348,6 +365,8 @@ export const tekuHoleskyConfig = { MAX_BLOB_COMMITMENTS_PER_BLOCK: "4096", DOMAIN_RANDAO: "0x02000000", CAPELLA_FORK_VERSION: "0x04017000", + EPOCHS_PER_RANDOM_SUBNET_SUBSCRIPTION: "256", + MIN_PER_EPOCH_CHURN_LIMIT_ELECTRA: "128000000000", MIN_SLASHING_PENALTY_QUOTIENT_ALTAIR: "64", EPOCHS_PER_ETH1_VOTING_PERIOD: "64", MAX_DEPOSIT_RECEIPTS_PER_PAYLOAD: "8192", @@ -383,6 +402,13 @@ export const tekuHoleskyConfig = { DOMAIN_AGGREGATE_AND_PROOF: "0x06000000", CHURN_LIMIT_QUOTIENT: "65536", BLS_WITHDRAWAL_PREFIX: "0x00", + MAX_EFFECTIVE_BALANCE_ELECTRA: "2048000000000", + MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA: "65536", + MIN_ACTIVATION_BALANCE: "32000000000", + PENDING_BALANCE_DEPOSITS_LIMIT: "134217728", + PENDING_PARTIAL_WITHDRAWALS_LIMIT: "134217728", + PENDING_CONSOLIDATIONS_LIMIT: "262144", + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD: "1", }; export const nimbusHoleskyConfig = { @@ -465,6 +491,7 @@ export const nimbusHoleskyConfig = { INACTIVITY_SCORE_RECOVERY_RATE: "16", EJECTION_BALANCE: "28000000000", MIN_PER_EPOCH_CHURN_LIMIT: "4", + MIN_PER_EPOCH_CHURN_LIMIT_ELECTRA: "128000000000", CHURN_LIMIT_QUOTIENT: "65536", MAX_PER_EPOCH_ACTIVATION_CHURN_LIMIT: "8", PROPOSER_SCORE_BOOST: "40", @@ -518,4 +545,11 @@ export const nimbusHoleskyConfig = { TARGET_AGGREGATORS_PER_COMMITTEE: "16", TARGET_AGGREGATORS_PER_SYNC_SUBCOMMITTEE: "16", SYNC_COMMITTEE_SUBNET_COUNT: "4", + MAX_EFFECTIVE_BALANCE_ELECTRA: "2048000000000", + MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA: "65536", + MIN_ACTIVATION_BALANCE: "32000000000", + PENDING_BALANCE_DEPOSITS_LIMIT: "134217728", + PENDING_PARTIAL_WITHDRAWALS_LIMIT: "134217728", + PENDING_CONSOLIDATIONS_LIMIT: "262144", + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD: "1", }; diff --git a/packages/validator/test/unit/utils/metrics.test.ts b/packages/validator/test/unit/utils/metrics.test.ts index 695e8731b7f6..de4010761001 100644 --- a/packages/validator/test/unit/utils/metrics.test.ts +++ b/packages/validator/test/unit/utils/metrics.test.ts @@ -3,6 +3,6 @@ import {BeaconHealth, renderEnumNumeric} from "../../../src/metrics.js"; describe("renderEnumNumeric", () => { it("BeaconHealth", () => { - expect(renderEnumNumeric(BeaconHealth)).toBe("READY=0, SYNCING=1, NOT_INITIALIZED_OR_ISSUES=2, UNKNOWN=3, ERROR=4"); + expect(renderEnumNumeric(BeaconHealth)).toBe("READY=0, SYNCING=1, ERROR=2"); }); }); diff --git a/packages/validator/test/utils/apiStub.ts b/packages/validator/test/utils/apiStub.ts index ac41c7145128..4443dab5c4ac 100644 --- a/packages/validator/test/utils/apiStub.ts +++ b/packages/validator/test/utils/apiStub.ts @@ -16,10 +16,15 @@ export function getApiClientStub(): ApiClientStub { return { beacon: { getStateValidators: vi.fn(), + postStateValidators: vi.fn(), publishBlindedBlockV2: vi.fn(), publishBlockV2: vi.fn(), submitPoolSyncCommitteeSignatures: vi.fn(), submitPoolAttestations: vi.fn(), + submitPoolAttestationsV2: vi.fn(), + }, + node: { + getSyncingStatus: vi.fn(), }, validator: { getProposerDuties: vi.fn(), @@ -33,7 +38,9 @@ export function getApiClientStub(): ApiClientStub { submitSyncCommitteeSelections: vi.fn(), produceAttestationData: vi.fn(), getAggregatedAttestation: vi.fn(), + getAggregatedAttestationV2: vi.fn(), publishAggregateAndProofs: vi.fn(), + publishAggregateAndProofsV2: vi.fn(), submitBeaconCommitteeSelections: vi.fn(), }, httpClient: httpClientStub, diff --git a/packages/validator/test/utils/logger.ts b/packages/validator/test/utils/logger.ts index 8eaed6dfe6c1..44f5190dfe79 100644 --- a/packages/validator/test/utils/logger.ts +++ b/packages/validator/test/utils/logger.ts @@ -1,3 +1,5 @@ +import {vi, Mocked} from "vitest"; +import {Logger} from "@lodestar/logger"; import {getEnvLogger} from "@lodestar/logger/env"; import {getLoggerVc} from "../../src/util/index.js"; import {ClockMock} from "./clock.js"; @@ -13,3 +15,15 @@ import {ClockMock} from "./clock.js"; export const testLogger = getEnvLogger; export const loggerVc = getLoggerVc(getEnvLogger(), new ClockMock()); + +export type MockedLogger = Mocked; + +export function getMockedLogger(): MockedLogger { + return { + debug: vi.fn(), + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + verbose: vi.fn(), + }; +} diff --git a/scripts/generate_changelog.mjs b/scripts/generate_changelog.mjs index cd809622c5ea..de697e23612b 100644 --- a/scripts/generate_changelog.mjs +++ b/scripts/generate_changelog.mjs @@ -24,7 +24,7 @@ import fs from "node:fs"; const knownAuthors = { "caymannava@gmail.com": "wemeetagain", "develop@g11tech.io": "g11tech", - "tuyen@chainsafe.io": "tuyennhv", + "tuyen@chainsafe.io": "twoeths", "35266934+dapplion@users.noreply.github.com": "dapplion", "41898282+github-actions[bot]@users.noreply.github.com": "github-actions[bot]", "49699333+dependabot[bot]@users.noreply.github.com": "dependabot[bot]", @@ -42,6 +42,7 @@ const knownAuthors = { "nflaig@protonmail.com": "nflaig", "nazarhussain@gmail.com": "nazarhussain", "me@matthewkeil.com": "matthewkeil", + "17676176+ensi321@users.noreply.github.com": "ensi321", }; const fromTag = process.argv[2]; diff --git a/scripts/lint-grafana-dashboard.mjs b/scripts/lint-grafana-dashboard.mjs index e5e43fc3d9cf..84eb92e94040 100644 --- a/scripts/lint-grafana-dashboard.mjs +++ b/scripts/lint-grafana-dashboard.mjs @@ -244,7 +244,7 @@ export function lintGrafanaDashboard(json) { condition: "", key: "instance", operator: "=", - value: "unstable-lg1k-hzax41", + value: "unstable-lg1k-hzax41-dkr", }, ], hide: 0, diff --git a/vite.base.config.ts b/vite.base.config.ts index 65e1bad01500..f9030d9edb7f 100644 --- a/vite.base.config.ts +++ b/vite.base.config.ts @@ -40,6 +40,7 @@ export function getBaseViteConfig( esbuild: { banner, legalComments: "none", + sourcemap: "inline", }, build: { // "modules" refer to ['es2020', 'edge88', 'firefox78', 'chrome87', 'safari14'] diff --git a/yarn.lock b/yarn.lock index eadac87bdb9a..9c749a977827 100644 --- a/yarn.lock +++ b/yarn.lock @@ -308,6 +308,11 @@ resolved "https://registry.yarnpkg.com/@chainsafe/as-chacha20poly1305/-/as-chacha20poly1305-0.1.0.tgz#7da6f8796f9b42dac6e830a086d964f1f9189e09" integrity sha512-BpNcL8/lji/GM3+vZ/bgRWqJ1q5kwvTFmGPk7pxm/QQZDbaMI98waOHjEymTjq2JmdD/INdNBFOVSyJofXg7ew== +"@chainsafe/as-sha256@0.5.0", "@chainsafe/as-sha256@^0.5.0": + version "0.5.0" + resolved "https://registry.yarnpkg.com/@chainsafe/as-sha256/-/as-sha256-0.5.0.tgz#2523fbef2b80b5000f9aa71f4a76e5c2c5c076bb" + integrity sha512-dTIY6oUZNdC5yDTVP5Qc9hAlKAsn0QTQ2DnQvvsbTnKSTbYs3p5RPN0aIUqN0liXei/9h24c7V0dkV44cnWIQA== + "@chainsafe/as-sha256@^0.4.1": version "0.4.1" resolved "https://registry.yarnpkg.com/@chainsafe/as-sha256/-/as-sha256-0.4.1.tgz#cfc0737e25f8c206767bdb6703e7943e5d44513e" @@ -453,6 +458,30 @@ optionalDependencies: "@node-rs/crc32" "^1.6.0" +"@chainsafe/hashtree-darwin-arm64@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@chainsafe/hashtree-darwin-arm64/-/hashtree-darwin-arm64-1.0.1.tgz#e2c60090c56a1c8dc8bdff329856184ad32e4cd5" + integrity sha512-+KmEgQMpO7FDL3klAcpXbQ4DPZvfCe0qSaBBrtT4vLF8V1JGm3sp+j7oibtxtOsLKz7nJMiK1pZExi7vjXu8og== + +"@chainsafe/hashtree-linux-arm64-gnu@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@chainsafe/hashtree-linux-arm64-gnu/-/hashtree-linux-arm64-gnu-1.0.1.tgz#49d2604a6c9106219448af3eaf76f4da6e44daca" + integrity sha512-p1hnhGq2aFY+Zhdn1Q6L/6yLYNKjqXfn/Pc8jiM0e3+Lf/hB+yCdqYVu1pto26BrZjugCFZfupHaL4DjUTDttw== + +"@chainsafe/hashtree-linux-x64-gnu@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@chainsafe/hashtree-linux-x64-gnu/-/hashtree-linux-x64-gnu-1.0.1.tgz#31c5a2bb196b78f04f2bf4bfb5c1bf1f3331f071" + integrity sha512-uCIGuUWuWV0LiB4KLMy6JFa7Jp6NmPl3hKF5BYWu8TzUBe7vSXMZfqTzGxXPggFYN2/0KymfRdG9iDCOJfGRqg== + +"@chainsafe/hashtree@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@chainsafe/hashtree/-/hashtree-1.0.1.tgz#587666a261e1da6a37904095ce875fddc53c7c89" + integrity sha512-bleu9FjqBeR/l6W1u2Lz+HsS0b0LLJX2eUt3hOPBN7VqOhidx8wzkVh2S7YurS+iTQtfdK4K5QU9tcTGNrGwDg== + optionalDependencies: + "@chainsafe/hashtree-darwin-arm64" "1.0.1" + "@chainsafe/hashtree-linux-arm64-gnu" "1.0.1" + "@chainsafe/hashtree-linux-x64-gnu" "1.0.1" + "@chainsafe/is-ip@^2.0.1": version "2.0.1" resolved "https://registry.yarnpkg.com/@chainsafe/is-ip/-/is-ip-2.0.1.tgz#62cb285669d91f88fd9fa285048dde3882f0993b" @@ -530,6 +559,15 @@ dependencies: "@chainsafe/is-ip" "^2.0.1" +"@chainsafe/persistent-merkle-tree@0.8.0", "@chainsafe/persistent-merkle-tree@^0.8.0": + version "0.8.0" + resolved "https://registry.yarnpkg.com/@chainsafe/persistent-merkle-tree/-/persistent-merkle-tree-0.8.0.tgz#18e2f0a5de3a0b59c6e5be8797a78e0d209dd7dc" + integrity sha512-hh6C1JO6SKlr0QGNTNtTLqgGVMA/Bc20wD6CeMHp+wqbFKCULRJuBUxhF4WDx/7mX8QlqF3nFriF/Eo8oYJ4/A== + dependencies: + "@chainsafe/as-sha256" "0.5.0" + "@chainsafe/hashtree" "1.0.1" + "@noble/hashes" "^1.3.0" + "@chainsafe/persistent-merkle-tree@^0.6.1": version "0.6.1" resolved "https://registry.yarnpkg.com/@chainsafe/persistent-merkle-tree/-/persistent-merkle-tree-0.6.1.tgz#37bde25cf6cbe1660ad84311aa73157dc86ec7f2" @@ -538,14 +576,6 @@ "@chainsafe/as-sha256" "^0.4.1" "@noble/hashes" "^1.3.0" -"@chainsafe/persistent-merkle-tree@^0.7.1": - version "0.7.1" - resolved "https://registry.yarnpkg.com/@chainsafe/persistent-merkle-tree/-/persistent-merkle-tree-0.7.1.tgz#bfe6b3f4296ed3a578bb4fe69f9a7c232822a1dc" - integrity sha512-GUomb8DgkbHyKVBoLY9wMBe67oyAK9HKMjPImIocGOJuwqqxvDbVwh/ihdzyOrhEyhISqD/TxhCBDEXzLM52Vg== - dependencies: - "@chainsafe/as-sha256" "^0.4.1" - "@noble/hashes" "^1.3.0" - "@chainsafe/persistent-ts@^0.19.1": version "0.19.1" resolved "https://registry.npmjs.org/@chainsafe/persistent-ts/-/persistent-ts-0.19.1.tgz" @@ -556,6 +586,42 @@ resolved "https://registry.yarnpkg.com/@chainsafe/prometheus-gc-stats/-/prometheus-gc-stats-1.0.2.tgz#585f8f1555251db156d7e50ef8c86dd4f3e78f70" integrity sha512-h3mFKduSX85XMVbOdWOYvx9jNq99jGcRVNyW5goGOqju1CsI+ZJLhu5z4zBb/G+ksL0R4uLVulu/mIMe7Y0rNg== +"@chainsafe/pubkey-index-map-darwin-arm64@2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@chainsafe/pubkey-index-map-darwin-arm64/-/pubkey-index-map-darwin-arm64-2.0.0.tgz#e468e772787f2411ecab8e8316da6c801356b72d" + integrity sha512-7eROFdQvwN1b0zJy0YJd1wBSv8j+Sp8tc3HsyaLQvjX7w93LcPPe+2Y5QpMkECBFzD2BcvKFpYxIvkDzV2v8rw== + +"@chainsafe/pubkey-index-map-darwin-x64@2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@chainsafe/pubkey-index-map-darwin-x64/-/pubkey-index-map-darwin-x64-2.0.0.tgz#995755f71bcb49e5393a6af122c11a850aef4ce4" + integrity sha512-HfKIV83Y+AOugw0jaeUIHqe4Ikfwo47baFg97fpdcpUwPfWnw4Blej5C1zAyEX2IuUo2S1D450neTBSUgSdNCA== + +"@chainsafe/pubkey-index-map-linux-arm64-gnu@2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@chainsafe/pubkey-index-map-linux-arm64-gnu/-/pubkey-index-map-linux-arm64-gnu-2.0.0.tgz#0c25ffb451d9861515e26e68006aa08e18ebc42d" + integrity sha512-t7Tdy+m9lZF2gqs0LmxFTAztNe6tDuSxje0xS8LTYanBSWQ6ADbWjTxcp/63yBbIYGzncigePZG2iis9nxB95Q== + +"@chainsafe/pubkey-index-map-linux-x64-gnu@2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@chainsafe/pubkey-index-map-linux-x64-gnu/-/pubkey-index-map-linux-x64-gnu-2.0.0.tgz#26c3628faaeb1ef9b47952cc03ae209d7e9656d8" + integrity sha512-1DKoITe7ZwjClhCBpIZq7SOIOJbUNLxgsFuV7e0ZcBq+tz5UqhKB8SSRzNn7THoo+XRg1mJiDyFPzDKGxHxRkg== + +"@chainsafe/pubkey-index-map-win32-x64-msvc@2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@chainsafe/pubkey-index-map-win32-x64-msvc/-/pubkey-index-map-win32-x64-msvc-2.0.0.tgz#0e35c67ed9dcaaee6ff9e582ed6a733d852473e9" + integrity sha512-hnEZBtTFxTl52lytogexOtzqPQyUKKB28mLbLTZnl2OicsEfNcczJpgF6o1uQ0O0zktAn/m1Tc6/iHmQg2VuhQ== + +"@chainsafe/pubkey-index-map@2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@chainsafe/pubkey-index-map/-/pubkey-index-map-2.0.0.tgz#a8262b353e8335e9acf5e750353a53c55e5cf9be" + integrity sha512-2mVvWrHGApF3mPS7ecp8k3dI/C3QF5824bpQNSRWDsmZEU9H3HzITIj256v14QiB+22MIitpWkBc6hl2bjhJ+Q== + optionalDependencies: + "@chainsafe/pubkey-index-map-darwin-arm64" "2.0.0" + "@chainsafe/pubkey-index-map-darwin-x64" "2.0.0" + "@chainsafe/pubkey-index-map-linux-arm64-gnu" "2.0.0" + "@chainsafe/pubkey-index-map-linux-x64-gnu" "2.0.0" + "@chainsafe/pubkey-index-map-win32-x64-msvc" "2.0.0" + "@chainsafe/snappy-wasm@^0.5.0": version "0.5.0" resolved "https://registry.yarnpkg.com/@chainsafe/snappy-wasm/-/snappy-wasm-0.5.0.tgz#067e534341ef746706e2dbf255bd7604c849be73" @@ -569,13 +635,13 @@ "@chainsafe/as-sha256" "^0.4.1" "@chainsafe/persistent-merkle-tree" "^0.6.1" -"@chainsafe/ssz@^0.15.1": - version "0.15.1" - resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.15.1.tgz#008a711c3bcdc0d207cd4be15108870b0b1c60c0" - integrity sha512-f09UKTyYwWA1nr1BwrwsFpkXMspDDIZtwWXK1pM5mpPMnexmuPVstnN+P0M4YJ2aHcfqJXG7QOqnOwGj5Z7bUw== +"@chainsafe/ssz@^0.17.1": + version "0.17.1" + resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.17.1.tgz#7986afbcad5e6971006d596fdb7dfa34bc195131" + integrity sha512-1ay46QqYcVTBvUnDXTPTi5WTiENu7tIxpZGMDpUWps1/nYBmh/We/UoCF/jO+o/fkcDD3p8xQPlHbcCfy+jyjA== dependencies: - "@chainsafe/as-sha256" "^0.4.1" - "@chainsafe/persistent-merkle-tree" "^0.7.1" + "@chainsafe/as-sha256" "0.5.0" + "@chainsafe/persistent-merkle-tree" "0.8.0" "@chainsafe/threads@^1.11.1": version "1.11.1" @@ -1260,108 +1326,104 @@ "@ethersproject/properties" "^5.7.0" "@ethersproject/strings" "^5.7.0" -"@fastify/accept-negotiator@^1.0.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@fastify/accept-negotiator/-/accept-negotiator-1.1.0.tgz#c1c66b3b771c09742a54dd5bc87c582f6b0630ff" - integrity sha512-OIHZrb2ImZ7XG85HXOONLcJWGosv7sIvM2ifAPQVhg9Lv7qdmMBNVaai4QTdyuaqbKM5eO6sLSQOYI7wEQeCJQ== +"@fastify/accept-negotiator@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@fastify/accept-negotiator/-/accept-negotiator-2.0.0.tgz#efce76b4d658e7ee669e681c2d79bffc9a654fdb" + integrity sha512-/Sce/kBzuTxIq5tJh85nVNOq9wKD8s+viIgX0fFMDBdw95gnpf53qmF1oBgJym3cPFliWUuSloVg/1w/rH0FcQ== -"@fastify/ajv-compiler@^3.5.0": - version "3.5.0" - resolved "https://registry.yarnpkg.com/@fastify/ajv-compiler/-/ajv-compiler-3.5.0.tgz#459bff00fefbf86c96ec30e62e933d2379e46670" - integrity sha512-ebbEtlI7dxXF5ziNdr05mOY8NnDiPB1XvAlLHctRt/Rc+C3LCOVW5imUVX+mhvUhnNzmPBHewUkOFgGlCxgdAA== +"@fastify/ajv-compiler@^4.0.0": + version "4.0.1" + resolved "https://registry.yarnpkg.com/@fastify/ajv-compiler/-/ajv-compiler-4.0.1.tgz#9567b4c09149a0f342e931c7196a8ed9dc292954" + integrity sha512-DxrBdgsjNLP0YM6W5Hd6/Fmj43S8zMKiFJYgi+Ri3htTGAowPVG/tG1wpnWLMjufEnehRivUCKZ1pLDIoZdTuw== dependencies: - ajv "^8.11.0" - ajv-formats "^2.1.1" - fast-uri "^2.0.0" + ajv "^8.12.0" + ajv-formats "^3.0.1" + fast-uri "^3.0.0" -"@fastify/bearer-auth@^9.0.0": - version "9.0.0" - resolved "https://registry.yarnpkg.com/@fastify/bearer-auth/-/bearer-auth-9.0.0.tgz#9a75abcb1d54751dc04e97b37db5363e325c0f90" - integrity sha512-I1egwg1LRdIvhjL/P+3UEfyK7A3YTnN3goTyf8MJ+v7vVkwyyd8ieccFKI0SzEuxMmfQh/p4yNyLZWcMVwpInA== +"@fastify/bearer-auth@^10.0.1": + version "10.0.1" + resolved "https://registry.yarnpkg.com/@fastify/bearer-auth/-/bearer-auth-10.0.1.tgz#893466052fa566c24eb4f44a3c6aacb50db96ecd" + integrity sha512-i2snRkAJsMmfFcsRS/fFIovcLL3WeZtxJP9pprx2NvB8N/l+fjMNmKeWWyX0hDS2Q0zEPqLz/G0DK92nqJYAJQ== dependencies: - fastify-plugin "^4.0.0" + "@fastify/error" "^4.0.0" + fastify-plugin "^5.0.0" "@fastify/busboy@^2.0.0": version "2.1.1" resolved "https://registry.yarnpkg.com/@fastify/busboy/-/busboy-2.1.1.tgz#b9da6a878a371829a0502c9b6c1c143ef6663f4d" integrity sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA== -"@fastify/cors@^8.2.1": - version "8.2.1" - resolved "https://registry.yarnpkg.com/@fastify/cors/-/cors-8.2.1.tgz#dd348162bcbfb87dff4b492e2bef32d41244006a" - integrity sha512-2H2MrDD3ea7g707g1CNNLWb9/tYbmw7HS+MK2SDcgjxwzbOFR93JortelTIO8DBFsZqFtEpKNxiZfSyrGgYcbw== +"@fastify/cors@^10.0.1": + version "10.0.1" + resolved "https://registry.yarnpkg.com/@fastify/cors/-/cors-10.0.1.tgz#c208fa5f672db31a8383400349e9852762903d64" + integrity sha512-O8JIf6448uQbOgzSkCqhClw6gFTAqrdfeA6R3fc/3gwTJGUp7gl8/3tbNB+6INuu4RmgVOq99BmvdGbtu5pgOA== dependencies: - fastify-plugin "^4.0.0" - mnemonist "0.39.5" + fastify-plugin "^5.0.0" + mnemonist "0.39.8" -"@fastify/deepmerge@^1.0.0": - version "1.3.0" - resolved "https://registry.yarnpkg.com/@fastify/deepmerge/-/deepmerge-1.3.0.tgz#8116858108f0c7d9fd460d05a7d637a13fe3239a" - integrity sha512-J8TOSBq3SoZbDhM9+R/u77hP93gz/rajSA+K2kGyijPpORPWUXHUpTaleoj+92As0S9uPRP7Oi8IqMf0u+ro6A== - -"@fastify/error@^3.3.0", "@fastify/error@^3.4.0": - version "3.4.1" - resolved "https://registry.yarnpkg.com/@fastify/error/-/error-3.4.1.tgz#b14bb4cac3dd4ec614becbc643d1511331a6425c" - integrity sha512-wWSvph+29GR783IhmvdwWnN4bUxTD01Vm5Xad4i7i1VuAOItLvbPAb69sb0IQ2N57yprvhNIwAP5B6xfKTmjmQ== +"@fastify/error@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@fastify/error/-/error-4.0.0.tgz#7842d6161fbce78953638318be99033a0c2d5070" + integrity sha512-OO/SA8As24JtT1usTUTKgGH7uLvhfwZPwlptRi2Dp5P4KKmJI3gvsZ8MIHnNwDs4sLf/aai5LzTyl66xr7qMxA== -"@fastify/fast-json-stringify-compiler@^4.3.0": - version "4.3.0" - resolved "https://registry.yarnpkg.com/@fastify/fast-json-stringify-compiler/-/fast-json-stringify-compiler-4.3.0.tgz#5df89fa4d1592cbb8780f78998355feb471646d5" - integrity sha512-aZAXGYo6m22Fk1zZzEUKBvut/CIIQe/BapEORnxiD5Qr0kPHqqI69NtEMCme74h+at72sPhbkb4ZrLd1W3KRLA== +"@fastify/fast-json-stringify-compiler@^5.0.0": + version "5.0.1" + resolved "https://registry.yarnpkg.com/@fastify/fast-json-stringify-compiler/-/fast-json-stringify-compiler-5.0.1.tgz#659c74f3181fb4f984fe27dcc95d14366ae85ca0" + integrity sha512-f2d3JExJgFE3UbdFcpPwqNUEoHWmt8pAKf8f+9YuLESdefA0WgqxeT6DrGL4Yrf/9ihXNSKOqpjEmurV405meA== dependencies: - fast-json-stringify "^5.7.0" + fast-json-stringify "^6.0.0" -"@fastify/merge-json-schemas@^0.1.0": +"@fastify/merge-json-schemas@^0.1.1": version "0.1.1" resolved "https://registry.yarnpkg.com/@fastify/merge-json-schemas/-/merge-json-schemas-0.1.1.tgz#3551857b8a17a24e8c799e9f51795edb07baa0bc" integrity sha512-fERDVz7topgNjtXsJTTW1JKLy0rhuLRcquYqNR9rF7OcVpCa2OVW49ZPDIhaRRCaUuvVxI+N416xUoF76HNSXA== dependencies: fast-deep-equal "^3.1.3" -"@fastify/send@^2.0.0": - version "2.1.0" - resolved "https://registry.yarnpkg.com/@fastify/send/-/send-2.1.0.tgz#1aa269ccb4b0940a2dadd1f844443b15d8224ea0" - integrity sha512-yNYiY6sDkexoJR0D8IDy3aRP3+L4wdqCpvx5WP+VtEU58sn7USmKynBzDQex5X42Zzvw2gNzzYgP90UfWShLFA== +"@fastify/send@^3.1.0": + version "3.1.1" + resolved "https://registry.yarnpkg.com/@fastify/send/-/send-3.1.1.tgz#455a8fa56ae005c4c387ddf111364f346b848117" + integrity sha512-LdiV2mle/2tH8vh6GwGl0ubfUAgvY+9yF9oGI1iiwVyNUVOQamvw5n+OFu6iCNNoyuCY80FFURBn4TZCbTe8LA== dependencies: - "@lukeed/ms" "^2.0.1" + "@lukeed/ms" "^2.0.2" escape-html "~1.0.3" fast-decode-uri-component "^1.0.1" - http-errors "2.0.0" - mime "^3.0.0" - -"@fastify/static@^6.0.0": - version "6.11.2" - resolved "https://registry.yarnpkg.com/@fastify/static/-/static-6.11.2.tgz#1fe40c40daf055a28d29db807b459fcff431d9b6" - integrity sha512-EH7mh7q4MfNdT7N07ZVlwsX/ObngMvQ7KBP0FXAuPov99Fjn80KSJMdxQhhYKAKWW1jXiFdrk8X7d6uGWdZFxg== - dependencies: - "@fastify/accept-negotiator" "^1.0.0" - "@fastify/send" "^2.0.0" - content-disposition "^0.5.3" - fastify-plugin "^4.0.0" - glob "^8.0.1" - p-limit "^3.1.0" + http-errors "^2.0.0" + mime "^3" -"@fastify/swagger-ui@^1.9.3": - version "1.9.3" - resolved "https://registry.yarnpkg.com/@fastify/swagger-ui/-/swagger-ui-1.9.3.tgz#1ec03ea2595cb2e7d6de6ae7c949bebcff8370a5" - integrity sha512-YYqce4CydjDIEry6Zo4JLjVPe5rjS8iGnk3fHiIQnth9sFSLeyG0U1DCH+IyYmLddNDg1uWJOuErlVqnu/jI3w== +"@fastify/static@^8.0.0": + version "8.0.1" + resolved "https://registry.yarnpkg.com/@fastify/static/-/static-8.0.1.tgz#137059a4625c64cce8ee7eb513961c5e23018805" + integrity sha512-7idyhbcgf14v4bjWzUeHEFvnVxvNJ1n5cyGPgFtwTZjnjUQ1wgC7a2FQai7OGKqCKywDEjzbPhAZRW+uEK1LMg== dependencies: - "@fastify/static" "^6.0.0" - fastify-plugin "^4.0.0" - openapi-types "^12.0.2" - rfdc "^1.3.0" - yaml "^2.2.2" + "@fastify/accept-negotiator" "^2.0.0" + "@fastify/send" "^3.1.0" + content-disposition "^0.5.4" + fastify-plugin "^5.0.0" + fastq "^1.17.1" + glob "^11.0.0" -"@fastify/swagger@^8.10.0": - version "8.10.0" - resolved "https://registry.yarnpkg.com/@fastify/swagger/-/swagger-8.10.0.tgz#d978ae9f2d802ab652955d02be7a125f7f6d9f05" - integrity sha512-0o6nd0qWpJbVSv/vbK4bzHSYe7l+PTGPqrQVwWIXVGd7CvXr585SBx+h8EgrMOY80bcOnGreqnjYFOV0osGP5A== +"@fastify/swagger-ui@^5.0.1": + version "5.0.1" + resolved "https://registry.yarnpkg.com/@fastify/swagger-ui/-/swagger-ui-5.0.1.tgz#76c348bbaf7e49e3cfb62ebe3cc3fb15ef0eefb3" + integrity sha512-nCDV5l0OTziK8nIeHaLZ30ENFFftZ4Pcs7GHDcqOO6Jp3qSnyOsqBg1/EosM+d1mrCvH4vSlM09xolkjrbuJQQ== + dependencies: + "@fastify/static" "^8.0.0" + fastify-plugin "^5.0.0" + openapi-types "^12.1.3" + rfdc "^1.3.1" + yaml "^2.4.1" + +"@fastify/swagger@^9.0.0": + version "9.0.0" + resolved "https://registry.yarnpkg.com/@fastify/swagger/-/swagger-9.0.0.tgz#a6013ee3cf4ec0f2562e1455face9eb6ef787d89" + integrity sha512-E7TQbBCbhvS2djGLxJ7t2OFbhc2F+KCsOZCNhh6xQIlJxq9H4ZR5KuLKG+vn6COVqkLxRVUOZ9qtbbzdf5Jfqw== dependencies: - fastify-plugin "^4.0.0" + fastify-plugin "^5.0.0" json-schema-resolver "^2.0.0" - openapi-types "^12.0.0" - rfdc "^1.3.0" - yaml "^2.2.2" + openapi-types "^12.1.3" + rfdc "^1.3.1" + yaml "^2.4.2" "@gar/promisify@^1.0.1", "@gar/promisify@^1.1.3": version "1.1.3" @@ -1832,10 +1894,10 @@ race-signal "^1.0.2" uint8arraylist "^2.4.8" -"@lukeed/ms@^2.0.1": - version "2.0.1" - resolved "https://registry.yarnpkg.com/@lukeed/ms/-/ms-2.0.1.tgz#3c2bbc258affd9cc0e0cc7828477383c73afa6ee" - integrity sha512-Xs/4RZltsAL7pkvaNStUQt7netTkyxrS0K+RILcVr3TRMS/ToOg4I6uNfhB9SlGsnWBym4U+EaXq0f0cEMNkHA== +"@lukeed/ms@^2.0.2": + version "2.0.2" + resolved "https://registry.yarnpkg.com/@lukeed/ms/-/ms-2.0.2.tgz#07f09e59a74c52f4d88c6db5c1054e819538e2a8" + integrity sha512-9I2Zn6+NJLfaGoz9jN3lpwDgAYvfGeNYdbAIjJOqzs4Tpc+VU3Jqq4IofSUBKajiDS8k9fZIg18/z13mpk1bsA== "@microsoft/api-extractor-model@7.28.13": version "7.28.13" @@ -2611,15 +2673,15 @@ integrity sha512-2LuNTFBIO0m7kKIQvvPHN6UE63VjpmL9rnEEaOOaiSPbZK+zUOYIzBAWcED+3XYzhYsd/0mD57VdxAEqqV52CQ== "@puppeteer/browsers@1.4.6", "@puppeteer/browsers@^1.6.0", "@puppeteer/browsers@^2.1.0": - version "2.2.4" - resolved "https://registry.yarnpkg.com/@puppeteer/browsers/-/browsers-2.2.4.tgz#4307245d881aa5a79743050be66568bad0f6ffbb" - integrity sha512-BdG2qiI1dn89OTUUsx2GZSpUzW+DRffR1wlMJyKxVHYrhnKoELSDxDd+2XImUkuWPEKk76H5FcM/gPFrEK1Tfw== + version "2.3.0" + resolved "https://registry.yarnpkg.com/@puppeteer/browsers/-/browsers-2.3.0.tgz#791ea7d80450fea24eb19fb1d70c367ad4e08cae" + integrity sha512-ioXoq9gPxkss4MYhD+SFaU9p1IHFUX0ILAWFPyjGaBdjLsYAlZw6j1iLA0N/m12uVHLFDfSYNF7EQccjinIMDA== dependencies: debug "^4.3.5" extract-zip "^2.0.1" progress "^2.0.3" proxy-agent "^6.4.0" - semver "^7.6.2" + semver "^7.6.3" tar-fs "^3.0.6" unbzip2-stream "^1.4.3" yargs "^17.7.2" @@ -3862,10 +3924,10 @@ aggregate-error@^3.0.0: clean-stack "^2.0.0" indent-string "^4.0.0" -ajv-formats@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" - integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA== +ajv-formats@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/ajv-formats/-/ajv-formats-3.0.1.tgz#3d5dc762bca17679c3c2ea7e90ad6b7532309578" + integrity sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ== dependencies: ajv "^8.0.0" @@ -3879,7 +3941,7 @@ ajv@^6.12.4, ajv@~6.12.6: json-schema-traverse "^0.4.1" uri-js "^4.2.2" -ajv@^8.0.0, ajv@^8.10.0, ajv@^8.11.0, ajv@^8.12.0: +ajv@^8.0.0, ajv@^8.12.0: version "8.12.0" resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.12.0.tgz#d1a0527323e22f53562c567c00991577dfbe19d1" integrity sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA== @@ -4035,11 +4097,6 @@ archiver@^7.0.0: tar-stream "^3.0.0" zip-stream "^6.0.1" -archy@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz" - integrity sha1-+cjBN1fMHde8N5rHeyxipcKGjEA= - are-we-there-yet@^3.0.0: version "3.0.0" resolved "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-3.0.0.tgz" @@ -4262,14 +4319,12 @@ available-typed-arrays@^1.0.5: resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz#92f95616501069d07d10edb2fc37d3e1c65123b7" integrity sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw== -avvio@^8.3.0: - version "8.3.0" - resolved "https://registry.yarnpkg.com/avvio/-/avvio-8.3.0.tgz#1e019433d935730b814978a583eefac41a65082f" - integrity sha512-VBVH0jubFr9LdFASy/vNtm5giTrnbVquWBhT0fyizuNK2rQ7e7ONU2plZQWUNqtE1EmxFEb+kbSkFRkstiaS9Q== +avvio@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/avvio/-/avvio-9.0.0.tgz#3ae02fb318377006e0e06a3f47842c98d8668607" + integrity sha512-UbYrOXgE/I+knFG+3kJr9AgC7uNo8DG+FGGODpH9Bj1O1kL/QDjBXnTem9leD3VdQKtaHjV3O85DQ7hHh4IIHw== dependencies: - "@fastify/error" "^3.3.0" - archy "^1.0.0" - debug "^4.0.0" + "@fastify/error" "^4.0.0" fastq "^1.17.1" aws-sdk@^2.932.0: @@ -4288,11 +4343,11 @@ aws-sdk@^2.932.0: xml2js "0.4.19" axios@^1.0.0, axios@^1.3.4: - version "1.6.0" - resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.0.tgz#f1e5292f26b2fd5c2e66876adc5b06cdbd7d2102" - integrity sha512-EZ1DYihju9pwVB+jg67ogm+Tmqc6JmhamRN6I4Zt8DfZu5lbcQGw3ozH9lFejSJgs/ibaef3A9PMXPLeefFGJg== + version "1.7.4" + resolved "https://registry.yarnpkg.com/axios/-/axios-1.7.4.tgz#4c8ded1b43683c8dd362973c393f3ede24052aa2" + integrity sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw== dependencies: - follow-redirects "^1.15.0" + follow-redirects "^1.15.6" form-data "^4.0.0" proxy-from-env "^1.1.0" @@ -4481,7 +4536,14 @@ brace-expansion@^2.0.1: dependencies: balanced-match "^1.0.0" -braces@^3.0.2, braces@~3.0.2: +braces@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.3.tgz#490332f40919452272d55a8480adc0c441358789" + integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA== + dependencies: + fill-range "^7.1.1" + +braces@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== @@ -5204,7 +5266,7 @@ constants-browserify@^1.0.0: resolved "https://registry.npmjs.org/constants-browserify/-/constants-browserify-1.0.0.tgz" integrity sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U= -content-disposition@^0.5.3: +content-disposition@^0.5.4: version "0.5.4" resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== @@ -5526,7 +5588,7 @@ de-indent@^1.0.2: resolved "https://registry.yarnpkg.com/de-indent/-/de-indent-1.0.2.tgz#b2038e846dc33baa5796128d0804b455b8c1e21d" integrity sha512-e/1zu3xH5MQryN2zdVaF0OrdNLUbvWxzMbi+iNA6Bky7l1RoP8a2fIbRocyHclXt/arDrrR6lL3TqFD9pMQTsg== -debug@4, debug@4.3.4, debug@^4.0.0, debug@^4.1.0, debug@^4.1.1, debug@^4.2.0, debug@^4.3.1, debug@^4.3.2, debug@^4.3.3, debug@^4.3.4: +debug@4, debug@4.3.4, debug@^4.1.0, debug@^4.1.1, debug@^4.2.0, debug@^4.3.1, debug@^4.3.2, debug@^4.3.3, debug@^4.3.4: version "4.3.4" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== @@ -6536,11 +6598,6 @@ extract-zip@^2.0.1: optionalDependencies: "@types/yauzl" "^2.9.1" -fast-content-type-parse@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/fast-content-type-parse/-/fast-content-type-parse-1.1.0.tgz#4087162bf5af3294d4726ff29b334f72e3a1092c" - integrity sha512-fBHHqSTFLVnR61C+gltJuE5GkVQMV0S2nqUO8TJ+5Z3qAKG8vAx4FKai1s5jq/inV1+sREynIWSuQ6HgoSXpDQ== - fast-decode-uri-component@^1.0.1: version "1.0.1" resolved "https://registry.npmjs.org/fast-decode-uri-component/-/fast-decode-uri-component-1.0.1.tgz" @@ -6582,28 +6639,16 @@ fast-json-stable-stringify@^2.0.0: resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== -fast-json-stringify@^5.7.0: - version "5.7.0" - resolved "https://registry.yarnpkg.com/fast-json-stringify/-/fast-json-stringify-5.7.0.tgz#b0a04c848fdeb6ecd83440c71a4db35067023bed" - integrity sha512-sBVPTgnAZseLu1Qgj6lUbQ0HfjFhZWXAmpZ5AaSGkyLh5gAXBga/uPJjQPHpDFjC9adWIpdOcCLSDTgrZ7snoQ== - dependencies: - "@fastify/deepmerge" "^1.0.0" - ajv "^8.10.0" - ajv-formats "^2.1.1" - fast-deep-equal "^3.1.3" - fast-uri "^2.1.0" - rfdc "^1.2.0" - -fast-json-stringify@^5.8.0: - version "5.13.0" - resolved "https://registry.yarnpkg.com/fast-json-stringify/-/fast-json-stringify-5.13.0.tgz#3eafc02168713ef934d75000a8cf749492729fe8" - integrity sha512-XjTDWKHP3GoMQUOfnjYUbqeHeEt+PvYgvBdG2fRSmYaORILbSr8xTJvZX+w1YSAP5pw2NwKrGRmQleYueZEoxw== +fast-json-stringify@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/fast-json-stringify/-/fast-json-stringify-6.0.0.tgz#15c5e85b567ead695773bf55938b56aaaa57d805" + integrity sha512-FGMKZwniMTgZh7zQp9b6XnBVxUmKVahQLQeRQHqwYmPDqDhcEKZ3BaQsxelFFI5PY7nN71OEeiL47/zUWcYe1A== dependencies: - "@fastify/merge-json-schemas" "^0.1.0" - ajv "^8.10.0" - ajv-formats "^2.1.1" + "@fastify/merge-json-schemas" "^0.1.1" + ajv "^8.12.0" + ajv-formats "^3.0.1" fast-deep-equal "^3.1.3" - fast-uri "^2.1.0" + fast-uri "^2.3.0" json-schema-ref-resolver "^1.0.1" rfdc "^1.2.0" @@ -6629,37 +6674,41 @@ fast-safe-stringify@^2.1.1: resolved "https://registry.yarnpkg.com/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz#c406a83b6e70d9e35ce3b30a81141df30aeba884" integrity sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA== -fast-uri@^2.0.0, fast-uri@^2.1.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/fast-uri/-/fast-uri-2.2.0.tgz#519a0f849bef714aad10e9753d69d8f758f7445a" - integrity sha512-cIusKBIt/R/oI6z/1nyfe2FvGKVTohVRfvkOhvx0nCEW+xf5NoCXjAHcWp93uOUBchzYcsvPlrapAdX1uW+YGg== +fast-uri@^2.3.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/fast-uri/-/fast-uri-2.4.0.tgz#67eae6fbbe9f25339d5d3f4c4234787b65d7d55e" + integrity sha512-ypuAmmMKInk5q7XcepxlnUWDLWv4GFtaJqAzWKqn62IpQ3pejtr5dTVbt3vwqVaMKmkNR55sTT+CqUKIaT21BA== -fastify-plugin@^4.0.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/fastify-plugin/-/fastify-plugin-4.5.0.tgz#8b853923a0bba6ab6921bb8f35b81224e6988d91" - integrity sha512-79ak0JxddO0utAXAQ5ccKhvs6vX2MGyHHMMsmZkBANrq3hXc1CHzvNPHOcvTsVMEPl5I+NT+RO4YKMGehOfSIg== +fast-uri@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/fast-uri/-/fast-uri-3.0.1.tgz#cddd2eecfc83a71c1be2cc2ef2061331be8a7134" + integrity sha512-MWipKbbYiYI0UC7cl8m/i/IWTqfC8YXsqjzybjddLsFjStroQzsHXkc73JutMvBiXmOvapk+axIl79ig5t55Bw== -fastify@^4.27.0: - version "4.27.0" - resolved "https://registry.yarnpkg.com/fastify/-/fastify-4.27.0.tgz#e4a9b2a0a7b9efaeaf1140d47fdd4f91b5fcacb1" - integrity sha512-ci9IXzbigB8dyi0mSy3faa3Bsj0xWAPb9JeT4KRzubdSb6pNhcADRUaXCBml6V1Ss/a05kbtQls5LBmhHydoTA== +fastify-plugin@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/fastify-plugin/-/fastify-plugin-5.0.1.tgz#82d44e6fe34d1420bb5a4f7bee434d501e41939f" + integrity sha512-HCxs+YnRaWzCl+cWRYFnHmeRFyR5GVnJTAaCJQiYzQSDwK9MgJdyAsuL3nh0EWRCYMgQ5MeziymvmAhUHYHDUQ== + +fastify@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/fastify/-/fastify-5.0.0.tgz#f8f80bd741bde2de1997c25dbe31e61c91978111" + integrity sha512-Qe4dU+zGOzg7vXjw4EvcuyIbNnMwTmcuOhlOrOJsgwzvjEZmsM/IeHulgJk+r46STjdJS/ZJbxO8N70ODXDMEQ== dependencies: - "@fastify/ajv-compiler" "^3.5.0" - "@fastify/error" "^3.4.0" - "@fastify/fast-json-stringify-compiler" "^4.3.0" + "@fastify/ajv-compiler" "^4.0.0" + "@fastify/error" "^4.0.0" + "@fastify/fast-json-stringify-compiler" "^5.0.0" abstract-logging "^2.0.1" - avvio "^8.3.0" - fast-content-type-parse "^1.1.0" - fast-json-stringify "^5.8.0" - find-my-way "^8.0.0" - light-my-request "^5.11.0" + avvio "^9.0.0" + fast-json-stringify "^6.0.0" + find-my-way "^9.0.0" + light-my-request "^6.0.0" pino "^9.0.0" - process-warning "^3.0.0" + process-warning "^4.0.0" proxy-addr "^2.0.7" - rfdc "^1.3.0" + rfdc "^1.3.1" secure-json-parse "^2.7.0" - semver "^7.5.4" - toad-cache "^3.3.0" + semver "^7.6.0" + toad-cache "^3.7.0" fastq@^1.17.1: version "1.17.1" @@ -6743,14 +6792,21 @@ fill-range@^7.0.1: dependencies: to-regex-range "^5.0.1" -find-my-way@^8.0.0: - version "8.1.0" - resolved "https://registry.yarnpkg.com/find-my-way/-/find-my-way-8.1.0.tgz#cc05e8e4b145322299d0de0a839b5be528c2083e" - integrity sha512-41QwjCGcVTODUmLLqTMeoHeiozbMXYMAE1CKFiDyi9zVZ2Vjh0yz3MF0WQZoIb+cmzP/XlbFjlF2NtJmvZHznA== +fill-range@^7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.1.1.tgz#44265d3cac07e3ea7dc247516380643754a05292" + integrity sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg== + dependencies: + to-regex-range "^5.0.1" + +find-my-way@^9.0.0: + version "9.0.1" + resolved "https://registry.yarnpkg.com/find-my-way/-/find-my-way-9.0.1.tgz#991c3a7af36734480d48cd4ad0889ed168ed6c40" + integrity sha512-/5NN/R0pFWuff16TMajeKt2JyiW+/OE8nOO8vo1DwZTxLaIURb7lcBYPIgRPh61yCNh9l8voeKwcrkUzmB00vw== dependencies: fast-deep-equal "^3.1.3" fast-querystring "^1.0.0" - safe-regex2 "^2.0.0" + safe-regex2 "^4.0.0" find-up@5.0.0, find-up@^5.0.0: version "5.0.0" @@ -6806,7 +6862,7 @@ fn.name@1.x.x: resolved "https://registry.npmjs.org/fn.name/-/fn.name-1.1.0.tgz" integrity sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw== -follow-redirects@^1.0.0, follow-redirects@^1.15.0: +follow-redirects@^1.0.0, follow-redirects@^1.15.6: version "1.15.6" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.6.tgz#7f815c0cda4249c74ff09e95ef97c23b5fd0399b" integrity sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA== @@ -7032,7 +7088,7 @@ get-caller-file@^2.0.5: resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== -get-func-name@^2.0.0: +get-func-name@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/get-func-name/-/get-func-name-2.0.2.tgz#0d7cf20cd13fda808669ffa88f4ffc7a3943fc41" integrity sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ== @@ -7238,6 +7294,18 @@ glob@^10.4.1: package-json-from-dist "^1.0.0" path-scurry "^1.11.1" +glob@^11.0.0: + version "11.0.0" + resolved "https://registry.yarnpkg.com/glob/-/glob-11.0.0.tgz#6031df0d7b65eaa1ccb9b29b5ced16cea658e77e" + integrity sha512-9UiX/Bl6J2yaBbxKoEBRm4Cipxgok8kQYcOPEhScPwebu2I0HoQOuYdIO6S3hLuWoZgpDpwQZMzTFxgpkyT76g== + dependencies: + foreground-child "^3.1.0" + jackspeak "^4.0.1" + minimatch "^10.0.0" + minipass "^7.1.2" + package-json-from-dist "^1.0.0" + path-scurry "^2.0.0" + glob@^7.1.3, glob@^7.1.4: version "7.2.3" resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" @@ -7534,7 +7602,7 @@ http-cache-semantics@^4.0.0, http-cache-semantics@^4.1.0, http-cache-semantics@^ resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz#abe02fcb2985460bf0323be664436ec3476a6d5a" integrity sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ== -http-errors@2.0.0: +http-errors@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== @@ -7617,15 +7685,7 @@ https-proxy-agent@^5.0.0: agent-base "6" debug "4" -https-proxy-agent@^7.0.2: - version "7.0.2" - resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-7.0.2.tgz#e2645b846b90e96c6e6f347fb5b2e41f1590b09b" - integrity sha512-NmLNjm6ucYwtcUmL7JQC1ZQ57LmHP4lT15FQ8D61nak1rO6DH+fz5qNK2Ap5UN4ZapYICE3/0KodcLYSPsPbaA== - dependencies: - agent-base "^7.0.2" - debug "4" - -https-proxy-agent@^7.0.3: +https-proxy-agent@^7.0.2, https-proxy-agent@^7.0.3, https-proxy-agent@^7.0.4: version "7.0.5" resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-7.0.5.tgz#9e8b5013873299e11fab6fd548405da2d6c602b2" integrity sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw== @@ -7633,14 +7693,6 @@ https-proxy-agent@^7.0.3: agent-base "^7.0.2" debug "4" -https-proxy-agent@^7.0.4: - version "7.0.4" - resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz#8e97b841a029ad8ddc8731f26595bad868cb4168" - integrity sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg== - dependencies: - agent-base "^7.0.2" - debug "4" - human-signals@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" @@ -7701,6 +7753,11 @@ ignore@^5.0.4, ignore@^5.1.1, ignore@^5.2.0, ignore@^5.2.4: resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.0.tgz#67418ae40d34d6999c95ff56016759c718c82f78" integrity sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg== +immutable@^4.3.2: + version "4.3.5" + resolved "https://registry.yarnpkg.com/immutable/-/immutable-4.3.5.tgz#f8b436e66d59f99760dc577f5c99a4fd2a5cc5a0" + integrity sha512-8eabxkth9gZatlwl5TBuJnCsoTADlL6ftEr7A4qgdaTsPyreilDSnUk57SO+jfKcNtxPa22U5KK6DSeAYhpBJw== + import-fresh@^3.2.1, import-fresh@^3.3.0: version "3.3.0" resolved "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz" @@ -8412,6 +8469,15 @@ jackspeak@^3.1.2: optionalDependencies: "@pkgjs/parseargs" "^0.11.0" +jackspeak@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-4.0.1.tgz#9fca4ce961af6083e259c376e9e3541431f5287b" + integrity sha512-cub8rahkh0Q/bw1+GxP7aeSe29hHHn2V4m29nnDlvCdlgU+3UGxkZp7Z53jLUdpX3jdTO0nJZUDl3xvbWc2Xog== + dependencies: + "@isaacs/cliui" "^8.0.2" + optionalDependencies: + "@pkgjs/parseargs" "^0.11.0" + jake@^10.8.5: version "10.8.5" resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.5.tgz#f2183d2c59382cb274226034543b9c03b8164c46" @@ -8801,14 +8867,14 @@ libp2p@1.4.3: multiformats "^13.1.0" uint8arrays "^5.0.3" -light-my-request@^5.11.0: - version "5.12.0" - resolved "https://registry.yarnpkg.com/light-my-request/-/light-my-request-5.12.0.tgz#e42ed02ddbfa587f82031b21459c6841a6948dfa" - integrity sha512-P526OX6E7aeCIfw/9UyJNsAISfcFETghysaWHQAlQYayynShT08MOj4c6fBCvTWBrHXSvqBAKDp3amUPSCQI4w== +light-my-request@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/light-my-request/-/light-my-request-6.0.0.tgz#97c6d0d5448ea2fc37836f0aefe94298f5a87dde" + integrity sha512-kFkFXrmKCL0EEeOmJybMH5amWFd+AFvlvMlvFTRxCUwbhfapZqDmeLMPoWihntnYY6JpoQDE9k+vOzObF1fDqg== dependencies: cookie "^0.6.0" - process-warning "^3.0.0" - set-cookie-parser "^2.4.1" + process-warning "^4.0.0" + set-cookie-parser "^2.6.0" lines-and-columns@^1.1.6: version "1.2.4" @@ -9012,11 +9078,11 @@ loglevel@^1.6.0: integrity sha512-tCRIJM51SHjAayKwC+QAg8hT8vg6z7GSgLJKGvzuPb1Wc+hLzqtuVLxp6/HzSPOozuK+8ErAhy7U/sVzw8Dgfg== loupe@^2.3.6, loupe@^3.1.0, loupe@^3.1.1: - version "2.3.6" - resolved "https://registry.yarnpkg.com/loupe/-/loupe-2.3.6.tgz#76e4af498103c532d1ecc9be102036a21f787b53" - integrity sha512-RaPMZKiMy8/JruncMU5Bt6na1eftNoo++R4Y+N2FrxkDVTrGvcyzFTsaGif4QTeKESheMGegbhw6iUAq+5A8zA== + version "2.3.7" + resolved "https://registry.yarnpkg.com/loupe/-/loupe-2.3.7.tgz#6e69b7d4db7d3ab436328013d37d1c8c3540c697" + integrity sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA== dependencies: - get-func-name "^2.0.0" + get-func-name "^2.0.1" lowercase-keys@^2.0.0: version "2.0.0" @@ -9038,6 +9104,11 @@ lru-cache@^10.2.0: resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.4.3.tgz#410fc8a17b70e598013df257c2446b7f3383f119" integrity sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ== +lru-cache@^11.0.0: + version "11.0.1" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-11.0.1.tgz#3a732fbfedb82c5ba7bca6564ad3f42afcb6e147" + integrity sha512-CgeuL5uom6j/ZVrg7G/+1IXqRY8JXX4Hghfy5YE0EhoYQWvndP1kufu58cmZLNIDKnRhZrXfdS9urVWx98AipQ== + lru-cache@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" @@ -9260,11 +9331,11 @@ micro-ftch@^0.3.1: integrity sha512-/0LLxhzP0tfiR5hcQebtudP56gUurs2CLkGarnCiB/OqEyUFQ6U3paQi/tgLv0hBJYt2rnr9MNpxz4fiiugstg== micromatch@^4.0.4: - version "4.0.5" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" - integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== + version "4.0.8" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.8.tgz#d66fa18f3a47076789320b9b1af32bd86d9fa202" + integrity sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA== dependencies: - braces "^3.0.2" + braces "^3.0.3" picomatch "^2.3.1" miller-rabin@^4.0.0: @@ -9292,7 +9363,7 @@ mime@2.6.0: resolved "https://registry.yarnpkg.com/mime/-/mime-2.6.0.tgz#a2a682a95cd4d0cb1d6257e28f83da7e35800367" integrity sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg== -mime@^3.0.0: +mime@^3: version "3.0.0" resolved "https://registry.yarnpkg.com/mime/-/mime-3.0.0.tgz#b374550dca3a0c18443b0c950a6a58f1931cf7a7" integrity sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A== @@ -9358,6 +9429,13 @@ minimatch@9.0.3, minimatch@^9.0.0, minimatch@^9.0.1: dependencies: brace-expansion "^2.0.1" +minimatch@^10.0.0: + version "10.0.1" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-10.0.1.tgz#ce0521856b453c86e25f2c4c0d03e6ff7ddc440b" + integrity sha512-ethXTt3SGGR+95gudmqJ1eNhRO7eGEGIgYA9vnPatK4/etz2MEVDno5GMCibdMTuBMyElzIlgxMna3K94XDIDQ== + dependencies: + brace-expansion "^2.0.1" + minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: version "3.1.2" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" @@ -9540,10 +9618,10 @@ mkdirp@^1.0.3, mkdirp@^1.0.4: resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== -mnemonist@0.39.5: - version "0.39.5" - resolved "https://registry.yarnpkg.com/mnemonist/-/mnemonist-0.39.5.tgz#5850d9b30d1b2bc57cc8787e5caa40f6c3420477" - integrity sha512-FPUtkhtJ0efmEFGpU14x7jGbTB+s18LrzRL2KgoWz9YvcY3cPomz8tih01GbHwnGk/OmkOKfqd/RAQoc8Lm7DQ== +mnemonist@0.39.8: + version "0.39.8" + resolved "https://registry.yarnpkg.com/mnemonist/-/mnemonist-0.39.8.tgz#9078cd8386081afd986cca34b52b5d84ea7a4d38" + integrity sha512-vyWo2K3fjrUw8YeeZ1zF0fy6Mu59RHokURlld8ymdUPjMlD9EC9ov1/YPqTgqRvUN9nTr3Gqfz29LYAmu0PHPQ== dependencies: obliterator "^2.0.1" @@ -9685,12 +9763,7 @@ n12@0.4.0: resolved "https://registry.yarnpkg.com/n12/-/n12-0.4.0.tgz#363058560b435e6857b5e039ed5eab08c5122e5e" integrity sha512-p/hj4zQ8d3pbbFLQuN1K9honUxiDDhueOWyFLw/XgBv+wZCE44bcLH4CIcsolOceJQduh4Jf7m/LfaTxyGmGtQ== -nan@^2.16.0, nan@^2.17.0: - version "2.19.0" - resolved "https://registry.yarnpkg.com/nan/-/nan-2.19.0.tgz#bb58122ad55a6c5bc973303908d5b16cfdd5a8c0" - integrity sha512-nO1xXxfh/RWNxfd/XPfbIfFk5vgLsAxUR9y5O0cHMJu/AW9U95JLXqthYHjEp+8gQ5p96K9jUp8nbVOxCdRbtw== - -nan@^2.19.0: +nan@^2.16.0, nan@^2.17.0, nan@^2.19.0: version "2.20.0" resolved "https://registry.yarnpkg.com/nan/-/nan-2.20.0.tgz#08c5ea813dd54ed16e5bd6505bf42af4f7838ca3" integrity sha512-bk3gXBZDGILuuo/6sKtr0DQmSThYHLtNCdSdXk9YkxD/jK6X2vmCyyXBBxyqZ4XcnzTyYEAThfX3DCEnLf6igw== @@ -10253,7 +10326,7 @@ open@^8.4.0: is-docker "^2.1.1" is-wsl "^2.2.0" -openapi-types@^12.0.0, openapi-types@^12.0.2: +openapi-types@^12.1.3: version "12.1.3" resolved "https://registry.yarnpkg.com/openapi-types/-/openapi-types-12.1.3.tgz#471995eb26c4b97b7bd356aacf7b91b73e777dd3" integrity sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw== @@ -10349,7 +10422,7 @@ p-limit@^2.2.0: dependencies: p-try "^2.0.0" -p-limit@^3.0.2, p-limit@^3.1.0: +p-limit@^3.0.2: version "3.1.0" resolved "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz" integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== @@ -10637,10 +10710,18 @@ path-scurry@^1.11.1: lru-cache "^10.2.0" minipass "^5.0.0 || ^6.0.2 || ^7.0.0" +path-scurry@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-2.0.0.tgz#9f052289f23ad8bf9397a2a0425e7b8615c58580" + integrity sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg== + dependencies: + lru-cache "^11.0.0" + minipass "^7.1.2" + path-to-regexp@^6.2.0: - version "6.2.2" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-6.2.2.tgz#324377a83e5049cbecadc5554d6a63a9a4866b36" - integrity sha512-GQX3SSMokngb36+whdpRXE+3f9V8UzyAorlYvOGx87ufGHehNTn5lCxrKtLyZ4Yl/wEKnNnr98ZzOwwDZV5ogw== + version "6.3.0" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-6.3.0.tgz#2b6a26a337737a8e1416f9272ed0766b1c0389f4" + integrity sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ== path-type@^3.0.0: version "3.0.0" @@ -10823,6 +10904,11 @@ process-warning@^3.0.0: resolved "https://registry.yarnpkg.com/process-warning/-/process-warning-3.0.0.tgz#96e5b88884187a1dce6f5c3166d611132058710b" integrity sha512-mqn0kFRl0EoqhnL0GQ0veqFHyIN1yig9RHh/InzORTUiZHFRAur+aMtRkELNwGs9aNwKS6tg/An4NYBPGwvtzQ== +process-warning@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/process-warning/-/process-warning-4.0.0.tgz#581e3a7a1fb456c5f4fd239f76bce75897682d5a" + integrity sha512-/MyYDxttz7DfGMMHiysAsFE4qF+pQYAA8ziO/3NcRVrQ5fSk+Mns4QZA/oRPFzvcqNoVJXQNWNAsdwBXLUkQKw== + process@^0.11.10: version "0.11.10" resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" @@ -11335,10 +11421,10 @@ restore-cursor@^4.0.0: onetime "^5.1.0" signal-exit "^3.0.2" -ret@~0.2.0: - version "0.2.2" - resolved "https://registry.npmjs.org/ret/-/ret-0.2.2.tgz" - integrity sha512-M0b3YWQs7R3Z917WRQy1HHA7Ba7D8hvZg6UE5mLykJxQVE2ju0IXbGlaHPPlkY+WN7wFP+wUMXmBFA0aV6vYGQ== +ret@~0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/ret/-/ret-0.5.0.tgz#30a4d38a7e704bd96dc5ffcbe7ce2a9274c41c95" + integrity sha512-I1XxrZSQ+oErkRR4jYbAyEEu2I0avBvvMM5JN+6EBprOGRCs63ENqZ3vjavq8fBw2+62G5LF5XelKwuJpcvcxw== retry@^0.12.0: version "0.12.0" @@ -11364,10 +11450,10 @@ rewiremock@^3.14.5: wipe-node-cache "^2.1.2" wipe-webpack-cache "^2.1.0" -rfdc@^1.1.4, rfdc@^1.2.0, rfdc@^1.3.0: - version "1.3.0" - resolved "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz" - integrity sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA== +rfdc@^1.1.4, rfdc@^1.2.0, rfdc@^1.3.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.4.1.tgz#778f76c4fb731d93414e8f925fbecf64cce7f6ca" + integrity sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA== rgb2hex@0.2.5: version "0.2.5" @@ -11527,12 +11613,12 @@ safe-regex-test@^1.0.0: get-intrinsic "^1.1.3" is-regex "^1.1.4" -safe-regex2@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/safe-regex2/-/safe-regex2-2.0.0.tgz" - integrity sha512-PaUSFsUaNNuKwkBijoAPHAK6/eM6VirvyPWlZ7BAQy4D+hCvh4B6lIG+nPdhbFfIbP+gTGBcrdsOaUs0F+ZBOQ== +safe-regex2@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/safe-regex2/-/safe-regex2-4.0.0.tgz#5e04d8362cd4884753c8bce9715d4759a5239c0a" + integrity sha512-Hvjfv25jPDVr3U+4LDzBuZPPOymELG3PYcSk5hcevooo1yxxamQL/bHs/GrEPGmMoMEwRrHVGiCA1pXi97B8Ew== dependencies: - ret "~0.2.0" + ret "~0.5.0" safe-stable-stringify@^2.3.1: version "2.4.2" @@ -11593,12 +11679,10 @@ semver@^6.1.0, semver@^6.2.0, semver@^6.3.1: resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== -semver@^7.0.0, semver@^7.1.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3, semver@^7.5.4, semver@~7.5.4: - version "7.5.4" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" - integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== - dependencies: - lru-cache "^6.0.0" +semver@^7.0.0, semver@^7.1.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3, semver@^7.5.4, semver@^7.6.3: + version "7.6.3" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.3.tgz#980f7b5550bc175fb4dc09403085627f9eb33143" + integrity sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A== semver@^7.6.0: version "7.6.0" @@ -11607,10 +11691,12 @@ semver@^7.6.0: dependencies: lru-cache "^6.0.0" -semver@^7.6.2: - version "7.6.3" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.3.tgz#980f7b5550bc175fb4dc09403085627f9eb33143" - integrity sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A== +semver@~7.5.4: + version "7.5.4" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" + integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== + dependencies: + lru-cache "^6.0.0" serialize-error@^11.0.1: version "11.0.3" @@ -11638,10 +11724,10 @@ set-blocking@^2.0.0: resolved "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz" integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= -set-cookie-parser@^2.4.1: - version "2.4.8" - resolved "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.4.8.tgz" - integrity sha512-edRH8mBKEWNVIVMKejNnuJxleqYE/ZSdcT8/Nem9/mmosx12pctd80s2Oy00KNZzrogMZS5mauK2/ymL1bvlvg== +set-cookie-parser@^2.6.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/set-cookie-parser/-/set-cookie-parser-2.7.0.tgz#ef5552b56dc01baae102acb5fc9fb8cd060c30f9" + integrity sha512-lXLOiqpkUumhRdFF3k1osNXCy9akgx/dyPZ5p8qAg9seJzXr5ZrlqZuWIMuY6ejOsVLE6flJ5/h3lsn57fQ/PQ== set-function-length@^1.1.1: version "1.2.0" @@ -12302,27 +12388,7 @@ tapable@^2.2.0: resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== -tar-fs@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-3.0.4.tgz#a21dc60a2d5d9f55e0089ccd78124f1d3771dbbf" - integrity sha512-5AFQU8b9qLfZCX9zp2duONhPmZv0hGYiBPJsyUdqMjzq/mqVpy/rEUSeHk1+YitmxugaptgBh5oDGU3VsAJq4w== - dependencies: - mkdirp-classic "^0.5.2" - pump "^3.0.0" - tar-stream "^3.1.5" - -tar-fs@^3.0.5: - version "3.0.5" - resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-3.0.5.tgz#f954d77767e4e6edf973384e1eb95f8f81d64ed9" - integrity sha512-JOgGAmZyMgbqpLwct7ZV8VzkEB6pxXFBVErLtb+XCOqzc6w1xiWKI9GVd6bwk68EX7eJ4DWmfXVmq8K2ziZTGg== - dependencies: - pump "^3.0.0" - tar-stream "^3.1.5" - optionalDependencies: - bare-fs "^2.1.1" - bare-path "^2.1.0" - -tar-fs@^3.0.6: +tar-fs@^3.0.4, tar-fs@^3.0.5, tar-fs@^3.0.6: version "3.0.6" resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-3.0.6.tgz#eaccd3a67d5672f09ca8e8f9c3d2b89fa173f217" integrity sha512-iokBDQQkUyeXhgPYaZxmczGPhnhXZ0CmrqI+MOb/WFGS9DW5wnfrLgtjUJBvz50vQ3qfRwJ62QVoCFu8mPVu5w== @@ -12533,7 +12599,7 @@ to-regex-range@^5.0.1: dependencies: is-number "^7.0.0" -toad-cache@^3.3.0: +toad-cache@^3.7.0: version "3.7.0" resolved "https://registry.yarnpkg.com/toad-cache/-/toad-cache-3.7.0.tgz#b9b63304ea7c45ec34d91f1d2fa513517025c441" integrity sha512-/m8M+2BJUpoJdgAHoG+baCwBT+tf2VraSfkBgl0Y00qIWt41DJ8R5B8nsEw0I58YwF5IZH6z24/2TobDKnqSWw== @@ -13140,9 +13206,9 @@ vite-plugin-top-level-await@^1.4.2: uuid "^10.0.0" vite@^5.0.0, vite@^5.3.4: - version "5.3.4" - resolved "https://registry.yarnpkg.com/vite/-/vite-5.3.4.tgz#b36ebd47c8a5e3a8727046375d5f10bf9fdf8715" - integrity sha512-Cw+7zL3ZG9/NZBB8C+8QbQZmR54GwqIz+WMI4b3JgdYJvX+ny9AjJXqkGQlDXSXRP9rP0B4tbciRMOVEKulVOA== + version "5.3.6" + resolved "https://registry.yarnpkg.com/vite/-/vite-5.3.6.tgz#e097c0a7b79adb2e60bec9ef7907354f09d027bd" + integrity sha512-es78AlrylO8mTVBygC0gTC0FENv0C6T496vvd33ydbjF/mIi9q3XQ9A3NWo5qLGFKywvz10J26813OkLvcQleA== dependencies: esbuild "^0.21.3" postcss "^8.4.39" @@ -13847,10 +13913,10 @@ yallist@^4.0.0: resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== -yaml@^2.2.2: - version "2.3.2" - resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.2.tgz#f522db4313c671a0ca963a75670f1c12ea909144" - integrity sha512-N/lyzTPaJasoDmfV7YTrYCI0G/3ivm/9wdG0aHuheKowWQwGTsK0Eoiw6utmzAnI6pkJa0DUVygvp3spqqEKXg== +yaml@^2.2.2, yaml@^2.4.1, yaml@^2.4.2: + version "2.5.1" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.5.1.tgz#c9772aacf62cb7494a95b0c4f1fb065b563db130" + integrity sha512-bLQOjaX/ADgQ20isPJRvF0iRUHIxVhYvr53Of7wGcWlO2jvtUlH5m87DsmulFVxRpNLOnI4tB6p/oh8D7kpn9Q== yargs-parser@20.2.4: version "20.2.4"