diff --git a/.prettierignore b/.prettierignore index a37ce996..58df28e2 100644 --- a/.prettierignore +++ b/.prettierignore @@ -23,6 +23,5 @@ zp-relayer/poolTxs.db *.md *.yml -**/lib/ **/build/ **/*.json \ No newline at end of file diff --git a/.prettierrc.json b/.prettierrc.json index 4a19f691..82263d9f 100644 --- a/.prettierrc.json +++ b/.prettierrc.json @@ -4,5 +4,6 @@ "singleQuote": true, "arrowParens": "avoid", "bracketSpacing": true, - "quoteProps": "consistent" + "quoteProps": "consistent", + "plugins": ["prettier-plugin-organize-imports"] } diff --git a/CONFIGURATION.md b/CONFIGURATION.md index 2dd1a591..6ed8869d 100644 --- a/CONFIGURATION.md +++ b/CONFIGURATION.md @@ -40,7 +40,7 @@ These environment variables are required for all services. | RELAYER_MIN_GAS_PRICE_BUMP_FACTOR | Minimum `gasPrice` bump factor to meet RPC node requirements. Default `0.1`. | float | | RELAYER_MAX_FEE_PER_GAS_LIMIT | Max limit on `maxFeePerGas` parameter for each transaction in wei | integer | | RELAYER_MAX_SENT_QUEUE_SIZE | Maximum number of jobs waiting in the `sentTxQueue` at a time. | integer | -| RELAYER_TX_REDUNDANCY | If set to `true`, instructs relayer to send `eth_sendRawTransaction` requests through all available RPC urls defined in `RPC_URL` variables instead of using first available one. Defaults to `false` | boolean | +| TX_REDUNDANCY | If set to `true`, instructs relayer to send `eth_sendRawTransaction` requests through all available RPC urls defined in `RPC_URL` variables instead of using first available one. Defaults to `false` | boolean | | RELAYER_INSUFFICIENT_BALANCE_CHECK_TIMEOUT | Interval in milliseconds to check for relayer balance update if transaction send failed with insufficient balance error. Default `60000` | integer | | RELAYER_SENT_TX_DELAY | Delay in milliseconds for sentTxWorker to verify submitted transactions | integer | | RELAYER_SENT_TX_ERROR_THRESHOLD | Maximum number of re-sends which is considered to be normal. After this threshold each re-send will log a corresponding error (but re-send loop will continue). Defaults to `3`. | integer | diff --git a/docker-compose.yaml b/docker-compose.yaml index 333139f7..4b43d7b3 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -1,47 +1,68 @@ version: '3.8' services: - ganache: - image: trufflesuite/ganache-cli - command: > - --deterministic - --gasLimit 20000000 - --allowUnlimitedContractSize - --networkId 1337 - --chainId 1337 - ports: - - 8545:8545 - contracts: - image: lok52/zp-contracts:latest - verifier: - image: lok52/verifier:latest - build: - context: ./docker - dockerfile: Dockerfile.verifier - redis: + redis_indexer: command: [redis-server, --appendonly, 'yes'] image: redis:6.2.6 + indexer: + image: ghcr.io/zkbob/zkbob-relayer:${RELAYER_VERSION:-latest} + build: + context: . + dockerfile: docker/Dockerfile.relayer + command: yarn run start:indexer:prod + container_name: indexer + volumes: + - ./zp-relayer/params:/app/zp-relayer/params/ + env_file: ./zp-relayer/indexer.env + environment: + - INDEXER_PORT=80 + - COMMON_REDIS_URL=redis_indexer:6379 ports: - - 6379:6379 + - 8000:80 + restart: always + depends_on: + - redis_indexer + redis_relayer: + command: [redis-server, --appendonly, 'yes'] + image: redis:6.2.6 relayer: image: ghcr.io/zkbob/zkbob-relayer:${RELAYER_VERSION:-latest} build: context: . dockerfile: docker/Dockerfile.relayer + container_name: relayer volumes: - - relayer_tree:/app/tree.db - - relayer_txs:/app/txs.db - - $PARAMS_PATH:/app/zp-relayer/params/ - env_file: ./zp-relayer/relayer.env + - ./zp-relayer/params:/app/zp-relayer/params/ + env_file: ./zp-relayer/relayer-decentralized.env + environment: + - RELAYER_PORT=80 + - RELAYER_PROVER_URL=http://commitment_watcher:80 + - COMMON_INDEXER_URL=http://indexer:80 + - COMMON_REDIS_URL=redis_relayer:6379 ports: - - 8000:8000 - watcher: + - 8001:80 + restart: always + depends_on: + - redis_relayer + redis_prover: + command: [redis-server, --appendonly, 'yes'] + image: redis:6.2.6 + commitment_watcher: image: ghcr.io/zkbob/zkbob-relayer:${RELAYER_VERSION:-latest} build: context: . dockerfile: docker/Dockerfile.relayer - command: yarn run start:direct-deposit-watcher:prod - env_file: ./zp-relayer/watcher.env -volumes: - relayer_tree: - relayer_txs: + command: yarn run start:commitment-watcher:prod + container_name: commitment_watcher + volumes: + - ./zp-relayer/params:/app/zp-relayer/params/ + env_file: ./zp-relayer/commitment-watcher.env + environment: + - COMMITMENT_WATCHER_PORT=80 + - COMMON_INDEXER_URL=http://indexer:80 + - COMMON_REDIS_URL=redis_prover:6379 + ports: + - 8002:80 + restart: always + depends_on: + - redis_prover diff --git a/docker/Dockerfile.relayer b/docker/Dockerfile.relayer index 828c22e0..f44afc12 100644 --- a/docker/Dockerfile.relayer +++ b/docker/Dockerfile.relayer @@ -1,10 +1,10 @@ -FROM rust:1.69.0-slim-buster as base +FROM rust:1.77.0-slim-buster as base WORKDIR /app RUN apt-get update && \ apt-get -y install curl && \ - curl -sL https://deb.nodesource.com/setup_18.x | bash && \ + curl -sL https://deb.nodesource.com/setup_20.x | bash && \ apt-get -y install nodejs libclang-dev clang && \ npm install -g yarn cargo-cp-artifact @@ -29,7 +29,7 @@ RUN yarn build:relayer RUN yarn install --frozen-lockfile --production -FROM node:18 +FROM node:20 ARG RELAYER_REF ARG RELAYER_SHA diff --git a/package.json b/package.json index 6c838133..f50e326d 100644 --- a/package.json +++ b/package.json @@ -5,6 +5,7 @@ "devDependencies": { "@types/node": "^18.11.17", "prettier": "^2.7.1", + "prettier-plugin-organize-imports": "^3.2.4", "ts-loader": "9.4.2", "tsc-alias": "^1.7.0", "tsconfig-paths": "^4.1.0", diff --git a/yarn.lock b/yarn.lock index aad8c79d..df97cac7 100644 --- a/yarn.lock +++ b/yarn.lock @@ -28,6 +28,13 @@ chalk "^2.0.0" js-tokens "^4.0.0" +"@babel/runtime@^7.0.0": + version "7.22.6" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.22.6.tgz#57d64b9ae3cff1d67eb067ae117dac087f5bd438" + integrity sha512-wDb5pWm4WDdF6LFUde3Jl8WzPA+3ZbxYqkC6xAXuD3irdEHN1k0NfTRrJD8ZD378SJ61miMLCqIOXYhd8x+AJQ== + dependencies: + regenerator-runtime "^0.13.11" + "@cspotcode/source-map-support@^0.8.0": version "0.8.1" resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1" @@ -80,6 +87,21 @@ "@ethersproject/properties" "^5.6.0" "@ethersproject/strings" "^5.6.1" +"@ethersproject/abi@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/abi/-/abi-5.7.0.tgz#b3f3e045bbbeed1af3947335c247ad625a44e449" + integrity sha512-351ktp42TiRcYB3H1OP8yajPeAQstMW/yCFokj/AthP9bLHzQFPlOrxOcwYEDkUAICmOHljvN4K39OMTMUa9RA== + dependencies: + "@ethersproject/address" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@ethersproject/hash" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + "@ethersproject/abstract-provider@^5.6.1": version "5.6.1" resolved "https://registry.yarnpkg.com/@ethersproject/abstract-provider/-/abstract-provider-5.6.1.tgz#02ddce150785caf0c77fe036a0ebfcee61878c59" @@ -93,6 +115,19 @@ "@ethersproject/transactions" "^5.6.2" "@ethersproject/web" "^5.6.1" +"@ethersproject/abstract-provider@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/abstract-provider/-/abstract-provider-5.7.0.tgz#b0a8550f88b6bf9d51f90e4795d48294630cb9ef" + integrity sha512-R41c9UkchKCpAqStMYUpdunjo3pkEvZC3FAwZn5S5MGbXoMQOHIdHItezTETxAO5bevtMApSyEhn9+CHcDsWBw== + dependencies: + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/networks" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/transactions" "^5.7.0" + "@ethersproject/web" "^5.7.0" + "@ethersproject/abstract-signer@^5.6.2": version "5.6.2" resolved "https://registry.yarnpkg.com/@ethersproject/abstract-signer/-/abstract-signer-5.6.2.tgz#491f07fc2cbd5da258f46ec539664713950b0b33" @@ -104,6 +139,17 @@ "@ethersproject/logger" "^5.6.0" "@ethersproject/properties" "^5.6.0" +"@ethersproject/abstract-signer@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/abstract-signer/-/abstract-signer-5.7.0.tgz#13f4f32117868452191a4649723cb086d2b596b2" + integrity sha512-a16V8bq1/Cz+TGCkE2OPMTOUDLS3grCpdjoJCYNnVBbdYEMSgKrU0+B90s8b6H+ByYTBZN7a3g76jdIJi7UfKQ== + dependencies: + "@ethersproject/abstract-provider" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/address@^5.6.1": version "5.6.1" resolved "https://registry.yarnpkg.com/@ethersproject/address/-/address-5.6.1.tgz#ab57818d9aefee919c5721d28cd31fd95eff413d" @@ -115,6 +161,17 @@ "@ethersproject/logger" "^5.6.0" "@ethersproject/rlp" "^5.6.1" +"@ethersproject/address@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/address/-/address-5.7.0.tgz#19b56c4d74a3b0a46bfdbb6cfcc0a153fc697f37" + integrity sha512-9wYhYt7aghVGo758POM5nqcOMaE168Q6aRLJZwUmiqSrAungkG74gSSeKEIR7ukixesdRZGPgVqme6vmxs1fkA== + dependencies: + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/rlp" "^5.7.0" + "@ethersproject/base64@^5.6.1": version "5.6.1" resolved "https://registry.yarnpkg.com/@ethersproject/base64/-/base64-5.6.1.tgz#2c40d8a0310c9d1606c2c37ae3092634b41d87cb" @@ -122,6 +179,13 @@ dependencies: "@ethersproject/bytes" "^5.6.1" +"@ethersproject/base64@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/base64/-/base64-5.7.0.tgz#ac4ee92aa36c1628173e221d0d01f53692059e1c" + integrity sha512-Dr8tcHt2mEbsZr/mwTPIQAf3Ai0Bks/7gTw9dSqk1mQvhW3XvRlmDJr/4n+wg1JmCl16NZue17CDh8xb/vZ0sQ== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/bignumber@^5.6.2": version "5.6.2" resolved "https://registry.yarnpkg.com/@ethersproject/bignumber/-/bignumber-5.6.2.tgz#72a0717d6163fab44c47bcc82e0c550ac0315d66" @@ -131,6 +195,15 @@ "@ethersproject/logger" "^5.6.0" bn.js "^5.2.1" +"@ethersproject/bignumber@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/bignumber/-/bignumber-5.7.0.tgz#e2f03837f268ba655ffba03a57853e18a18dc9c2" + integrity sha512-n1CAdIHRWjSucQO3MC1zPSVgV/6dy/fjL9pMrPP9peL+QxEg9wOsVqwD4+818B6LUEtaXzVHQiuivzRoxPxUGw== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + bn.js "^5.2.1" + "@ethersproject/bytes@^5.6.1": version "5.6.1" resolved "https://registry.yarnpkg.com/@ethersproject/bytes/-/bytes-5.6.1.tgz#24f916e411f82a8a60412344bf4a813b917eefe7" @@ -138,6 +211,13 @@ dependencies: "@ethersproject/logger" "^5.6.0" +"@ethersproject/bytes@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/bytes/-/bytes-5.7.0.tgz#a00f6ea8d7e7534d6d87f47188af1148d71f155d" + integrity sha512-nsbxwgFXWh9NyYWo+U8atvmMsSdKJprTcICAkvbBffT75qDocbuggBU0SJiVK2MuTrp0q+xvLkTnGMPK1+uA9A== + dependencies: + "@ethersproject/logger" "^5.7.0" + "@ethersproject/constants@^5.6.1": version "5.6.1" resolved "https://registry.yarnpkg.com/@ethersproject/constants/-/constants-5.6.1.tgz#e2e974cac160dd101cf79fdf879d7d18e8cb1370" @@ -145,6 +225,13 @@ dependencies: "@ethersproject/bignumber" "^5.6.2" +"@ethersproject/constants@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/constants/-/constants-5.7.0.tgz#df80a9705a7e08984161f09014ea012d1c75295e" + integrity sha512-DHI+y5dBNvkpYUMiRQyxRBYBefZkJfo70VUkUAsRjcPs47muV9evftfZ0PJVCXYbAiCgght0DtcF9srFQmIgWA== + dependencies: + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/hash@^5.6.1": version "5.6.1" resolved "https://registry.yarnpkg.com/@ethersproject/hash/-/hash-5.6.1.tgz#224572ea4de257f05b4abf8ae58b03a67e99b0f4" @@ -159,6 +246,21 @@ "@ethersproject/properties" "^5.6.0" "@ethersproject/strings" "^5.6.1" +"@ethersproject/hash@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/hash/-/hash-5.7.0.tgz#eb7aca84a588508369562e16e514b539ba5240a7" + integrity sha512-qX5WrQfnah1EFnO5zJv1v46a8HW0+E5xuBBDTwMFZLuVTx0tbU2kkx15NqdjxecrLGatQN9FGQKpb1FKdHCt+g== + dependencies: + "@ethersproject/abstract-signer" "^5.7.0" + "@ethersproject/address" "^5.7.0" + "@ethersproject/base64" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + "@ethersproject/keccak256@^5.6.1": version "5.6.1" resolved "https://registry.yarnpkg.com/@ethersproject/keccak256/-/keccak256-5.6.1.tgz#b867167c9b50ba1b1a92bccdd4f2d6bd168a91cc" @@ -167,11 +269,24 @@ "@ethersproject/bytes" "^5.6.1" js-sha3 "0.8.0" +"@ethersproject/keccak256@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/keccak256/-/keccak256-5.7.0.tgz#3186350c6e1cd6aba7940384ec7d6d9db01f335a" + integrity sha512-2UcPboeL/iW+pSg6vZ6ydF8tCnv3Iu/8tUmLLzWWGzxWKFFqOBQFLo6uLUv6BDrLgCDfN28RJ/wtByx+jZ4KBg== + dependencies: + "@ethersproject/bytes" "^5.7.0" + js-sha3 "0.8.0" + "@ethersproject/logger@^5.6.0": version "5.6.0" resolved "https://registry.yarnpkg.com/@ethersproject/logger/-/logger-5.6.0.tgz#d7db1bfcc22fd2e4ab574cba0bb6ad779a9a3e7a" integrity sha512-BiBWllUROH9w+P21RzoxJKzqoqpkyM1pRnEKG69bulE9TSQD8SAIvTQqIMZmmCO8pUNkgLP1wndX1gKghSpBmg== +"@ethersproject/logger@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/logger/-/logger-5.7.0.tgz#6ce9ae168e74fecf287be17062b590852c311892" + integrity sha512-0odtFdXu/XHtjQXJYA3u9G0G8btm0ND5Cu8M7i5vhEcE8/HmF4Lbdqanwyv4uQTr2tx6b7fQRmgLrsnpQlmnig== + "@ethersproject/networks@^5.6.3": version "5.6.4" resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.6.4.tgz#51296d8fec59e9627554f5a8a9c7791248c8dc07" @@ -179,6 +294,13 @@ dependencies: "@ethersproject/logger" "^5.6.0" +"@ethersproject/networks@^5.7.0": + version "5.7.1" + resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.7.1.tgz#118e1a981d757d45ccea6bb58d9fd3d9db14ead6" + integrity sha512-n/MufjFYv3yFcUyfhnXotyDlNdFb7onmkSy8aQERi2PjNcnWQ66xXxa3XlS8nCcA8aJKJjIIMNJTC7tu80GwpQ== + dependencies: + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties@^5.6.0": version "5.6.0" resolved "https://registry.yarnpkg.com/@ethersproject/properties/-/properties-5.6.0.tgz#38904651713bc6bdd5bdd1b0a4287ecda920fa04" @@ -186,6 +308,13 @@ dependencies: "@ethersproject/logger" "^5.6.0" +"@ethersproject/properties@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/properties/-/properties-5.7.0.tgz#a6e12cb0439b878aaf470f1902a176033067ed30" + integrity sha512-J87jy8suntrAkIZtecpxEPxY//szqr1mlBaYlQ0r4RCaiD2hjheqF9s1LVE8vVuJCXisjIP+JgtK/Do54ej4Sw== + dependencies: + "@ethersproject/logger" "^5.7.0" + "@ethersproject/rlp@^5.6.1": version "5.6.1" resolved "https://registry.yarnpkg.com/@ethersproject/rlp/-/rlp-5.6.1.tgz#df8311e6f9f24dcb03d59a2bac457a28a4fe2bd8" @@ -194,6 +323,14 @@ "@ethersproject/bytes" "^5.6.1" "@ethersproject/logger" "^5.6.0" +"@ethersproject/rlp@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/rlp/-/rlp-5.7.0.tgz#de39e4d5918b9d74d46de93af80b7685a9c21304" + integrity sha512-rBxzX2vK8mVF7b0Tol44t5Tb8gomOHkj5guL+HhzQ1yBh/ydjGnpw6at+X6Iw0Kp3OzzzkcKp8N9r0W4kYSs9w== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/signing-key@^5.6.2": version "5.6.2" resolved "https://registry.yarnpkg.com/@ethersproject/signing-key/-/signing-key-5.6.2.tgz#8a51b111e4d62e5a62aee1da1e088d12de0614a3" @@ -206,6 +343,18 @@ elliptic "6.5.4" hash.js "1.1.7" +"@ethersproject/signing-key@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/signing-key/-/signing-key-5.7.0.tgz#06b2df39411b00bc57c7c09b01d1e41cf1b16ab3" + integrity sha512-MZdy2nL3wO0u7gkB4nA/pEf8lu1TlFswPNmy8AiYkfKTdO6eXBJyUdmHO/ehm/htHw9K/qF8ujnTyUAD+Ry54Q== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + bn.js "^5.2.1" + elliptic "6.5.4" + hash.js "1.1.7" + "@ethersproject/strings@^5.6.1": version "5.6.1" resolved "https://registry.yarnpkg.com/@ethersproject/strings/-/strings-5.6.1.tgz#dbc1b7f901db822b5cafd4ebf01ca93c373f8952" @@ -215,6 +364,15 @@ "@ethersproject/constants" "^5.6.1" "@ethersproject/logger" "^5.6.0" +"@ethersproject/strings@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/strings/-/strings-5.7.0.tgz#54c9d2a7c57ae8f1205c88a9d3a56471e14d5ed2" + integrity sha512-/9nu+lj0YswRNSH0NXYqrh8775XNyEdUQAuf3f+SmOrnVewcJ5SBNAjF7lpgehKi4abvNNXyf+HX86czCdJ8Mg== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/transactions@^5.6.2": version "5.6.2" resolved "https://registry.yarnpkg.com/@ethersproject/transactions/-/transactions-5.6.2.tgz#793a774c01ced9fe7073985bb95a4b4e57a6370b" @@ -230,6 +388,21 @@ "@ethersproject/rlp" "^5.6.1" "@ethersproject/signing-key" "^5.6.2" +"@ethersproject/transactions@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/transactions/-/transactions-5.7.0.tgz#91318fc24063e057885a6af13fdb703e1f993d3b" + integrity sha512-kmcNicCp1lp8qanMTC3RIikGgoJ80ztTyvtsFvCYpSCfkjhD0jZ2LOrnbcuxuToLIUYYf+4XwD1rP+B/erDIhQ== + dependencies: + "@ethersproject/address" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/rlp" "^5.7.0" + "@ethersproject/signing-key" "^5.7.0" + "@ethersproject/web@^5.6.1": version "5.6.1" resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.6.1.tgz#6e2bd3ebadd033e6fe57d072db2b69ad2c9bdf5d" @@ -241,6 +414,17 @@ "@ethersproject/properties" "^5.6.0" "@ethersproject/strings" "^5.6.1" +"@ethersproject/web@^5.7.0": + version "5.7.1" + resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.7.1.tgz#de1f285b373149bee5928f4eb7bcb87ee5fbb4ae" + integrity sha512-Gueu8lSvyjBWL4cYsWsjh6MtMwM0+H4HvqFPZfB6dV8ctbP9zFAO73VG1cMWae0FLPCtz0peKPpZY8/ugJJX2w== + dependencies: + "@ethersproject/base64" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + "@findeth/abi@^0.7.1": version "0.7.1" resolved "https://registry.yarnpkg.com/@findeth/abi/-/abi-0.7.1.tgz#60d0801cb252e587dc3228f00c00581bb748aebc" @@ -326,11 +510,23 @@ dependencies: "@mycrypto/eth-scan" "3.5.2" +"@noble/curves@1.1.0", "@noble/curves@~1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@noble/curves/-/curves-1.1.0.tgz#f13fc667c89184bc04cccb9b11e8e7bae27d8c3d" + integrity sha512-091oBExgENk/kGj3AZmtBDMpxQPDtxQABR2B9lb1JbVTs6ytdzZNwvhxQ4MWasRNEzlbEH8jCWFCwhF/Obj5AA== + dependencies: + "@noble/hashes" "1.3.1" + "@noble/hashes@1.1.2": version "1.1.2" resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.1.2.tgz#e9e035b9b166ca0af657a7848eb2718f0f22f183" integrity sha512-KYRCASVTv6aeUi1tsF8/vpyR7zpfs3FUzy2Jqm+MU+LmUKhQ0y2FpfwqkCcxSg2ua4GALJd8k2R76WxwZGbQpA== +"@noble/hashes@1.3.1", "@noble/hashes@~1.3.0", "@noble/hashes@~1.3.1": + version "1.3.1" + resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.3.1.tgz#8831ef002114670c603c458ab8b11328406953a9" + integrity sha512-EbqwksQwz9xDRGfDST86whPBgM65E0OH/pCgqW0GBVzO22bNE+NuIbeTb714+IfSjU3aRk47EUvXIb5bTsenKA== + "@noble/secp256k1@1.7.1": version "1.7.1" resolved "https://registry.yarnpkg.com/@noble/secp256k1/-/secp256k1-1.7.1.tgz#b251c70f824ce3ca7f8dc3df08d58f005cc0507c" @@ -357,6 +553,28 @@ "@nodelib/fs.scandir" "2.1.5" fastq "^1.6.0" +"@scure/base@~1.1.0": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@scure/base/-/base-1.1.1.tgz#ebb651ee52ff84f420097055f4bf46cfba403938" + integrity sha512-ZxOhsSyxYwLJj3pLZCefNitxsj093tb2vq90mp2txoYeBqbcjDjqFhyM8eUjq/uFm6zJ+mUuqxlS2FkuSY1MTA== + +"@scure/bip32@1.3.1": + version "1.3.1" + resolved "https://registry.yarnpkg.com/@scure/bip32/-/bip32-1.3.1.tgz#7248aea723667f98160f593d621c47e208ccbb10" + integrity sha512-osvveYtyzdEVbt3OfwwXFr4P2iVBL5u1Q3q4ONBfDY/UpOuXmOlbgwc1xECEboY8wIays8Yt6onaWMUdUbfl0A== + dependencies: + "@noble/curves" "~1.1.0" + "@noble/hashes" "~1.3.1" + "@scure/base" "~1.1.0" + +"@scure/bip39@1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@scure/bip39/-/bip39-1.2.1.tgz#5cee8978656b272a917b7871c981e0541ad6ac2a" + integrity sha512-Z3/Fsz1yr904dduJD0NpiyRHhRYHdcnyh73FZWiV+/qhWi83wNJ3NWolYqCEN+ZWsUz2TWwajJggcRE9r1zUYg== + dependencies: + "@noble/hashes" "~1.3.0" + "@scure/base" "~1.1.0" + "@sindresorhus/is@^0.14.0": version "0.14.0" resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea" @@ -369,6 +587,11 @@ dependencies: defer-to-connect "^1.0.1" +"@tronweb3/google-protobuf@^3.21.2": + version "3.21.2" + resolved "https://registry.yarnpkg.com/@tronweb3/google-protobuf/-/google-protobuf-3.21.2.tgz#0964cf83ed7826d31c3cb4e4ecf07655681631c9" + integrity sha512-IVcT2GfWX3K6tHUVhs14NP5uzKhQt4KeDya1g9ACxuZsUzsaoGUIGzceK2Ltu7xp1YV94AaHOf4yxLAivlvEkQ== + "@tsconfig/node10@^1.0.7": version "1.0.8" resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.8.tgz#c1e4e80d6f964fbecb3359c43bd48b40f7cadad9" @@ -980,6 +1203,13 @@ axios@^0.21.2: dependencies: follow-redirects "^1.14.0" +axios@^0.26.1: + version "0.26.1" + resolved "https://registry.yarnpkg.com/axios/-/axios-0.26.1.tgz#1ede41c51fcf51bbbd6fd43669caaa4f0495aaa9" + integrity sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA== + dependencies: + follow-redirects "^1.14.8" + balanced-match@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" @@ -1026,6 +1256,11 @@ bignumber.js@^9.0.0: resolved "https://registry.yarnpkg.com/bignumber.js/-/bignumber.js-9.0.1.tgz#8d7ba124c882bfd8e43260c67475518d0689e4e5" integrity sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA== +bignumber.js@^9.0.1: + version "9.1.1" + resolved "https://registry.yarnpkg.com/bignumber.js/-/bignumber.js-9.1.1.tgz#c4df7dc496bd849d4c9464344c1aa74228b4dac6" + integrity sha512-pHm4LsMJ6lzgNGVfZHjMoO8sdoRhOzOH4MLmY65Jg70bpxCKu5iOHNJyfF6OyvYw7t8Fpf35RuzUyqnQsj8Vig== + binary-extensions@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" @@ -2231,6 +2466,16 @@ ethereum-cryptography@^0.1.3: secp256k1 "^4.0.1" setimmediate "^1.0.5" +ethereum-cryptography@^2.0.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ethereum-cryptography/-/ethereum-cryptography-2.1.2.tgz#18fa7108622e56481157a5cb7c01c0c6a672eb67" + integrity sha512-Z5Ba0T0ImZ8fqXrJbpHcbpAvIswRte2wGNR/KePnu8GbbvgJ47lMxT/ZZPG6i9Jaht4azPDop4HaM00J0J59ug== + dependencies: + "@noble/curves" "1.1.0" + "@noble/hashes" "1.3.1" + "@scure/bip32" "1.3.1" + "@scure/bip39" "1.2.1" + ethereumjs-abi@^0.6.8: version "0.6.8" resolved "https://registry.yarnpkg.com/ethereumjs-abi/-/ethereumjs-abi-0.6.8.tgz#71bc152db099f70e62f108b7cdfca1b362c6fcae" @@ -2276,6 +2521,19 @@ ethers@^6.5.1: tslib "2.4.0" ws "8.5.0" +ethers@^6.6.0: + version "6.7.0" + resolved "https://registry.yarnpkg.com/ethers/-/ethers-6.7.0.tgz#0f772c31a9450de28aa518b181c8cb269bbe7fd1" + integrity sha512-pxt5hK82RNwcTX2gOZP81t6qVPVspnkpeivwEgQuK9XUvbNtghBnT8GNIb/gPh+WnVSfi8cXC9XlfT8sqc6D6w== + dependencies: + "@adraffy/ens-normalize" "1.9.2" + "@noble/hashes" "1.1.2" + "@noble/secp256k1" "1.7.1" + "@types/node" "18.15.13" + aes-js "4.0.0-beta.5" + tslib "2.4.0" + ws "8.5.0" + ethjs-unit@0.1.6: version "0.1.6" resolved "https://registry.yarnpkg.com/ethjs-unit/-/ethjs-unit-0.1.6.tgz#c665921e476e87bce2a9d588a6fe0405b2c41699" @@ -2297,6 +2555,11 @@ eventemitter3@4.0.4: resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.4.tgz#b5463ace635a083d018bdc7c917b4c5f10a85384" integrity sha512-rlaVLnVxtxvoyLsQQFBx53YmXHDxRIzzTLbdfxqi4yocpSjAxXwkU0cScM5JgSKMqEhrZpnvQ2D9gjylR0AimQ== +eventemitter3@^3.1.0: + version "3.1.2" + resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-3.1.2.tgz#2d3d48f9c346698fce83a85d7d664e98535df6e7" + integrity sha512-tvtQIeLVHjDkJYnzf2dgVMxfuSGJeM/7UCG17TT4EumTfNtF+0nebF/4zWOIkCreAbtNqhGEboB6BWrwqNaw4Q== + eventemitter3@^4.0.0: version "4.0.7" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" @@ -2519,6 +2782,11 @@ follow-redirects@^1.14.0: resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.1.tgz#0ca6a452306c9b276e4d3127483e29575e207ad5" integrity sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA== +follow-redirects@^1.14.8: + version "1.15.2" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" + integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== + foreach@^2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/foreach/-/foreach-2.0.5.tgz#0bee005018aeb260d0a3af3ae658dd0136ec1b99" @@ -3040,6 +3308,11 @@ inherits@2.0.3: resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= +injectpromise@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/injectpromise/-/injectpromise-1.0.0.tgz#c621f7df2bbfc1164d714f1fb229adec2079da39" + integrity sha512-qNq5wy4qX4uWHcVFOEU+RqZkoVG65FhvGkyDWbuBxILMjK6A1LFf5A1mgXZkD4nRx5FCorD81X/XvPKp/zVfPA== + internal-slot@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.3.tgz#7347e307deeea2faac2ac6205d4bc7d34967f59c" @@ -4558,6 +4831,11 @@ prepend-http@^2.0.0: resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= +prettier-plugin-organize-imports@^3.2.4: + version "3.2.4" + resolved "https://registry.yarnpkg.com/prettier-plugin-organize-imports/-/prettier-plugin-organize-imports-3.2.4.tgz#77967f69d335e9c8e6e5d224074609309c62845e" + integrity sha512-6m8WBhIp0dfwu0SkgfOxJqh+HpdyfqSSLfKKRZSFbDuEQXDDndb8fTpRWkUrX/uBenkex3MgnVk0J3b3Y5byog== + prettier@^2.7.1: version "2.7.1" resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.7.1.tgz#e235806850d057f97bb08368a4f7d899f7760c64" @@ -4821,6 +5099,11 @@ redis-parser@^3.0.0: dependencies: redis-errors "^1.0.0" +regenerator-runtime@^0.13.11: + version "0.13.11" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz#f6dca3e7ceec20590d07ada785636a90cdca17f9" + integrity sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg== + remove-array-items@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/remove-array-items/-/remove-array-items-1.1.1.tgz#fd745ff73d0822e561ea910bf1b401fc7843e693" @@ -5031,6 +5314,11 @@ semver@7.3.8: dependencies: lru-cache "^6.0.0" +semver@^5.6.0: + version "5.7.2" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8" + integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== + semver@^7.3.4: version "7.3.5" resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7" @@ -5557,6 +5845,25 @@ triple-beam@^1.2.0, triple-beam@^1.3.0: resolved "https://registry.yarnpkg.com/triple-beam/-/triple-beam-1.3.0.tgz#a595214c7298db8339eeeee083e4d10bd8cb8dd9" integrity sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw== +tronweb@^5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/tronweb/-/tronweb-5.3.0.tgz#b40c4aa68f81b70bac4d8de52960b82b61f9ab04" + integrity sha512-i03+3UviQacqdrr3VgXHDL8h/2E24BeULak4w6+yRkJaCuEyxjWOtEn1dq87ulTkHzS/vKK0zIyvW7rSxuISOA== + dependencies: + "@babel/runtime" "^7.0.0" + "@ethersproject/abi" "^5.7.0" + "@tronweb3/google-protobuf" "^3.21.2" + axios "^0.26.1" + bignumber.js "^9.0.1" + ethereum-cryptography "^2.0.0" + ethers "^6.6.0" + eventemitter3 "^3.1.0" + injectpromise "^1.0.0" + lodash "^4.17.21" + querystring-es3 "^0.2.1" + semver "^5.6.0" + validator "^13.7.0" + ts-loader@9.4.2: version "9.4.2" resolved "https://registry.yarnpkg.com/ts-loader/-/ts-loader-9.4.2.tgz#80a45eee92dd5170b900b3d00abcfa14949aeb78" @@ -5868,6 +6175,11 @@ validate-npm-package-license@^3.0.1: spdx-correct "^3.0.0" spdx-expression-parse "^3.0.0" +validator@^13.7.0: + version "13.11.0" + resolved "https://registry.yarnpkg.com/validator/-/validator-13.11.0.tgz#23ab3fd59290c61248364eabf4067f04955fbb1b" + integrity sha512-Ii+sehpSfZy+At5nPdnyMhx78fEoPDkR2XW/zimHEL3MyGJQOCQ7WeP20jPYRz7ZCpcKLB21NxuXHF3bxjStBQ== + varint@^5.0.0: version "5.0.2" resolved "https://registry.yarnpkg.com/varint/-/varint-5.0.2.tgz#5b47f8a947eb668b848e034dcfa87d0ff8a7f7a4" @@ -6555,6 +6867,11 @@ yocto-queue@^0.1.0: resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== +zod@^3.21.4: + version "3.21.4" + resolved "https://registry.yarnpkg.com/zod/-/zod-3.21.4.tgz#10882231d992519f0a10b5dd58a38c9dabbb64db" + integrity sha512-m46AKbrzKVzOzs/DZgVnG5H55N1sv1M8qZU3A8RIKbs3mrACDNeIOeilDymVb2HdmP8uwshOCF4uJ8uM9rCqJw== + "zp-memo-parser@link:zp-memo-parser": version "0.0.3" dependencies: diff --git a/zp-memo-parser/memo.ts b/zp-memo-parser/memo.ts index a377d07b..bf498a56 100644 --- a/zp-memo-parser/memo.ts +++ b/zp-memo-parser/memo.ts @@ -1,5 +1,5 @@ +import { BinaryReader, deserialize } from 'borsh' import { Buffer } from 'buffer' -import { deserialize, BinaryReader } from 'borsh' type Option = T | null @@ -10,25 +10,30 @@ export enum TxType { PERMITTABLE_DEPOSIT = '0003', } -interface DefaultTxData { - fee: string +interface BaseTxData { + transactFee: string } -export interface WithdrawTxData extends DefaultTxData { +export interface WithdrawTxData { nativeAmount: string receiver: Uint8Array } -export interface PermittableDepositTxData extends DefaultTxData { +export interface PermittableDepositTxData { deadline: string holder: Uint8Array } -export type TxData = T extends TxType.WITHDRAWAL - ? WithdrawTxData - : T extends TxType.PERMITTABLE_DEPOSIT - ? PermittableDepositTxData - : DefaultTxData +export type TxData = BaseTxData & + (T extends TxType.WITHDRAWAL ? WithdrawTxData : T extends TxType.PERMITTABLE_DEPOSIT ? PermittableDepositTxData : {}) + +interface BaseTxDataProverV2 { + proxyAddress: Uint8Array + proverAddress: Uint8Array + treeUpdateFee: string +} + +export type TxDataProverV2 = BaseTxDataProverV2 & TxData // Size in bytes const U256_SIZE = 32 @@ -100,34 +105,79 @@ function getAddress(data: Buffer, offset: number): Uint8Array { return new Uint8Array(data.subarray(offset, offset + 20)) } -export function getTxData(data: Buffer, txType: Option): TxData { - function readU64(offset: number) { - let uint = data.readBigUInt64BE(offset) - return uint.toString(10) +function readU64(data: Buffer, offset: number) { + let uint = data.readBigUInt64BE(offset) + return uint.toString(10) +} + +export function getTxData(m: Buffer, txType: Option): TxData { + let offset = 0 + const transactFee = readU64(m, offset) + offset += 8 + if (txType === TxType.WITHDRAWAL) { + const nativeAmount = readU64(m, offset) + offset += 8 + const receiver = getAddress(m, offset) + return { + transactFee, + nativeAmount, + receiver, + } as unknown as TxData + } else if (txType === TxType.PERMITTABLE_DEPOSIT) { + const deadline = readU64(m, offset) + offset += 8 + const holder = getAddress(m, offset) + return { + transactFee, + deadline, + holder, + } as unknown as TxData } + return { transactFee } as TxData +} + +export function getTxDataProverV2(m: Buffer, txType: Option): TxDataProverV2 { let offset = 0 - const fee = readU64(offset) + + const proxyAddress = getAddress(m, offset) + offset += 20 + + const proverAddress = getAddress(m, offset) + offset += 20 + + const transactFee = readU64(m, offset) + offset += 8 + + const treeUpdateFee = readU64(m, offset) offset += 8 + + const base = { + proxyAddress, + proverAddress, + transactFee, + treeUpdateFee, + } + if (txType === TxType.WITHDRAWAL) { - const nativeAmount = readU64(offset) + const nativeAmount = readU64(m, offset) offset += 8 - const receiver = getAddress(data, offset) + const receiver = getAddress(m, offset) return { - fee, + ...base, nativeAmount, receiver, - } as TxData + } as unknown as TxDataProverV2 } else if (txType === TxType.PERMITTABLE_DEPOSIT) { - const deadline = readU64(offset) + const deadline = readU64(m, offset) offset += 8 - const holder = getAddress(data, offset) + const holder = getAddress(m, offset) return { - fee, + ...base, deadline, holder, - } as TxData + } as unknown as TxDataProverV2 } - return { fee } as TxData + return base as unknown as TxDataProverV2 } export function decodeMemo(data: Buffer, maxNotes = 127) { diff --git a/zp-relayer/abi/accounting-abi.json b/zp-relayer/abi/accounting-abi.json new file mode 100644 index 00000000..c8bddf3a --- /dev/null +++ b/zp-relayer/abi/accounting-abi.json @@ -0,0 +1,487 @@ +[ + { + "inputs": [ + { + "internalType": "address", + "name": "_pool", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_precision", + "type": "uint256" + } + ], + "stateMutability": "nonpayable", + "type": "constructor" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "previousOwner", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "newOwner", + "type": "address" + } + ], + "name": "OwnershipTransferred", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "address", + "name": "manager", + "type": "address" + } + ], + "name": "UpdateKYCProvidersManager", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint8", + "name": "tier", + "type": "uint8" + }, + { + "components": [ + { + "internalType": "uint56", + "name": "tvlCap", + "type": "uint56" + }, + { + "internalType": "uint32", + "name": "dailyDepositCap", + "type": "uint32" + }, + { + "internalType": "uint32", + "name": "dailyWithdrawalCap", + "type": "uint32" + }, + { + "internalType": "uint32", + "name": "dailyUserDepositCap", + "type": "uint32" + }, + { + "internalType": "uint32", + "name": "depositCap", + "type": "uint32" + }, + { + "internalType": "uint32", + "name": "directDepositCap", + "type": "uint32" + }, + { + "internalType": "uint32", + "name": "dailyUserDirectDepositCap", + "type": "uint32" + } + ], + "indexed": false, + "internalType": "struct ZkBobAccounting.TierLimits", + "name": "limits", + "type": "tuple" + } + ], + "name": "UpdateLimits", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "address", + "name": "user", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint8", + "name": "tier", + "type": "uint8" + } + ], + "name": "UpdateTier", + "type": "event" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_user", + "type": "address" + } + ], + "name": "getLimitsFor", + "outputs": [ + { + "components": [ + { + "internalType": "uint256", + "name": "tvlCap", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "tvl", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "dailyDepositCap", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "dailyDepositCapUsage", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "dailyWithdrawalCap", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "dailyWithdrawalCapUsage", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "dailyUserDepositCap", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "dailyUserDepositCapUsage", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "depositCap", + "type": "uint256" + }, + { + "internalType": "uint8", + "name": "tier", + "type": "uint8" + }, + { + "internalType": "uint256", + "name": "dailyUserDirectDepositCap", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "dailyUserDirectDepositCapUsage", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "directDepositCap", + "type": "uint256" + } + ], + "internalType": "struct IZkBobAccounting.Limits", + "name": "", + "type": "tuple" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint32", + "name": "_txCount", + "type": "uint32" + }, + { + "internalType": "uint72", + "name": "_tvl", + "type": "uint72" + }, + { + "internalType": "uint88", + "name": "_cumTvl", + "type": "uint88" + }, + { + "internalType": "uint32", + "name": "_maxWeeklyTxCount", + "type": "uint32" + }, + { + "internalType": "uint56", + "name": "_maxWeeklyAvgTvl", + "type": "uint56" + } + ], + "name": "initialize", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "kycProvidersManager", + "outputs": [ + { + "internalType": "contract IKycProvidersManager", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "owner", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "pool", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "enum IZkBobAccounting.TxType", + "name": "_txType", + "type": "uint8" + }, + { + "internalType": "address", + "name": "_user", + "type": "address" + }, + { + "internalType": "int256", + "name": "_txAmount", + "type": "int256" + } + ], + "name": "recordOperation", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "renounceOwnership", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint8", + "name": "_tier", + "type": "uint8" + } + ], + "name": "resetDailyLimits", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "contract IKycProvidersManager", + "name": "_kycProvidersManager", + "type": "address" + } + ], + "name": "setKycProvidersManager", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint8", + "name": "_tier", + "type": "uint8" + }, + { + "internalType": "uint256", + "name": "_tvlCap", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_dailyDepositCap", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_dailyWithdrawalCap", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_dailyUserDepositCap", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_depositCap", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_dailyUserDirectDepositCap", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_directDepositCap", + "type": "uint256" + } + ], + "name": "setLimits", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint8", + "name": "_tier", + "type": "uint8" + }, + { + "internalType": "address", + "name": "_user", + "type": "address" + } + ], + "name": "setUserTier", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint8", + "name": "_tier", + "type": "uint8" + }, + { + "internalType": "address[]", + "name": "_users", + "type": "address[]" + } + ], + "name": "setUsersTier", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "slot0", + "outputs": [ + { + "internalType": "uint56", + "name": "maxWeeklyAvgTvl", + "type": "uint56" + }, + { + "internalType": "uint32", + "name": "maxWeeklyTxCount", + "type": "uint32" + }, + { + "internalType": "uint24", + "name": "tailSlot", + "type": "uint24" + }, + { + "internalType": "uint24", + "name": "headSlot", + "type": "uint24" + }, + { + "internalType": "uint88", + "name": "cumTvl", + "type": "uint88" + }, + { + "internalType": "uint32", + "name": "txCount", + "type": "uint32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "slot1", + "outputs": [ + { + "internalType": "uint72", + "name": "tvl", + "type": "uint72" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "newOwner", + "type": "address" + } + ], + "name": "transferOwnership", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + } +] \ No newline at end of file diff --git a/zp-relayer/abi/pool-abi.json b/zp-relayer/abi/pool-abi.json index 17c40810..f94a9e4f 100644 --- a/zp-relayer/abi/pool-abi.json +++ b/zp-relayer/abi/pool-abi.json @@ -1,750 +1,764 @@ [ - { - "inputs": [ - { - "internalType": "uint256", - "name": "__pool_id", - "type": "uint256" - }, - { - "internalType": "address", - "name": "_token", - "type": "address" - }, - { - "internalType": "contract ITransferVerifier", - "name": "_transfer_verifier", - "type": "address" - }, - { - "internalType": "contract ITreeVerifier", - "name": "_tree_verifier", - "type": "address" - }, - { - "internalType": "contract IBatchDepositVerifier", - "name": "_batch_deposit_verifier", - "type": "address" - }, - { - "internalType": "address", - "name": "_direct_deposit_queue", - "type": "address" - } - ], - "stateMutability": "nonpayable", - "type": "constructor" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "uint256", - "name": "index", - "type": "uint256" - }, - { - "indexed": true, - "internalType": "bytes32", - "name": "hash", - "type": "bytes32" - }, - { - "indexed": false, - "internalType": "bytes", - "name": "message", - "type": "bytes" - } - ], - "name": "Message", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "previousOwner", - "type": "address" - }, - { - "indexed": true, - "internalType": "address", - "name": "newOwner", - "type": "address" - } - ], - "name": "OwnershipTransferred", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "uint8", - "name": "tier", - "type": "uint8" - }, - { - "components": [ - { - "internalType": "uint56", - "name": "tvlCap", - "type": "uint56" - }, - { - "internalType": "uint32", - "name": "dailyDepositCap", - "type": "uint32" - }, - { - "internalType": "uint32", - "name": "dailyWithdrawalCap", - "type": "uint32" - }, - { - "internalType": "uint32", - "name": "dailyUserDepositCap", - "type": "uint32" - }, - { - "internalType": "uint32", - "name": "depositCap", - "type": "uint32" - }, - { - "internalType": "uint32", - "name": "directDepositCap", - "type": "uint32" - }, - { - "internalType": "uint32", - "name": "dailyUserDirectDepositCap", - "type": "uint32" - } - ], - "indexed": false, - "internalType": "struct ZkBobAccounting.TierLimits", - "name": "limits", - "type": "tuple" - } - ], - "name": "UpdateLimits", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": false, - "internalType": "address", - "name": "manager", - "type": "address" - } - ], - "name": "UpdateOperatorManager", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": false, - "internalType": "address", - "name": "user", - "type": "address" - }, - { - "indexed": false, - "internalType": "uint8", - "name": "tier", - "type": "uint8" - } - ], - "name": "UpdateTier", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": false, - "internalType": "address", - "name": "seller", - "type": "address" - } - ], - "name": "UpdateTokenSeller", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "operator", - "type": "address" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "fee", - "type": "uint256" - } - ], - "name": "WithdrawFee", - "type": "event" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "name": "accumulatedFee", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "all_messages_hash", - "outputs": [ - { - "internalType": "bytes32", - "name": "", - "type": "bytes32" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "_root_after", - "type": "uint256" - }, - { - "internalType": "uint256[]", - "name": "_indices", - "type": "uint256[]" - }, - { - "internalType": "uint256", - "name": "_out_commit", - "type": "uint256" - }, - { - "internalType": "uint256[8]", - "name": "_batch_deposit_proof", - "type": "uint256[8]" - }, - { - "internalType": "uint256[8]", - "name": "_tree_proof", - "type": "uint256[8]" - } - ], - "name": "appendDirectDeposits", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [], - "name": "batch_deposit_verifier", - "outputs": [ - { - "internalType": "contract IBatchDepositVerifier", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "denominator", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "pure", - "type": "function" - }, - { - "inputs": [], - "name": "direct_deposit_queue", - "outputs": [ - { - "internalType": "contract IZkBobDirectDepositQueue", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "_user", - "type": "address" - } - ], - "name": "getLimitsFor", - "outputs": [ - { - "components": [ - { - "internalType": "uint256", - "name": "tvlCap", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "tvl", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "dailyDepositCap", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "dailyDepositCapUsage", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "dailyWithdrawalCap", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "dailyWithdrawalCapUsage", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "dailyUserDepositCap", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "dailyUserDepositCapUsage", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "depositCap", - "type": "uint256" - }, - { - "internalType": "uint8", - "name": "tier", - "type": "uint8" - }, - { - "internalType": "uint256", - "name": "dailyUserDirectDepositCap", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "dailyUserDirectDepositCapUsage", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "directDepositCap", - "type": "uint256" - } - ], - "internalType": "struct ZkBobAccounting.Limits", - "name": "", - "type": "tuple" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "_root", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_tvlCap", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_dailyDepositCap", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_dailyWithdrawalCap", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_dailyUserDepositCap", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_depositCap", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_dailyUserDirectDepositCap", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_directDepositCap", - "type": "uint256" - } - ], - "name": "initialize", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "name": "nullifiers", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "operatorManager", - "outputs": [ - { - "internalType": "contract IOperatorManager", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "owner", - "outputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "pool_id", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "pool_index", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "_sender", - "type": "address" - }, - { - "internalType": "uint256", - "name": "_amount", - "type": "uint256" - } - ], - "name": "recordDirectDeposit", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [], - "name": "renounceOwnership", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint8", - "name": "_tier", - "type": "uint8" - } - ], - "name": "resetDailyLimits", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "name": "roots", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint8", - "name": "_tier", - "type": "uint8" - }, - { - "internalType": "uint256", - "name": "_tvlCap", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_dailyDepositCap", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_dailyWithdrawalCap", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_dailyUserDepositCap", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_depositCap", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_dailyUserDirectDepositCap", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_directDepositCap", - "type": "uint256" - } - ], - "name": "setLimits", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "contract IOperatorManager", - "name": "_operatorManager", - "type": "address" - } - ], - "name": "setOperatorManager", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "_seller", - "type": "address" - } - ], - "name": "setTokenSeller", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint8", - "name": "_tier", - "type": "uint8" - }, - { - "internalType": "address[]", - "name": "_users", - "type": "address[]" - } - ], - "name": "setUsersTier", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [], - "name": "token", - "outputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "tokenSeller", - "outputs": [ - { - "internalType": "contract ITokenSeller", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "transact", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "newOwner", - "type": "address" - } - ], - "name": "transferOwnership", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [], - "name": "transfer_verifier", - "outputs": [ - { - "internalType": "contract ITransferVerifier", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "tree_verifier", - "outputs": [ - { - "internalType": "contract ITreeVerifier", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "_operator", - "type": "address" - }, - { - "internalType": "address", - "name": "_to", - "type": "address" - } - ], - "name": "withdrawFee", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - } -] \ No newline at end of file + { + "type": "constructor", + "inputs": [ + { + "name": "__pool_id", + "type": "uint256", + "internalType": "uint256" + }, + { + "name": "_token", + "type": "address", + "internalType": "address" + }, + { + "name": "_transfer_verifier", + "type": "address", + "internalType": "contract ITransferVerifier" + }, + { + "name": "_tree_verifier", + "type": "address", + "internalType": "contract ITreeVerifier" + }, + { + "name": "_batch_deposit_verifier", + "type": "address", + "internalType": "contract IBatchDepositVerifier" + }, + { + "name": "_direct_deposit_queue", + "type": "address", + "internalType": "address" + } + ], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "accounting", + "inputs": [], + "outputs": [ + { + "name": "", + "type": "address", + "internalType": "contract IZkBobAccounting" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "accumulatedFee", + "inputs": [ + { + "name": "", + "type": "address", + "internalType": "address" + } + ], + "outputs": [ + { + "name": "", + "type": "uint256", + "internalType": "uint256" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "all_messages_hash", + "inputs": [], + "outputs": [ + { + "name": "", + "type": "bytes32", + "internalType": "bytes32" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "appendDirectDeposits", + "inputs": [ + { + "name": "_indices", + "type": "uint256[]", + "internalType": "uint256[]" + }, + { + "name": "_out_commit", + "type": "uint256", + "internalType": "uint256" + }, + { + "name": "_batch_deposit_proof", + "type": "uint256[8]", + "internalType": "uint256[8]" + }, + { + "name": "_prover", + "type": "address", + "internalType": "address" + } + ], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "batch_deposit_verifier", + "inputs": [], + "outputs": [ + { + "name": "", + "type": "address", + "internalType": "contract IBatchDepositVerifier" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "denominator", + "inputs": [], + "outputs": [ + { + "name": "", + "type": "uint256", + "internalType": "uint256" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "direct_deposit_queue", + "inputs": [], + "outputs": [ + { + "name": "", + "type": "address", + "internalType": "contract IZkBobDirectDepositQueue" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "extsload", + "inputs": [ + { + "name": "slot", + "type": "bytes32", + "internalType": "bytes32" + } + ], + "outputs": [ + { + "name": "value", + "type": "bytes32", + "internalType": "bytes32" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "extsload", + "inputs": [ + { + "name": "startSlot", + "type": "bytes32", + "internalType": "bytes32" + }, + { + "name": "nSlots", + "type": "uint256", + "internalType": "uint256" + } + ], + "outputs": [ + { + "name": "", + "type": "bytes", + "internalType": "bytes" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "gracePeriod", + "inputs": [], + "outputs": [ + { + "name": "", + "type": "uint64", + "internalType": "uint64" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "initialize", + "inputs": [ + { + "name": "_root", + "type": "uint256", + "internalType": "uint256" + } + ], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "initializePoolIndex", + "inputs": [ + { + "name": "_poolIndex", + "type": "uint96", + "internalType": "uint96" + } + ], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "minTreeUpdateFee", + "inputs": [], + "outputs": [ + { + "name": "", + "type": "uint64", + "internalType": "uint64" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "nullifiers", + "inputs": [ + { + "name": "", + "type": "uint256", + "internalType": "uint256" + } + ], + "outputs": [ + { + "name": "", + "type": "uint256", + "internalType": "uint256" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "operatorManager", + "inputs": [], + "outputs": [ + { + "name": "", + "type": "address", + "internalType": "contract IOperatorManager" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "owner", + "inputs": [], + "outputs": [ + { + "name": "", + "type": "address", + "internalType": "address" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "pendingCommitment", + "inputs": [], + "outputs": [ + { + "name": "commitment", + "type": "uint256", + "internalType": "uint256" + }, + { + "name": "privilegedProver", + "type": "address", + "internalType": "address" + }, + { + "name": "fee", + "type": "uint64", + "internalType": "uint64" + }, + { + "name": "timestamp", + "type": "uint64", + "internalType": "uint64" + }, + { + "name": "gracePeriodEnd", + "type": "uint64", + "internalType": "uint64" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "pool_id", + "inputs": [], + "outputs": [ + { + "name": "", + "type": "uint256", + "internalType": "uint256" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "pool_index", + "inputs": [], + "outputs": [ + { + "name": "", + "type": "uint96", + "internalType": "uint96" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "proveTreeUpdate", + "inputs": [ + { + "name": "_commitment", + "type": "uint256", + "internalType": "uint256" + }, + { + "name": "_proof", + "type": "uint256[8]", + "internalType": "uint256[8]" + }, + { + "name": "_rootAfter", + "type": "uint256", + "internalType": "uint256" + } + ], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "recordDirectDeposit", + "inputs": [ + { + "name": "_sender", + "type": "address", + "internalType": "address" + }, + { + "name": "_amount", + "type": "uint256", + "internalType": "uint256" + } + ], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "redeemer", + "inputs": [], + "outputs": [ + { + "name": "", + "type": "address", + "internalType": "contract IEnergyRedeemer" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "renounceOwnership", + "inputs": [], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "roots", + "inputs": [ + { + "name": "", + "type": "uint256", + "internalType": "uint256" + } + ], + "outputs": [ + { + "name": "", + "type": "uint256", + "internalType": "uint256" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "setAccounting", + "inputs": [ + { + "name": "_accounting", + "type": "address", + "internalType": "contract IZkBobAccounting" + } + ], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "setEnergyRedeemer", + "inputs": [ + { + "name": "_redeemer", + "type": "address", + "internalType": "contract IEnergyRedeemer" + } + ], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "setGracePeriod", + "inputs": [ + { + "name": "_gracePeriod", + "type": "uint64", + "internalType": "uint64" + } + ], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "setMinTreeUpdateFee", + "inputs": [ + { + "name": "_minTreeUpdateFee", + "type": "uint64", + "internalType": "uint64" + } + ], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "setOperatorManager", + "inputs": [ + { + "name": "_operatorManager", + "type": "address", + "internalType": "contract IOperatorManager" + } + ], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "setTokenSeller", + "inputs": [ + { + "name": "_seller", + "type": "address", + "internalType": "address" + } + ], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "token", + "inputs": [], + "outputs": [ + { + "name": "", + "type": "address", + "internalType": "address" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "tokenSeller", + "inputs": [], + "outputs": [ + { + "name": "", + "type": "address", + "internalType": "contract ITokenSeller" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "transactV2", + "inputs": [], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "transferOwnership", + "inputs": [ + { + "name": "newOwner", + "type": "address", + "internalType": "address" + } + ], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "transfer_verifier", + "inputs": [], + "outputs": [ + { + "name": "", + "type": "address", + "internalType": "contract ITransferVerifier" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "tree_verifier", + "inputs": [], + "outputs": [ + { + "name": "", + "type": "address", + "internalType": "contract ITreeVerifier" + } + ], + "stateMutability": "view" + }, + { + "type": "function", + "name": "withdrawFee", + "inputs": [ + { + "name": "_operator", + "type": "address", + "internalType": "address" + }, + { + "name": "_to", + "type": "address", + "internalType": "address" + } + ], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "event", + "name": "Message", + "inputs": [ + { + "name": "index", + "type": "uint256", + "indexed": true, + "internalType": "uint256" + }, + { + "name": "hash", + "type": "bytes32", + "indexed": true, + "internalType": "bytes32" + }, + { + "name": "message", + "type": "bytes", + "indexed": false, + "internalType": "bytes" + } + ], + "anonymous": false + }, + { + "type": "event", + "name": "OwnershipTransferred", + "inputs": [ + { + "name": "previousOwner", + "type": "address", + "indexed": true, + "internalType": "address" + }, + { + "name": "newOwner", + "type": "address", + "indexed": true, + "internalType": "address" + } + ], + "anonymous": false + }, + { + "type": "event", + "name": "RootUpdated", + "inputs": [ + { + "name": "index", + "type": "uint256", + "indexed": true, + "internalType": "uint256" + }, + { + "name": "root", + "type": "uint256", + "indexed": false, + "internalType": "uint256" + }, + { + "name": "commitment", + "type": "uint256", + "indexed": false, + "internalType": "uint256" + } + ], + "anonymous": false + }, + { + "type": "event", + "name": "UpdateAccounting", + "inputs": [ + { + "name": "accounting", + "type": "address", + "indexed": false, + "internalType": "address" + } + ], + "anonymous": false + }, + { + "type": "event", + "name": "UpdateGracePeriod", + "inputs": [ + { + "name": "gracePeriod", + "type": "uint64", + "indexed": false, + "internalType": "uint64" + } + ], + "anonymous": false + }, + { + "type": "event", + "name": "UpdateMinTreeUpdateFee", + "inputs": [ + { + "name": "minTreeUpdateFee", + "type": "uint64", + "indexed": false, + "internalType": "uint64" + } + ], + "anonymous": false + }, + { + "type": "event", + "name": "UpdateOperatorManager", + "inputs": [ + { + "name": "manager", + "type": "address", + "indexed": false, + "internalType": "address" + } + ], + "anonymous": false + }, + { + "type": "event", + "name": "UpdateRedeemer", + "inputs": [ + { + "name": "redeemer", + "type": "address", + "indexed": false, + "internalType": "address" + } + ], + "anonymous": false + }, + { + "type": "event", + "name": "UpdateTokenSeller", + "inputs": [ + { + "name": "seller", + "type": "address", + "indexed": false, + "internalType": "address" + } + ], + "anonymous": false + }, + { + "type": "event", + "name": "WithdrawFee", + "inputs": [ + { + "name": "operator", + "type": "address", + "indexed": true, + "internalType": "address" + }, + { + "name": "fee", + "type": "uint256", + "indexed": false, + "internalType": "uint256" + } + ], + "anonymous": false + } + ] + \ No newline at end of file diff --git a/zp-relayer/common/serviceUtils.ts b/zp-relayer/common/serviceUtils.ts new file mode 100644 index 00000000..649e7642 --- /dev/null +++ b/zp-relayer/common/serviceUtils.ts @@ -0,0 +1,111 @@ +import { BaseConfig } from '@/configs/baseConfig' +import { GasPriceConfig } from '@/configs/common/gasPriceConfig' +import { NetworkConfig } from '@/configs/common/networkConfig' +import { PriceFeedConfig } from '@/configs/common/priceFeedConfig' +import { TxManagerConfig } from '@/configs/common/txManagerConfig' +import { GasPrice } from '@/lib/gas-price' +import { EvmBackend, isEthereum, Network, NetworkBackend, TransactionManager } from '@/lib/network' +import { EvmTxManager } from '@/lib/network/evm/EvmTxManager' +import { IPriceFeed, NativePriceFeed, OneInchPriceFeed, PriceFeedType } from '@/lib/price-feed' +import { Circuit, IProver, LocalProver, ProverType, RemoteProver } from '@/prover' +import { Redis } from 'ioredis' +import { Params } from 'libzkbob-rs-node' + +export function buildProver( + circuit: T, + type: ProverType, + path: string, + precompute: boolean = false +): IProver { + switch (type) { + case ProverType.Local: { + const params = Params.fromFile(path, precompute) + return new LocalProver(circuit, params) + } + case ProverType.Remote: + return new RemoteProver(path) + default: + throw new Error('Unsupported prover type') + } +} + +export function buildPriceFeed( + network: NetworkBackend, + config: PriceFeedConfig, + poolToken: string +): IPriceFeed { + switch (config.PRICE_FEED_TYPE) { + case PriceFeedType.OneInch: + return new OneInchPriceFeed(network, config.PRICE_FEED_CONTRACT_ADDRESS, { + poolTokenAddress: poolToken, + customBaseTokenAddress: config.PRICE_FEED_BASE_TOKEN_ADDRESS, + }) + case PriceFeedType.Native: + return new NativePriceFeed() + default: + throw new Error('Unsupported price feed') + } +} + +export function buildNetworkBackend( + config: BaseConfig, + networkConfig: NetworkConfig, + poolToken: string +): NetworkBackend { + let networkBackend: NetworkBackend + const baseConfig = { + poolAddress: config.COMMON_POOL_ADDRESS, + tokenAddress: poolToken, + rpcUrls: config.COMMON_RPC_URL, + requireHTTPS: config.COMMON_REQUIRE_RPC_HTTPS, + } + if (networkConfig.NETWORK === Network.Ethereum) { + const evmBackend = new EvmBackend({ + ...baseConfig, + rpcRequestTimeout: config.COMMON_RPC_REQUEST_TIMEOUT, + rpcSyncCheckInterval: config.COMMON_RPC_SYNC_STATE_CHECK_INTERVAL, + jsonRpcErrorCodes: config.COMMON_JSONRPC_ERROR_CODES, + withRedundantProvider: networkConfig.TX_REDUNDANCY, + }) + networkBackend = evmBackend + } else if (networkConfig.NETWORK === Network.Tron) { + throw new Error('Unsupported network backend') + } else { + throw new Error('Unsupported network backend') + } + return networkBackend +} + +export function buildTxManager( + redis: Redis, + network: NetworkBackend, + gasPriceConfig: GasPriceConfig, + txManagerConfig: TxManagerConfig +): TransactionManager { + let txManager: TransactionManager + if (isEthereum(network)) { + const gpConfig = gasPriceConfig as GasPriceConfig + const gasPrice = new GasPrice( + (network as EvmBackend).web3, + { gasPrice: gpConfig.GAS_PRICE_FALLBACK }, + gpConfig.GAS_PRICE_UPDATE_INTERVAL, + gpConfig.GAS_PRICE_ESTIMATION_TYPE, + { + speedType: gpConfig.GAS_PRICE_SPEED_TYPE, + factor: gpConfig.GAS_PRICE_FACTOR, + maxFeeLimit: gpConfig.MAX_FEE_PER_GAS_LIMIT, + } + ) + const tmConfig = txManagerConfig as TxManagerConfig + txManager = new EvmTxManager((network as EvmBackend).web3Redundant, txManagerConfig.TX_PRIVATE_KEY, gasPrice, { + redis, + gasPriceBumpFactor: tmConfig.TX_MIN_GAS_PRICE_BUMP_FACTOR, + gasPriceSurplus: tmConfig.TX_GAS_PRICE_SURPLUS, + gasPriceMaxFeeLimit: tmConfig.TX_MAX_FEE_PER_GAS_LIMIT, + waitingFundsTimeout: tmConfig.BALANCE_CHECK_TIMEOUT + }) + } else { + throw new Error('Unsupported network backend') + } + return txManager +} diff --git a/zp-relayer/configs/baseConfig.ts b/zp-relayer/configs/baseConfig.ts index a3c0da46..fcfa3ef8 100644 --- a/zp-relayer/configs/baseConfig.ts +++ b/zp-relayer/configs/baseConfig.ts @@ -1,20 +1,31 @@ -const config = { - poolAddress: process.env.COMMON_POOL_ADDRESS as string, - startBlock: parseInt(process.env.COMMON_START_BLOCK || '0'), - colorizeLogs: process.env.COMMON_COLORIZE_LOGS === 'true', - logLevel: process.env.COMMON_LOG_LEVEL || 'debug', - redisUrl: process.env.COMMON_REDIS_URL as string, - rpcUrls: (process.env.COMMON_RPC_URL as string).split(' ').filter(url => url.length > 0), - requireHTTPS: process.env.COMMON_REQUIRE_RPC_HTTPS === 'true', - rpcSyncCheckInterval: parseInt(process.env.COMMON_RPC_SYNC_STATE_CHECK_INTERVAL || '0'), - rpcRequestTimeout: parseInt(process.env.COMMON_RPC_REQUEST_TIMEOUT || '1000'), - jsonRpcErrorCodes: (process.env.COMMON_JSONRPC_ERROR_CODES || '-32603 -32002 -32005') - .split(' ') - .filter(s => s.length > 0) - .map(s => parseInt(s, 10)), - eventsProcessingBatchSize: parseInt(process.env.COMMON_EVENTS_PROCESSING_BATCH_SIZE || '10000'), - screenerUrl: process.env.COMMON_SCREENER_URL || null, - screenerToken: process.env.COMMON_SCREENER_TOKEN || null, -} +import { z } from 'zod' +import { zBooleanString, zNullishString } from './common/utils' + +const schema = z.object({ + COMMON_POOL_ADDRESS: z.string(), + COMMON_START_BLOCK: z.coerce.number().default(0), + COMMON_INDEXER_URL: z.string().optional(), + COMMON_REDIS_URL: z.string(), + COMMON_RPC_URL: z.string().transform(us => us.split(' ').filter(url => url.length > 0)), + COMMON_REQUIRE_RPC_HTTPS: zBooleanString().default('false'), + COMMON_RPC_SYNC_STATE_CHECK_INTERVAL: z.coerce.number().default(0), + COMMON_RPC_REQUEST_TIMEOUT: z.coerce.number().default(1000), + COMMON_JSONRPC_ERROR_CODES: z + .string() + .transform(s => + s + .split(' ') + .filter(s => s.length > 0) + .map(s => parseInt(s, 10)) + ) + .default('-32603 -32002 -32005'), + COMMON_EVENTS_PROCESSING_BATCH_SIZE: z.coerce.number().default(10000), + COMMON_SCREENER_URL: zNullishString(), + COMMON_SCREENER_TOKEN: zNullishString(), +}) -export default config +export type BaseConfig = z.infer + +export function getBaseConfig(): BaseConfig { + return schema.parse(process.env) +} diff --git a/zp-relayer/configs/commitmentWatcherConfig.ts b/zp-relayer/configs/commitmentWatcherConfig.ts new file mode 100644 index 00000000..f1ad73da --- /dev/null +++ b/zp-relayer/configs/commitmentWatcherConfig.ts @@ -0,0 +1,28 @@ +import { z } from 'zod' +import { getBaseConfig } from './baseConfig' +import { getGasPriceConfig } from './common/gasPriceConfig' +import { getNetworkConfig } from './common/networkConfig' +import { getTxManagerConfig } from './common/txManagerConfig' +import { zBN, zBooleanString } from './common/utils' + +const zSchema = z.object({ + COMMITMENT_WATCHER_PORT: z.coerce.number().default(8000), + COMMITMENT_WATCHER_TOKEN_ADDRESS: z.string(), + COMMITMENT_WATCHER_PRECOMPUTE_PARAMS: zBooleanString().default('false'), + COMMITMENT_WATCHER_TREE_UPDATE_PARAMS_PATH: z.string().default('../params/tree_params.bin'), + COMMITMENT_WATCHER_DIRECT_DEPOSIT_PARAMS_PATH: z.string().default('../params/delegated_deposit_params.bin'), + COMMITMENT_WATCHER_STATE_DIR_PATH: z.string().default('./POOL_STATE'), + COMMITMENT_WATCHER_TX_VK_PATH: z.string().default('../params/transfer_verification_key.json'), + COMMITMENT_WATCHER_FETCH_INTERVAL: z.coerce.number().default(10000), + COMMITMENT_WATCHER_FEE: zBN().default("100_000_000"), +}) + +const network = getNetworkConfig() + +export default { + ...zSchema.parse(process.env), + network, + base: getBaseConfig(), + txManager: getTxManagerConfig(network.NETWORK), + gasPrice: getGasPriceConfig(network.NETWORK), +} diff --git a/zp-relayer/configs/common/gasPriceConfig.ts b/zp-relayer/configs/common/gasPriceConfig.ts new file mode 100644 index 00000000..aac9612f --- /dev/null +++ b/zp-relayer/configs/common/gasPriceConfig.ts @@ -0,0 +1,31 @@ +import { EstimationType } from '@/lib/gas-price' +import { Network } from '@/lib/network' +import { z } from 'zod' +import { zBN } from './utils' + +export const zGasPrice = z.object({ + GAS_PRICE_ESTIMATION_TYPE: z.nativeEnum(EstimationType).default(EstimationType.Web3), + GAS_PRICE_UPDATE_INTERVAL: z.coerce.number().default(5000), + GAS_PRICE_SURPLUS: z.coerce.number().default(0.1), + MIN_GAS_PRICE_BUMP_FACTOR: z.coerce.number().default(0.1), + GAS_PRICE_FACTOR: z.coerce.number().default(1), + GAS_PRICE_SPEED_TYPE: z.string().default('fast'), + GAS_PRICE_FALLBACK: z.string(), + MAX_FEE_PER_GAS_LIMIT: zBN().nullable().default(null), +}) + +export type GasPriceConfig = N extends Network.Ethereum + ? z.infer + : N extends Network.Tron + ? {} + : never + +export function getGasPriceConfig(network: N): GasPriceConfig { + if (network === Network.Ethereum) { + return zGasPrice.parse(process.env) as GasPriceConfig + } else if (network === Network.Tron) { + return {} as GasPriceConfig + } else { + throw new Error('Unsupported network') + } +} diff --git a/zp-relayer/configs/common/networkConfig.ts b/zp-relayer/configs/common/networkConfig.ts new file mode 100644 index 00000000..88e7fb5f --- /dev/null +++ b/zp-relayer/configs/common/networkConfig.ts @@ -0,0 +1,19 @@ +import { Network } from '@/lib/network' +import { z } from 'zod' +import { zBooleanString } from './utils' + +const zNetwork = z.discriminatedUnion('NETWORK', [ + z.object({ + NETWORK: z.literal(Network.Ethereum), + TX_REDUNDANCY: zBooleanString().default('false'), + }), + z.object({ + NETWORK: z.literal(Network.Tron), + }), +]) + +export type NetworkConfig = z.infer + +export function getNetworkConfig(): NetworkConfig { + return zNetwork.parse(process.env) +} diff --git a/zp-relayer/configs/common/priceFeedConfig.ts b/zp-relayer/configs/common/priceFeedConfig.ts new file mode 100644 index 00000000..2ca5381a --- /dev/null +++ b/zp-relayer/configs/common/priceFeedConfig.ts @@ -0,0 +1,17 @@ +import { PriceFeedType } from '@/lib/price-feed' +import { z } from 'zod' + +const zPriceFeed = z.discriminatedUnion('PRICE_FEED_TYPE', [ + z.object({ PRICE_FEED_TYPE: z.literal(PriceFeedType.Native) }), + z.object({ + PRICE_FEED_TYPE: z.literal(PriceFeedType.OneInch), + PRICE_FEED_CONTRACT_ADDRESS: z.string(), + PRICE_FEED_BASE_TOKEN_ADDRESS: z.string(), + }), +]) + +export type PriceFeedConfig = z.infer + +export function getPriceFeedConfig(): PriceFeedConfig { + return zPriceFeed.parse(process.env) +} diff --git a/zp-relayer/configs/common/txManagerConfig.ts b/zp-relayer/configs/common/txManagerConfig.ts new file mode 100644 index 00000000..54a03f71 --- /dev/null +++ b/zp-relayer/configs/common/txManagerConfig.ts @@ -0,0 +1,61 @@ +import { Network } from '@/lib/network/types' +import Web3 from 'web3' +import { z } from 'zod' +import { TxType } from 'zp-memo-parser' +import { zBN } from './utils' + +const zBaseConfig = z + .object({ + TX_PRIVATE_KEY: z.string(), + RELAYER_INSUFFICIENT_BALANCE_CHECK_TIMEOUT: z.coerce.number().default(5000), + }) + .transform(o => ({ + TX_ADDRESS: new Web3().eth.accounts.privateKeyToAccount(o.TX_PRIVATE_KEY).address, + TX_PRIVATE_KEY: o.TX_PRIVATE_KEY, + BALANCE_CHECK_TIMEOUT: o.RELAYER_INSUFFICIENT_BALANCE_CHECK_TIMEOUT, + })) + +const zTxGas = z + .object({ + BASE_TX_GAS_DEPOSIT: zBN().default('650000'), + BASE_TX_GAS_PERMITTABLE_DEPOSIT: zBN().default('650000'), + BASE_TX_GAS_TRANSFER: zBN().default('650000'), + BASE_TX_GAS_WITHDRAWAL: zBN().default('650000'), + BASE_TX_GAS_NATIVE_CONVERT: zBN().default('200000'), + }) + .transform(o => ({ + baseTxGas: { + [TxType.DEPOSIT]: o.BASE_TX_GAS_DEPOSIT, + [TxType.PERMITTABLE_DEPOSIT]: o.BASE_TX_GAS_PERMITTABLE_DEPOSIT, + [TxType.TRANSFER]: o.BASE_TX_GAS_TRANSFER, + [TxType.WITHDRAWAL]: o.BASE_TX_GAS_WITHDRAWAL, + RELAYER_BASE_TX_GAS_NATIVE_CONVERT: o.BASE_TX_GAS_NATIVE_CONVERT, + }, + })) + +const zEvmConfig = z + .object({ + TX_MIN_GAS_PRICE_BUMP_FACTOR: z.coerce.number().default(0.1), + TX_GAS_PRICE_SURPLUS: z.coerce.number().default(0.1), + TX_MAX_FEE_PER_GAS_LIMIT: zBN().nullable().default(null), + }) + .and(zTxGas) + .and(zBaseConfig) + +const zTronConfig = z.object({}).and(zBaseConfig) + +export type TxManagerConfig = N extends Network.Ethereum + ? z.infer + : N extends Network.Tron + ? z.infer + : never + +export function getTxManagerConfig(network: N): TxManagerConfig { + if (network === Network.Ethereum) { + return zEvmConfig.parse(process.env) as TxManagerConfig + } else if (network === Network.Tron) { + return zTronConfig.parse(process.env) as TxManagerConfig + } else { + throw new Error('Unsupported network') + } +} diff --git a/zp-relayer/configs/common/utils.ts b/zp-relayer/configs/common/utils.ts new file mode 100644 index 00000000..3d707e95 --- /dev/null +++ b/zp-relayer/configs/common/utils.ts @@ -0,0 +1,10 @@ +import { toBN } from 'web3-utils' +import { z } from 'zod' + +export const zBN = () => z.string().transform(toBN) +export const zBooleanString = () => z.enum(['true', 'false']).transform(value => value === 'true') +export const zNullishString = () => + z + .string() + .optional() + .transform(x => x ?? null) diff --git a/zp-relayer/configs/guardConfig.ts b/zp-relayer/configs/guardConfig.ts new file mode 100644 index 00000000..0dd451ba --- /dev/null +++ b/zp-relayer/configs/guardConfig.ts @@ -0,0 +1,21 @@ +import { Network } from '@/lib/network/types' +import { z } from 'zod' + +export const zBooleanString = () => z.enum(['true', 'false']).transform(value => value === 'true') + +const schema = z.object({ + GUARD_PORT: z.coerce.number(), + GUARD_NETWORK: z.nativeEnum(Network), + COMMON_RPC_URL: z.string().transform(us => us.split(' ').filter(url => url.length > 0)), + GUARD_ADDRESS_PRIVATE_KEY: z.string(), + COMMON_REQUIRE_RPC_HTTPS: zBooleanString().default('false'), + COMMON_POOL_ADDRESS: z.string(), + GUARD_TX_VK_PATH: z.string().default('../params/transfer_verification_key.json'), + GUARD_TREE_VK_PATH: z.string().default('../params/tree_verification_key.json'), +}) + +const config = schema.parse(process.env) + +export default { + ...config, +} diff --git a/zp-relayer/configs/indexerConfig.ts b/zp-relayer/configs/indexerConfig.ts new file mode 100644 index 00000000..943a2bd0 --- /dev/null +++ b/zp-relayer/configs/indexerConfig.ts @@ -0,0 +1,22 @@ +import { z } from 'zod' +import { getBaseConfig } from './baseConfig' +import { getNetworkConfig } from './common/networkConfig' +import { zBooleanString } from './common/utils' + +const schema = z.object({ + INDEXER_PORT: z.coerce.number().default(8000), + INDEXER_REQUEST_LOG_PATH: z.string().default('./indexer.log'), + INDEXER_EXPRESS_TRUST_PROXY: zBooleanString().default('false'), + INDEXER_STATE_DIR_PATH: z.string().default('./INDEXER_STATE'), + INDEXER_TX_VK_PATH: z.string().default('../params/transfer_verification_key.json'), + INDEXER_TOKEN_ADDRESS: z.string(), + INDEXER_BLOCK_CONFIRMATIONS: z.coerce.number().default(1), +}) + +const config = schema.parse(process.env) + +export default { + ...config, + base: getBaseConfig(), + network: getNetworkConfig(), +} diff --git a/zp-relayer/configs/loggerConfig.ts b/zp-relayer/configs/loggerConfig.ts new file mode 100644 index 00000000..c4bed915 --- /dev/null +++ b/zp-relayer/configs/loggerConfig.ts @@ -0,0 +1,11 @@ +import { z } from 'zod' +import { zBooleanString } from './common/utils' + +const schema = z.object({ + LOGGER_COLORIZE_LOGS: zBooleanString().default('false'), + LOGGER_LOG_LEVEL: z.string().default('debug'), +}) + +const config = schema.parse(process.env) + +export default config diff --git a/zp-relayer/configs/relayerConfig.ts b/zp-relayer/configs/relayerConfig.ts index e3a56575..6f94ab2f 100644 --- a/zp-relayer/configs/relayerConfig.ts +++ b/zp-relayer/configs/relayerConfig.ts @@ -1,88 +1,107 @@ -import Web3 from 'web3' -import { toBN } from 'web3-utils' -import baseConfig from './baseConfig' -import { FeeManagerType } from '@/services/fee' -import { PriceFeedType } from '@/services/price-feed' -import type { EstimationType, GasPriceKey } from '@/services/gas-price' +import { logger } from '@/lib/appLogger' +import { FeeManagerType } from '@/lib/fee' import { ProverType } from '@/prover' import { countryCodes } from '@/utils/countryCodes' -import { logger } from '@/services/appLogger' import { PermitType } from '@/utils/permit/types' +import { z } from 'zod' import { TxType } from 'zp-memo-parser' - -const relayerAddress = new Web3().eth.accounts.privateKeyToAccount( - process.env.RELAYER_ADDRESS_PRIVATE_KEY as string -).address +import { getBaseConfig } from './baseConfig' +import { getGasPriceConfig } from './common/gasPriceConfig' +import { getNetworkConfig } from './common/networkConfig' +import { getPriceFeedConfig } from './common/priceFeedConfig' +import { getTxManagerConfig } from './common/txManagerConfig' +import { zBN, zBooleanString, zNullishString } from './common/utils' const defaultHeaderBlacklist = 'accept accept-language accept-encoding connection content-length content-type postman-token referer upgrade-insecure-requests' -const config = { - ...baseConfig, - relayerRef: process.env.RELAYER_REF || null, - relayerSHA: process.env.RELAYER_SHA || null, - port: parseInt(process.env.RELAYER_PORT || '8000'), - relayerAddress, - relayerPrivateKey: process.env.RELAYER_ADDRESS_PRIVATE_KEY as string, - tokenAddress: process.env.RELAYER_TOKEN_ADDRESS as string, - relayerGasLimit: toBN(process.env.RELAYER_GAS_LIMIT as string), - minBaseFee: toBN(process.env.RELAYER_MIN_BASE_FEE || '0'), - relayerFee: process.env.RELAYER_FEE ? toBN(process.env.RELAYER_FEE) : null, - maxNativeAmount: toBN(process.env.RELAYER_MAX_NATIVE_AMOUNT || '0'), - treeUpdateParamsPath: process.env.RELAYER_TREE_UPDATE_PARAMS_PATH || './params/tree_params.bin', - transferParamsPath: process.env.RELAYER_TRANSFER_PARAMS_PATH || './params/transfer_params.bin', - directDepositParamsPath: process.env.RELAYER_DIRECT_DEPOSIT_PARAMS_PATH || './params/delegated_deposit_params.bin', - txVKPath: process.env.RELAYER_TX_VK_PATH || './params/transfer_verification_key.json', - requestLogPath: process.env.RELAYER_REQUEST_LOG_PATH || './zp.log', - stateDirPath: process.env.RELAYER_STATE_DIR_PATH || './POOL_STATE', - gasPriceFallback: process.env.RELAYER_GAS_PRICE_FALLBACK as string, - gasPriceEstimationType: (process.env.RELAYER_GAS_PRICE_ESTIMATION_TYPE as EstimationType) || 'web3', - gasPriceSpeedType: (process.env.RELAYER_GAS_PRICE_SPEED_TYPE as GasPriceKey) || 'fast', - gasPriceFactor: parseInt(process.env.RELAYER_GAS_PRICE_FACTOR || '1'), - gasPriceUpdateInterval: parseInt(process.env.RELAYER_GAS_PRICE_UPDATE_INTERVAL || '5000'), - gasPriceSurplus: parseFloat(process.env.RELAYER_GAS_PRICE_SURPLUS || '0.1'), - minGasPriceBumpFactor: parseFloat(process.env.RELAYER_MIN_GAS_PRICE_BUMP_FACTOR || '0.1'), - maxFeeLimit: process.env.RELAYER_MAX_FEE_PER_GAS_LIMIT ? toBN(process.env.RELAYER_MAX_FEE_PER_GAS_LIMIT) : null, - maxSentQueueSize: parseInt(process.env.RELAYER_MAX_SENT_QUEUE_SIZE || '20'), - relayerTxRedundancy: process.env.RELAYER_TX_REDUNDANCY === 'true', - sentTxDelay: parseInt(process.env.RELAYER_SENT_TX_DELAY || '30000'), - sentTxLogErrorThreshold: parseInt(process.env.RELAYER_SENT_TX_ERROR_THRESHOLD || '3'), - insufficientBalanceCheckTimeout: parseInt(process.env.RELAYER_INSUFFICIENT_BALANCE_CHECK_TIMEOUT || '60000'), - permitDeadlineThresholdInitial: parseInt(process.env.RELAYER_PERMIT_DEADLINE_THRESHOLD_INITIAL || '300'), - requireTraceId: process.env.RELAYER_REQUIRE_TRACE_ID === 'true', - requireLibJsVersion: process.env.RELAYER_REQUIRE_LIBJS_VERSION === 'true', - logIgnoreRoutes: (process.env.RELAYER_LOG_IGNORE_ROUTES || '').split(' ').filter(r => r.length > 0), - logHeaderBlacklist: (process.env.RELAYER_LOG_HEADER_BLACKLIST || defaultHeaderBlacklist) - .split(' ') - .filter(r => r.length > 0), - blockedCountries: (process.env.RELAYER_BLOCKED_COUNTRIES || '').split(' ').filter(c => { - if (c.length === 0) return false +const zBaseTxGas = z + .object({ + RELAYER_BASE_TX_GAS_DEPOSIT: zBN().default('650000'), + RELAYER_BASE_TX_GAS_PERMITTABLE_DEPOSIT: zBN().default('650000'), + RELAYER_BASE_TX_GAS_TRANSFER: zBN().default('650000'), + RELAYER_BASE_TX_GAS_WITHDRAWAL: zBN().default('650000'), + RELAYER_BASE_TX_GAS_NATIVE_CONVERT: zBN().default('200000'), + }) + .transform(o => ({ + baseTxGas: { + [TxType.DEPOSIT]: o.RELAYER_BASE_TX_GAS_DEPOSIT, + [TxType.PERMITTABLE_DEPOSIT]: o.RELAYER_BASE_TX_GAS_PERMITTABLE_DEPOSIT, + [TxType.TRANSFER]: o.RELAYER_BASE_TX_GAS_TRANSFER, + [TxType.WITHDRAWAL]: o.RELAYER_BASE_TX_GAS_WITHDRAWAL, + RELAYER_BASE_TX_GAS_NATIVE_CONVERT: o.RELAYER_BASE_TX_GAS_NATIVE_CONVERT, + }, + })) - const exists = countryCodes.has(c) - if (!exists) { - logger.error(`Country code ${c} is not valid, skipping`) - } - return exists - }), - trustProxy: process.env.RELAYER_EXPRESS_TRUST_PROXY === 'true', - treeProverType: (process.env.RELAYER_TREE_PROVER_TYPE || ProverType.Local) as ProverType, - directDepositProverType: (process.env.RELAYER_DD_PROVER_TYPE || ProverType.Local) as ProverType, - feeManagerType: (process.env.RELAYER_FEE_MANAGER_TYPE || FeeManagerType.Dynamic) as FeeManagerType, - feeManagerUpdateInterval: parseInt(process.env.RELAYER_FEE_MANAGER_UPDATE_INTERVAL || '10000'), - feeMarginFactor: toBN(process.env.RELAYER_FEE_MARGIN_FACTOR || '100'), - feeScalingFactor: toBN(process.env.RELAYER_FEE_SCALING_FACTOR || '100'), - priceFeedType: (process.env.RELAYER_PRICE_FEED_TYPE || PriceFeedType.Native) as PriceFeedType, - priceFeedContractAddress: process.env.RELAYER_PRICE_FEED_CONTRACT_ADDRESS || null, - priceFeedBaseTokenAddress: process.env.RELAYER_PRICE_FEED_BASE_TOKEN_ADDRESS || null, - precomputeParams: process.env.RELAYER_PRECOMPUTE_PARAMS === 'true', - permitType: (process.env.RELAYER_PERMIT_TYPE || PermitType.SaltedPermit) as PermitType, - baseTxGas: { - [TxType.DEPOSIT]: toBN(process.env.RELAYER_BASE_TX_GAS_DEPOSIT || '650000'), - [TxType.PERMITTABLE_DEPOSIT]: toBN(process.env.RELAYER_BASE_TX_GAS_PERMITTABLE_DEPOSIT || '650000'), - [TxType.TRANSFER]: toBN(process.env.RELAYER_BASE_TX_GAS_TRANSFER || '650000'), - [TxType.WITHDRAWAL]: toBN(process.env.RELAYER_BASE_TX_GAS_WITHDRAWAL || '650000'), - nativeConvertOverhead: toBN(process.env.RELAYER_BASE_TX_GAS_NATIVE_CONVERT || '200000'), - }, -} +const zFeeManager = z + .object({ + RELAYER_FEE_MARGIN_FACTOR: zBN().default('100'), + RELAYER_FEE_SCALING_FACTOR: zBN().default('100'), + RELAYER_FEE_MANAGER_UPDATE_INTERVAL: z.coerce.number().default(10000), + }) + .and( + z.discriminatedUnion('RELAYER_FEE_MANAGER_TYPE', [ + z.object({ RELAYER_FEE_MANAGER_TYPE: z.literal(FeeManagerType.Optimism) }), + z.object({ RELAYER_FEE_MANAGER_TYPE: z.literal(FeeManagerType.Dynamic) }), + z.object({ RELAYER_FEE_MANAGER_TYPE: z.literal(FeeManagerType.Static), RELAYER_FEE: zBN() }), + ]) + ) + +const zSchema = z + .object({ + RELAYER_REF: zNullishString(), + RELAYER_SHA: zNullishString(), + RELAYER_PORT: z.coerce.number().default(8000), + RELAYER_TOKEN_ADDRESS: z.string(), + RELAYER_GAS_LIMIT: zBN(), + RELAYER_MIN_BASE_FEE: zBN().default('0'), + RELAYER_MAX_NATIVE_AMOUNT: zBN().default('0'), + RELAYER_TREE_UPDATE_PARAMS_PATH: z.string().default('../params/tree_params.bin'), + RELAYER_TRANSFER_PARAMS_PATH: z.string().default('../params/transfer_params.bin'), + RELAYER_TX_VK_PATH: z.string().default('../params/transfer_verification_key.json'), + RELAYER_REQUEST_LOG_PATH: z.string().default('./zp.log'), + RELAYER_STATE_DIR_PATH: z.string().default('./POOL_STATE'), + RELAYER_SENT_TX_DELAY: z.coerce.number().default(30000), // NOT USED + RELAYER_SENT_TX_ERROR_THRESHOLD: z.coerce.number().default(3), // NOT USED + RELAYER_PERMIT_DEADLINE_THRESHOLD_INITIAL: z.coerce.number().default(300), + RELAYER_REQUIRE_TRACE_ID: zBooleanString().default('false'), + RELAYER_REQUIRE_LIBJS_VERSION: zBooleanString().default('false'), + RELAYER_EXPRESS_TRUST_PROXY: zBooleanString().default('false'), + RELAYER_PROVER_URL: z.string(), + RELAYER_LOG_IGNORE_ROUTES: z + .string() + .default('') + .transform(rs => rs.split(' ').filter(r => r.length > 0)), + RELAYER_LOG_HEADER_BLACKLIST: z + .string() + .default(defaultHeaderBlacklist) + .transform(hs => hs.split(' ').filter(r => r.length > 0)), + RELAYER_PERMIT_TYPE: z.nativeEnum(PermitType).default(PermitType.SaltedPermit), + RELAYER_BLOCKED_COUNTRIES: z + .string() + .default('') + .transform(cs => + cs.split(' ').filter(c => { + if (c.length === 0) return false -export default config + const exists = countryCodes.has(c) + if (!exists) { + logger.error(`Country code ${c} is not valid, skipping`) + } + return exists + }) + ), + }) + .and(zBaseTxGas) + .and(zFeeManager) + +const network = getNetworkConfig() + +export default { + ...zSchema.parse(process.env), + network, + base: getBaseConfig(), + txManager: getTxManagerConfig(network.NETWORK), + gasPrice: getGasPriceConfig(network.NETWORK), + priceFeed: getPriceFeedConfig(), +} diff --git a/zp-relayer/configs/watcherConfig.ts b/zp-relayer/configs/watcherConfig.ts index 483ec582..51e172ff 100644 --- a/zp-relayer/configs/watcherConfig.ts +++ b/zp-relayer/configs/watcherConfig.ts @@ -1,11 +1,19 @@ -import baseConfig from './baseConfig' +import { z } from 'zod' +import { getBaseConfig } from './baseConfig' +import { getNetworkConfig } from './common/networkConfig' -const config = { - ...baseConfig, - blockConfirmations: parseInt(process.env.WATCHER_BLOCK_CONFIRMATIONS || '1'), - eventPollingInterval: parseInt(process.env.WATCHER_EVENT_POLLING_INTERVAL || '600000'), - directDepositBatchSize: parseInt(process.env.DIRECT_DEPOSIT_BATCH_SIZE || '16'), - directDepositBatchTtl: parseInt(process.env.DIRECT_DEPOSIT_BATCH_TTL || '3600000'), -} +const zSchema = z.object({ + RELAYER_TOKEN_ADDRESS: z.string(), + WATCHER_BLOCK_CONFIRMATIONS: z.coerce.number().default(1), + WATCHER_EVENT_POLLING_INTERVAL: z.coerce.number().default(600000), + DIRECT_DEPOSIT_BATCH_SIZE: z.coerce.number().default(16), + DIRECT_DEPOSIT_BATCH_TTL: z.coerce.number().default(3600000), +}) + +const network = getNetworkConfig() -export default config +export default { + ...zSchema.parse(process.env), + network, + base: getBaseConfig(), +} diff --git a/zp-relayer/direct-deposit/watcher.ts b/zp-relayer/direct-deposit/watcher.ts deleted file mode 100644 index cad73a84..00000000 --- a/zp-relayer/direct-deposit/watcher.ts +++ /dev/null @@ -1,89 +0,0 @@ -// Reference implementation: -// https://github.com/omni/tokenbridge/blob/master/oracle/src/watcher.js -import type Web3 from 'web3' -import type { AbiItem } from 'web3-utils' -import type { DirectDeposit } from '@/queue/poolTxQueue' -import { web3 } from '@/services/web3' -import PoolAbi from '@/abi/pool-abi.json' -import DirectDepositQueueAbi from '@/abi/direct-deposit-queue-abi.json' -import config from '@/configs/watcherConfig' -import { logger } from '@/services/appLogger' -import { redis } from '@/services/redisClient' -import { lastProcessedBlock, getLastProcessedBlock, updateLastProcessedBlock, parseDirectDepositEvent } from './utils' -import { BatchCache } from './BatchCache' -import { validateDirectDeposit } from '@/validation/tx/validateDirectDeposit' -import { getBlockNumber, getEvents } from '@/utils/web3' -import { directDepositQueue } from '@/queue/directDepositQueue' - -const PoolInstance = new web3.eth.Contract(PoolAbi as AbiItem[], config.poolAddress) -const DirectDepositQueueInstance = new web3.eth.Contract(DirectDepositQueueAbi as AbiItem[]) - -const eventName = 'SubmitDirectDeposit' - -const batch = new BatchCache( - config.directDepositBatchSize, - config.directDepositBatchTtl, - ds => { - logger.info('Adding direct-deposit events to queue', { count: ds.length }) - directDepositQueue.add('', ds) - }, - dd => validateDirectDeposit(dd, DirectDepositQueueInstance), - redis -) - -async function init() { - await getLastProcessedBlock() - await batch.init() - const queueAddress = await PoolInstance.methods.direct_deposit_queue().call() - DirectDepositQueueInstance.options.address = queueAddress - runWatcher() -} - -async function getLastBlockToProcess(web3: Web3) { - const lastBlockNumber = await getBlockNumber(web3) - return lastBlockNumber - config.blockConfirmations -} - -async function watch() { - const lastBlockToProcess = await getLastBlockToProcess(web3) - - if (lastBlockToProcess <= lastProcessedBlock) { - logger.debug('All blocks already processed') - return - } - - const fromBlock = lastProcessedBlock + 1 - const rangeEndBlock = fromBlock + config.eventsProcessingBatchSize - let toBlock = Math.min(lastBlockToProcess, rangeEndBlock) - - let events = await getEvents(DirectDepositQueueInstance, eventName, { - fromBlock, - toBlock, - }) - logger.info(`Found ${events.length} direct-deposit events`) - - const directDeposits: [string, DirectDeposit][] = [] - for (let event of events) { - const dd = parseDirectDepositEvent(event.returnValues) - directDeposits.push([dd.nonce, dd]) - } - - await batch.add(directDeposits) - - logger.debug('Updating last processed block', { lastProcessedBlock: toBlock.toString() }) - await updateLastProcessedBlock(toBlock) -} - -async function runWatcher() { - try { - await watch() - } catch (e) { - logger.error(e) - } - - setTimeout(() => { - runWatcher() - }, config.eventPollingInterval) -} - -init() diff --git a/zp-relayer/endpoints.ts b/zp-relayer/endpoints.ts deleted file mode 100644 index 5fec68bc..00000000 --- a/zp-relayer/endpoints.ts +++ /dev/null @@ -1,298 +0,0 @@ -import type { Queue } from 'bullmq' -import { Request, Response } from 'express' -import { LimitsFetch, pool, PoolTx } from './pool' -import { poolTxQueue } from './queue/poolTxQueue' -import config from './configs/relayerConfig' -import { - validateCountryIP, - checkGetLimits, - checkGetSiblings, - checkGetTransactionsV2, - checkMerkleRootErrors, - checkSendTransactionsErrors, - checkTraceId, - validateBatch, -} from './validation/api/validation' -import { sentTxQueue, SentTxState } from './queue/sentTxQueue' -import { HEADER_TRACE_ID } from './utils/constants' -import { getFileHash } from './utils/helpers' -import type { FeeManager } from './services/fee' - -async function sendTransactions(req: Request, res: Response) { - validateBatch([ - [checkTraceId, req.headers], - [checkSendTransactionsErrors, req.body], - ]) - - await validateCountryIP(req.ip) - - const rawTxs = req.body as PoolTx[] - const traceId = req.headers[HEADER_TRACE_ID] as string - - const txs = rawTxs.map(tx => { - const { proof, memo, txType, depositSignature } = tx - return { - proof, - memo, - txType, - depositSignature, - } - }) - const jobId = await pool.transact(txs, traceId) - res.json({ jobId }) -} - -async function merkleRoot(req: Request, res: Response) { - validateBatch([ - [checkTraceId, req.headers], - [checkMerkleRootErrors, req.params], - ]) - - const index = req.params.index - const root = await pool.getContractMerkleRoot(index) - res.json(root) -} - -async function getTransactionsV2(req: Request, res: Response) { - validateBatch([ - [checkTraceId, req.headers], - [checkGetTransactionsV2, req.query], - ]) - - const toV2Format = (prefix: string) => (tx: string) => { - const outCommit = tx.slice(0, 64) - const txHash = tx.slice(64, 128) - const memo = tx.slice(128) - return prefix + txHash + outCommit + memo - } - - // Types checked in validation stage - const limit = req.query.limit as unknown as number - const offset = req.query.offset as unknown as number - - const txs: string[] = [] - const { txs: poolTxs, nextOffset } = await pool.state.getTransactions(limit, offset) - txs.push(...poolTxs.map(toV2Format('1'))) - - if (txs.length < limit) { - const { txs: optimisticTxs } = await pool.optimisticState.getTransactions(limit - txs.length, nextOffset) - txs.push(...optimisticTxs.map(toV2Format('0'))) - } - - res.json(txs) -} - -async function getJob(req: Request, res: Response) { - enum JobStatus { - WAITING = 'waiting', - FAILED = 'failed', - SENT = 'sent', - REVERTED = 'reverted', - COMPLETED = 'completed', - } - - interface GetJobResponse { - resolvedJobId: string - createdOn: number - failedReason: null | string - finishedOn: null | number - state: JobStatus - txHash: null | string - } - - validateBatch([[checkTraceId, req.headers]]) - - const jobId = req.params.id - - async function getPoolJobState(requestedJobId: string): Promise { - const INCONSISTENCY_ERR = 'Internal job inconsistency' - - // Should be used in places where job is expected to exist - const safeGetJob = async (queue: Queue, id: string) => { - const job = await queue.getJob(id) - if (!job) { - throw new Error(INCONSISTENCY_ERR) - } - return job - } - - const jobId = await pool.state.jobIdsMapping.get(requestedJobId) - - const poolJobState = await poolTxQueue.getJobState(jobId) - if (poolJobState === 'unknown') return null - - const job = await safeGetJob(poolTxQueue, jobId) - - // Default result object - let result: GetJobResponse = { - resolvedJobId: jobId, - createdOn: job.timestamp, - failedReason: null, - finishedOn: null, - state: JobStatus.WAITING, - txHash: null, - } - - if (poolJobState === 'completed') { - // Transaction was included in optimistic state, waiting to be mined - - // Sanity check - if (job.returnvalue === null) throw new Error(INCONSISTENCY_ERR) - const sentJobId = job.returnvalue[0][1] - - const sentJobState = await sentTxQueue.getJobState(sentJobId) - // Should not happen here, but need to verify to be sure - if (sentJobState === 'unknown') throw new Error('Sent job not found') - - const sentJob = await safeGetJob(sentTxQueue, sentJobId) - if (sentJobState === 'waiting' || sentJobState === 'active' || sentJobState === 'delayed') { - // Transaction is in re-send loop - const txHash = sentJob.data.prevAttempts.at(-1)?.[0] - result.state = JobStatus.SENT - result.txHash = txHash || null - } else if (sentJobState === 'completed') { - // Sanity check - if (sentJob.returnvalue === null) throw new Error(INCONSISTENCY_ERR) - - const [txState, txHash] = sentJob.returnvalue - if (txState === SentTxState.MINED) { - // Transaction mined successfully - result.state = JobStatus.COMPLETED - result.txHash = txHash - result.finishedOn = sentJob.finishedOn || null - } else if (txState === SentTxState.REVERT) { - // Transaction reverted - result.state = JobStatus.REVERTED - result.txHash = txHash - result.finishedOn = sentJob.finishedOn || null - } - } - } else if (poolJobState === 'failed') { - // Either validation or tx sending failed - - // Sanity check - if (!job.finishedOn) throw new Error(INCONSISTENCY_ERR) - - result.state = JobStatus.FAILED - result.failedReason = job.failedReason - result.finishedOn = job.finishedOn || null - } - // Other states mean that transaction is either waiting in queue - // or being processed by worker - // So, no need to update `result` object - - return result - } - - const jobState = await getPoolJobState(jobId) - if (jobState) { - res.json(jobState) - } else { - res.json(`Job ${jobId} not found`) - } -} - -function relayerInfo(req: Request, res: Response) { - const deltaIndex = pool.state.getNextIndex() - const optimisticDeltaIndex = pool.optimisticState.getNextIndex() - const root = pool.state.getMerkleRoot() - const optimisticRoot = pool.optimisticState.getMerkleRoot() - - res.json({ - root, - optimisticRoot, - deltaIndex, - optimisticDeltaIndex, - }) -} - -function getFeeBuilder(feeManager: FeeManager) { - return async (req: Request, res: Response) => { - validateBatch([[checkTraceId, req.headers]]) - - const feeOptions = await feeManager.getFeeOptions() - const fees = feeOptions.denominate(pool.denominator).getObject() - - res.json(fees) - } -} - -async function getLimits(req: Request, res: Response) { - validateBatch([ - [checkTraceId, req.headers], - [checkGetLimits, req.query], - ]) - - const address = req.query.address as unknown as string - - let limitsFetch: LimitsFetch - try { - const limits = await pool.getLimitsFor(address) - limitsFetch = pool.processLimits(limits) - } catch (e) { - throw new Error(`Error while fetching limits for ${address}`) - } - - res.json(limitsFetch) -} - -function getMaxNativeAmount(req: Request, res: Response) { - validateBatch([[checkTraceId, req.headers]]) - - res.json({ - maxNativeAmount: config.maxNativeAmount.toString(10), - }) -} - -function getSiblings(req: Request, res: Response) { - validateBatch([ - [checkTraceId, req.headers], - [checkGetSiblings, req.query], - ]) - - const index = req.query.index as unknown as number - - if (index >= pool.state.getNextIndex()) { - res.status(400).json({ errors: ['Index out of range'] }) - return - } - - const siblings = pool.state.getSiblings(index) - res.json(siblings) -} - -function getParamsHashBuilder(path: string | null) { - let hash: string | null = null - if (path) { - hash = getFileHash(path) - } - return (req: Request, res: Response) => { - res.json({ hash }) - } -} - -function relayerVersion(req: Request, res: Response) { - res.json({ - ref: config.relayerRef, - commitHash: config.relayerSHA, - }) -} - -function root(req: Request, res: Response) { - return res.sendStatus(200) -} - -export default { - sendTransactions, - merkleRoot, - getTransactionsV2, - getJob, - relayerInfo, - getFeeBuilder, - getLimits, - getMaxNativeAmount, - getSiblings, - getParamsHashBuilder, - relayerVersion, - root, -} diff --git a/zp-relayer/index.ts b/zp-relayer/index.ts deleted file mode 100644 index bc96453d..00000000 --- a/zp-relayer/index.ts +++ /dev/null @@ -1,21 +0,0 @@ -import express from 'express' -import { createRouter } from './router' -import { logger } from './services/appLogger' -import { createConsoleLoggerMiddleware, createPersistentLoggerMiddleware } from './services/loggerMiddleware' -import config from './configs/relayerConfig' -import { init } from './init' - -init().then(({ feeManager }) => { - const app = express() - - if (config.trustProxy) { - app.set('trust proxy', true) - } - - app.use(createPersistentLoggerMiddleware(config.requestLogPath)) - app.use(createConsoleLoggerMiddleware()) - - app.use(createRouter({ feeManager })) - const PORT = config.port - app.listen(PORT, () => logger.info(`Started relayer on port ${PORT}`)) -}) diff --git a/zp-relayer/init.ts b/zp-relayer/init.ts deleted file mode 100644 index 4513cff1..00000000 --- a/zp-relayer/init.ts +++ /dev/null @@ -1,134 +0,0 @@ -import type Web3 from 'web3' -import { Mutex } from 'async-mutex' -import { Params } from 'libzkbob-rs-node' -import { pool } from './pool' -import { EstimationType, GasPrice } from './services/gas-price' -import { web3 } from './services/web3' -import { web3Redundant } from './services/web3Redundant' -import config from './configs/relayerConfig' -import { createPoolTxWorker } from './workers/poolTxWorker' -import { createSentTxWorker } from './workers/sentTxWorker' -import { createDirectDepositWorker } from './workers/directDepositWorker' -import { redis } from './services/redisClient' -import { validateTx } from './validation/tx/validateTx' -import { TxManager } from './tx/TxManager' -import { Circuit, IProver, LocalProver, ProverType, RemoteProver } from './prover' -import { FeeManagerType, FeeManager, StaticFeeManager, DynamicFeeManager, OptimismFeeManager } from './services/fee' -import type { IPriceFeed } from './services/price-feed/IPriceFeed' -import type { IWorkerBaseConfig } from './workers/workerTypes' -import { NativePriceFeed, OneInchPriceFeed, PriceFeedType } from './services/price-feed' - -function buildProver(circuit: T, type: ProverType, path: string): IProver { - if (type === ProverType.Local) { - const params = Params.fromFile(path, config.precomputeParams) - return new LocalProver(circuit, params) - } else if (type === ProverType.Remote) { - // TODO: test relayer with remote prover - return new RemoteProver('') - } else { - throw new Error('Unsupported prover type') - } -} - -function buildFeeManager( - type: FeeManagerType, - priceFeed: IPriceFeed, - gasPrice: GasPrice, - web3: Web3 -): FeeManager { - const managerConfig = { - priceFeed, - scaleFactor: config.feeScalingFactor, - marginFactor: config.feeMarginFactor, - updateInterval: config.feeManagerUpdateInterval, - } - if (type === FeeManagerType.Static) { - if (config.relayerFee === null) throw new Error('Static relayer fee is not set') - return new StaticFeeManager(managerConfig, config.relayerFee) - } - if (type === FeeManagerType.Dynamic) { - return new DynamicFeeManager(managerConfig, gasPrice) - } else if (type === FeeManagerType.Optimism) { - return new OptimismFeeManager(managerConfig, gasPrice, web3) - } else { - throw new Error('Unsupported fee manager') - } -} - -function buildPriceFeed(type: PriceFeedType, web3: Web3): IPriceFeed { - if (type === PriceFeedType.OneInch) { - if (!config.priceFeedContractAddress) throw new Error('Price feed contract address is not set') - return new OneInchPriceFeed(web3, config.priceFeedContractAddress, { - poolTokenAddress: config.tokenAddress, - customBaseTokenAddress: config.priceFeedBaseTokenAddress, - }) - } else if (type === PriceFeedType.Native) { - return new NativePriceFeed() - } else { - throw new Error('Unsupported price feed') - } -} - -export async function init() { - await pool.init() - - const gasPriceService = new GasPrice( - web3, - { gasPrice: config.gasPriceFallback }, - config.gasPriceUpdateInterval, - config.gasPriceEstimationType, - { - speedType: config.gasPriceSpeedType, - factor: config.gasPriceFactor, - maxFeeLimit: config.maxFeeLimit, - } - ) - await gasPriceService.start() - - const txManager = new TxManager(web3Redundant, config.relayerPrivateKey, gasPriceService) - await txManager.init() - - const mutex = new Mutex() - - const baseConfig: IWorkerBaseConfig = { - redis, - } - - const treeProver = buildProver(Circuit.Tree, config.treeProverType, config.treeUpdateParamsPath as string) - - const directDepositProver = buildProver( - Circuit.DirectDeposit, - config.directDepositProverType, - config.directDepositParamsPath as string - ) - - const priceFeed = buildPriceFeed(config.priceFeedType, web3) - await priceFeed.init() - const feeManager = buildFeeManager(config.feeManagerType, priceFeed, gasPriceService, web3) - await feeManager.start() - - const workerPromises = [ - createPoolTxWorker({ - ...baseConfig, - validateTx, - treeProver, - mutex, - txManager, - feeManager, - }), - createSentTxWorker({ - ...baseConfig, - mutex, - txManager, - }), - createDirectDepositWorker({ - ...baseConfig, - directDepositProver, - }), - ] - - const workers = await Promise.all(workerPromises) - workers.forEach(w => w.run()) - - return { feeManager } -} diff --git a/zp-relayer/services/appLogger.ts b/zp-relayer/lib/appLogger.ts similarity index 73% rename from zp-relayer/services/appLogger.ts rename to zp-relayer/lib/appLogger.ts index 1c8a629c..613b7ada 100644 --- a/zp-relayer/services/appLogger.ts +++ b/zp-relayer/lib/appLogger.ts @@ -1,13 +1,13 @@ +import config from '@/configs/loggerConfig' import { createLogger, format, transports } from 'winston' -import config from '@/configs/baseConfig' let logFormat = format.combine(format.timestamp(), format.splat(), format.simple()) -if (config.colorizeLogs) { +if (config.LOGGER_COLORIZE_LOGS) { logFormat = format.combine(format.colorize(), logFormat) } export const logger = createLogger({ - level: config.logLevel, + level: config.LOGGER_LOG_LEVEL, format: logFormat, transports: [new transports.Console()], }) diff --git a/zp-relayer/services/fee/DynamicFeeManager.ts b/zp-relayer/lib/fee/DynamicFeeManager.ts similarity index 87% rename from zp-relayer/services/fee/DynamicFeeManager.ts rename to zp-relayer/lib/fee/DynamicFeeManager.ts index 6bb1568f..99562e77 100644 --- a/zp-relayer/services/fee/DynamicFeeManager.ts +++ b/zp-relayer/lib/fee/DynamicFeeManager.ts @@ -1,8 +1,8 @@ -import { toBN } from 'web3-utils' -import { FeeManager, FeeEstimate, IFeeEstimateParams, IFeeManagerConfig, DynamicFeeOptions } from './FeeManager' -import { NZERO_BYTE_GAS } from '@/utils/constants' import relayerConfig from '@/configs/relayerConfig' +import { NZERO_BYTE_GAS } from '@/utils/constants' +import { toBN } from 'web3-utils' import type { EstimationType, GasPrice } from '../gas-price' +import { DynamicFeeOptions, FeeEstimate, FeeManager, IFeeEstimateParams, IFeeManagerConfig } from './FeeManager' export class DynamicFeeManager extends FeeManager { constructor(config: IFeeManagerConfig, private gasPrice: GasPrice) { @@ -25,6 +25,6 @@ export class DynamicFeeManager extends FeeManager { async _fetchFeeOptions(): Promise { const gasPrice = await this.gasPrice.fetchOnce() const oneByteFee = FeeManager.executionFee(gasPrice, toBN(NZERO_BYTE_GAS)) - return DynamicFeeOptions.fromGasPice(gasPrice, oneByteFee, relayerConfig.minBaseFee) + return DynamicFeeOptions.fromGasPice(gasPrice, oneByteFee, relayerConfig.RELAYER_MIN_BASE_FEE) } } diff --git a/zp-relayer/services/fee/FeeManager.ts b/zp-relayer/lib/fee/FeeManager.ts similarity index 98% rename from zp-relayer/services/fee/FeeManager.ts rename to zp-relayer/lib/fee/FeeManager.ts index 8200f937..0da5b73e 100644 --- a/zp-relayer/services/fee/FeeManager.ts +++ b/zp-relayer/lib/fee/FeeManager.ts @@ -1,11 +1,11 @@ +import config from '@/configs/relayerConfig' +import { applyDenominator, setIntervalAndRun } from '@/utils/helpers' import BN from 'bn.js' import { toBN } from 'web3-utils' -import type { IPriceFeed } from '../price-feed/IPriceFeed' -import { getMaxRequiredGasPrice, GasPriceValue } from '../gas-price' -import { applyDenominator, setIntervalAndRun } from '@/utils/helpers' -import { logger } from '../appLogger' import { TxType } from 'zp-memo-parser' -import config from '@/configs/relayerConfig' +import { logger } from '../appLogger' +import { GasPriceValue, getMaxRequiredGasPrice } from '../gas-price' +import type { IPriceFeed } from '../price-feed/IPriceFeed' export interface IFeeEstimateParams { txType: TxType @@ -103,7 +103,7 @@ export class DynamicFeeOptions extends FeeOptions { [TxType.TRANSFER]: getFee(TxType.TRANSFER), [TxType.WITHDRAWAL]: getFee(TxType.WITHDRAWAL), oneByteFee, - nativeConvertFee: FeeManager.executionFee(gasPrice, config.baseTxGas.nativeConvertOverhead), + nativeConvertFee: FeeManager.executionFee(gasPrice, config.baseTxGas.RELAYER_BASE_TX_GAS_NATIVE_CONVERT), } const minFees: Fees = { [TxType.DEPOSIT]: minFee, diff --git a/zp-relayer/services/fee/OptimismFeeManager.ts b/zp-relayer/lib/fee/OptimismFeeManager.ts similarity index 64% rename from zp-relayer/services/fee/OptimismFeeManager.ts rename to zp-relayer/lib/fee/OptimismFeeManager.ts index f1e8cf4e..a9983926 100644 --- a/zp-relayer/services/fee/OptimismFeeManager.ts +++ b/zp-relayer/lib/fee/OptimismFeeManager.ts @@ -1,30 +1,31 @@ -import type Web3 from 'web3' -import type BN from 'bn.js' -import type { Contract } from 'web3-eth-contract' -import { OP_GAS_ORACLE_ADDRESS } from '@/utils/constants' -import { AbiItem, toBN, hexToBytes } from 'web3-utils' import OracleAbi from '@/abi/op-oracle.json' -import { contractCallRetry } from '@/utils/helpers' -import { FeeManager, FeeEstimate, IFeeEstimateParams, IFeeManagerConfig, DynamicFeeOptions } from './FeeManager' import relayerConfig from '@/configs/relayerConfig' -import { ZERO_BYTE_GAS, NZERO_BYTE_GAS } from '@/utils/constants' +import { NZERO_BYTE_GAS, OP_GAS_ORACLE_ADDRESS, ZERO_BYTE_GAS } from '@/utils/constants' +import type BN from 'bn.js' +import { hexToBytes, toBN } from 'web3-utils' import type { EstimationType, GasPrice } from '../gas-price' +import { NetworkBackend } from '../network/NetworkBackend' +import { Network, NetworkContract } from '../network/types' +import { DynamicFeeOptions, FeeEstimate, FeeManager, IFeeEstimateParams, IFeeManagerConfig } from './FeeManager' export class OptimismFeeManager extends FeeManager { - private oracle: Contract + private oracle: NetworkContract private overhead!: BN private decimals!: BN private scalar!: BN + private gasPrice: GasPrice - constructor(config: IFeeManagerConfig, private gasPrice: GasPrice, web3: Web3) { + constructor(config: IFeeManagerConfig, network: NetworkBackend) { super(config) - this.oracle = new web3.eth.Contract(OracleAbi as AbiItem[], OP_GAS_ORACLE_ADDRESS) + // @ts-ignore + this.gasPrice = network.gasPrice + this.oracle = network.contract(OracleAbi, OP_GAS_ORACLE_ADDRESS) } async init() { - this.overhead = await contractCallRetry(this.oracle, 'overhead').then(toBN) - this.decimals = await contractCallRetry(this.oracle, 'decimals').then(toBN) - this.scalar = await contractCallRetry(this.oracle, 'scalar').then(toBN) + this.overhead = await this.oracle.callRetry('overhead').then(toBN) + this.decimals = await this.oracle.callRetry('decimals').then(toBN) + this.scalar = await this.oracle.callRetry('scalar').then(toBN) } private getL1GasUsed(data: string): BN { @@ -63,10 +64,10 @@ export class OptimismFeeManager extends FeeManager { async _fetchFeeOptions(): Promise { const gasPrice = await this.gasPrice.fetchOnce() - const l1BaseFee = await contractCallRetry(this.oracle, 'l1BaseFee').then(toBN) + const l1BaseFee = await this.oracle.callRetry('l1BaseFee').then(toBN) const oneByteFee = l1BaseFee.muln(NZERO_BYTE_GAS) - return DynamicFeeOptions.fromGasPice(gasPrice, oneByteFee, relayerConfig.minBaseFee) + return DynamicFeeOptions.fromGasPice(gasPrice, oneByteFee, relayerConfig.RELAYER_MIN_BASE_FEE) } } diff --git a/zp-relayer/services/fee/StaticFeeManager.ts b/zp-relayer/lib/fee/StaticFeeManager.ts similarity index 86% rename from zp-relayer/services/fee/StaticFeeManager.ts rename to zp-relayer/lib/fee/StaticFeeManager.ts index 19830dd7..e3daf3b1 100644 --- a/zp-relayer/services/fee/StaticFeeManager.ts +++ b/zp-relayer/lib/fee/StaticFeeManager.ts @@ -1,5 +1,5 @@ import type BN from 'bn.js' -import { FeeManager, FeeEstimate, IFeeManagerConfig, FeeOptions } from './FeeManager' +import { FeeEstimate, FeeManager, FeeOptions, IFeeManagerConfig } from './FeeManager' export class StaticFeeManager extends FeeManager { constructor(config: IFeeManagerConfig, private readonly staticFee: BN) { diff --git a/zp-relayer/services/fee/index.ts b/zp-relayer/lib/fee/index.ts similarity index 100% rename from zp-relayer/services/fee/index.ts rename to zp-relayer/lib/fee/index.ts index c24fdc68..59d64a97 100644 --- a/zp-relayer/services/fee/index.ts +++ b/zp-relayer/lib/fee/index.ts @@ -1,7 +1,7 @@ -export * from './FeeManager' -export * from './StaticFeeManager' export * from './DynamicFeeManager' +export * from './FeeManager' export * from './OptimismFeeManager' +export * from './StaticFeeManager' export enum FeeManagerType { Static = 'static', diff --git a/zp-relayer/services/gas-price/GasPrice.ts b/zp-relayer/lib/gas-price/GasPrice.ts similarity index 97% rename from zp-relayer/services/gas-price/GasPrice.ts rename to zp-relayer/lib/gas-price/GasPrice.ts index 39c033c3..e1488473 100644 --- a/zp-relayer/services/gas-price/GasPrice.ts +++ b/zp-relayer/lib/gas-price/GasPrice.ts @@ -1,24 +1,24 @@ -import BN from 'bn.js' -import type Web3 from 'web3' -import type { TransactionConfig } from 'web3-core' -import { AbiItem, toWei, toBN } from 'web3-utils' -import BigNumber from 'bignumber.js' +import OracleAbi from '@/abi/op-oracle.json' +import { logger } from '@/lib/appLogger' import constants from '@/utils/constants' import { contractCallRetry, setIntervalAndRun } from '@/utils/helpers' import { estimateFees } from '@mycrypto/gas-estimation' +import BigNumber from 'bignumber.js' +import BN from 'bn.js' import { GasPriceOracle } from 'gas-price-oracle' -import { logger } from '@/services/appLogger' -import OracleAbi from '@/abi/op-oracle.json' +import type Web3 from 'web3' +import type { TransactionConfig } from 'web3-core' +import { AbiItem, toBN, toWei } from 'web3-utils' import { + EIP1559GasPrice, + EstimationOptions, EstimationType, FetchFunc, - EstimationOptions, - GasPriceValue, - PolygonGSV2Response, - PolygonGSV2GasPriceKey, GasPriceKey, + GasPriceValue, LegacyGasPrice, - EIP1559GasPrice, + PolygonGSV2GasPriceKey, + PolygonGSV2Response, } from './types' const polygonGasPriceKeyMapping: Record = { @@ -200,6 +200,7 @@ export class GasPrice { [EstimationType.Oracle]: this.fetchGasPriceOracle, [EstimationType.PolygonGSV2]: this.fetchPolygonGasStationV2, [EstimationType.OptimismOracle]: this.fetchOptimismOracle, + [EstimationType.Tron]: this.fetchTron, } return funcs[estimationType] } @@ -256,6 +257,10 @@ export class GasPrice { return { gasPrice } } + private fetchTron: FetchFunc = async () => { + return { gasPrice: '0' } + } + static normalizeGasPrice(rawGasPrice: number, factor = 1) { const gasPrice = rawGasPrice * factor return toWei(gasPrice.toFixed(2).toString(), 'gwei') diff --git a/zp-relayer/services/gas-price/index.ts b/zp-relayer/lib/gas-price/index.ts similarity index 100% rename from zp-relayer/services/gas-price/index.ts rename to zp-relayer/lib/gas-price/index.ts diff --git a/zp-relayer/services/gas-price/types.ts b/zp-relayer/lib/gas-price/types.ts similarity index 98% rename from zp-relayer/services/gas-price/types.ts rename to zp-relayer/lib/gas-price/types.ts index 09651012..3fe01155 100644 --- a/zp-relayer/services/gas-price/types.ts +++ b/zp-relayer/lib/gas-price/types.ts @@ -38,6 +38,7 @@ export enum EstimationType { Web3 = 'web3', PolygonGSV2 = 'polygon-gasstation-v2', OptimismOracle = 'optimism-gas-price-oracle', + Tron = 'tron', } export type EstimationOracleOptions = { speedType: GasPriceKey; factor: number } diff --git a/zp-relayer/services/loggerMiddleware.ts b/zp-relayer/lib/loggerMiddleware.ts similarity index 72% rename from zp-relayer/services/loggerMiddleware.ts rename to zp-relayer/lib/loggerMiddleware.ts index 213dd52f..7b14bd45 100644 --- a/zp-relayer/services/loggerMiddleware.ts +++ b/zp-relayer/lib/loggerMiddleware.ts @@ -1,6 +1,5 @@ -import { format, transports } from 'winston' import expressWinston from 'express-winston' -import config from '@/configs/relayerConfig' +import { format, transports } from 'winston' import { logger } from './appLogger' export function createPersistentLoggerMiddleware(filename: string = 'zp.log') { @@ -10,12 +9,12 @@ export function createPersistentLoggerMiddleware(filename: string = 'zp.log') { }) } -export function createConsoleLoggerMiddleware() { +export function createConsoleLoggerMiddleware(ignoredRoutes: string[] = [], headerBlacklist: string[] = []) { return expressWinston.logger({ winstonInstance: logger, level: 'debug', - ignoredRoutes: config.logIgnoreRoutes, - headerBlacklist: config.logHeaderBlacklist, + ignoredRoutes, + headerBlacklist, requestWhitelist: ['headers', 'httpVersion'], }) } diff --git a/zp-relayer/lib/network/NetworkBackend.ts b/zp-relayer/lib/network/NetworkBackend.ts new file mode 100644 index 00000000..873f6da5 --- /dev/null +++ b/zp-relayer/lib/network/NetworkBackend.ts @@ -0,0 +1,38 @@ +import { Network, NetworkContract } from './types' + +export function isTron(n: NetworkBackend): n is NetworkBackend { + return n.type === Network.Tron +} + +export function isEthereum(n: NetworkBackend): n is NetworkBackend { + return n.type === Network.Ethereum +} + +export interface Event { + txHash: string + values: Record + blockNumber: number +} + +export interface GetEventsConfig { + contract: NetworkContract + event: string + startBlock: number + lastBlock: number + batchSize: number +} +export interface INetworkBackend { + type: N + pool: NetworkContract + token: NetworkContract + accounting: NetworkContract + + init(): Promise + contract(abi: any[], address: string): NetworkContract + recover(msg: string, sig: string): Promise + getBlockNumber(): Promise + getTxCalldata(hash: string): Promise + getEvents(config: GetEventsConfig): AsyncGenerator<{ events: Event[]; fromBlock: number; toBlock: number }, void> +} + +export type NetworkBackend = INetworkBackend diff --git a/zp-relayer/lib/network/evm/EvmBackend.ts b/zp-relayer/lib/network/evm/EvmBackend.ts new file mode 100644 index 00000000..49079918 --- /dev/null +++ b/zp-relayer/lib/network/evm/EvmBackend.ts @@ -0,0 +1,103 @@ +import AccountingAbi from '@/abi/accounting-abi.json' +import PoolAbi from '@/abi/pool-abi.json' +import TokenAbi from '@/abi/token-abi.json' +import RedundantHttpListProvider from '@/lib/providers/RedundantHttpListProvider' +import { RETRY_CONFIG } from '@/utils/constants' +import { checkHTTPS } from '@/utils/helpers' +import { getEvents } from '@/utils/web3' +import promiseRetry from 'promise-retry' +import Web3 from 'web3' +import type { HttpProvider } from 'web3-core' +import { AbiItem } from 'web3-utils' +import HttpListProvider from '../../providers/HttpListProvider' +import { SafeEthLogsProvider } from '../../providers/SafeEthLogsProvider' +import type { GetEventsConfig, INetworkBackend } from '../NetworkBackend' +import { Network, NetworkBackendConfig } from '../types' +import { EthereumContract } from './EvmContract' + +export class EvmBackend implements INetworkBackend { + type: Network.Ethereum = Network.Ethereum + web3: Web3 + web3Redundant: Web3 + pool: EthereumContract + token: EthereumContract + accounting: EthereumContract + + constructor(config: NetworkBackendConfig) { + const providerOptions = { + requestTimeout: config.rpcRequestTimeout, + retry: RETRY_CONFIG, + } + config.rpcUrls.forEach(checkHTTPS(config.requireHTTPS)) + const provider = new HttpListProvider(config.rpcUrls, providerOptions, config.jsonRpcErrorCodes) + provider.startSyncStateChecker(config.rpcSyncCheckInterval) + + this.web3 = new Web3(SafeEthLogsProvider(provider as HttpProvider)) + this.web3Redundant = this.web3 + + if (config.withRedundantProvider && config.rpcUrls.length > 1) { + const redundantProvider = new RedundantHttpListProvider(config.rpcUrls, { + ...providerOptions, + name: 'redundant', + }) + this.web3Redundant = new Web3(redundantProvider) + } + + this.pool = this.contract(PoolAbi as AbiItem[], config.poolAddress) + this.token = this.contract(TokenAbi as AbiItem[], config.tokenAddress) + this.accounting = this.contract(AccountingAbi as AbiItem[], config.poolAddress) + } + + async *getEvents({ startBlock, lastBlock, event, batchSize, contract }: GetEventsConfig) { + let toBlock = startBlock + for (let fromBlock = startBlock; fromBlock <= lastBlock; fromBlock = toBlock + 1) { + toBlock = Math.min(toBlock + batchSize, lastBlock) + const res = await promiseRetry( + async retry => { + const events = await getEvents(contract.instance, event, { + fromBlock, + toBlock, + }).catch(retry) + return { + events: events.map(e => ({ + txHash: e.transactionHash, + values: e.returnValues, + blockNumber: e.blockNumber, + })), + fromBlock, + toBlock, + } + }, + { + forever: true, + factor: 2, + } + ) + yield res + } + } + + async init() { + try { + const accountingAddress = await this.pool.call('accounting') + this.accounting = this.contract(AccountingAbi as AbiItem[], accountingAddress) + } catch (_) {} + } + + recover(msg: string, sig: string): Promise { + return Promise.resolve(this.web3.eth.accounts.recover(msg, sig)) + } + + contract(abi: any[], address: string) { + return new EthereumContract(this.web3, abi, address) + } + + public getBlockNumber() { + return this.web3.eth.getBlockNumber() + } + + public async getTxCalldata(hash: string): Promise { + const tx = await this.web3.eth.getTransaction(hash) + return tx.input + } +} diff --git a/zp-relayer/lib/network/evm/EvmContract.ts b/zp-relayer/lib/network/evm/EvmContract.ts new file mode 100644 index 00000000..855b61c9 --- /dev/null +++ b/zp-relayer/lib/network/evm/EvmContract.ts @@ -0,0 +1,46 @@ +import { logger } from '@/lib/appLogger' +import { isContractCallError } from '@/utils/web3Errors' +import promiseRetry from 'promise-retry' +import Web3 from 'web3' +import type { Contract } from 'web3-eth-contract' +import { INetworkContract } from '../types' + +export class EthereumContract implements INetworkContract { + instance: Contract + + constructor(web3: Web3, public abi: any[], address: string) { + this.instance = new web3.eth.Contract(abi, address) + } + + address(): string { + return this.instance.options.address + } + + call(method: string, args: any[] = []): Promise { + return this.instance.methods[method](...args).call() + } + + callRetry(method: string, args: any[] = []): Promise { + return promiseRetry( + async retry => { + try { + return await this.instance.methods[method](...args).call() + } catch (e) { + if (isContractCallError(e as Error)) { + logger.warn('Retrying failed contract call', { method, args }) + retry(e) + } else { + logger.debug('Unknown contract call error', { method, args, error: e }) + throw e + } + } + }, + { + retries: 2, + minTimeout: 500, + maxTimeout: 500, + } + ) + } + +} diff --git a/zp-relayer/lib/network/evm/EvmTxManager.ts b/zp-relayer/lib/network/evm/EvmTxManager.ts new file mode 100644 index 00000000..74cb39c1 --- /dev/null +++ b/zp-relayer/lib/network/evm/EvmTxManager.ts @@ -0,0 +1,291 @@ +import { logger } from '@/lib/appLogger' +import { + addExtraGasPrice, + chooseGasPriceOptions, + EstimationType, + GasPrice, + GasPriceValue, + getGasPriceValue, +} from '@/lib/gas-price' +import { + SendError, + type PreparedTx, + type SendAttempt, + type SendTx, + type TransactionManager, + type TxInfo, +} from '@/lib/network/types' +import { readNonce, updateNonce } from '@/utils/redisFields' +import { getChainId } from '@/utils/web3' +import { isGasPriceError, isInsufficientBalanceError, isNonceError, isSameTransactionError } from '@/utils/web3Errors' +import { Mutex } from 'async-mutex' +import BN from 'bn.js' +import type { Redis } from 'ioredis' +import Web3 from 'web3' +import { toBN } from 'web3-utils' +import type { TransactionConfig } from 'web3-core' +import { Logger } from 'winston' +import promiseRetry from 'promise-retry' + +export interface EvmTxManagerConfig { + redis: Redis + gasPriceBumpFactor: number + gasPriceSurplus: number + gasPriceMaxFeeLimit: BN | null + waitingFundsTimeout: number +} + +type ExtraInfo = TransactionConfig + +export class EvmTxManager implements TransactionManager { + nonce!: number + chainId!: number + mutex: Mutex + logger!: Logger + address: string + + constructor( + private web3: Web3, + private pk: string, + public gasPrice: GasPrice, + private config: EvmTxManagerConfig + ) { + this.mutex = new Mutex() + this.address = new Web3().eth.accounts.privateKeyToAccount(pk).address + } + + async init() { + this.nonce = await readNonce(this.config.redis, this.web3, this.address)(true) + await updateNonce(this.config.redis, this.nonce) + this.chainId = await getChainId(this.web3) + } + + async updateAndBumpGasPrice( + txConfig: TransactionConfig, + newGasPrice: GasPriceValue + ): Promise<[GasPriceValue | null, GasPriceValue]> { + const oldGasPrice = getGasPriceValue(txConfig) + if (oldGasPrice) { + const oldGasPriceWithExtra = addExtraGasPrice(oldGasPrice, this.config.gasPriceBumpFactor, null) + return [oldGasPrice, chooseGasPriceOptions(oldGasPriceWithExtra, newGasPrice)] + } else { + return [null, newGasPrice] + } + } + + async prepareTx({ txDesc, options, extraData }: SendTx): Promise<[PreparedTx, SendAttempt]> { + const txConfig = { + ...txDesc, + ...extraData, + gas: extraData?.gas, + } + + const release = await this.mutex.acquire() + try { + const gasPriceValue = options.shouldUpdateGasPrice ? await this.gasPrice.fetchOnce() : this.gasPrice.getPrice() + const newGasPriceWithExtra = addExtraGasPrice( + gasPriceValue, + this.config.gasPriceSurplus, + this.config.gasPriceMaxFeeLimit + ) + + let updatedTxConfig: TransactionConfig = {} + let newGasPrice: GasPriceValue + + if (options.isResend) { + if (typeof txConfig.nonce === 'undefined') { + throw new Error('Nonce should be set for re-send') + } + const [oldGasPrice, updatedGasPrice] = await this.updateAndBumpGasPrice(txConfig, newGasPriceWithExtra) + newGasPrice = updatedGasPrice + logger.info('Updating tx gasPrice: %o -> %o', oldGasPrice, newGasPrice) + } else { + logger.info('Nonce', { nonce: this.nonce }) + newGasPrice = newGasPriceWithExtra + updatedTxConfig.nonce = this.nonce++ + updatedTxConfig.chainId = this.chainId + await updateNonce(this.config.redis, this.nonce) + } + + updatedTxConfig = { + ...updatedTxConfig, + ...txConfig, + ...newGasPrice, + } + + const { transactionHash, rawTransaction } = await this.web3.eth.accounts.signTransaction(updatedTxConfig, this.pk) + return [ + { + rawTransaction: rawTransaction as string, + }, + { + txHash: transactionHash as string, + extraData: updatedTxConfig, + }, + ] + } finally { + release() + } + } + + async resendTx(prevAttempts: SendAttempt[]) { + if (prevAttempts.length === 0) { + throw new Error('No previous attempts') + } + + const { txHash, extraData } = prevAttempts.at(-1)! + logger.info('Resending tx %s ', txHash) + + const preparedTx = await this.prepareTx({ + txDesc: { + to: extraData.to as string, + value: extraData.value as number, + data: extraData.data as string, + }, + extraData, + options: { + isResend: true, + shouldUpdateGasPrice: true, + }, + }) + + try { + await new Promise((res, rej) => + // prettier-ignore + this.web3.eth.sendSignedTransaction(preparedTx[0].rawTransaction) + .once('transactionHash', () => res(preparedTx)) + .once('error', e => { + // Consider 'already known' errors as a successful send + if (isSameTransactionError(e)){ + res(preparedTx) + } else { + rej(e) + } + }) + ) + return { + attempt: preparedTx[1], + } + } catch (e) { + const err = e as Error + // jobLogger.warn('Tx resend failed', { error: err.message, txHash }) + if (isGasPriceError(err) || isSameTransactionError(err)) { + // Tx wasn't sent successfully, but still update last attempt's + // gasPrice to be accounted in the next iteration + return { + attempt: preparedTx[1], + error: SendError.GAS_PRICE_ERROR, + } + // await job.update({ + // ...job.data, + // }) + } else if (isInsufficientBalanceError(err)) { + return { + attempt: preparedTx[1], + error: SendError.INSUFFICIENT_BALANCE, + } + // We don't want to take into account last gasPrice increase + // job.data.prevAttempts.at(-1)![1] = lastGasPrice + + // const minimumBalance = toBN(txConfig.gas!).mul(toBN(getMaxRequiredGasPrice(gasPrice))) + // jobLogger.error('Insufficient balance, waiting for funds', { minimumBalance: minimumBalance.toString(10) }) + } else if (isNonceError(err)) { + logger.warn('Nonce error', { error: err.message, nonce: preparedTx[1].extraData.nonce }) + return { + attempt: preparedTx[1], + error: SendError.NONCE_ERROR, + } + } + } + + return { + attempt: preparedTx[1], + error: SendError.GAS_PRICE_ERROR, + } + } + + async confirmTx(txHashes: string[]): Promise<[TxInfo | null, boolean]> { + // Transaction was not mined + // const actualNonce = await getNonce(this.web3, this.address) + // logger.info('Nonce value from RPC: %d; tx nonce: %d', actualNonce, txNonce) + // if (actualNonce <= txNonce) { + // return null + // } + + let tx = null + // Iterate in reverse order to check the latest hash first + for (let i = txHashes.length - 1; i >= 0; i--) { + const txHash = txHashes[i] + logger.info('Verifying tx', { txHash }) + try { + tx = await this.web3.eth.getTransactionReceipt(txHash) + } catch (e) { + logger.warn('Cannot get tx receipt; RPC response: %s', (e as Error).message, { txHash }) + // Exception should be caught by `withLoop` to re-run job + throw e + } + if (tx && tx.blockNumber) + return [{ txHash: tx.transactionHash, success: tx.status, blockNumber: tx.blockNumber }, false] + } + return [null, false] + } + + async _sendTx(rawTransaction: string): Promise { + return new Promise((res, rej) => + // prettier-ignore + this.web3.eth.sendSignedTransaction(rawTransaction) + .once('transactionHash', () => res()) + .once('error', e => { + // Consider 'already known' errors as a successful send + if (isSameTransactionError(e)){ + res() + } else { + rej(e) + } + }) + ) + } + async sendTx({ txDesc, options, extraData }: SendTx): Promise<[PreparedTx, SendAttempt]> { + const preparedTx = await this.prepareTx({ txDesc, options, extraData }) + return this.sendPreparedTx(preparedTx) + } + + sendPreparedTx(preparedTx: [PreparedTx, SendAttempt]): Promise<[PreparedTx, SendAttempt]> { + return new Promise((res, rej) => + // prettier-ignore + this.web3.eth.sendSignedTransaction(preparedTx[0].rawTransaction) + .once('transactionHash', () => res(preparedTx)) + .once('error', e => { + // Consider 'already known' errors as a successful send + if (isSameTransactionError(e)){ + res(preparedTx) + } else { + rej(e) + } + }) + ) + } + + waitingForFunds(minimumBalance: BN, cb: (balance: BN) => void): Promise { + return promiseRetry( + async retry => { + logger.debug('Getting manager balance') + const newBalance = toBN(await this.web3.eth.getBalance(this.address)) + const balanceLog = { balance: newBalance.toString(10), minimumBalance: minimumBalance.toString(10) } + if (newBalance.gte(minimumBalance)) { + logger.info('Relayer has minimum necessary balance', balanceLog) + cb(newBalance) + } else { + logger.warn('Relayer balance is still less than the minimum', balanceLog) + retry(new Error('Not enough balance')) + } + }, + { + forever: true, + factor: 1, + maxTimeout: this.config.waitingFundsTimeout, + minTimeout: this.config.waitingFundsTimeout, + } + ) + } +} diff --git a/zp-relayer/lib/network/index.ts b/zp-relayer/lib/network/index.ts new file mode 100644 index 00000000..1fe5c98d --- /dev/null +++ b/zp-relayer/lib/network/index.ts @@ -0,0 +1,4 @@ +export * from './evm/EvmBackend' +export * from './NetworkBackend' +export * from './tron/TronBackend' +export * from './types' diff --git a/zp-relayer/lib/network/tron/TronBackend.ts b/zp-relayer/lib/network/tron/TronBackend.ts new file mode 100644 index 00000000..a20de4ab --- /dev/null +++ b/zp-relayer/lib/network/tron/TronBackend.ts @@ -0,0 +1,85 @@ +import AccountingAbi from '@/abi/accounting-abi.json' +import PoolAbi from '@/abi/pool-abi.json' +import TokenAbi from '@/abi/token-abi.json' +// @ts-ignore +import TronWeb from 'tronweb' +import { hexToBytes } from 'web3-utils' +import type { GetEventsConfig, INetworkBackend } from '../NetworkBackend' +import { Network, NetworkBackendConfig } from '../types' +import { TronContract } from './TronContract' + +export class TronBackend implements INetworkBackend { + type: Network.Tron = Network.Tron + tronWeb: any + pool: TronContract + token: TronContract + accounting: TronContract + + constructor(config: NetworkBackendConfig) { + this.tronWeb = new TronWeb(config.rpcUrls[0], config.rpcUrls[0], config.rpcUrls[0]) + + // TODO: Workaround for https://github.com/tronprotocol/tronweb/issues/90 + // Example: + // const pk = config.pk.slice(2) + // const callerAddress = this.tronWeb.address.fromPrivateKey(pk) + // this.tronWeb.setAddress(callerAddress) + + this.pool = new TronContract(this.tronWeb, PoolAbi, config.poolAddress) + this.token = new TronContract(this.tronWeb, TokenAbi, config.tokenAddress) + this.accounting = new TronContract(this.tronWeb, AccountingAbi, config.poolAddress) + } + + async *getEvents({ startBlock, event, batchSize, contract }: GetEventsConfig) { + const block = await this.tronWeb.trx.getBlockByNumber(startBlock) + const sinceTimestamp = block.block_header.raw_data.timestamp + + let fingerprint = null + do { + const events = await this.tronWeb.getEventResult(contract.address(), { + sinceTimestamp, + eventName: event, + onlyConfirmed: true, + sort: 'block_timestamp', + size: batchSize, + }) + if (events.length === 0) { + break + } + + yield events.map((e: any) => ({ + txHash: e.transaction, + values: e.result, + blockNumber: e.block, + })) + + fingerprint = events[events.length - 1].fingerprint || null + } while (fingerprint !== null) + } + + async init() { + try { + const accountingAddress = await this.pool.call('accounting') + this.accounting = this.contract(AccountingAbi, accountingAddress) + } catch (_) {} + } + + async recover(msg: string, sig: string): Promise { + const bytes = hexToBytes(msg) + const address = await this.tronWeb.trx.verifyMessageV2(bytes, sig) + return address + } + + contract(abi: any[], address: string) { + return new TronContract(this.tronWeb, abi, address) + } + + async getBlockNumber(): Promise { + const block = await this.tronWeb.trx.getCurrentBlock() + return block.block_header.raw_data.number + } + + public async getTxCalldata(hash: string): Promise { + const tx = await this.tronWeb.trx.getTransaction(hash) + return '0x' + tx.raw_data.contract[0].parameter.value.data + } +} diff --git a/zp-relayer/lib/network/tron/TronContract.ts b/zp-relayer/lib/network/tron/TronContract.ts new file mode 100644 index 00000000..b7b167e8 --- /dev/null +++ b/zp-relayer/lib/network/tron/TronContract.ts @@ -0,0 +1,24 @@ +// @ts-ignore +import TronWeb from 'tronweb' +import { INetworkContract } from '../types' + +export class TronContract implements INetworkContract { + instance: any + + constructor(tron: TronWeb, public abi: any[], address: string) { + this.instance = tron.contract(abi, address) + } + + address(): string { + return this.instance.address + } + + call(method: string, args: any[] = []): Promise { + return this.instance[method](...args).call() + } + + callRetry(method: string, args: any[] = []): Promise { + return this.instance[method](...args).call() + } + +} diff --git a/zp-relayer/lib/network/tron/TronTxManager.ts b/zp-relayer/lib/network/tron/TronTxManager.ts new file mode 100644 index 00000000..0dd0b344 --- /dev/null +++ b/zp-relayer/lib/network/tron/TronTxManager.ts @@ -0,0 +1,77 @@ +import { PreparedTx, SendAttempt, SendError, SendTx, TransactionManager, TxInfo } from '../types' +import BN from 'bn.js' + +interface ExtraInfo {} + +export class TronTxManager implements TransactionManager { + constructor(private tronWeb: any, private pk: string) {} + + async init() {} + + async confirmTx(txHashes: string[]): Promise<[TxInfo | null, boolean]> { + for (let i = txHashes.length - 1; i >= 0; i--) { + const txHash = txHashes[i] + const info = await this.tronWeb.trx.getTransactionInfo(txHash) + if (typeof info.blockNumber !== 'number') { + return [null, false] + } + return [ + { + blockNumber: info.blockNumber, + txHash: info.id, + success: info.receipt.result === 'SUCCESS', + }, + false, + ] + } + + return [null, false] + } + + async prepareTx({ + txDesc, + options: { maxFeeLimit, func }, + }: SendTx): Promise<[PreparedTx, SendAttempt]> { + const options = { + feeLimit: maxFeeLimit, + callValue: txDesc.value, + rawParameter: txDesc.data.slice(10), + } + const txObject = await this.tronWeb.transactionBuilder.triggerSmartContract(txDesc.to, func, options, []) + // XXX: this is not a string, but an object + const signedTx = await this.tronWeb.trx.sign(txObject.transaction, this.pk) + + return [ + { + rawTransaction: signedTx, + }, + { + txHash: signedTx.txID, + extraData: {}, + }, + ] + } + + async sendPreparedTx( + preparedTx: [PreparedTx, SendAttempt] + ): Promise<[PreparedTx, SendAttempt]> { + await this.tronWeb.trx.sendRawTransaction(preparedTx[0].rawTransaction) + return preparedTx + } + + async resendTx( + sendAttempts: SendAttempt[] + ): Promise<{ attempt?: SendAttempt | undefined; error?: SendError | undefined }> { + // TODO: check tx timestamp to resend + throw new Error('Method not implemented.') + } + + async sendTx(sendTx: SendTx) { + const preparedTx = await this.prepareTx(sendTx) + return this.sendPreparedTx(preparedTx) + } + + waitingForFunds(minimumBalance: BN, cb: (balance: BN) => void): Promise { + throw new Error('Method not implemented'); + } +} diff --git a/zp-relayer/lib/network/types.ts b/zp-relayer/lib/network/types.ts new file mode 100644 index 00000000..fe56b8cc --- /dev/null +++ b/zp-relayer/lib/network/types.ts @@ -0,0 +1,88 @@ +import type { TransactionConfig } from 'web3-core' +import type { EthereumContract } from './evm/EvmContract' +import type { TronContract } from './tron/TronContract' +import BN from 'bn.js' + +export enum Network { + Tron = 'tron', + Ethereum = 'ethereum', +} + +interface BaseBackendConfig { + poolAddress: string + tokenAddress: string + rpcUrls: string[] + requireHTTPS: boolean +} + +interface EvmBackendConfig extends BaseBackendConfig { + rpcRequestTimeout: number + rpcSyncCheckInterval: number + jsonRpcErrorCodes: number[] + withRedundantProvider: boolean +} + +interface TronBackendConfig extends BaseBackendConfig {} + +export type NetworkBackendConfig = N extends Network.Tron ? TronBackendConfig : EvmBackendConfig + +export type NetworkContract = N extends Network.Tron ? TronContract : EthereumContract + +type BaseTxDesc = Required> + +export type TxDesc = BaseTxDesc + +export type TxOptions = { + func?: string + isResend?: boolean + shouldUpdateGasPrice?: boolean + maxFeeLimit?: number +} + +export type PreparedTx = { + rawTransaction: string +} + +export interface SendTx { + txDesc: TxDesc + extraData: E | null + options: TxOptions +} + +export interface TxInfo { + blockNumber: number + txHash: string + success: boolean +} + +export interface SendAttempt { + txHash: string + extraData: E +} + +export enum SendError { + INSUFFICIENT_BALANCE = 'INSUFFICIENT_BALANCE', + NONCE_ERROR = 'NONCE_ERROR', + GAS_PRICE_ERROR = 'GAS_PRICE_ERROR', +} + +export interface TransactionManager { + init(): Promise + confirmTx(txHashes: string[]): Promise<[TxInfo | null, boolean]> + prepareTx(sendTx: SendTx): Promise<[PreparedTx, SendAttempt]> + sendTx(sendTx: SendTx): Promise<[PreparedTx, SendAttempt]> + sendPreparedTx(preparedTx: [PreparedTx, SendAttempt]): Promise<[PreparedTx, SendAttempt]> + resendTx(sendAttempts: SendAttempt[]): Promise<{ + attempt?: SendAttempt + error?: SendError + }> + waitingForFunds(minimumBalance: BN, cb: (balance: BN) => void): Promise; +} + +export interface INetworkContract { + abi: any[] + instance: any + address(): string + call(method: string, args: any[]): Promise + callRetry(method: string, args: any[]): Promise +} diff --git a/zp-relayer/services/price-feed/IPriceFeed.ts b/zp-relayer/lib/price-feed/IPriceFeed.ts similarity index 100% rename from zp-relayer/services/price-feed/IPriceFeed.ts rename to zp-relayer/lib/price-feed/IPriceFeed.ts diff --git a/zp-relayer/services/price-feed/NativePriceFeed.ts b/zp-relayer/lib/price-feed/NativePriceFeed.ts similarity index 100% rename from zp-relayer/services/price-feed/NativePriceFeed.ts rename to zp-relayer/lib/price-feed/NativePriceFeed.ts index fbff7345..f89c7b6e 100644 --- a/zp-relayer/services/price-feed/NativePriceFeed.ts +++ b/zp-relayer/lib/price-feed/NativePriceFeed.ts @@ -1,6 +1,6 @@ import type BN from 'bn.js' -import type { IPriceFeed } from './IPriceFeed' import { toBN } from 'web3-utils' +import type { IPriceFeed } from './IPriceFeed' export class NativePriceFeed implements IPriceFeed { constructor() {} diff --git a/zp-relayer/services/price-feed/OneInchPriceFeed.ts b/zp-relayer/lib/price-feed/OneInchPriceFeed.ts similarity index 69% rename from zp-relayer/services/price-feed/OneInchPriceFeed.ts rename to zp-relayer/lib/price-feed/OneInchPriceFeed.ts index a709c9b9..8512bb38 100644 --- a/zp-relayer/services/price-feed/OneInchPriceFeed.ts +++ b/zp-relayer/lib/price-feed/OneInchPriceFeed.ts @@ -1,21 +1,21 @@ -import type Web3 from 'web3' -import type BN from 'bn.js' -import type { Contract } from 'web3-eth-contract' -import type { IPriceFeed } from './IPriceFeed' -import { toBN, toWei, AbiItem } from 'web3-utils' -import { ZERO_ADDRESS } from '@/utils/constants' import Erc20Abi from '@/abi/erc20.json' import OracleAbi from '@/abi/one-inch-oracle.json' +import { ZERO_ADDRESS } from '@/utils/constants' +import type BN from 'bn.js' +import { toBN, toWei } from 'web3-utils' +import { NetworkBackend } from '../network/NetworkBackend' +import { Network, NetworkContract } from '../network/types' +import type { IPriceFeed } from './IPriceFeed' // 1Inch price feed oracle: https://github.com/1inch/spot-price-aggregator export class OneInchPriceFeed implements IPriceFeed { - private contract: Contract + private contract: NetworkContract private baseTokenAddress: string private baseTokenDecimals!: BN private poolTokenAddress: string constructor( - private web3: Web3, + private network: NetworkBackend, contractAddress: string, config: { poolTokenAddress: string @@ -24,7 +24,7 @@ export class OneInchPriceFeed implements IPriceFeed { ) { this.poolTokenAddress = config.poolTokenAddress this.baseTokenAddress = config.customBaseTokenAddress || ZERO_ADDRESS - this.contract = new web3.eth.Contract(OracleAbi as AbiItem[], contractAddress) + this.contract = network.contract(OracleAbi, contractAddress) } async init() { @@ -36,13 +36,13 @@ export class OneInchPriceFeed implements IPriceFeed { } private async getContractDecimals(contractAddress: string): Promise { - const contract = new this.web3.eth.Contract(Erc20Abi as AbiItem[], contractAddress) - const decimals = await contract.methods.decimals().call() + const contract = this.network.contract(Erc20Abi, contractAddress) + const decimals = await contract.call('decimals') return toBN(10).pow(toBN(decimals)) } getRate(): Promise { - return this.contract.methods.getRate(this.baseTokenAddress, this.poolTokenAddress, true).call().then(toBN) + return this.contract.call('getRate', [this.baseTokenAddress, this.poolTokenAddress, true]).then(toBN) } convert(rate: BN, baseTokenAmount: BN): BN { diff --git a/zp-relayer/services/price-feed/index.ts b/zp-relayer/lib/price-feed/index.ts similarity index 100% rename from zp-relayer/services/price-feed/index.ts rename to zp-relayer/lib/price-feed/index.ts index 954281e5..2be3a728 100644 --- a/zp-relayer/services/price-feed/index.ts +++ b/zp-relayer/lib/price-feed/index.ts @@ -1,6 +1,6 @@ export * from './IPriceFeed' -export * from './OneInchPriceFeed' export * from './NativePriceFeed' +export * from './OneInchPriceFeed' export enum PriceFeedType { OneInch = '1inch', diff --git a/zp-relayer/services/providers/BaseHttpProvider.ts b/zp-relayer/lib/providers/BaseHttpProvider.ts similarity index 100% rename from zp-relayer/services/providers/BaseHttpProvider.ts rename to zp-relayer/lib/providers/BaseHttpProvider.ts index 1617c5a0..fdb97478 100644 --- a/zp-relayer/services/providers/BaseHttpProvider.ts +++ b/zp-relayer/lib/providers/BaseHttpProvider.ts @@ -1,5 +1,5 @@ -import { HttpProvider } from 'web3-core' import type { OperationOptions } from 'retry' +import { HttpProvider } from 'web3-core' export interface ProviderOptions { name: string diff --git a/zp-relayer/services/providers/HttpListProvider.ts b/zp-relayer/lib/providers/HttpListProvider.ts similarity index 100% rename from zp-relayer/services/providers/HttpListProvider.ts rename to zp-relayer/lib/providers/HttpListProvider.ts index 17955a1c..39f3f54b 100644 --- a/zp-relayer/services/providers/HttpListProvider.ts +++ b/zp-relayer/lib/providers/HttpListProvider.ts @@ -1,8 +1,8 @@ // Reference implementation: // https://github.com/omni/tokenbridge/blob/master/oracle/src/services/HttpListProvider.js -import { hexToNumber } from 'web3-utils' -import promiseRetry from 'promise-retry' import { FALLBACK_RPC_URL_SWITCH_TIMEOUT } from '@/utils/constants' +import promiseRetry from 'promise-retry' +import { hexToNumber } from 'web3-utils' import { logger } from '../appLogger' import BaseHttpProvider, { ProviderOptions } from './BaseHttpProvider' diff --git a/zp-relayer/services/providers/RedundantHttpListProvider.ts b/zp-relayer/lib/providers/RedundantHttpListProvider.ts similarity index 100% rename from zp-relayer/services/providers/RedundantHttpListProvider.ts rename to zp-relayer/lib/providers/RedundantHttpListProvider.ts index 0304f834..bd0f8911 100644 --- a/zp-relayer/services/providers/RedundantHttpListProvider.ts +++ b/zp-relayer/lib/providers/RedundantHttpListProvider.ts @@ -1,8 +1,8 @@ // Reference implementation: // https://github.com/omni/tokenbridge/blob/master/oracle/src/services/RedundantHttpListProvider.js import promiseRetry from 'promise-retry' -import { HttpListProviderError } from './HttpListProvider' import BaseHttpProvider, { ProviderOptions } from './BaseHttpProvider' +import { HttpListProviderError } from './HttpListProvider' export default class RedundantHttpListProvider extends BaseHttpProvider { urls: string[] diff --git a/zp-relayer/services/providers/SafeEthLogsProvider.ts b/zp-relayer/lib/providers/SafeEthLogsProvider.ts similarity index 100% rename from zp-relayer/services/providers/SafeEthLogsProvider.ts rename to zp-relayer/lib/providers/SafeEthLogsProvider.ts diff --git a/zp-relayer/lib/redisClient.ts b/zp-relayer/lib/redisClient.ts new file mode 100644 index 00000000..30e646fa --- /dev/null +++ b/zp-relayer/lib/redisClient.ts @@ -0,0 +1,5 @@ +import Redis from 'ioredis' + +export const redis = new Redis(process.env.COMMON_REDIS_URL as string, { + maxRetriesPerRequest: null, +}) diff --git a/zp-relayer/package.json b/zp-relayer/package.json index bdd39a2b..d6cbe259 100644 --- a/zp-relayer/package.json +++ b/zp-relayer/package.json @@ -6,10 +6,16 @@ "scripts": { "initialize": "yarn install --frozen-lockfile", "build": "tsc --project ./ && tsc-alias", - "start:dev": "DOTENV_CONFIG_PATH=relayer.env ts-node -r dotenv/config index.ts", - "start:prod": "node index.js", - "start:direct-deposit-watcher:dev": "DOTENV_CONFIG_PATH=watcher.env ts-node -r dotenv/config direct-deposit/watcher.ts", - "start:direct-deposit-watcher:prod": "node direct-deposit/watcher.js", + "start:dev": "DOTENV_CONFIG_PATH=relayer-decentralized.env ts-node -r dotenv/config services/relayer/index.ts", + "start:prod": "node services/relayer/index.js", + "start:mpc:guard:dev": "DOTENV_CONFIG_PATH=guard.env ts-node -r dotenv/config services/guard/guard.ts", + "start:mpc:guard:prod": "node services/guard/guard.js", + "start:direct-deposit-watcher:dev": "DOTENV_CONFIG_PATH=watcher.env ts-node -r dotenv/config services/direct-deposit/watcher.ts", + "start:direct-deposit-watcher:prod": "node services/direct-deposit/watcher.js", + "start:commitment-watcher:dev": "DOTENV_CONFIG_PATH=commitment-watcher.env ts-node -r dotenv/config services/commitment-watcher/index.ts", + "start:commitment-watcher:prod": "node services/commitment-watcher/index.js", + "start:indexer:dev": "DOTENV_CONFIG_PATH=indexer.env ts-node -r dotenv/config services/indexer/index.ts", + "start:indexer:prod": "node services/indexer/index.js", "deploy:local": "ts-node test/deploy.ts", "deploy:local:rm": "rm -rf test/STATE_DIR && ts-node test/clear.ts", "test:unit": "ts-mocha -r dotenv/config --paths --timeout 1000000 test/unit-tests/*.test.ts", @@ -31,8 +37,10 @@ "libzkbob-rs-node": "1.1.0", "promise-retry": "^2.0.1", "semver": "7.3.8", + "tronweb": "^5.3.0", "web3": "1.7.4", "winston": "3.3.3", + "zod": "^3.21.4", "zp-memo-parser": "link:../zp-memo-parser" }, "devDependencies": { diff --git a/zp-relayer/pool.ts b/zp-relayer/pool.ts deleted file mode 100644 index 314d17de..00000000 --- a/zp-relayer/pool.ts +++ /dev/null @@ -1,329 +0,0 @@ -import BN from 'bn.js' -import PoolAbi from './abi/pool-abi.json' -import TokenAbi from './abi/token-abi.json' -import { AbiItem, toBN } from 'web3-utils' -import type { Contract } from 'web3-eth-contract' -import config from './configs/relayerConfig' -import { web3 } from './services/web3' -import { logger } from './services/appLogger' -import { redis } from './services/redisClient' -import { poolTxQueue, WorkerTxType, WorkerTxTypePriority } from './queue/poolTxQueue' -import { getBlockNumber, getEvents, getTransaction } from './utils/web3' -import { Helpers, Proof, SnarkProof, VK } from 'libzkbob-rs-node' -import { PoolState } from './state/PoolState' - -import type { TxType } from 'zp-memo-parser' -import { contractCallRetry, numToHex, toTxType, truncateHexPrefix, truncateMemoTxPrefix } from './utils/helpers' -import { PoolCalldataParser } from './utils/PoolCalldataParser' -import { OUTPLUSONE, PERMIT2_CONTRACT } from './utils/constants' -import { Permit2Recover, SaltedPermitRecover, TransferWithAuthorizationRecover } from './utils/permit' -import { PermitRecover, PermitType } from './utils/permit/types' - -export interface PoolTx { - proof: Proof - memo: string - txType: TxType - depositSignature: string | null -} - -export interface Limits { - tvlCap: BN - tvl: BN - dailyDepositCap: BN - dailyDepositCapUsage: BN - dailyWithdrawalCap: BN - dailyWithdrawalCapUsage: BN - dailyUserDepositCap: BN - dailyUserDepositCapUsage: BN - depositCap: BN - tier: BN - dailyUserDirectDepositCap: BN - dailyUserDirectDepositCapUsage: BN - directDepositCap: BN -} - -export interface LimitsFetch { - deposit: { - singleOperation: string - dailyForAddress: { - total: string - available: string - } - dailyForAll: { - total: string - available: string - } - poolLimit: { - total: string - available: string - } - } - withdraw: { - dailyForAll: { - total: string - available: string - } - } - dd: { - singleOperation: string - dailyForAddress: { - total: string - available: string - } - } - tier: string -} - -class Pool { - public PoolInstance: Contract - public TokenInstance: Contract - private txVK: VK - public state: PoolState - public optimisticState: PoolState - public denominator: BN = toBN(1) - public poolId: BN = toBN(0) - public isInitialized = false - public permitRecover!: PermitRecover - - constructor() { - this.PoolInstance = new web3.eth.Contract(PoolAbi as AbiItem[], config.poolAddress) - this.TokenInstance = new web3.eth.Contract(TokenAbi as AbiItem[], config.tokenAddress) - - const txVK = require(config.txVKPath) - this.txVK = txVK - - this.state = new PoolState('pool', redis, config.stateDirPath) - this.optimisticState = new PoolState('optimistic', redis, config.stateDirPath) - } - - loadState(states: { poolState: PoolState; optimisticState: PoolState }) { - this.state = states.poolState - this.optimisticState = states.optimisticState - } - - async init() { - if (this.isInitialized) return - - this.denominator = toBN(await this.PoolInstance.methods.denominator().call()) - this.poolId = toBN(await this.PoolInstance.methods.pool_id().call()) - - if (config.permitType === PermitType.SaltedPermit) { - this.permitRecover = new SaltedPermitRecover(web3, config.tokenAddress) - } else if (config.permitType === PermitType.Permit2) { - this.permitRecover = new Permit2Recover(web3, PERMIT2_CONTRACT) - } else if (config.permitType === PermitType.TransferWithAuthorization) { - this.permitRecover = new TransferWithAuthorizationRecover(web3, config.tokenAddress) - } else { - throw new Error("Cannot infer pool's permit standard") - } - await this.permitRecover.initializeDomain() - - await this.syncState(config.startBlock) - this.isInitialized = true - } - - async transact(txs: PoolTx[], traceId?: string) { - const queueTxs = txs.map(({ proof, txType, memo, depositSignature }) => { - return { - amount: '0', - gas: config.relayerGasLimit.toString(), - txProof: proof, - txType, - rawMemo: memo, - depositSignature, - } - }) - const job = await poolTxQueue.add( - 'tx', - { type: WorkerTxType.Normal, transactions: queueTxs, traceId }, - { - priority: WorkerTxTypePriority[WorkerTxType.Normal], - } - ) - logger.debug(`Added poolTxWorker job: ${job.id}`) - return job.id - } - - async getLastBlockToProcess() { - const lastBlockNumber = await getBlockNumber(web3) - return lastBlockNumber - } - - async syncState(startBlock: number) { - logger.debug('Syncing state; starting from block %d', startBlock) - - const localIndex = this.state.getNextIndex() - const localRoot = this.state.getMerkleRoot() - - const contractIndex = await this.getContractIndex() - const contractRoot = await this.getContractMerkleRoot(contractIndex) - - logger.debug(`LOCAL ROOT: ${localRoot}; LOCAL INDEX: ${localIndex}`) - logger.debug(`CONTRACT ROOT: ${contractRoot}; CONTRACT INDEX: ${contractIndex}`) - - if (contractRoot === localRoot && contractIndex === localIndex) { - logger.info('State is ok, no need to resync') - return - } - - const numTxs = Math.floor((contractIndex - localIndex) / OUTPLUSONE) - const missedIndices = Array(numTxs) - for (let i = 0; i < numTxs; i++) { - missedIndices[i] = localIndex + (i + 1) * OUTPLUSONE - } - - const transactSelector = '0xaf989083' - const directDepositSelector = '0x1dc4cb33' - - const lastBlockNumber = (await this.getLastBlockToProcess()) + 1 - let toBlock = startBlock - for (let fromBlock = startBlock; toBlock < lastBlockNumber; fromBlock = toBlock) { - toBlock = Math.min(toBlock + config.eventsProcessingBatchSize, lastBlockNumber) - const events = await getEvents(this.PoolInstance, 'Message', { - fromBlock, - toBlock: toBlock - 1, - filter: { - index: missedIndices, - }, - }) - - for (let i = 0; i < events.length; i++) { - const { returnValues, transactionHash } = events[i] - const { input } = await getTransaction(web3, transactionHash) - - const newPoolIndex = Number(returnValues.index) - const prevPoolIndex = newPoolIndex - OUTPLUSONE - const prevCommitIndex = Math.floor(Number(prevPoolIndex) / OUTPLUSONE) - - let outCommit: string - let memo: string - - if (input.startsWith(directDepositSelector)) { - // Direct deposit case - const res = web3.eth.abi.decodeParameters( - [ - 'uint256', // Root after - 'uint256[]', // Indices - 'uint256', // Out commit - 'uint256[8]', // Deposit proof - 'uint256[8]', // Tree proof - ], - input.slice(10) // Cut off selector - ) - outCommit = res[2] - memo = truncateHexPrefix(returnValues.message || '') - } else if (input.startsWith(transactSelector)) { - // Normal tx case - const calldata = Buffer.from(truncateHexPrefix(input), 'hex') - - const parser = new PoolCalldataParser(calldata) - - const outCommitRaw = parser.getField('outCommit') - outCommit = web3.utils.hexToNumberString(outCommitRaw) - - const txTypeRaw = parser.getField('txType') - const txType = toTxType(txTypeRaw) - - const memoSize = web3.utils.hexToNumber(parser.getField('memoSize')) - const memoRaw = truncateHexPrefix(parser.getField('memo', memoSize)) - - memo = truncateMemoTxPrefix(memoRaw, txType) - - // Save nullifier in confirmed state - const nullifier = parser.getField('nullifier') - await this.state.nullifiers.add([web3.utils.hexToNumberString(nullifier)]) - } else { - throw new Error(`Unknown transaction type: ${input}`) - } - - const commitAndMemo = numToHex(toBN(outCommit)).concat(transactionHash.slice(2)).concat(memo) - for (let state of [this.state, this.optimisticState]) { - state.addCommitment(prevCommitIndex, Helpers.strToNum(outCommit)) - state.addTx(prevPoolIndex, Buffer.from(commitAndMemo, 'hex')) - } - } - } - - const newLocalRoot = this.state.getMerkleRoot() - logger.debug(`LOCAL ROOT AFTER UPDATE ${newLocalRoot}`) - if (newLocalRoot !== contractRoot) { - logger.error('State is corrupted, roots mismatch') - } - } - - verifyProof(proof: SnarkProof, inputs: Array) { - return Proof.verify(this.txVK, proof, inputs) - } - - async getContractIndex() { - const poolIndex = await contractCallRetry(this.PoolInstance, 'pool_index') - return Number(poolIndex) - } - - async getContractMerkleRoot(index: string | number | undefined): Promise { - if (!index) { - index = await this.getContractIndex() - logger.info('CONTRACT INDEX %d', index) - } - const root = await contractCallRetry(this.PoolInstance, 'roots', [index]) - return root.toString() - } - - async getLimitsFor(address: string): Promise { - const limits = await contractCallRetry(this.PoolInstance, 'getLimitsFor', [address]) - return { - tvlCap: toBN(limits.tvlCap), - tvl: toBN(limits.tvl), - dailyDepositCap: toBN(limits.dailyDepositCap), - dailyDepositCapUsage: toBN(limits.dailyDepositCapUsage), - dailyWithdrawalCap: toBN(limits.dailyWithdrawalCap), - dailyWithdrawalCapUsage: toBN(limits.dailyWithdrawalCapUsage), - dailyUserDepositCap: toBN(limits.dailyUserDepositCap), - dailyUserDepositCapUsage: toBN(limits.dailyUserDepositCapUsage), - depositCap: toBN(limits.depositCap), - tier: toBN(limits.tier), - dailyUserDirectDepositCap: toBN(limits.dailyUserDirectDepositCap), - dailyUserDirectDepositCapUsage: toBN(limits.dailyUserDirectDepositCapUsage), - directDepositCap: toBN(limits.directDepositCap), - } - } - - processLimits(limits: Limits): LimitsFetch { - const limitsFetch = { - deposit: { - singleOperation: limits.depositCap.toString(10), - dailyForAddress: { - total: limits.dailyUserDepositCap.toString(10), - available: limits.dailyUserDepositCap.sub(limits.dailyUserDepositCapUsage).toString(10), - }, - dailyForAll: { - total: limits.dailyDepositCap.toString(10), - available: limits.dailyDepositCap.sub(limits.dailyDepositCapUsage).toString(10), - }, - poolLimit: { - total: limits.tvlCap.toString(10), - available: limits.tvlCap.sub(limits.tvl).toString(10), - }, - }, - withdraw: { - dailyForAll: { - total: limits.dailyWithdrawalCap.toString(10), - available: limits.dailyWithdrawalCap.sub(limits.dailyWithdrawalCapUsage).toString(10), - }, - }, - dd: { - singleOperation: limits.directDepositCap.toString(10), - dailyForAddress: { - total: limits.dailyUserDirectDepositCap.toString(10), - available: limits.dailyUserDirectDepositCap.sub(limits.dailyUserDirectDepositCapUsage).toString(10), - }, - }, - tier: limits.tier.toString(10), - } - return limitsFetch - } -} - -export let pool: Pool = new Pool() - -export type { Pool } diff --git a/zp-relayer/pool/BasePool.ts b/zp-relayer/pool/BasePool.ts new file mode 100644 index 00000000..ece62d18 --- /dev/null +++ b/zp-relayer/pool/BasePool.ts @@ -0,0 +1,469 @@ +import { logger } from '@/lib/appLogger' +import { NetworkBackend } from '@/lib/network/NetworkBackend' +import { Network } from '@/lib/network/types' +import { redis } from '@/lib/redisClient' +import { BasePoolTx, JobState, PoolTx, poolTxQueue, WorkerTxType, WorkerTxTypePriority } from '@/queue/poolTxQueue' +import { PoolState } from '@/state/PoolState' +import { OUTPLUSONE } from '@/utils/constants' +import { + buildPrefixedMemo, + fetchJson, + toTxType, + truncateHexPrefix, + truncateMemoTxPrefix, + truncateMemoTxPrefixProverV2, +} from '@/utils/helpers' +import { PoolCalldataParser, PoolCalldataV2Parser } from '@/utils/PoolCalldataParser' +import { getBlockNumber } from '@/utils/web3' +import BN from 'bn.js' +import { Helpers, Proof, SnarkProof, VK } from 'libzkbob-rs-node' +import AbiCoder from 'web3-eth-abi' +import { hexToNumber, hexToNumberString, toBN } from 'web3-utils' +import type { BasePoolConfig, Limits, LimitsFetch, OptionalChecks, ProcessResult } from './types' + +export abstract class BasePool { + public txVK: VK + public state: PoolState + public optimisticState: PoolState + public denominator: BN = toBN(1) + public poolId: BN = toBN(0) + public isInitialized = false + + protected poolName(): string { return 'base-pool'; } + + constructor(public network: NetworkBackend, private config: BasePoolConfig) { + this.txVK = require(config.txVkPath) + + this.state = new PoolState('pool', redis, config.statePath) + this.optimisticState = new PoolState('optimistic', redis, config.statePath) + } + + loadState(states: { poolState: PoolState; optimisticState: PoolState }) { + this.state = states.poolState + this.optimisticState = states.optimisticState + } + + abstract init(...args: any): Promise + + abstract onSend(p: ProcessResult, txHash: string): Promise + abstract onConfirmed(p: ProcessResult, txHash: string, callback?: () => Promise, jobId?: string): Promise + + async onFailed(txHash: string, jobId: string): Promise { + logger.error('Transaction reverted', { txHash }) + + await this.clearOptimisticState() + } + + validateTx(tx: PoolTx, optionalChecks: OptionalChecks, traceId?: string): Promise { + switch (tx.type) { + case WorkerTxType.Normal: + return this.validateNormalTx(tx as PoolTx, optionalChecks, traceId) + case WorkerTxType.DirectDeposit: + return this.validateDirectDepositTx(tx as PoolTx, optionalChecks, traceId) + case WorkerTxType.Finalize: + return this.validateFinalizeTx(tx as PoolTx, optionalChecks, traceId) + default: + throw new Error(`Unknown tx type: ${tx.type}`) + } + } + validateDirectDepositTx( + _tx: PoolTx, + _optionalChecks: OptionalChecks, + _traceId: string | undefined + ): Promise { + throw new Error('Method not implemented.') + } + validateFinalizeTx( + _tx: PoolTx, + _optionalChecks: OptionalChecks, + _traceId: string | undefined + ): Promise { + throw new Error('Method not implemented.') + } + validateNormalTx( + _tx: PoolTx, + _optionalChecks: OptionalChecks, + _traceId?: string + ): Promise { + throw new Error('Method not implemented.') + } + + buildTx(tx: PoolTx): Promise> { + switch (tx.type) { + case WorkerTxType.Normal: + return this.buildNormalTx(tx as PoolTx) + case WorkerTxType.DirectDeposit: + return this.buildDirectDepositTx(tx as PoolTx) + case WorkerTxType.Finalize: + return this.buildFinalizeTx(tx as PoolTx) + default: + throw new Error(`Unknown tx type: ${tx.type}`) + } + } + buildNormalTx(tx: PoolTx): Promise> { + throw new Error('Method not implemented.') + } + buildDirectDepositTx(tx: PoolTx): Promise> { + throw new Error('Method not implemented.') + } + buildFinalizeTx(tx: PoolTx): Promise> { + throw new Error('Method not implemented.') + } + + async transact(tx: BasePoolTx, traceId?: string) { + const queueTx = { + ...tx, + txHash: null, + sentJobId: null, + state: JobState.WAITING, + } + + const job = await poolTxQueue.add( + 'tx', + { type: WorkerTxType.Normal, transaction: queueTx, traceId }, + { + priority: WorkerTxTypePriority[WorkerTxType.Normal], + } + ) + logger.debug(`Added poolTxWorker job: ${job.id}`) + return job.id + } + + async clearOptimisticState() { + logger.info('Rollback optimistic state...') + this.optimisticState.rollbackTo(this.state) + logger.info('Clearing optimistic nullifiers...') + await this.optimisticState.nullifiers.clear() + + const root1 = this.state.getMerkleRoot() + const root2 = this.optimisticState.getMerkleRoot() + logger.info(`Assert roots are equal: ${root1}, ${root2}, ${root1 === root2}`) + } + + async getLastBlockToProcess() { + const lastBlockNumber = await getBlockNumber(this.network) + return lastBlockNumber + } + + async syncState(startBlock?: number, lastBlock?: number, indexerUrl?: string) { + logger.debug('Syncing state; starting from block %d', startBlock) + + let localIndex = this.state.getNextIndex() + let localRoot = this.state.getMerkleRoot() + + let contractIndex = await this.getContractIndex() + let contractRoot = await this.getContractMerkleRoot(contractIndex) + + logger.debug('State info', { + localRoot, + localIndex, + contractRoot, + contractIndex, + }) + + if (contractRoot === localRoot && contractIndex === localIndex) { + logger.info('State is ok, no need to resync') + return + } + + while (localIndex < contractIndex) { + if (indexerUrl) { + await this.syncStateFromIndexer(indexerUrl) + } else if (startBlock && lastBlock) { + const savedBlockNumberOfLastConfirmedTx = await this.getLastConfirmedTxBlock(); + const actualStartBlock = Math.max(startBlock, savedBlockNumberOfLastConfirmedTx); + + logger.debug('Syncing from contract; starting from block %d', actualStartBlock) + + await this.syncStateFromContract(actualStartBlock, lastBlock, contractIndex, localIndex); + } else { + throw new Error('Either (startBlock, lastBlock) or indexerUrl should be provided for sync') + } + + localIndex = this.state.getNextIndex() + localRoot = this.state.getMerkleRoot() + logger.debug('Local state after update', { + localRoot, + localIndex, + }) + } + + if (localRoot !== contractRoot) { + await this.state.wipe(); + await this.optimisticState.wipe(); + await this.setLastConfirmedTxBlockForced(0); + + throw new Error('State is corrupted, roots mismatch. State was wiped') + } + } + + async syncStateFromIndexer(indexerUrl: string) { + let txs = [] + let offset = this.state.getNextIndex() + let commitIndex = offset / OUTPLUSONE + do { + txs = await this.fetchTransactionsFromIndexer(indexerUrl, offset, 200) + for (const tx of txs) { + const outCommit = hexToNumberString('0x' + tx.commitment) + this.optimisticState.addCommitment(commitIndex, Helpers.strToNum(outCommit)) + if (tx.isMined) { + this.state.addCommitment(commitIndex, Helpers.strToNum(outCommit)) + } + commitIndex++ + } + offset = this.optimisticState.getNextIndex() + } while (txs.length !== 0) + } + + async fetchTransactionsFromIndexer(indexerUrl: string, offset: number, limit: number) { + const txs: string[] = await fetchJson(indexerUrl, '/transactions/v2', [ + ['offset', offset.toString()], + ['limit', limit.toString()], + ]) + + return txs.map((tx, txIdx) => { + // mined flag + txHash(32 bytes) + commitment(32 bytes) + memo + return { + isMined: tx.slice(0, 1) === '1', + txHash: '0x' + tx.slice(1, 65), + commitment: tx.slice(65, 129), + index: offset + txIdx * OUTPLUSONE, + memo: tx.slice(129), + } + }) + } + + async syncStateFromContract(startBlock: number, lastBlock: number, contractIndex: number, localIndex: number) { + const numTxs = Math.floor((contractIndex - localIndex) / OUTPLUSONE) + if (numTxs < 0) { + // TODO: rollback state + throw new Error('State is corrupted, contract index is less than local index') + } + + for await (const batch of this.network.getEvents({ + contract: this.network.pool, + startBlock, + lastBlock, + event: 'Message', + batchSize: this.config.eventsBatchSize, + })) { + for (const e of batch.events) { + // Filter pending txs in case of decentralized relay pool + const state = toBN(e.values.index).lte(toBN(contractIndex)) ? 'all' : 'optimistic' + await this.addTxToState(e.txHash, e.values.index, e.values.message, state, e.blockNumber) + } + } + } + + async addTxToState(txHash: string, newPoolIndex: number, message: string, state: 'optimistic' | 'confirmed' | 'all', blockNumber: number) { + const transactSelector = '0xaf989083' + const transactV2Selector = '0x5fd28f8c' + + const directDepositOldSelector = '0x1dc4cb33' + const directDepositSelector = '0xe6b14272' + + const input = await this.network.getTxCalldata(txHash) + + const prevPoolIndex = newPoolIndex - OUTPLUSONE + const prevCommitIndex = Math.floor(Number(prevPoolIndex) / OUTPLUSONE) + + let outCommit: string + let memo: string + + if (input.startsWith(directDepositSelector)) { + // Direct deposit case + const res = AbiCoder.decodeParameters( + [ + 'uint256[]', // Indices + 'uint256', // Out commit + 'uint256[8]', // Deposit proof + 'address', // Prover + ], + input.slice(10) // Cut off selector + ) + outCommit = res[1] + memo = truncateHexPrefix(message || '') + } else if (input.startsWith(directDepositOldSelector)) { + // Old direct deposit case + const res = AbiCoder.decodeParameters( + [ + 'uint256', // Root after + 'uint256[]', // Indices + 'uint256', // Out commit + 'uint256[8]', // Deposit proof + 'uint256[8]', // Tree proof + ], + input.slice(10) // Cut off selector + ) + outCommit = res[2] + memo = truncateHexPrefix(message || '') + } else if (input.startsWith(transactSelector)) { + // Normal tx case + const calldata = Buffer.from(truncateHexPrefix(input), 'hex') + + const parser = new PoolCalldataParser(calldata) + + outCommit = hexToNumberString(parser.getField('outCommit')) + + const txType = toTxType(parser.getField('txType')) + + const memoSize = hexToNumber(parser.getField('memoSize')) + const memoRaw = truncateHexPrefix(parser.getField('memo', memoSize)) + + memo = truncateMemoTxPrefix(memoRaw, txType) + + // Save nullifier and tx's block number in confirmed state + if (state !== 'optimistic') { + const nullifier = parser.getField('nullifier') + await this.state.nullifiers.add([hexToNumberString(nullifier)]) + + this.setLastConfirmedTxBlock(blockNumber); + } + } else if (input.startsWith(transactV2Selector)) { + const calldata = Buffer.from(truncateHexPrefix(input), 'hex') + + const parser = new PoolCalldataV2Parser(calldata) + + outCommit = hexToNumberString(parser.getField('outCommit')) + + const txType = toTxType(parser.getField('txType')) + + const memoSize = hexToNumber(parser.getField('memoSize')) + const memoRaw = truncateHexPrefix(parser.getField('memo', memoSize)) + + memo = truncateMemoTxPrefixProverV2(memoRaw, txType) + + // Save nullifier and tx's block number in confirmed state + if (state !== 'optimistic') { + const nullifier = parser.getField('nullifier') + await this.state.nullifiers.add([hexToNumberString(nullifier)]) + + this.setLastConfirmedTxBlock(blockNumber); + } + } else { + throw new Error(`Unknown transaction type: ${input}`) + } + + const states = state === 'optimistic' ? [this.optimisticState] : [this.state, this.optimisticState] + const prefixedMemo = buildPrefixedMemo(outCommit, txHash, memo) + for (let state of states) { + state.updateState(prevCommitIndex, outCommit, prefixedMemo) + } + } + + propagateOptimisticState(index: number, blockNumber: number) { + index = Math.floor(index / OUTPLUSONE) + const opIndex = Math.floor(this.optimisticState.getNextIndex() / OUTPLUSONE) + const stateIndex = Math.floor(this.state.getNextIndex() / OUTPLUSONE) + if (index > opIndex) { + throw new Error('Index is greater than optimistic state index') + } + + for (let i = stateIndex; i < index; i++) { + const tx = this.optimisticState.getDbTx(i * OUTPLUSONE) + if (!tx) { + throw new Error(`Tx not found, index: ${i}`) + } + const outCommit = hexToNumberString('0x' + tx.slice(0, 64)) + this.state.updateState(i, outCommit, tx) + } + + this.setLastConfirmedTxBlock(blockNumber); + } + + verifyProof(proof: SnarkProof, inputs: Array) { + return Proof.verify(this.txVK, proof, inputs) + } + + async getContractIndex() { + const poolIndex = await this.network.pool.callRetry('pool_index') + return Number(poolIndex) + } + + async getContractMerkleRoot(index: string | number | undefined): Promise { + if (!index) { + index = await this.getContractIndex() + logger.info('CONTRACT INDEX %d', index) + } + const root = await this.network.pool.callRetry('roots', [index]) + return root.toString() + } + + async getLimitsFor(address: string): Promise { + const limits = await this.network.accounting.callRetry('getLimitsFor', [address]) + return { + tvlCap: toBN(limits.tvlCap), + tvl: toBN(limits.tvl), + dailyDepositCap: toBN(limits.dailyDepositCap), + dailyDepositCapUsage: toBN(limits.dailyDepositCapUsage), + dailyWithdrawalCap: toBN(limits.dailyWithdrawalCap), + dailyWithdrawalCapUsage: toBN(limits.dailyWithdrawalCapUsage), + dailyUserDepositCap: toBN(limits.dailyUserDepositCap), + dailyUserDepositCapUsage: toBN(limits.dailyUserDepositCapUsage), + depositCap: toBN(limits.depositCap), + tier: toBN(limits.tier), + dailyUserDirectDepositCap: toBN(limits.dailyUserDirectDepositCap), + dailyUserDirectDepositCapUsage: toBN(limits.dailyUserDirectDepositCapUsage), + directDepositCap: toBN(limits.directDepositCap), + } + } + + processLimits(limits: Limits): LimitsFetch { + const limitsFetch = { + deposit: { + singleOperation: limits.depositCap.toString(10), + dailyForAddress: { + total: limits.dailyUserDepositCap.toString(10), + available: limits.dailyUserDepositCap.sub(limits.dailyUserDepositCapUsage).toString(10), + }, + dailyForAll: { + total: limits.dailyDepositCap.toString(10), + available: limits.dailyDepositCap.sub(limits.dailyDepositCapUsage).toString(10), + }, + poolLimit: { + total: limits.tvlCap.toString(10), + available: limits.tvlCap.sub(limits.tvl).toString(10), + }, + }, + withdraw: { + dailyForAll: { + total: limits.dailyWithdrawalCap.toString(10), + available: limits.dailyWithdrawalCap.sub(limits.dailyWithdrawalCapUsage).toString(10), + }, + }, + dd: { + singleOperation: limits.directDepositCap.toString(10), + dailyForAddress: { + total: limits.dailyUserDirectDepositCap.toString(10), + available: limits.dailyUserDirectDepositCap.sub(limits.dailyUserDirectDepositCapUsage).toString(10), + }, + }, + tier: limits.tier.toString(10), + } + return limitsFetch + } + + + // The following key in Redis DB will use to restore sync from the last confirmed tx + private lastConfirmedTxBlockRedisKey = `${this.poolName}:LastConfirmedTxBlock`; + + protected async setLastConfirmedTxBlock(blockNumber: number) { + const curValue = await this.getLastConfirmedTxBlock(); + if (blockNumber > curValue) { + this.setLastConfirmedTxBlockForced(blockNumber); + } + } + + private async setLastConfirmedTxBlockForced(blockNumber: number) { + redis.set(this.lastConfirmedTxBlockRedisKey, blockNumber); + } + + protected async getLastConfirmedTxBlock(): Promise { + const result = await redis.get(this.lastConfirmedTxBlockRedisKey); + try{ + return Number(result); + } catch(_) {}; + + return 0; + } +} diff --git a/zp-relayer/pool/DefaultPool.ts b/zp-relayer/pool/DefaultPool.ts new file mode 100644 index 00000000..43b3c059 --- /dev/null +++ b/zp-relayer/pool/DefaultPool.ts @@ -0,0 +1,336 @@ +import config from '@/configs/relayerConfig' +import { logger } from '@/lib/appLogger' +import type { Circuit, IProver } from '@/prover' +import { PoolTx, WorkerTxType } from '@/queue/poolTxQueue' +import { + ENERGY_SIZE, + MOCK_CALLDATA, + OUTPLUSONE, + PERMIT2_CONTRACT, + TOKEN_SIZE, + TRANSFER_INDEX_SIZE, +} from '@/utils/constants' +import { + applyDenominator, + buildPrefixedMemo, + encodeProof, + flattenProof, + numToHex, + truncateHexPrefix, + truncateMemoTxPrefix, +} from '@/utils/helpers' +import { Permit2Recover, SaltedPermitRecover, TransferWithAuthorizationRecover } from '@/utils/permit' +import { PermitType, type PermitRecover } from '@/utils/permit/types' +import { getTxProofField, parseDelta } from '@/utils/proofInputs' +import { + checkAssertion, + checkCondition, + checkDeadline, + checkDepositEnoughBalance, + checkFee, + checkLimits, + checkMemoPrefix, + checkNativeAmount, + checkNonZeroWithdrawAddress, + checkNullifier, + checkPoolId, + checkProof, + checkRoot, + checkScreener, + checkTransferIndex, + getRecoveredAddress, + TxValidationError, +} from '@/validation/tx/common' +import { Proof } from 'libzkbob-rs-node' +import AbiCoder from 'web3-eth-abi' +import { bytesToHex, toBN } from 'web3-utils' +import { getTxData, TxData, TxType } from 'zp-memo-parser' +import { BasePool } from './BasePool' +import { OptionalChecks, ProcessResult } from './types' + +const ZERO = toBN(0) + +export class DefaultPool extends BasePool { + treeProver!: IProver + public permitRecover: PermitRecover | null = null + + protected poolName(): string { return 'default-pool'; } + + async init(startBlock: number | null = null, treeProver: IProver) { + if (this.isInitialized) return + + this.treeProver = treeProver + + this.denominator = toBN(await this.network.pool.call('denominator')) + this.poolId = toBN(await this.network.pool.call('pool_id')) + + if (config.RELAYER_PERMIT_TYPE === PermitType.SaltedPermit) { + this.permitRecover = new SaltedPermitRecover(this.network, config.RELAYER_TOKEN_ADDRESS) + } else if (config.RELAYER_PERMIT_TYPE === PermitType.Permit2) { + this.permitRecover = new Permit2Recover(this.network, PERMIT2_CONTRACT) + } else if (config.RELAYER_PERMIT_TYPE === PermitType.TransferWithAuthorization) { + this.permitRecover = new TransferWithAuthorizationRecover(this.network, config.RELAYER_TOKEN_ADDRESS) + } else if (config.RELAYER_PERMIT_TYPE === PermitType.None) { + this.permitRecover = null + } else { + throw new Error("Cannot infer pool's permit standard") + } + await this.permitRecover?.initializeDomain() + if (startBlock) { + const lastBlock = await this.getLastBlockToProcess() + await this.syncState(startBlock, lastBlock) + } + this.isInitialized = true + } + + onIncluded(r: ProcessResult, txHash: string): Promise { + throw new Error('Method not implemented.') + } + + async validateNormalTx( + { transaction: { memo, proof, txType, depositSignature } }: PoolTx, + optionalChecks: OptionalChecks = {}, + traceId?: string + ): Promise { + await checkAssertion(() => checkMemoPrefix(memo, txType)) + + const buf = Buffer.from(memo, 'hex') + const txData = getTxData(buf, txType) + + const root = getTxProofField(proof, 'root') + const nullifier = getTxProofField(proof, 'nullifier') + const delta = parseDelta(getTxProofField(proof, 'delta')) + const fee = toBN(txData.transactFee) + + logger.info( + 'Delta tokens: %s, Energy tokens: %s, Fee: %s', + delta.tokenAmount.toString(10), + delta.energyAmount.toString(10), + fee.toString(10) + ) + + await checkAssertion(() => checkPoolId(delta.poolId, this.poolId)) + await checkAssertion(() => checkRoot(delta.transferIndex, root, this.optimisticState)) + await checkAssertion(() => checkNullifier(nullifier, this.state.nullifiers)) + await checkAssertion(() => checkNullifier(nullifier, this.optimisticState.nullifiers)) + await checkAssertion(() => checkTransferIndex(toBN(this.optimisticState.getNextIndex()), delta.transferIndex)) + await checkAssertion(() => checkProof(proof, (p, i) => this.verifyProof(p, i))) + if (optionalChecks.treeProof) { + const { proof, vk } = optionalChecks.treeProof + await checkAssertion(() => checkProof(proof, (p, i) => Proof.verify(vk, p, i))) + } + + const tokenAmount = delta.tokenAmount + const tokenAmountWithFee = tokenAmount.add(fee) + const energyAmount = delta.energyAmount + + let nativeConvert = false + let userAddress: string + + if (txType === TxType.WITHDRAWAL) { + checkCondition(tokenAmountWithFee.lte(ZERO) && energyAmount.lte(ZERO), 'Incorrect withdraw amounts') + + const { nativeAmount, receiver } = txData as TxData + const nativeAmountBN = toBN(nativeAmount) + userAddress = bytesToHex(Array.from(receiver)) + logger.info('Withdraw address: %s', userAddress) + await checkAssertion(() => checkNonZeroWithdrawAddress(userAddress)) + await checkAssertion(() => + checkNativeAmount(nativeAmountBN, tokenAmountWithFee.neg(), config.RELAYER_MAX_NATIVE_AMOUNT) + ) + + if (!nativeAmountBN.isZero()) { + nativeConvert = true + } + } else if (txType === TxType.DEPOSIT || txType === TxType.PERMITTABLE_DEPOSIT) { + checkCondition(tokenAmount.gt(ZERO) && energyAmount.eq(ZERO), 'Incorrect deposit amounts') + checkCondition(depositSignature !== null, 'Deposit signature is required') + + const requiredTokenAmount = applyDenominator(tokenAmountWithFee, this.denominator) + userAddress = await getRecoveredAddress( + txType, + nullifier, + txData, + this.network, + requiredTokenAmount, + depositSignature as string, + this.permitRecover + ) + logger.info('Deposit address: %s', userAddress) + // TODO check for approve in case of deposit + await checkAssertion(() => checkDepositEnoughBalance(this.network, userAddress, requiredTokenAmount)) + } else if (txType === TxType.TRANSFER) { + userAddress = config.txManager.TX_ADDRESS + checkCondition(tokenAmountWithFee.eq(ZERO) && energyAmount.eq(ZERO), 'Incorrect transfer amounts') + } else { + throw new TxValidationError('Unsupported TxType') + } + + if (optionalChecks.fee) { + const { feeManager } = optionalChecks.fee + const requiredFee = await feeManager.estimateFee({ + txType, + nativeConvert, + txData: MOCK_CALLDATA + memo + (depositSignature || ''), + }) + const denominatedFee = requiredFee.denominate(this.denominator).getEstimate() + await checkAssertion(() => checkFee(fee, denominatedFee)) + } + + const limits = await this.getLimitsFor(userAddress) + await checkAssertion(() => checkLimits(limits, delta.tokenAmount)) + + if (txType === TxType.PERMITTABLE_DEPOSIT) { + const { deadline } = txData as TxData + logger.info('Deadline: %s', deadline) + await checkAssertion(() => checkDeadline(toBN(deadline), config.RELAYER_PERMIT_DEADLINE_THRESHOLD_INITIAL)) + } + + if (txType === TxType.DEPOSIT || txType === TxType.PERMITTABLE_DEPOSIT || txType === TxType.WITHDRAWAL) { + if (optionalChecks.screener) { + const { screenerUrl, screenerToken } = optionalChecks.screener + await checkAssertion(() => checkScreener(userAddress, screenerUrl, screenerToken, traceId)) + } + } + } + + async getTreeProof(outCommit: string) { + const { pub, sec, commitIndex } = this.optimisticState.getVirtualTreeProofInputs(outCommit) + + logger.debug(`Proving tree...`) + const treeProof = await this.treeProver.prove(pub, sec) + logger.debug(`Tree proved`) + return { treeProof, commitIndex } + } + + async buildNormalTx({ + transaction: { txType, proof, memo, depositSignature }, + }: PoolTx): Promise> { + const func = 'transact()' + + const nullifier = getTxProofField(proof, 'nullifier') + const outCommit = getTxProofField(proof, 'out_commit') + const delta = parseDelta(getTxProofField(proof, 'delta')) + + const { treeProof, commitIndex } = await this.getTreeProof(outCommit) + + const rootAfter = treeProof.inputs[1] + + const selector: string = AbiCoder.encodeFunctionSignature(func) + + let transferIndex = numToHex(delta.transferIndex, TRANSFER_INDEX_SIZE) + let energyAmount = numToHex(delta.energyAmount, ENERGY_SIZE) + let tokenAmount = numToHex(delta.tokenAmount, TOKEN_SIZE) + + const txFlatProof = encodeProof(proof.proof) + const treeFlatProof = encodeProof(treeProof.proof) + + const memoSize = numToHex(toBN(memo.length).divn(2), 4) + + const data = [ + selector, + numToHex(toBN(nullifier)), + numToHex(toBN(outCommit)), + transferIndex, + energyAmount, + tokenAmount, + txFlatProof, + numToHex(toBN(rootAfter)), + treeFlatProof, + txType, + memoSize, + memo, + ] + + if (depositSignature) { + const signature = truncateHexPrefix(depositSignature) + data.push(signature) + } + + let calldata = data.join('') + + const memoTruncated = truncateMemoTxPrefix(memo, txType) + + return { data: calldata, func, commitIndex, outCommit, nullifier, memo: memoTruncated, mpc: false, root: rootAfter } + } + + async buildDirectDepositTx({ + transaction: { outCommit, txProof, deposits, memo }, + }: PoolTx): Promise> { + logger.info('Received direct deposit', { number: deposits.length }) + + const func = 'appendDirectDeposits(uint256,uint256[],uint256,uint256[8],uint256[8])' + + const { treeProof, commitIndex } = await this.getTreeProof(outCommit) + + const rootAfter = treeProof.inputs[1] + const indices = deposits.map(d => d.nonce) + + const data = + AbiCoder.encodeFunctionSignature({} as any) + + AbiCoder.encodeParameters( + [], + [rootAfter, indices, outCommit, flattenProof(txProof.proof), flattenProof(treeProof.proof)] + ).slice(2) + + return { data, func, commitIndex, outCommit, memo, mpc: false, root: rootAfter } + } + + async validateDirectDepositTx( + tx: PoolTx, + _optionalChecks: OptionalChecks, + _traceId: string | undefined + ): Promise { + if (tx.transaction.deposits.length === 0) { + throw new Error('Empty direct deposit batch, skipping') + } + } + + async onSend({ outCommit, memo, commitIndex, nullifier }: ProcessResult, txHash: string) { + const prefixedMemo = buildPrefixedMemo(outCommit, txHash, memo) + this.optimisticState.updateState(commitIndex, outCommit, prefixedMemo) + + if (nullifier) { + logger.debug('Adding nullifier %s to OS', nullifier) + await this.optimisticState.nullifiers.add([nullifier]) + } + } + + async onConfirmed( + { outCommit, memo, commitIndex, nullifier, root }: ProcessResult, + txHash: string, + callback?: () => Promise + ) { + const prefixedMemo = buildPrefixedMemo(outCommit, txHash, memo) + this.state.updateState(commitIndex, outCommit, prefixedMemo) + // Update tx hash in optimistic state tx db + this.optimisticState.addTx(commitIndex * OUTPLUSONE, Buffer.from(prefixedMemo, 'hex')) + + // Add nullifier to confirmed state and remove from optimistic one + if (nullifier) { + logger.info('Adding nullifier %s to PS', nullifier) + await this.state.nullifiers.add([nullifier]) + logger.info('Removing nullifier %s from OS', nullifier) + await this.optimisticState.nullifiers.remove([nullifier]) + } + + const node1 = this.state.getCommitment(commitIndex) + const node2 = this.optimisticState.getCommitment(commitIndex) + logger.info('Assert commitments are equal: %s, %s', node1, node2) + if (node1 !== node2) { + logger.error('Commitments are not equal, state is corrupted') + } + + const rootConfirmed = this.state.getMerkleRoot() + logger.info('Assert roots are equal') + if (rootConfirmed !== root) { + // TODO: Should be impossible but in such case + // we should recover from some checkpoint + logger.error('Roots are not equal: %s should be %s', rootConfirmed, root) + } + + if (callback) { + await callback() + } + } +} diff --git a/zp-relayer/pool/FinalizerPool.ts b/zp-relayer/pool/FinalizerPool.ts new file mode 100644 index 00000000..cbb93a9d --- /dev/null +++ b/zp-relayer/pool/FinalizerPool.ts @@ -0,0 +1,171 @@ +import { logger } from '@/lib/appLogger' +import { Circuit, IProver } from '@/prover' +import { DirectDeposit, PoolTx, poolTxQueue, WorkerTxType } from '@/queue/poolTxQueue' +import { buildPrefixedMemo, flattenProof } from '@/utils/helpers' +import { DelegatedDepositsData } from 'libzkbob-rs-node' +import AbiCoder from 'web3-eth-abi' +import { toBN } from 'web3-utils' +import { BasePool } from './BasePool' +import { ProcessResult } from './types' + +export interface PendingCommitment { + commitment: string + privilegedProver: string + fee: string + timestamp: string + gracePeriodEnd: string +} + +export class FinalizerPool extends BasePool { + treeProver!: IProver + directDepositProver!: IProver + indexerUrl!: string + + protected poolName(): string { return 'finalizer-pool'; } + + async init( + treeProver: IProver, + directDepositProver: IProver, + indexerUrl: string + ) { + if (this.isInitialized) return + + this.treeProver = treeProver + this.directDepositProver = directDepositProver + this.indexerUrl = indexerUrl + + this.denominator = toBN(await this.network.pool.call('denominator')) + this.poolId = toBN(await this.network.pool.call('pool_id')) + + await this.syncState(undefined, undefined, indexerUrl) + + this.isInitialized = true + } + + async validateTx(): Promise {} + + async buildFinalizeTx({ + transaction: { outCommit }, + }: PoolTx): Promise> { + await this.syncState(undefined, undefined, this.indexerUrl) + + const func = 'proveTreeUpdate(uint256,uint256[8],uint256)' + + const { treeProof, commitIndex } = await this.getTreeProof(outCommit) + const rootAfter = treeProof.inputs[1] + + const treeFlatProof = flattenProof(treeProof.proof) + + const data = + AbiCoder.encodeFunctionSignature(func) + + AbiCoder.encodeParameters(['uint256', 'uint256[8]', 'uint256'], [outCommit, treeFlatProof, rootAfter]).slice(2) + + return { + data, + func, + commitIndex, + outCommit, + memo: '', + root: rootAfter, + mpc: false, + } + } + + async buildDirectDepositTx({ + transaction: { outCommit, deposits, txProof, memo }, + }: PoolTx): Promise> { + logger.info('Received direct deposit', { number: deposits.length }) + + const func = 'appendDirectDeposits(uint256[],uint256,uint256[8],address)' + + const { treeProof, commitIndex } = await this.getTreeProof(outCommit) + const rootAfter = treeProof.inputs[1] + const indices = deposits.map(d => d.nonce) + + const data = + AbiCoder.encodeFunctionSignature(func) + + AbiCoder.encodeParameters( + ['uint256[]', 'uint256', 'uint256[8]', 'address'], + [indices, outCommit, flattenProof(txProof.proof), '0x0000000000000000000000000000000000000000'] + ).slice(2) + + return { data, func, commitIndex, outCommit, memo, root: rootAfter, mpc: false } + } + + async getTreeProof(outCommit: string) { + const { pub, sec, commitIndex } = this.state.getVirtualTreeProofInputs(outCommit) + + logger.debug('Proving tree...') + const treeProof = await this.treeProver.prove(pub, sec) + logger.debug('Tree proved') + return { treeProof, commitIndex } + } + + async getDirectDepositProof(deposits: DirectDeposit[]) { + const { + public: pub, + secret: sec, + memo, + out_commitment_hash: outCommit, + } = await DelegatedDepositsData.create( + deposits.map(d => { + return { + id: d.nonce, + receiver_d: toBN(d.zkAddress.diversifier).toString(10), + receiver_p: toBN(d.zkAddress.pk).toString(10), + denominated_amount: d.deposit, + } + }) + ) + const proof = await this.directDepositProver.prove(pub, sec) + return { proof, memo, outCommit } + } + + async fetchCommitment() { + try { + const res = await this.network.pool.call('pendingCommitment') + return res as PendingCommitment + } catch (e) { + //this corresponds to ZkBobPool: queue is empty which is not considered as error + if ((e as any).data?.endsWith("195a6b426f62506f6f6c3a20717565756520697320656d70747900000000000000")) { + return + } + logger.error(e) + return + } + } + + async onSend(p: ProcessResult, txHash: string): Promise {} + + async onConfirmed( + { outCommit, memo, commitIndex, nullifier, root }: ProcessResult, + txHash: string, + callback?: (() => Promise) | undefined, + jobId?: string + ): Promise { + const prefixedMemo = buildPrefixedMemo(outCommit, txHash, memo) + this.optimisticState.updateState(commitIndex, outCommit, prefixedMemo) + + if (!jobId) { + logger.error('Pool job not found', { jobId }); + return; + } + + const poolJob = await poolTxQueue.getJob(jobId); + if (poolJob?.data.type === WorkerTxType.Finalize) { + this.state.updateState(commitIndex, outCommit, prefixedMemo) + + const rootConfirmed = this.state.getMerkleRoot() + logger.info('Assert roots are equal') + if (rootConfirmed !== root) { + // TODO: Should be impossible but in such case + // we should recover from some checkpoint + logger.error('Roots are not equal: %s should be %s', rootConfirmed, root) + } + } + + if (callback) { + await callback() + } + } +} diff --git a/zp-relayer/pool/IndexerPool.ts b/zp-relayer/pool/IndexerPool.ts new file mode 100644 index 00000000..074ce64c --- /dev/null +++ b/zp-relayer/pool/IndexerPool.ts @@ -0,0 +1,29 @@ +import { toBN } from 'web3-utils' +import { BasePool } from './BasePool' + +import { type PermitRecover } from '@/utils/permit/types' + +export class IndexerPool extends BasePool { + public permitRecover: PermitRecover | null = null + + protected poolName(): string { return 'indexer-pool'; } + + async init(startBlock: number | null = null, lastBlock: number | null = null) { + if (this.isInitialized) return + + this.denominator = toBN(await this.network.pool.call('denominator')) + this.poolId = toBN(await this.network.pool.call('pool_id')) + + if (startBlock && lastBlock) { + await this.syncState(startBlock, lastBlock) + } + this.isInitialized = true + } + + onSend(): Promise { + throw new Error('Indexer pool is read-only') + } + onConfirmed(): Promise { + throw new Error('Indexer pool is read-only') + } +} diff --git a/zp-relayer/pool/RelayPool.ts b/zp-relayer/pool/RelayPool.ts new file mode 100644 index 00000000..12875f90 --- /dev/null +++ b/zp-relayer/pool/RelayPool.ts @@ -0,0 +1,404 @@ +import config from '@/configs/relayerConfig' +import { logger } from '@/lib/appLogger' +import { Network } from '@/lib/network' +import { redis } from '@/lib/redisClient' +import { JobState, PoolTx, poolTxQueue, TxPayload, WorkerTxType } from '@/queue/poolTxQueue' +import { TxStore } from '@/state/TxStore' +import { ENERGY_SIZE, MOCK_CALLDATA, OUTPLUSONE, PERMIT2_CONTRACT, TOKEN_SIZE, TRANSFER_INDEX_SIZE } from '@/utils/constants' +import { + applyDenominator, + buildPrefixedMemo, + encodeProof, + fetchJson, + numToHex, + sleep, + truncateHexPrefix, + truncateMemoTxPrefixProverV2, +} from '@/utils/helpers' +import { Permit2Recover, SaltedPermitRecover, TransferWithAuthorizationRecover } from '@/utils/permit' +import { PermitType, type PermitRecover } from '@/utils/permit/types' +import { getTxProofField, parseDelta } from '@/utils/proofInputs' +import { + checkAddressEq, + checkAssertion, + checkCondition, + checkDeadline, + checkDepositEnoughBalance, + checkFee, + checkLimits, + checkMemoPrefixProverV2, + checkNativeAmount, + checkNonZeroWithdrawAddress, + checkNullifier, + checkNullifierContract, + checkPoolId, + checkProof, + checkRootIndexer, + checkScreener, + checkTransferIndex, + getRecoveredAddress, + TxValidationError, +} from '@/validation/tx/common' +import AbiCoder from 'web3-eth-abi' +import { bytesToHex, toBN } from 'web3-utils' +import { getTxDataProverV2, TxDataProverV2, TxType } from 'zp-memo-parser' +import { BasePool } from './BasePool' +import { OptionalChecks, PermitConfig, ProcessResult } from './types' +import BigNumber from 'bignumber.js' + +const ZERO = toBN(0) + +export class RelayPool extends BasePool { + public permitRecover: PermitRecover | null = null + private proxyAddress!: string + private indexerUrl!: string + private observePromise: Promise | undefined; + txStore!: TxStore + + protected poolName(): string { return 'relay-pool'; } + + async init(permitConfig: PermitConfig, proxyAddress: string, indexerUrl: string) { + if (this.isInitialized) return + + this.txStore = new TxStore('tmp-tx-store', redis) + + this.proxyAddress = proxyAddress + this.indexerUrl = indexerUrl + + this.denominator = toBN(await this.network.pool.call('denominator')) + this.poolId = toBN(await this.network.pool.call('pool_id')) + + if (permitConfig.permitType === PermitType.SaltedPermit) { + this.permitRecover = new SaltedPermitRecover(this.network, permitConfig.token) + } else if (permitConfig.permitType === PermitType.Permit2) { + this.permitRecover = new Permit2Recover(this.network, PERMIT2_CONTRACT) + } else if (permitConfig.permitType === PermitType.TransferWithAuthorization) { + this.permitRecover = new TransferWithAuthorizationRecover(this.network, permitConfig.token) + } else if (permitConfig.permitType === PermitType.None) { + this.permitRecover = null + } else { + throw new Error("Cannot infer pool's permit standard") + } + await this.permitRecover?.initializeDomain() + + this.isInitialized = true + + this.observePromise = undefined; + } + + async validateTx( + { transaction: { memo, proof, txType, depositSignature } }: PoolTx, + optionalChecks: OptionalChecks = {}, + traceId?: string + ): Promise { + // Additional checks for memo? + await checkAssertion(() => checkMemoPrefixProverV2(memo, txType)) + + const buf = Buffer.from(memo, 'hex') + const txData = getTxDataProverV2(buf, txType) + + const root = getTxProofField(proof, 'root') + const nullifier = getTxProofField(proof, 'nullifier') + const delta = parseDelta(getTxProofField(proof, 'delta')) + const transactFee = toBN(txData.transactFee) + const treeUpdateFee = toBN(txData.treeUpdateFee) + const proxyAddress = bytesToHex(Array.from(txData.proxyAddress)) + const proverAddress = bytesToHex(Array.from(txData.proverAddress)) + + logger.info('TxData', { + deltaTokens: delta.tokenAmount.toString(10), + deltaEnergy: delta.energyAmount.toString(10), + transactFee: transactFee.toString(10), + treeUpdateFee: treeUpdateFee.toString(10), + proxyAddress, + proverAddress, + }) + + const indexerInfo = await this.getIndexerInfo() + + await checkAssertion(() => checkAddressEq(proxyAddress, this.proxyAddress)) + await checkAssertion(() => checkPoolId(delta.poolId, this.poolId)) + await checkAssertion(() => checkRootIndexer(delta.transferIndex, root, this.indexerUrl)) + await checkAssertion(() => checkNullifier(nullifier, this.optimisticState.nullifiers)) + await checkAssertion(() => checkNullifierContract(nullifier, this.network)) + await checkAssertion(() => checkTransferIndex(toBN(indexerInfo.optimisticDeltaIndex), delta.transferIndex)) + await checkAssertion(() => checkProof(proof, (p, i) => this.verifyProof(p, i))) + + const tokenAmount = delta.tokenAmount + const totalFee = transactFee.add(treeUpdateFee) + const tokenAmountWithFee = tokenAmount.add(totalFee) + const energyAmount = delta.energyAmount + + let nativeConvert = false + let userAddress: string + + if (txType === TxType.WITHDRAWAL) { + checkCondition(tokenAmountWithFee.lte(ZERO) && energyAmount.lte(ZERO), 'Incorrect withdraw amounts') + + const { nativeAmount, receiver } = txData as TxDataProverV2 + const nativeAmountBN = toBN(nativeAmount) + userAddress = bytesToHex(Array.from(receiver)) + logger.info('Withdraw address: %s', userAddress) + await checkAssertion(() => checkNonZeroWithdrawAddress(userAddress)) + await checkAssertion(() => + checkNativeAmount(nativeAmountBN, tokenAmountWithFee.neg(), config.RELAYER_MAX_NATIVE_AMOUNT) + ) + + if (!nativeAmountBN.isZero()) { + nativeConvert = true + } + } else if (txType === TxType.DEPOSIT || txType === TxType.PERMITTABLE_DEPOSIT) { + checkCondition(tokenAmount.gt(ZERO) && energyAmount.eq(ZERO), 'Incorrect deposit amounts') + checkCondition(depositSignature !== null, 'Deposit signature is required') + + const requiredTokenAmount = applyDenominator(tokenAmountWithFee, this.denominator) + userAddress = await getRecoveredAddress( + txType, + nullifier, + txData, + this.network, + requiredTokenAmount, + depositSignature as string, + this.permitRecover + ) + logger.info('Deposit address: %s', userAddress) + // TODO check for approve in case of deposit + await checkAssertion(() => checkDepositEnoughBalance(this.network, userAddress, requiredTokenAmount)) + } else if (txType === TxType.TRANSFER) { + userAddress = this.proxyAddress + checkCondition(tokenAmountWithFee.eq(ZERO) && energyAmount.eq(ZERO), 'Incorrect transfer amounts') + } else { + throw new TxValidationError('Unsupported TxType') + } + + if (optionalChecks.fee) { + const { feeManager } = optionalChecks.fee + const requiredFee = await feeManager.estimateFee({ + txType, + nativeConvert, + txData: MOCK_CALLDATA + memo + (depositSignature || ''), + }) + const denominatedFee = requiredFee.denominate(this.denominator).getEstimate() + await checkAssertion(() => checkFee(totalFee, denominatedFee)) + } + + const limits = await this.getLimitsFor(userAddress) + await checkAssertion(() => checkLimits(limits, delta.tokenAmount)) + + if (txType === TxType.PERMITTABLE_DEPOSIT) { + const { deadline } = txData as TxDataProverV2 + logger.info('Deadline: %s', deadline) + await checkAssertion(() => checkDeadline(toBN(deadline), config.RELAYER_PERMIT_DEADLINE_THRESHOLD_INITIAL)) + } + + if (txType === TxType.DEPOSIT || txType === TxType.PERMITTABLE_DEPOSIT || txType === TxType.WITHDRAWAL) { + if (optionalChecks.screener) { + const { screenerUrl, screenerToken } = optionalChecks.screener + await checkAssertion(() => checkScreener(userAddress, screenerUrl, screenerToken, traceId)) + } + } + } + + async buildNormalTx({ + transaction: { proof, memo, depositSignature, txType }, + }: PoolTx): Promise> { + const func = 'transactV2()' + const version = 2 + + const nullifier = getTxProofField(proof, 'nullifier') + const outCommit = getTxProofField(proof, 'out_commit') + const delta = parseDelta(getTxProofField(proof, 'delta')) + + const selector: string = AbiCoder.encodeFunctionSignature(func) + + let transferIndex = numToHex(delta.transferIndex, TRANSFER_INDEX_SIZE) + let energyAmount = numToHex(delta.energyAmount, ENERGY_SIZE) + let tokenAmount = numToHex(delta.tokenAmount, TOKEN_SIZE) + + const txFlatProof = encodeProof(proof.proof) + + const memoSize = numToHex(toBN(memo.length).divn(2), 4) + + const data = [ + selector, + numToHex(toBN(version), 2), + numToHex(toBN(nullifier)), + numToHex(toBN(outCommit)), + transferIndex, + energyAmount, + tokenAmount, + txFlatProof, + txType, + memoSize, + memo, + ] + + if (depositSignature) { + const signature = truncateHexPrefix(depositSignature) + data.push(signature) + } + + const calldata = data.join('') + + const memoTruncated = truncateMemoTxPrefixProverV2(memo, txType) + + // Commit index should be treated as an optimistic checkpoint + // It can increase after the transaction is included into the Merkle tree + const commitIndex = await this.assumeNextPendingTxIndex(); + + return { + data: calldata, + func, + outCommit, + nullifier, + memo: memoTruncated, + commitIndex, + } + } + + async onSend({ outCommit, nullifier, memo, commitIndex }: ProcessResult, txHash: string): Promise { + if (nullifier) { + logger.debug('Adding nullifier %s to OS', nullifier) + await this.optimisticState.nullifiers.add([nullifier]) + } + + // cache transaction locally + const indexerOptimisticIndex = Number((await this.getIndexerInfo()).deltaIndex); + await this.cacheTxLocally(outCommit, txHash, memo, Date.now()); + // start monitoring local cache against the indexer to cleanup already indexed txs + this.startLocalCacheObserver(indexerOptimisticIndex); + } + + async onConfirmed(res: ProcessResult, txHash: string, callback?: () => Promise, jobId?: string): Promise { + logger.debug("Updating pool job %s completed, txHash %s", jobId, txHash); + if (jobId) { + const poolJob = await poolTxQueue.getJob(jobId); + if (!poolJob) { + logger.error('Pool job not found', { jobId }); + } else { + poolJob.data.transaction.state = JobState.COMPLETED; + poolJob.data.transaction.txHash = txHash; + await poolJob.update(poolJob.data); + } + } + } + + async onFailed(txHash: string, jobId: string): Promise { + super.onFailed(txHash, jobId); + const poolJob = await poolTxQueue.getJob(jobId); + if (!poolJob) { + logger.error('Pool job not found', { jobId }); + } else { + poolJob.data.transaction.state = JobState.REVERTED; + poolJob.data.transaction.txHash = txHash; + + const txPayload = poolJob.data.transaction as TxPayload; + if (txPayload.proof.inputs.length > 2) { + const commit = txPayload.proof.inputs[2]; + this.txStore.remove(commit); + logger.info('Removing local cached transaction', {commit}); + } + await poolJob.update(poolJob.data); + } + } + + protected async cacheTxLocally(commit: string, txHash: string, memo: string, timestamp: number) { + // store or updating local tx store + // (we should keep sent transaction until the indexer grab them) + const prefixedMemo = buildPrefixedMemo( + commit, + txHash, + memo + ); + await this.txStore.add(commit, prefixedMemo, timestamp); + logger.info('Tx has been CACHED locally', { commit, timestamp }); + } + + private async getIndexerInfo() { + const info = await fetchJson(this.indexerUrl, '/info', []) + return info + } + + // It's just an assumption needed for internal purposes. The final index may be changed + private async assumeNextPendingTxIndex() { + const [indexerInfo, localCache] = await Promise.all([this.getIndexerInfo(), this.txStore.getAll()]); + + return Number(indexerInfo.optimisticDeltaIndex + Object.keys(localCache).length * OUTPLUSONE); + } + + private async getIndexerTxs(offset: number, limit: number): Promise { + const url = new URL('/transactions/v2', config.base.COMMON_INDEXER_URL) + url.searchParams.set('limit', limit.toString()) + url.searchParams.set('offset', offset.toString()) + + const response = await fetch(url) + if (!response.ok) { + throw new Error(`Failed to fetch transactions from indexer. Status: ${response.status}`) + } + return response.json() as Promise; + } + + // observe the current local cache and indexer to remove local record + // after adding it to the indexer's optimistic/persistent state + // return when local cache is empty + protected async startLocalCacheObserver(fromIndex: number): Promise { + if (this.observePromise == undefined) { + this.observePromise = this.localCacheObserverWorker(fromIndex).finally(() => { + this.observePromise = undefined; + }); + } + + return this.observePromise; + } + + protected async localCacheObserverWorker(fromIndex: number): Promise { + // we start checking transactions slightly earlier than the current optimistic index + // to cover the case when the indexer was already updated before onSend was called + const OFFSET_MARGIN = 10 * OUTPLUSONE; + fromIndex = Math.max(fromIndex - OFFSET_MARGIN, 0); + logger.debug('Local cache observer worker was started', { fromIndex }) + const CACHE_OBSERVE_INTERVAL_MS = 1000; // waiting time between checks + const EXTEND_LIMIT_TO_FETCH = 10; // taking into account non-atomic nature of /info and /transactions/v2 requests + const EXPIRATION_MS = 1000 * 60 * 60 * 24; // we drop entries older than 24 hours, unlikely that they ever will be indexed + + while (true) { + const localEntries = Object.entries(await this.txStore.getAll()); + let localEntriesCnt = localEntries.length; + + if (localEntries.length == 0) { + break; + } + + // there are entries in the local cache + try { + const indexerOptimisticIndex = Number((await this.getIndexerInfo()).optimisticDeltaIndex); + const limit = (indexerOptimisticIndex - fromIndex) / OUTPLUSONE + localEntries.length + EXTEND_LIMIT_TO_FETCH; + const indexerCommitments = (await this.getIndexerTxs(fromIndex, limit)).map(tx => BigNumber(tx.slice(65, 129), 16).toString(10)); + + // find cached commitments in the indexer's response + for (const [commit, {memo, timestamp}] of localEntries) { + if (indexerCommitments.includes(commit)) { + logger.info('Deleting cached entry', { commit, timestamp }) + await this.txStore.remove(commit) + localEntriesCnt--; + } else { + if (Date.now() - timestamp > EXPIRATION_MS) { + logger.error('Cached transaction was not indexed for a long time, removing', { commit, timestamp }); + await this.txStore.remove(commit) + localEntriesCnt--; + } + //logger.info('Cached entry is still in the local cache', { commit, index }); + } + } + } catch(e) { + logger.error(`Cannot check local cache against indexer : ${(e as Error).message}`); + } + + if (localEntriesCnt > 0) { + await sleep(CACHE_OBSERVE_INTERVAL_MS); + } + } + + logger.debug('Local cache observer worker has finished', { fromIndex }) + } +} diff --git a/zp-relayer/pool/types.ts b/zp-relayer/pool/types.ts new file mode 100644 index 00000000..41c2ceb4 --- /dev/null +++ b/zp-relayer/pool/types.ts @@ -0,0 +1,96 @@ +import type BN from 'bn.js' +import type { Proof, VK } from 'libzkbob-rs-node' + +import type { FeeManager } from '@/lib/fee' +import type { PermitType } from '@/utils/permit/types' +import type { BasePool } from './BasePool' +import type { RelayPool } from './RelayPool' + +export interface Limits { + tvlCap: BN + tvl: BN + dailyDepositCap: BN + dailyDepositCapUsage: BN + dailyWithdrawalCap: BN + dailyWithdrawalCapUsage: BN + dailyUserDepositCap: BN + dailyUserDepositCapUsage: BN + depositCap: BN + tier: BN + dailyUserDirectDepositCap: BN + dailyUserDirectDepositCapUsage: BN + directDepositCap: BN +} + +export interface LimitsFetch { + deposit: { + singleOperation: string + dailyForAddress: { + total: string + available: string + } + dailyForAll: { + total: string + available: string + } + poolLimit: { + total: string + available: string + } + } + withdraw: { + dailyForAll: { + total: string + available: string + } + } + dd: { + singleOperation: string + dailyForAddress: { + total: string + available: string + } + } + tier: string +} + +export interface OptionalChecks { + treeProof?: { + proof: Proof + vk: VK + } + fee?: { + feeManager: FeeManager + } + screener?: { + screenerUrl: string + screenerToken: string + } +} + +export interface BaseProcessResult { + data: string + func: string + nullifier?: string + outCommit: string + memo: string + commitIndex: number +} + +export interface DefaultPoolProcessResult extends BaseProcessResult { + root: string + mpc: boolean +} + +export type ProcessResult

= P extends RelayPool ? BaseProcessResult : DefaultPoolProcessResult + +export interface BasePoolConfig { + statePath: string + txVkPath: string + eventsBatchSize: number +} + +export interface PermitConfig { + permitType: PermitType + token: string +} diff --git a/zp-relayer/prover/IProver.ts b/zp-relayer/prover/IProver.ts index d11a876d..a284160f 100644 --- a/zp-relayer/prover/IProver.ts +++ b/zp-relayer/prover/IProver.ts @@ -1,11 +1,4 @@ -import type { - TreePub, - TreeSec, - Proof, - DelegatedDepositBatchPub, - DelegatedDepositBatchSec, - Params, -} from 'libzkbob-rs-node' +import type { DelegatedDepositBatchPub, DelegatedDepositBatchSec, Proof, TreePub, TreeSec } from 'libzkbob-rs-node' // TODO: add support for DD export enum Circuit { diff --git a/zp-relayer/prover/LocalProver.ts b/zp-relayer/prover/LocalProver.ts index 40522703..9f3ba96d 100644 --- a/zp-relayer/prover/LocalProver.ts +++ b/zp-relayer/prover/LocalProver.ts @@ -1,4 +1,4 @@ -import { Proof, Params } from 'libzkbob-rs-node' +import { Params, Proof } from 'libzkbob-rs-node' import { Circuit, IProver, PubInput, SecInput } from './IProver' type InternalProve = (p: Params, pub: PubInput, sec: SecInput) => Promise diff --git a/zp-relayer/queue/directDepositQueue.ts b/zp-relayer/queue/directDepositQueue.ts index 2966c635..83c07ff5 100644 --- a/zp-relayer/queue/directDepositQueue.ts +++ b/zp-relayer/queue/directDepositQueue.ts @@ -1,7 +1,7 @@ -import { Queue } from 'bullmq' +import { redis } from '@/lib/redisClient' import { DIRECT_DEPOSIT_QUEUE_NAME } from '@/utils/constants' +import { Queue } from 'bullmq' import { DirectDeposit } from './poolTxQueue' -import { redis } from '@/services/redisClient' export const directDepositQueue = new Queue(DIRECT_DEPOSIT_QUEUE_NAME, { connection: redis, diff --git a/zp-relayer/queue/poolTxQueue.ts b/zp-relayer/queue/poolTxQueue.ts index 9f64ce82..d35cf72d 100644 --- a/zp-relayer/queue/poolTxQueue.ts +++ b/zp-relayer/queue/poolTxQueue.ts @@ -1,14 +1,26 @@ -import { Queue } from 'bullmq' +import { redis } from '@/lib/redisClient' import { TX_QUEUE_NAME } from '@/utils/constants' +import { Queue } from 'bullmq' import type { Proof } from 'libzkbob-rs-node' import type { TxType } from 'zp-memo-parser' -import { redis } from '@/services/redisClient' -export interface TxPayload { - amount: string - txProof: Proof +export enum JobState { + WAITING = 'waiting', + SENT = 'sent', + COMPLETED = 'completed', + REVERTED = 'reverted', + FAILED = 'failed', +} + +export interface BasePayload { + txHash: string | null + state: JobState +} + +export interface BasePoolTx { + proof: Proof + memo: string txType: TxType - rawMemo: string depositSignature: string | null } @@ -25,37 +37,51 @@ export interface DirectDeposit { deposit: string } -export interface DirectDepositTxPayload { - deposits: DirectDeposit[] +export interface DirectDepositTx { txProof: Proof + deposits: DirectDeposit[] outCommit: string memo: string } +export interface FinalizeTx { + outCommit: string + privilegedProver: string + fee: string + timestamp: string + gracePeriodEnd: string +} + +export interface TxPayload extends BasePayload, BasePoolTx {} +export interface DirectDepositTxPayload extends BasePayload, DirectDepositTx {} +export interface FinalizeTxPayload extends BasePayload, FinalizeTx {} + export enum WorkerTxType { Normal = 'normal', DirectDeposit = 'dd', + Finalize = 'finalize', } export const WorkerTxTypePriority: Record = { [WorkerTxType.Normal]: 1, [WorkerTxType.DirectDeposit]: 2, + [WorkerTxType.Finalize]: 3, } export type WorkerTx = T extends WorkerTxType.Normal ? TxPayload : T extends WorkerTxType.DirectDeposit ? DirectDepositTxPayload + : T extends WorkerTxType.Finalize + ? FinalizeTxPayload : never -export interface BatchTx { +export interface PoolTx { type: T - transactions: M extends true ? WorkerTx[] : WorkerTx + transaction: WorkerTx traceId?: string } -export type PoolTxResult = [string, string] - -export const poolTxQueue = new Queue, PoolTxResult[]>(TX_QUEUE_NAME, { +export const poolTxQueue = new Queue>(TX_QUEUE_NAME, { connection: redis, }) diff --git a/zp-relayer/queue/sentTxQueue.ts b/zp-relayer/queue/sentTxQueue.ts index f69f6589..108e087c 100644 --- a/zp-relayer/queue/sentTxQueue.ts +++ b/zp-relayer/queue/sentTxQueue.ts @@ -1,21 +1,13 @@ -import { Queue } from 'bullmq' +import { SendAttempt } from '@/lib/network' +import { redis } from '@/lib/redisClient' +import { ProcessResult } from '@/pool/types' import { SENT_TX_QUEUE_NAME } from '@/utils/constants' -import { redis } from '@/services/redisClient' -import type { TransactionConfig } from 'web3-core' -import type { GasPriceValue } from '@/services/gas-price' -import type { BatchTx, WorkerTxType } from './poolTxQueue' +import { Queue } from 'bullmq' -export type SendAttempt = [string, GasPriceValue] export interface SentTxPayload { poolJobId: string - root: string - outCommit: string - commitIndex: number - truncatedMemo: string - txConfig: TransactionConfig - nullifier?: string - txPayload: BatchTx - prevAttempts: SendAttempt[] + processResult: ProcessResult + prevAttempts: SendAttempt[] } export enum SentTxState { @@ -24,8 +16,6 @@ export enum SentTxState { SKIPPED = 'SKIPPED', } -export type SentTxResult = [SentTxState, string, string[]] - -export const sentTxQueue = new Queue(SENT_TX_QUEUE_NAME, { +export const sentTxQueue = new Queue(SENT_TX_QUEUE_NAME, { connection: redis, }) diff --git a/zp-relayer/services/commitment-watcher/index.ts b/zp-relayer/services/commitment-watcher/index.ts new file mode 100644 index 00000000..5ce49903 --- /dev/null +++ b/zp-relayer/services/commitment-watcher/index.ts @@ -0,0 +1,13 @@ +import config from '@/configs/commitmentWatcherConfig' +import { logger } from '@/lib/appLogger' +import express from 'express' +import { init } from './init' +import { createRouter } from './router' + +init().then((pool) => { + const app = express() + + app.use(createRouter(pool)) + const PORT = config.COMMITMENT_WATCHER_PORT + app.listen(PORT, () => logger.info(`Started commitment-watcher on port ${PORT}`)) +}) diff --git a/zp-relayer/services/commitment-watcher/init.ts b/zp-relayer/services/commitment-watcher/init.ts new file mode 100644 index 00000000..c0968077 --- /dev/null +++ b/zp-relayer/services/commitment-watcher/init.ts @@ -0,0 +1,136 @@ +import { buildNetworkBackend, buildProver, buildTxManager } from '@/common/serviceUtils' +import config from '@/configs/commitmentWatcherConfig' +import { logger } from '@/lib/appLogger' +import { redis } from '@/lib/redisClient' +import { FinalizerPool, PendingCommitment } from '@/pool/FinalizerPool' +import { Circuit, ProverType } from '@/prover' +import { JobState, poolTxQueue, WorkerTxType } from '@/queue/poolTxQueue' +import { ZERO_ADDRESS } from '@/utils/constants' +import { createDirectDepositWorker } from '@/workers/directDepositWorker' +import { createPoolTxWorker } from '@/workers/poolTxWorker' +import { createSentTxWorker } from '@/workers/sentTxWorker' +import { IWorkerBaseConfig } from '@/workers/workerTypes' +import { Mutex } from 'async-mutex' +import BN from 'bn.js' +import { toBN } from 'web3-utils' + +async function processCommitment(pendingCommitment: PendingCommitment) { + const { timestamp, privilegedProver, fee, commitment, gracePeriodEnd } = pendingCommitment + + const currentTimestamp = new BN(Math.floor(Date.now() / 1000)) + const isEligible = + privilegedProver === ZERO_ADDRESS || + privilegedProver === config.txManager.TX_ADDRESS || + currentTimestamp.gte(toBN(gracePeriodEnd)) + if (!isEligible) { + logger.info('Not allowed to submit the proof yet, waiting...') + return + } + + const existingJob = await poolTxQueue.getJob(commitment); + if (existingJob) { + if (existingJob.data.transaction.state === JobState.FAILED) { + logger.info('Job failed, repeating...', { commitment }) + await poolTxQueue.remove(commitment); + } else { + logger.info('Job already created, waiting...', { commitment }) + return + } + } + + const job = await poolTxQueue.add( + 'tx', + { + type: WorkerTxType.Finalize, + transaction: { + txHash: null, + state: JobState.WAITING, + outCommit: commitment, + privilegedProver, + fee, + timestamp, + gracePeriodEnd, + }, + }, + { + jobId: commitment, + } + ) + logger.debug(`Added poolTxWorker job: ${job.id}`) +} + +async function runWatcher(pool: FinalizerPool) { + try { + const pendingCommitment = await pool.fetchCommitment() + if (pendingCommitment) { + await processCommitment(pendingCommitment) + } + } catch (e) { + logger.error(e) + } + + setTimeout(() => { + runWatcher(pool) + }, config.COMMITMENT_WATCHER_FETCH_INTERVAL) +} + +export async function init() { + const networkBackend = buildNetworkBackend(config.base, config.network, config.COMMITMENT_WATCHER_TOKEN_ADDRESS) + const txManager = buildTxManager(redis, networkBackend, config.gasPrice, config.txManager) + + const pool = new FinalizerPool(networkBackend, { + statePath: config.COMMITMENT_WATCHER_STATE_DIR_PATH, + txVkPath: config.COMMITMENT_WATCHER_TX_VK_PATH, + eventsBatchSize: config.base.COMMON_EVENTS_PROCESSING_BATCH_SIZE, + }) + const treeProver = buildProver( + Circuit.Tree, + ProverType.Local, + config.COMMITMENT_WATCHER_TREE_UPDATE_PARAMS_PATH as string, + config.COMMITMENT_WATCHER_PRECOMPUTE_PARAMS + ) + + const directDepositProver = buildProver( + Circuit.DirectDeposit, + ProverType.Local, + config.COMMITMENT_WATCHER_DIRECT_DEPOSIT_PARAMS_PATH as string, + config.COMMITMENT_WATCHER_PRECOMPUTE_PARAMS + ) + + if (!config.base.COMMON_INDEXER_URL) { + throw new Error('COMMON_INDEXER_URL is not set') + } + + await pool.init(treeProver, directDepositProver, config.base.COMMON_INDEXER_URL) + await txManager.init() + + const workerBaseConfig: IWorkerBaseConfig = { + pool, + redis, + } + + const mutex = new Mutex() + + const workerPromises = [ + createPoolTxWorker({ + ...workerBaseConfig, + mutex, + txManager, + }), + createSentTxWorker({ + ...workerBaseConfig, + mutex, + txManager, + }), + createDirectDepositWorker({ + ...workerBaseConfig, + }), + ] + + const workers = await Promise.all(workerPromises) + workers.forEach(w => w.run()) + + runWatcher(pool) + + return pool +} diff --git a/zp-relayer/services/commitment-watcher/router.ts b/zp-relayer/services/commitment-watcher/router.ts new file mode 100644 index 00000000..0e5037b8 --- /dev/null +++ b/zp-relayer/services/commitment-watcher/router.ts @@ -0,0 +1,51 @@ +import config from '@/configs/commitmentWatcherConfig' +import { logger } from '@/lib/appLogger' +import { BasePool } from '@/pool/BasePool' +import { poolTxQueue, WorkerTx, WorkerTxType } from '@/queue/poolTxQueue' +import { applyDenominator } from '@/utils/helpers' +import { ValidationError } from '@/validation/api/validation' +import cors from 'cors' +import express, { NextFunction, Request, Response } from 'express' +import { toBN } from 'web3-utils' + +export function createRouter(pool: BasePool) { + const router = express.Router() + + router.use(cors()) + router.use(express.urlencoded({ extended: true })) + router.use(express.json()) + router.use(express.text()) + + router.use((err: any, req: Request, res: Response, next: NextFunction) => { + if (err) { + logger.error('Request error:', err) + return res.sendStatus(500) + } + next() + }) + + router.get('/address', (req, res) => { + res.json({ address: config.txManager.TX_ADDRESS }) + }) + + router.get('/fee', (req, res) => { + const dInverse = toBN(1).shln(255) + const fee = applyDenominator(config.COMMITMENT_WATCHER_FEE, pool.denominator.xor(dInverse)) + res.json({ fee: fee.toString(10) }) + }) + + + // Error handler middleware + router.use((error: any, req: Request, res: Response, next: NextFunction) => { + if (error instanceof ValidationError) { + const validationErrors = error.validationErrors + logger.warn('Validation errors', { errors: validationErrors, path: req.path }) + res.status(400).json(validationErrors) + } else { + logger.error('Internal error', { error, path: req.path }) + res.status(500).send('Internal server error') + } + }) + + return router +} diff --git a/zp-relayer/direct-deposit/BatchCache.ts b/zp-relayer/services/direct-deposit/BatchCache.ts similarity index 98% rename from zp-relayer/direct-deposit/BatchCache.ts rename to zp-relayer/services/direct-deposit/BatchCache.ts index 6e2c16b1..cdc33f88 100644 --- a/zp-relayer/direct-deposit/BatchCache.ts +++ b/zp-relayer/services/direct-deposit/BatchCache.ts @@ -1,11 +1,11 @@ -import type Redis from 'ioredis' -import { Mutex } from 'async-mutex' -import { logger } from '@/services/appLogger' +import { logger } from '@/lib/appLogger' import { DIRECT_DEPOSIT_REPROCESS_INTERVAL, DIRECT_DEPOSIT_REPROCESS_NAME, DIRECT_DEPOSIT_SET_NAME, } from '@/utils/constants' +import { Mutex } from 'async-mutex' +import type Redis from 'ioredis' export class BatchCache { private timer: NodeJS.Timeout | null = null diff --git a/zp-relayer/direct-deposit/utils.ts b/zp-relayer/services/direct-deposit/utils.ts similarity index 80% rename from zp-relayer/direct-deposit/utils.ts rename to zp-relayer/services/direct-deposit/utils.ts index 1d5d343f..52cc8a4f 100644 --- a/zp-relayer/direct-deposit/utils.ts +++ b/zp-relayer/services/direct-deposit/utils.ts @@ -1,12 +1,12 @@ -import { logger } from '@/services/appLogger' -import { redis } from '@/services/redisClient' -import config from '@/configs/baseConfig' +import { getBaseConfig } from '@/configs/baseConfig' +import { logger } from '@/lib/appLogger' +import { redis } from '@/lib/redisClient' import type { DirectDeposit } from '@/queue/poolTxQueue' const serviceKey = 'direct-deposit' const lastBlockRedisKey = `${serviceKey}:lastProcessedBlock` -export let lastProcessedBlock = Math.max(config.startBlock - 1, 0) +export let lastProcessedBlock = Math.max(getBaseConfig().COMMON_START_BLOCK - 1, 0) export async function getLastProcessedBlock() { const result = await redis.get(lastBlockRedisKey) diff --git a/zp-relayer/services/direct-deposit/watcher.ts b/zp-relayer/services/direct-deposit/watcher.ts new file mode 100644 index 00000000..0c4e5f7c --- /dev/null +++ b/zp-relayer/services/direct-deposit/watcher.ts @@ -0,0 +1,52 @@ +import DirectDepositQueueAbi from '@/abi/direct-deposit-queue-abi.json' +import { buildNetworkBackend } from '@/common/serviceUtils' +import config from '@/configs/watcherConfig' +import { logger } from '@/lib/appLogger' +import { redis } from '@/lib/redisClient' +import { directDepositQueue } from '@/queue/directDepositQueue' +import type { DirectDeposit } from '@/queue/poolTxQueue' +import { validateDirectDeposit } from '@/validation/tx/validateDirectDeposit' +import { Watcher } from '@/watcher/Watcher' +import { BatchCache } from './BatchCache' +import { parseDirectDepositEvent } from './utils' + +async function init() { + const networkBackend = buildNetworkBackend(config.base, config.network, config.RELAYER_TOKEN_ADDRESS) + + const queueAddress = await networkBackend.pool.call('direct_deposit_queue') + const DirectDepositQueueInstance = networkBackend.contract(DirectDepositQueueAbi, queueAddress) + + const batchCache = new BatchCache( + config.DIRECT_DEPOSIT_BATCH_SIZE, + config.DIRECT_DEPOSIT_BATCH_TTL, + ds => { + logger.info('Adding direct-deposit events to queue', { count: ds.length }) + directDepositQueue.add('', ds) + }, + dd => validateDirectDeposit(dd, DirectDepositQueueInstance), + redis + ) + await batchCache.init() + + const watcher = new Watcher(networkBackend, DirectDepositQueueInstance, 'direct-deposit', { + event: 'SubmitDirectDeposit', + blockConfirmations: config.WATCHER_BLOCK_CONFIRMATIONS, + startBlock: config.base.COMMON_START_BLOCK, + eventPollingInterval: config.WATCHER_EVENT_POLLING_INTERVAL, + batchSize: config.base.COMMON_EVENTS_PROCESSING_BATCH_SIZE, + processor: async (batch: any) => { + const directDeposits: [string, DirectDeposit][] = [] + for (let event of batch) { + const dd = parseDirectDepositEvent(event.values) + directDeposits.push([dd.nonce, dd]) + } + + await batchCache.add(directDeposits) + }, + }) + + await watcher.init() + watcher.run() +} + +init() diff --git a/zp-relayer/services/guard/guard.ts b/zp-relayer/services/guard/guard.ts new file mode 100644 index 00000000..ac19df18 --- /dev/null +++ b/zp-relayer/services/guard/guard.ts @@ -0,0 +1,13 @@ +import config from '@/configs/guardConfig' +import express from 'express' +import { logger } from '../../lib/appLogger' +import { init } from './init' +import { createRouter } from './router' + +const app = express() + +init().then(({ poolContract }) => { + app.use(createRouter({ poolContract })) + const PORT = config.GUARD_PORT + app.listen(PORT, () => logger.info(`Started guard on port ${PORT}`)) +}) diff --git a/zp-relayer/services/guard/init.ts b/zp-relayer/services/guard/init.ts new file mode 100644 index 00000000..35a1ba6d --- /dev/null +++ b/zp-relayer/services/guard/init.ts @@ -0,0 +1,30 @@ +import config from '@/configs/guardConfig' +import { Network, NetworkContract } from '@/lib/network' +import { EthereumContract } from '@/lib/network/evm/EvmContract' +import { TronContract } from '@/lib/network/tron/TronContract' +// @ts-ignore +import TronWeb from 'tronweb' +import Web3 from 'web3' +import PoolAbi from '../../abi/pool-abi.json' + +function getPoolContract(): NetworkContract { + if (config.GUARD_NETWORK === Network.Tron) { + const tronWeb = new TronWeb(config.COMMON_RPC_URL[0], config.COMMON_RPC_URL[0], config.COMMON_RPC_URL[0]) + + const address = tronWeb.address.fromPrivateKey(config.GUARD_ADDRESS_PRIVATE_KEY.slice(2)) + tronWeb.setAddress(address) + + return new TronContract(tronWeb, PoolAbi, config.COMMON_POOL_ADDRESS) + } else if (config.GUARD_NETWORK === Network.Ethereum) { + const web3 = new Web3(config.COMMON_RPC_URL[0]) + return new EthereumContract(web3, PoolAbi, config.COMMON_POOL_ADDRESS) + } else { + throw new Error('Unsupported network') + } +} + +export async function init() { + const poolContract = getPoolContract() + + return { poolContract } +} diff --git a/zp-relayer/services/guard/router.ts b/zp-relayer/services/guard/router.ts new file mode 100644 index 00000000..d605baa0 --- /dev/null +++ b/zp-relayer/services/guard/router.ts @@ -0,0 +1,110 @@ +import config from '@/configs/guardConfig' +import { logger } from '@/lib/appLogger' +import type { Network, NetworkContract } from '@/lib/network' +// @ts-ignore +import { buildTxData, TxData } from '@/txProcessor' +import { ENERGY_SIZE, TOKEN_SIZE, TRANSFER_INDEX_SIZE } from '@/utils/constants' +import { numToHex, packSignature } from '@/utils/helpers' +import { getTxProofField, parseDelta } from '@/utils/proofInputs' +import { checkSignMPCSchema, validateBatch } from '@/validation/api/validation' +import cors from 'cors' +import { getBytes, keccak256 } from 'ethers' +import express, { NextFunction, Request, Response } from 'express' +import { VK } from 'libzkbob-rs-node' +// @ts-ignore +import TronWeb from 'tronweb' +import { toBN } from 'web3-utils' +// @ts-ignore +import { TxDataMPC, validateTxMPC } from '@/validation/tx/validateTx' + +function wrapErr(f: (_req: Request, _res: Response, _next: NextFunction) => Promise | void) { + return async (req: Request, res: Response, next: NextFunction) => { + try { + await f(req, res, next) + } catch (e) { + next(e) + } + } +} + +interface RouterConfig { + poolContract: NetworkContract +} + +export function createRouter({ poolContract }: RouterConfig) { + const router = express.Router() + + router.use(cors()) + router.use(express.urlencoded({ extended: true })) + router.use(express.json()) + router.use(express.text()) + + router.use((err: any, _req: Request, res: Response, next: NextFunction) => { + if (err) { + logger.error('Request error:', err) + return res.sendStatus(500) + } + next() + }) + + router.post( + '/sign', + wrapErr(async (req: Request, res: Response) => { + validateBatch([[checkSignMPCSchema, req.body]]) + const message = req.body as TxDataMPC + + // Validate + const txVK: VK = require(config.GUARD_TX_VK_PATH) + const treeVK: VK = require(config.GUARD_TREE_VK_PATH) + + const poolId = toBN(await poolContract.call('pool_id')) + + try { + await validateTxMPC(message, poolId, treeVK, txVK) + } catch (e) { + logger.error('Validation error', e) + throw new Error('Invalid transaction') + } + + // Sign + const { txProof, treeProof } = message + const nullifier = getTxProofField(txProof, 'nullifier') + const outCommit = getTxProofField(txProof, 'out_commit') + const delta = parseDelta(getTxProofField(txProof, 'delta')) + + const rootAfter = treeProof.inputs[1] + + const txData: TxData = { + txProof: message.txProof.proof, + treeProof: message.treeProof.proof, + nullifier: numToHex(toBN(nullifier)), + outCommit: numToHex(toBN(outCommit)), + rootAfter: numToHex(toBN(rootAfter)), + delta: { + transferIndex: numToHex(delta.transferIndex, TRANSFER_INDEX_SIZE), + energyAmount: numToHex(delta.energyAmount, ENERGY_SIZE), + tokenAmount: numToHex(delta.tokenAmount, TOKEN_SIZE), + }, + txType: message.txType, + memo: message.memo, + depositSignature: message.depositSignature, + } + + const transferRoot = numToHex(toBN(getTxProofField(txProof, 'root'))) + const currentRoot = numToHex(toBN(treeProof.inputs[0])) + logger.debug(`Using transferRoot: ${transferRoot}; Current root: ${currentRoot}; PoolId ${poolId}`) + + let calldata = buildTxData(txData) + calldata += transferRoot + currentRoot + numToHex(poolId) + + logger.debug(`Signing ${calldata}`) + const digest = getBytes(keccak256(calldata)) + const signature = packSignature(await TronWeb.Trx.signMessageV2(digest, config.GUARD_ADDRESS_PRIVATE_KEY)) + + logger.info(`Signed ${signature}}`) + res.json({ signature }) + }) + ) + + return router +} diff --git a/zp-relayer/services/indexer/index.ts b/zp-relayer/services/indexer/index.ts new file mode 100644 index 00000000..e191daef --- /dev/null +++ b/zp-relayer/services/indexer/index.ts @@ -0,0 +1,21 @@ +import config from '@/configs/indexerConfig' +import { logger } from '@/lib/appLogger' +import { createConsoleLoggerMiddleware, createPersistentLoggerMiddleware } from '@/lib/loggerMiddleware' +import express from 'express' +import { init } from './init' +import { createRouter } from './router' + +init().then(({ pool }) => { + const app = express() + + if (config.INDEXER_EXPRESS_TRUST_PROXY) { + app.set('trust proxy', true) + } + + app.use(createPersistentLoggerMiddleware(config.INDEXER_REQUEST_LOG_PATH)) + app.use(createConsoleLoggerMiddleware()) + + app.use(createRouter({ pool })) + const PORT = config.INDEXER_PORT + app.listen(PORT, () => logger.info(`Started indexer on port ${PORT}`)) +}) diff --git a/zp-relayer/services/indexer/init.ts b/zp-relayer/services/indexer/init.ts new file mode 100644 index 00000000..2b8e710a --- /dev/null +++ b/zp-relayer/services/indexer/init.ts @@ -0,0 +1,42 @@ +import { buildNetworkBackend } from '@/common/serviceUtils' +import config from '@/configs/indexerConfig' +import { IndexerPool } from '@/pool/IndexerPool' +import { Watcher } from '@/watcher/Watcher' + +export async function init() { + const networkBackend = buildNetworkBackend(config.base, config.network, config.INDEXER_TOKEN_ADDRESS) + + const pool = new IndexerPool(networkBackend, { + statePath: config.INDEXER_STATE_DIR_PATH, + txVkPath: config.INDEXER_TX_VK_PATH, + eventsBatchSize: config.base.COMMON_EVENTS_PROCESSING_BATCH_SIZE, + }) + + const lastInitialSyncBlock = await pool.getLastBlockToProcess() + await Promise.all([networkBackend.init(), pool.init(config.base.COMMON_START_BLOCK, lastInitialSyncBlock)]) + + const startBlock = lastInitialSyncBlock + 1 + const watcher = new Watcher(networkBackend, networkBackend.pool, 'pool-indexer', { + event: 'allEvents', + blockConfirmations: config.INDEXER_BLOCK_CONFIRMATIONS, + startBlock, + eventPollingInterval: parseInt(process.env.WATCHER_EVENT_POLLING_INTERVAL || '10000'), + batchSize: config.base.COMMON_EVENTS_PROCESSING_BATCH_SIZE, + processor: async batch => { + for (let event of batch) { + if (event.values.message) { + // Message event + await pool.addTxToState(event.txHash, event.values.index, event.values.message, 'optimistic', event.blockNumber) + } else if (event.values.commitment) { + // RootUpdated event + pool.propagateOptimisticState(event.values.index, event.blockNumber) + } + } + }, + }) + + await watcher.init() + watcher.run() + + return { pool } +} diff --git a/zp-relayer/services/indexer/router.ts b/zp-relayer/services/indexer/router.ts new file mode 100644 index 00000000..5ded1ef7 --- /dev/null +++ b/zp-relayer/services/indexer/router.ts @@ -0,0 +1,89 @@ +import { logger } from '@/lib/appLogger' +import { BasePool } from '@/pool/BasePool' +import { inject, txToV2Format } from '@/utils/helpers' +import { checkGetRoot, checkGetTransactionsV2, validateBatch } from '@/validation/api/validation' +import cors from 'cors' +import express, { NextFunction, Request, Response } from 'express' + +function wrapErr(f: (_req: Request, _res: Response, _next: NextFunction) => Promise | void) { + return async (req: Request, res: Response, next: NextFunction) => { + try { + await f(req, res, next) + } catch (e) { + next(e) + } + } +} + +interface RouterConfig { + pool: BasePool +} + +export function createRouter({ pool }: RouterConfig) { + const router = express.Router() + + router.use(cors()) + router.use(express.urlencoded({ extended: true })) + router.use(express.json()) + router.use(express.text()) + + router.use((err: any, _req: Request, res: Response, next: NextFunction) => { + if (err) { + logger.error('Request error:', err) + return res.sendStatus(500) + } + next() + }) + + router.get('/transactions/v2', wrapErr(inject({ pool }, getTransactionsV2))) + router.get('/info', wrapErr(inject({ pool }, relayerInfo))) + router.get('/root', wrapErr(inject({ pool }, getRoot))) + + return router +} + +interface PoolInjection { + pool: BasePool +} + +async function getTransactionsV2(req: Request, res: Response, { pool }: PoolInjection) { + validateBatch([[checkGetTransactionsV2, req.query]]) + + // Types checked at validation stage + const limit = req.query.limit as unknown as number + const offset = req.query.offset as unknown as number + + const txs: string[] = [] + const { txs: poolTxs, nextOffset } = await pool.state.getTransactions(limit, offset) + txs.push(...poolTxs.map(tx => txToV2Format('1', tx))) + + if (txs.length < limit) { + const { txs: optimisticTxs } = await pool.optimisticState.getTransactions(limit - txs.length, nextOffset) + txs.push(...optimisticTxs.map(tx => txToV2Format('2', tx))) + } + + res.json(txs) +} + +function relayerInfo(req: Request, res: Response, { pool }: PoolInjection) { + const deltaIndex = pool.state.getNextIndex() + const optimisticDeltaIndex = pool.optimisticState.getNextIndex() + const root = pool.state.getMerkleRoot() + const optimisticRoot = pool.optimisticState.getMerkleRoot() + + res.json({ + root, + optimisticRoot, + deltaIndex, + optimisticDeltaIndex, + }) +} + +function getRoot(req: Request, res: Response, { pool }: PoolInjection) { + validateBatch([[checkGetRoot, req.query]]) + + const index = req.query.index as unknown as number + const root = pool.state.getMerkleRootAt(index) ?? pool.optimisticState.getMerkleRootAt(index) + + res.json({ root }) +} diff --git a/zp-relayer/services/redisClient.ts b/zp-relayer/services/redisClient.ts deleted file mode 100644 index 372ee7b1..00000000 --- a/zp-relayer/services/redisClient.ts +++ /dev/null @@ -1,6 +0,0 @@ -import Redis from 'ioredis' -import config from '@/configs/baseConfig' - -export const redis = new Redis(config.redisUrl, { - maxRetriesPerRequest: null, -}) diff --git a/zp-relayer/services/relayer/endpoints.ts b/zp-relayer/services/relayer/endpoints.ts new file mode 100644 index 00000000..d893c936 --- /dev/null +++ b/zp-relayer/services/relayer/endpoints.ts @@ -0,0 +1,298 @@ +import { logger } from '@/lib/appLogger' +import type { BasePool } from '@/pool/BasePool' +import { RelayPool } from '@/pool/RelayPool' +import { LimitsFetch } from '@/pool/types' +import { txToV2Format } from '@/utils/helpers' +import type { Queue } from 'bullmq' +import { Request, Response } from 'express' +import config from '../../configs/relayerConfig' +import type { FeeManager } from '../../lib/fee' +import { BasePoolTx, JobState, PoolTx as Tx, poolTxQueue, WorkerTxType } from '../../queue/poolTxQueue' +import { HEADER_TRACE_ID, OUTPLUSONE } from '../../utils/constants' +import { + checkGetLimits, + checkGetSiblings, + checkGetTransactionsV2, + checkMerkleRootErrors, + checkSendTransactionsErrors, + checkTraceId, + validateBatch, + validateCountryIP, + ValidationFunction, +} from '../../validation/api/validation' +import BigNumber from 'bignumber.js' + +interface PoolInjection { + pool: BasePool +} + +interface FeeManagerInjection { + feeManager: FeeManager +} + +interface HashInjection { + hash: string | null +} + +const checkTraceIdFromConfig: ValidationFunction = (() => { + if (config.RELAYER_REQUIRE_TRACE_ID) { + return checkTraceId + } + return () => null +})() + +async function sendTransactions(req: Request, res: Response, { pool }: PoolInjection) { + validateBatch([ + [checkTraceIdFromConfig, req.headers], + [checkSendTransactionsErrors, req.body], + ]) + + await validateCountryIP(req.ip, config.RELAYER_BLOCKED_COUNTRIES) + + const rawTxs = req.body as BasePoolTx[] + const traceId = req.headers[HEADER_TRACE_ID] as string + + const txs = rawTxs.map(tx => { + const { proof, memo, txType, depositSignature } = tx + return { + proof, + memo, + txType, + depositSignature, + } + }) + if (txs.length !== 1) { + throw new Error('Batch transactions are not supported') + } + const jobId = await pool.transact(txs[0], traceId) + res.json({ jobId }) +} + +async function merkleRoot(req: Request, res: Response, { pool }: PoolInjection) { + validateBatch([ + [checkTraceIdFromConfig, req.headers], + [checkMerkleRootErrors, req.params], + ]) + + const index = req.params.index + const root = await pool.getContractMerkleRoot(index) + res.json(root) +} + +async function getTransactionsV2(req: Request, res: Response, { pool }: PoolInjection) { + validateBatch([ + [checkTraceIdFromConfig, req.headers], + [checkGetTransactionsV2, req.query], + ]) + + // Types checked in validation stage + const limit = req.query.limit as unknown as number + const offset = req.query.offset as unknown as number + const url = new URL('/transactions/v2', config.base.COMMON_INDEXER_URL) + url.searchParams.set('limit', limit.toString()) + url.searchParams.set('offset', offset.toString()) + + const response = await fetch(url) + if (!response.ok) { + throw new Error(`Failed to fetch transactions from indexer. Status: ${response.status}`) + } + const indexerTxs: string[] = await response.json() + + const txStore = (pool as RelayPool).txStore; + const localEntries = await txStore.getAll().then(entries => + Object.entries(entries) + .sort((a, b) => a[1].timestamp - b[1].timestamp) + ); + + const indexerCommitments = indexerTxs.map(tx => BigNumber(tx.slice(65, 129), 16).toString(10)); + const optimisticTxs: string[] = [] + for (const [commit, {memo, timestamp}] of localEntries) { + if (indexerTxs.length + optimisticTxs.length >= limit) { + break + } + + if (indexerCommitments.includes(commit)) { + // !!! we shouldn't modify local cache from here. Just filter entries to return correct response + //logger.info('Deleting index from optimistic state', { index }) + //await txStore.remove(commit) + } else { + optimisticTxs.push(txToV2Format('0', memo)) + } + } + + const txs: string[] = [...indexerTxs, ...optimisticTxs] + + res.json(txs) +} + +async function getJob(req: Request, res: Response, { pool }: PoolInjection) { + interface GetJobResponse { + resolvedJobId: string + createdOn: number + failedReason: null | string + finishedOn: null | number + state: JobState + txHash: null | string + } + + validateBatch([[checkTraceIdFromConfig, req.headers]]) + + const jobId = req.params.id + + async function getPoolJobState(requestedJobId: string): Promise { + const INCONSISTENCY_ERR = 'Internal job inconsistency' + + // Should be used in places where job is expected to exist + const safeGetJob = async (queue: Queue>, id: string) => { + const job = await queue.getJob(id) + if (!job) { + throw new Error(INCONSISTENCY_ERR) + } + return job + } + + const jobId = await pool.state.jobIdsMapping.get(requestedJobId) + + const poolJobState = await poolTxQueue.getJobState(jobId) + if (poolJobState === 'unknown') return null + + const job = await safeGetJob(poolTxQueue, jobId) + const { txHash, state } = job.data.transaction + + // Default result object + let result: GetJobResponse = { + resolvedJobId: jobId, + createdOn: job.timestamp, + failedReason: job.failedReason, + finishedOn: null, + state, + txHash, + } + + return result + } + + const jobState = await getPoolJobState(jobId) + if (jobState) { + res.json(jobState) + } else { + res.json(`Job ${jobId} not found`) + } +} + +async function relayerInfo(req: Request, res: Response, { pool }: PoolInjection) { + const url = new URL('/info', config.base.COMMON_INDEXER_URL) + + const response = await fetch(url) + if (!response.ok) { + throw new Error(`Failed to fetch info from indexer. Status: ${res.status}`) + } + const info = await response.json() + + const indexerMaxIdx = Math.max(parseInt(info.deltaIndex ?? '0'), parseInt(info.optimisticDeltaIndex ?? '0')) + + const txStore = (pool as RelayPool).txStore + const pendingCnt = await txStore.getAll().then(map => Object.keys(map).length) + // This number is not accurate since some txs might be already included in the indexer + // but still be available in the local cache + // This value should be used ONLY as some estimate of the total number of txs including pending ones + info.pendingDeltaIndex = indexerMaxIdx + pendingCnt * OUTPLUSONE; + res.json(info) +} + +async function getFee(req: Request, res: Response, { pool, feeManager }: PoolInjection & FeeManagerInjection) { + validateBatch([[checkTraceIdFromConfig, req.headers]]) + + const feeOptions = await feeManager.getFeeOptions() + const fees = feeOptions.denominate(pool.denominator).getObject() + + res.json(fees) +} + +async function getLimits(req: Request, res: Response, { pool }: PoolInjection) { + validateBatch([ + [checkTraceIdFromConfig, req.headers], + [checkGetLimits, req.query], + ]) + + const address = req.query.address as unknown as string + + let limitsFetch: LimitsFetch + try { + const limits = await pool.getLimitsFor(address) + limitsFetch = pool.processLimits(limits) + } catch (e) { + throw new Error(`Error while fetching limits for ${address}`) + } + + res.json(limitsFetch) +} + +function getMaxNativeAmount(req: Request, res: Response) { + validateBatch([[checkTraceIdFromConfig, req.headers]]) + + res.json({ + maxNativeAmount: config.RELAYER_MAX_NATIVE_AMOUNT.toString(10), + }) +} + +function getSiblings(req: Request, res: Response, { pool }: PoolInjection) { + validateBatch([ + [checkTraceIdFromConfig, req.headers], + [checkGetSiblings, req.query], + ]) + + const index = req.query.index as unknown as number + + if (index >= pool.state.getNextIndex()) { + res.status(400).json({ errors: ['Index out of range'] }) + return + } + + const siblings = pool.state.getSiblings(index) + res.json(siblings) +} + +function getParamsHash(req: Request, res: Response, { hash }: HashInjection) { + res.json({ hash }) +} + +function relayerVersion(req: Request, res: Response) { + res.json({ + ref: config.RELAYER_REF, + commitHash: config.RELAYER_SHA, + }) +} + +async function getProverFee(req: Request, res: Response) { + const url = new URL('/fee', config.RELAYER_PROVER_URL) + const fee = await fetch(url.toString()).then(r => r.json()) + res.json(fee) +} + +async function getProverAddress(req: Request, res: Response) { + const url = new URL('/address', config.RELAYER_PROVER_URL) + const address = await fetch(url.toString()).then(r => r.json()) + res.json(address) +} + +function root(req: Request, res: Response) { + return res.sendStatus(200) +} + +export default { + sendTransactions, + merkleRoot, + getTransactionsV2, + getJob, + relayerInfo, + getFee, + getLimits, + getMaxNativeAmount, + getSiblings, + getParamsHash, + getProverFee, + getProverAddress, + relayerVersion, + root, +} diff --git a/zp-relayer/services/relayer/index.ts b/zp-relayer/services/relayer/index.ts new file mode 100644 index 00000000..5dc53b65 --- /dev/null +++ b/zp-relayer/services/relayer/index.ts @@ -0,0 +1,21 @@ +import express from 'express' +import config from '../../configs/relayerConfig' +import { logger } from '../../lib/appLogger' +import { createConsoleLoggerMiddleware, createPersistentLoggerMiddleware } from '../../lib/loggerMiddleware' +import { init } from './init' +import { createRouter } from './router' + +init().then(({ feeManager, pool }) => { + const app = express() + + if (config.RELAYER_EXPRESS_TRUST_PROXY) { + app.set('trust proxy', true) + } + + app.use(createPersistentLoggerMiddleware(config.RELAYER_REQUEST_LOG_PATH)) + app.use(createConsoleLoggerMiddleware(config.RELAYER_LOG_IGNORE_ROUTES, config.RELAYER_LOG_HEADER_BLACKLIST)) + + app.use(createRouter({ feeManager, pool })) + const PORT = config.RELAYER_PORT + app.listen(PORT, () => logger.info(`Started relayer on port ${PORT}`)) +}) diff --git a/zp-relayer/services/relayer/init.ts b/zp-relayer/services/relayer/init.ts new file mode 100644 index 00000000..d0f2136c --- /dev/null +++ b/zp-relayer/services/relayer/init.ts @@ -0,0 +1,88 @@ +import { buildNetworkBackend, buildPriceFeed, buildTxManager } from '@/common/serviceUtils' +import config from '@/configs/relayerConfig' +import { DynamicFeeManager, FeeManager, FeeManagerType, OptimismFeeManager, StaticFeeManager } from '@/lib/fee' +import { isEthereum } from '@/lib/network' +import { EvmTxManager } from '@/lib/network/evm/EvmTxManager' +import { redis } from '@/lib/redisClient' +import { RelayPool } from '@/pool/RelayPool' +import { createPoolTxWorker } from '@/workers/poolTxWorker' +import { createSentTxWorker } from '@/workers/sentTxWorker' +import type { IWorkerBaseConfig } from '@/workers/workerTypes' +import { Mutex } from 'async-mutex' + +export async function init() { + const networkBackend = buildNetworkBackend(config.base, config.network, config.RELAYER_TOKEN_ADDRESS) + const txManager = buildTxManager(redis, networkBackend, config.gasPrice, config.txManager) + const pool = new RelayPool(networkBackend, { + statePath: config.RELAYER_STATE_DIR_PATH, + txVkPath: config.RELAYER_TX_VK_PATH, + eventsBatchSize: config.base.COMMON_EVENTS_PROCESSING_BATCH_SIZE, + }) + + await Promise.all([ + txManager.init(), + networkBackend.init(), + pool.init( + { + permitType: config.RELAYER_PERMIT_TYPE, + token: config.RELAYER_TOKEN_ADDRESS, + }, + config.txManager.TX_ADDRESS, + config.base.COMMON_INDEXER_URL as string + ), + ]) + + const mutex = new Mutex() + + const workerBaseConfig: IWorkerBaseConfig = { + pool, + redis, + } + + const priceFeed = buildPriceFeed(networkBackend, config.priceFeed, config.RELAYER_TOKEN_ADDRESS) + await priceFeed.init() + + let feeManager: FeeManager + const managerConfig = { + priceFeed, + scaleFactor: config.RELAYER_FEE_SCALING_FACTOR, + marginFactor: config.RELAYER_FEE_MARGIN_FACTOR, + updateInterval: config.RELAYER_FEE_MANAGER_UPDATE_INTERVAL, + } + switch (config.RELAYER_FEE_MANAGER_TYPE) { + case FeeManagerType.Static: + feeManager = new StaticFeeManager(managerConfig, config.RELAYER_FEE) + break + case FeeManagerType.Dynamic: { + if (!isEthereum(networkBackend)) throw new Error('Dynamic fee manager is supported only for Ethereum') + feeManager = new DynamicFeeManager(managerConfig, (txManager as EvmTxManager).gasPrice) + break + } + case FeeManagerType.Optimism: { + if (!isEthereum(networkBackend)) throw new Error('Dynamic fee manager is supported only for Ethereum') + feeManager = new OptimismFeeManager(managerConfig, networkBackend) + break + } + default: + throw new Error('Unsupported fee manager') + } + await feeManager.start() + + const workerPromises = [ + createPoolTxWorker({ + ...workerBaseConfig, + mutex, + txManager, + }), + createSentTxWorker({ + ...workerBaseConfig, + mutex, + txManager, + }), + ] + + const workers = await Promise.all(workerPromises) + workers.forEach(w => w.run()) + + return { feeManager, pool } +} diff --git a/zp-relayer/router.ts b/zp-relayer/services/relayer/router.ts similarity index 54% rename from zp-relayer/router.ts rename to zp-relayer/services/relayer/router.ts index c0205413..f47aa6b4 100644 --- a/zp-relayer/router.ts +++ b/zp-relayer/services/relayer/router.ts @@ -1,15 +1,18 @@ -import express, { NextFunction, Request, Response } from 'express' +import config from '@/configs/relayerConfig' +import { logger } from '@/lib/appLogger' +import type { FeeManager } from '@/lib/fee' +import type { BasePool } from '@/pool/BasePool' +import { getFileHash, inject } from '@/utils/helpers' +import { ValidationError } from '@/validation/api/validation' import cors from 'cors' +import express, { NextFunction, Request, Response } from 'express' import semver from 'semver' +import { HEADER_LIBJS, HEADER_TRACE_ID, LIBJS_MIN_VERSION } from '../../utils/constants' import endpoints from './endpoints' -import { logger } from './services/appLogger' -import { ValidationError } from './validation/api/validation' -import config from './configs/relayerConfig' -import { HEADER_LIBJS, HEADER_TRACE_ID, LIBJS_MIN_VERSION } from './utils/constants' -import type { FeeManager } from './services/fee' interface IRouterConfig { feeManager: FeeManager + pool: BasePool } function wrapErr(f: (_req: Request, _res: Response, _next: NextFunction) => Promise | void) { @@ -22,9 +25,8 @@ function wrapErr(f: (_req: Request, _res: Response, _next: NextFunction) => Prom } } -export function createRouter({ feeManager }: IRouterConfig) { +export function createRouter({ feeManager, pool }: IRouterConfig) { const router = express.Router() - router.use(cors()) router.use(express.urlencoded({ extended: true })) router.use(express.json()) @@ -40,11 +42,11 @@ export function createRouter({ feeManager }: IRouterConfig) { router.use((req: Request, res: Response, next: NextFunction) => { const traceId = req.headers[HEADER_TRACE_ID] - if (config.requireTraceId && traceId) { + if (config.RELAYER_REQUIRE_TRACE_ID && traceId) { logger.info('TraceId', { traceId, path: req.path }) } - if (config.requireLibJsVersion) { + if (config.RELAYER_REQUIRE_LIBJS_VERSION) { const libJsVersion = req.headers[HEADER_LIBJS] as string let isValidVersion = false try { @@ -63,18 +65,32 @@ export function createRouter({ feeManager }: IRouterConfig) { router.get('/', endpoints.root) router.get('/version', endpoints.relayerVersion) - router.post('/sendTransactions', wrapErr(endpoints.sendTransactions)) - router.get('/transactions/v2', wrapErr(endpoints.getTransactionsV2)) - router.get('/merkle/root/:index?', wrapErr(endpoints.merkleRoot)) - router.get('/job/:id', wrapErr(endpoints.getJob)) - router.get('/info', wrapErr(endpoints.relayerInfo)) - router.get('/fee', wrapErr(endpoints.getFeeBuilder(feeManager))) - router.get('/limits', wrapErr(endpoints.getLimits)) + router.get( + '/address', + wrapErr((_, res) => { + res.json({ address: config.txManager.TX_ADDRESS }) + }) + ) + router.get('/proverFee', wrapErr(endpoints.getProverFee)) + router.get('/proverAddress', wrapErr(endpoints.getProverAddress)) + router.post('/sendTransactions', wrapErr(inject({ pool }, endpoints.sendTransactions))) + router.get('/transactions/v2', wrapErr(inject({ pool }, endpoints.getTransactionsV2))) + router.get('/merkle/root/:index?', wrapErr(inject({ pool }, endpoints.merkleRoot))) + router.get('/job/:id', wrapErr(inject({ pool }, endpoints.getJob))) + router.get('/info', wrapErr(inject({ pool }, endpoints.relayerInfo))) + router.get('/fee', wrapErr(inject({ pool, feeManager }, endpoints.getFee))) + router.get('/limits', wrapErr(inject({ pool }, endpoints.getLimits))) router.get('/maxNativeAmount', wrapErr(endpoints.getMaxNativeAmount)) - router.get('/siblings', wrapErr(endpoints.getSiblings)) - router.get('/params/hash/tree', wrapErr(endpoints.getParamsHashBuilder(config.treeUpdateParamsPath))) - router.get('/params/hash/tx', wrapErr(endpoints.getParamsHashBuilder(config.transferParamsPath))) - router.get('/params/hash/direct-deposit', wrapErr(endpoints.getParamsHashBuilder(config.directDepositParamsPath))) + router.get('/siblings', wrapErr(inject({ pool }, endpoints.getSiblings))) + router.get( + '/params/hash/tree', + wrapErr(inject({ hash: getFileHash(config.RELAYER_TREE_UPDATE_PARAMS_PATH) }, endpoints.getParamsHash)) + ) + router.get( + '/params/hash/tx', + wrapErr(inject({ hash: getFileHash(config.RELAYER_TRANSFER_PARAMS_PATH) }, endpoints.getParamsHash)) + ) + router.get('/params/hash/direct-deposit', wrapErr(inject({ hash: getFileHash(null) }, endpoints.getParamsHash))) // Error handler middleware router.use((error: any, req: Request, res: Response, next: NextFunction) => { diff --git a/zp-relayer/services/web3.ts b/zp-relayer/services/web3.ts deleted file mode 100644 index b5da1345..00000000 --- a/zp-relayer/services/web3.ts +++ /dev/null @@ -1,16 +0,0 @@ -import Web3 from 'web3' -import type { HttpProvider } from 'web3-core' -import { RETRY_CONFIG } from '@/utils/constants' -import HttpListProvider from './providers/HttpListProvider' -import { checkHTTPS } from '@/utils/helpers' -import { SafeEthLogsProvider } from './providers/SafeEthLogsProvider' -import config from '@/configs/baseConfig' - -const providerOptions = { - requestTimeout: config.rpcRequestTimeout, - retry: RETRY_CONFIG, -} -config.rpcUrls.forEach(checkHTTPS(config.requireHTTPS)) -const provider = new HttpListProvider(config.rpcUrls, providerOptions, config.jsonRpcErrorCodes) -provider.startSyncStateChecker(config.rpcSyncCheckInterval) -export const web3 = new Web3(SafeEthLogsProvider(provider as HttpProvider)) diff --git a/zp-relayer/services/web3Redundant.ts b/zp-relayer/services/web3Redundant.ts deleted file mode 100644 index e261dd97..00000000 --- a/zp-relayer/services/web3Redundant.ts +++ /dev/null @@ -1,19 +0,0 @@ -import Web3 from 'web3' -import RedundantHttpListProvider from './providers/RedundantHttpListProvider' -import config from '@/configs/relayerConfig' -import { web3 } from './web3' -import { RETRY_CONFIG } from '@/utils/constants' - -export let web3Redundant = web3 - -const providerOptions = { - requestTimeout: config.rpcRequestTimeout, - retry: RETRY_CONFIG, -} -if (config.relayerTxRedundancy && config.rpcUrls.length > 1) { - const redundantProvider = new RedundantHttpListProvider(config.rpcUrls, { - ...providerOptions, - name: 'redundant', - }) - web3Redundant = new Web3(redundantProvider) -} diff --git a/zp-relayer/state/PoolState.ts b/zp-relayer/state/PoolState.ts index 3f73505c..44f183b7 100644 --- a/zp-relayer/state/PoolState.ts +++ b/zp-relayer/state/PoolState.ts @@ -1,15 +1,17 @@ -import type { Redis } from 'ioredis' -import { logger } from '@/services/appLogger' +import { logger } from '@/lib/appLogger' import { OUTPLUSONE } from '@/utils/constants' -import { MerkleTree, TxStorage, MerkleProof, Constants, Helpers } from 'libzkbob-rs-node' -import { NullifierSet } from './nullifierSet' +import { Mutex } from 'async-mutex' +import type { Redis } from 'ioredis' +import { Constants, Helpers, MerkleProof, MerkleTree, TxStorage } from 'libzkbob-rs-node' import { JobIdsMapping } from './jobIdsMapping' +import { NullifierSet } from './nullifierSet' export class PoolState { private tree: MerkleTree private txs: TxStorage public nullifiers: NullifierSet public jobIdsMapping: JobIdsMapping + private mutex: Mutex = new Mutex() constructor(private name: string, redis: Redis, path: string) { this.tree = new MerkleTree(`${path}/${name}Tree.db`) @@ -64,6 +66,7 @@ export class PoolState { } addCommitment(index: number, commit: Buffer) { + logger.debug(`Updating ${this.name} state tree`) this.tree.addCommitment(index, commit) } @@ -75,13 +78,11 @@ export class PoolState { this.tree.addHash(i, hash) } - getDbTx(i: number): [string, string] | null { + getDbTx(i: number): string | null { const buf = this.txs.get(i) if (!buf) return null - const data = buf.toString() - const outCommit = data.slice(0, 64) - const memo = data.slice(64) - return [outCommit, memo] + const data = buf.toString('hex') + return data } getMerkleRootAt(index: number): string | null { @@ -111,6 +112,7 @@ export class PoolState { } addTx(i: number, tx: Buffer) { + logger.debug(`Adding tx to ${this.name} state storage`) this.txs.add(i, tx) } @@ -119,10 +121,8 @@ export class PoolState { } updateState(commitIndex: number, outCommit: string, txData: string) { - logger.debug(`Updating ${this.name} state tree`) this.addCommitment(commitIndex, Helpers.strToNum(outCommit)) - logger.debug(`Adding tx to ${this.name} state storage`) this.addTx(commitIndex * OUTPLUSONE, Buffer.from(txData, 'hex')) } @@ -144,6 +144,16 @@ export class PoolState { } } + wipe() { + const stateNextIndex = this.tree.getNextIndex(); + this.tree.wipe(); + for (let i = 0; i < stateNextIndex; i += OUTPLUSONE) { + this.txs.delete(i) + } + this.jobIdsMapping.clear(); + this.nullifiers.clear(); + } + async getTransactions(limit: number, offset: number) { // Round offset to OUTPLUSONE offset = Math.floor(offset / OUTPLUSONE) * OUTPLUSONE diff --git a/zp-relayer/state/TxStore.ts b/zp-relayer/state/TxStore.ts new file mode 100644 index 00000000..05f802b4 --- /dev/null +++ b/zp-relayer/state/TxStore.ts @@ -0,0 +1,43 @@ +import { hexToNumber, numberToHexPadded } from '@/utils/helpers'; +import type { Redis } from 'ioredis' + +const TIMESTAMP_BYTES = 6; // enough for another ~8000 years + +export class TxStore { + constructor(public name: string, private redis: Redis) {} + + async add(commitment: string, memo: string, timestamp: number) { + await this.redis.hset(this.name, { [commitment]: `${numberToHexPadded(timestamp, TIMESTAMP_BYTES)}${memo}` }) + } + + async remove(commitment: string) { + await this.redis.hdel(this.name, commitment) + } + + async get(commitment: string): Promise<{memo: string, timestamp: number} | null> { + const data = await this.redis.hget(this.name, commitment); + + return data ? { + memo: data.slice(TIMESTAMP_BYTES * 2), + timestamp: hexToNumber(data.slice(0, TIMESTAMP_BYTES * 2)), + } : null; + } + + async getAll(): Promise> { + return this.redis.hgetall(this.name).then(keys => Object.fromEntries( + Object.entries(keys) + .map(([commit, data]) => + [commit, + { + memo: data.slice(TIMESTAMP_BYTES * 2), + timestamp: hexToNumber(data.slice(0, TIMESTAMP_BYTES * 2)), + }] as [string, {memo: string, timestamp: number}] + ) + )); + } + + async removeAll() { + const allKeys = await this.getAll().then(res => Object.keys(res)) + await this.redis.hdel(this.name, ...allKeys) + } +} \ No newline at end of file diff --git a/zp-relayer/test/clear.ts b/zp-relayer/test/clear.ts index de7e69be..641255ad 100644 --- a/zp-relayer/test/clear.ts +++ b/zp-relayer/test/clear.ts @@ -1,5 +1,5 @@ -import path from 'path' import compose from 'docker-compose' +import path from 'path' export async function clear() { const cwd = path.join(__dirname) diff --git a/zp-relayer/test/deploy.ts b/zp-relayer/test/deploy.ts index 7e994f1d..6abcca6f 100644 --- a/zp-relayer/test/deploy.ts +++ b/zp-relayer/test/deploy.ts @@ -1,5 +1,5 @@ -import path from 'path' import compose from 'docker-compose' +import path from 'path' import Web3 from 'web3' const web3 = new Web3('http://127.0.0.1:8545') diff --git a/zp-relayer/test/unit-tests/GasPrice.test.ts b/zp-relayer/test/unit-tests/GasPrice.test.ts index 82f3dabe..890263c6 100644 --- a/zp-relayer/test/unit-tests/GasPrice.test.ts +++ b/zp-relayer/test/unit-tests/GasPrice.test.ts @@ -1,6 +1,6 @@ import { expect } from 'chai' import { toBN } from 'web3-utils' -import { EIP1559GasPriceWithinLimit, addExtraGasPrice } from '../../services/gas-price/GasPrice' +import { addExtraGasPrice, EIP1559GasPriceWithinLimit } from '../../services/gas-price/GasPrice' describe('GasPrice', () => { it('correctly calculates fee limit', () => { diff --git a/zp-relayer/test/worker-tests/poolWorker.test.ts b/zp-relayer/test/worker-tests/poolWorker.test.ts index 0cb8ec07..c5badaa7 100644 --- a/zp-relayer/test/worker-tests/poolWorker.test.ts +++ b/zp-relayer/test/worker-tests/poolWorker.test.ts @@ -1,23 +1,25 @@ -import chai from 'chai' -import type BN from 'bn.js' -import { toBN } from 'web3-utils' -import { v4 } from 'uuid' import { Mutex } from 'async-mutex' -import chaiAsPromised from 'chai-as-promised' +import type BN from 'bn.js' import { Job, QueueEvents, Worker } from 'bullmq' +import chai from 'chai' +import chaiAsPromised from 'chai-as-promised' +import { v4 } from 'uuid' +import { toBN } from 'web3-utils' import { TxType } from 'zp-memo-parser' -import { web3 } from './web3' -import { pool } from '../../pool' -import config from '../../configs/relayerConfig' +import { FlowOutputItem } from '../../../test-flow-generator/src/types' import DirectDepositQueueAbi from '../../abi/direct-deposit-queue-abi.json' +import config from '../../configs/relayerConfig' +import { pool } from '../../pool' +import { Circuit, IProver, LocalProver } from '../../prover/' +import { BatchTx, DirectDeposit, poolTxQueue, PoolTxResult, WorkerTxType } from '../../queue/poolTxQueue' import { sentTxQueue, SentTxState } from '../../queue/sentTxQueue' -import { poolTxQueue, PoolTxResult, BatchTx, WorkerTxType, DirectDeposit } from '../../queue/poolTxQueue' -import { createPoolTxWorker } from '../../workers/poolTxWorker' -import { createSentTxWorker } from '../../workers/sentTxWorker' -import { PoolState } from '../../state/PoolState' import { EstimationType, GasPrice } from '../../services/gas-price' import { redis } from '../../services/redisClient' -import { FlowOutputItem } from '../../../test-flow-generator/src/types' +import { PoolState } from '../../state/PoolState' +import { TxManager } from '../../tx/TxManager' +import { validateTx } from '../../validation/tx/validateTx' +import { createPoolTxWorker } from '../../workers/poolTxWorker' +import { createSentTxWorker } from '../../workers/sentTxWorker' import { approveTokens, disableMining, @@ -28,17 +30,15 @@ import { newConnection, setBalance, } from './utils' -import { validateTx } from '../../validation/tx/validateTx' -import { TxManager } from '../../tx/TxManager' -import { Circuit, IProver, LocalProver } from '../../prover/' +import { web3 } from './web3' -import flow from '../flows/flow_independent_deposits_5.json' -import flowDependentDeposits from '../flows/flow_dependent_deposits_2.json' -import flowZeroAddressWithdraw from '../flows/flow_zero-address_withdraw_2.json' import { Params } from 'libzkbob-rs-node' import { directDepositQueue } from '../../queue/directDepositQueue' -import { createDirectDepositWorker } from '../../workers/directDepositWorker' import { DynamicFeeManager, FeeManager } from '../../services/fee' +import { createDirectDepositWorker } from '../../workers/directDepositWorker' +import flowDependentDeposits from '../flows/flow_dependent_deposits_2.json' +import flow from '../flows/flow_independent_deposits_5.json' +import flowZeroAddressWithdraw from '../flows/flow_zero-address_withdraw_2.json' chai.use(chaiAsPromised) const expect = chai.expect diff --git a/zp-relayer/test/worker-tests/utils.ts b/zp-relayer/test/worker-tests/utils.ts index dcfb1719..d3120780 100644 --- a/zp-relayer/test/worker-tests/utils.ts +++ b/zp-relayer/test/worker-tests/utils.ts @@ -1,6 +1,6 @@ -import type { HttpProvider } from 'web3-core' import type BN from 'bn.js' import Redis from 'ioredis' +import type { HttpProvider } from 'web3-core' import { toBN } from 'web3-utils' import { web3 } from './web3' diff --git a/zp-relayer/tx/TxManager.ts b/zp-relayer/tx/TxManager.ts deleted file mode 100644 index 31399e27..00000000 --- a/zp-relayer/tx/TxManager.ts +++ /dev/null @@ -1,115 +0,0 @@ -import Web3 from 'web3' -import type { TransactionConfig } from 'web3-core' -import { isSameTransactionError } from '@/utils/web3Errors' -import { - addExtraGasPrice, - chooseGasPriceOptions, - EstimationType, - GasPrice, - GasPriceValue, - getGasPriceValue, -} from '@/services/gas-price' -import { getChainId } from '@/utils/web3' -import config from '@/configs/relayerConfig' -import { Mutex } from 'async-mutex' -import { logger } from '@/services/appLogger' -import { readNonce, updateNonce } from '@/utils/redisFields' - -interface PrepareTxConfig { - isResend?: boolean - shouldUpdateGasPrice?: boolean -} - -export class TxManager { - nonce!: number - chainId!: number - mutex: Mutex - - constructor(private web3: Web3, private privateKey: string, private gasPrice: GasPrice) { - this.mutex = new Mutex() - } - - async init() { - this.nonce = await readNonce(true) - await updateNonce(this.nonce) - this.chainId = await getChainId(this.web3) - } - - async updateAndBumpGasPrice( - txConfig: TransactionConfig, - newGasPrice: GasPriceValue - ): Promise<[GasPriceValue | null, GasPriceValue]> { - const oldGasPrice = getGasPriceValue(txConfig) - if (oldGasPrice) { - const oldGasPriceWithExtra = addExtraGasPrice(oldGasPrice, config.minGasPriceBumpFactor, null) - return [oldGasPrice, chooseGasPriceOptions(oldGasPriceWithExtra, newGasPrice)] - } else { - return [null, newGasPrice] - } - } - - async prepareTx( - txConfig: TransactionConfig, - { isResend = false, shouldUpdateGasPrice = true }: PrepareTxConfig, - tLogger = logger - ) { - const release = await this.mutex.acquire() - try { - const gasPriceValue = shouldUpdateGasPrice ? await this.gasPrice.fetchOnce() : this.gasPrice.getPrice() - const newGasPriceWithExtra = addExtraGasPrice(gasPriceValue, config.gasPriceSurplus, config.maxFeeLimit) - - let updatedTxConfig: TransactionConfig = {} - let newGasPrice: GasPriceValue - - if (isResend) { - if (typeof txConfig.nonce === 'undefined') { - throw new Error('Nonce should be set for re-send') - } - const [oldGasPrice, updatedGasPrice] = await this.updateAndBumpGasPrice(txConfig, newGasPriceWithExtra) - newGasPrice = updatedGasPrice - tLogger.info('Updating tx gasPrice: %o -> %o', oldGasPrice, newGasPrice) - } else { - tLogger.info('Nonce', { nonce: this.nonce }) - newGasPrice = newGasPriceWithExtra - updatedTxConfig.nonce = this.nonce++ - updatedTxConfig.chainId = this.chainId - await updateNonce(this.nonce) - } - - updatedTxConfig = { - ...updatedTxConfig, - ...txConfig, - ...newGasPrice, - } - - const { transactionHash, rawTransaction } = await this.web3.eth.accounts.signTransaction( - updatedTxConfig, - this.privateKey - ) - return { - txHash: transactionHash as string, - rawTransaction: rawTransaction as string, - gasPrice: newGasPrice, - txConfig: updatedTxConfig, - } - } finally { - release() - } - } - - async sendTransaction(rawTransaction: string): Promise { - return new Promise((res, rej) => - // prettier-ignore - this.web3.eth.sendSignedTransaction(rawTransaction) - .once('transactionHash', () => res()) - .once('error', e => { - // Consider 'already known' errors as a successful send - if (isSameTransactionError(e)){ - res() - } else { - rej(e) - } - }) - ) - } -} diff --git a/zp-relayer/txProcessor.ts b/zp-relayer/txProcessor.ts deleted file mode 100644 index e6733136..00000000 --- a/zp-relayer/txProcessor.ts +++ /dev/null @@ -1,153 +0,0 @@ -import Contract from 'web3-eth-contract' -import { AbiItem, toBN } from 'web3-utils' -import type { TxType } from 'zp-memo-parser' -import { DelegatedDepositsData, SnarkProof } from 'libzkbob-rs-node' -import type { PoolState } from './state/PoolState' -import PoolAbi from './abi/pool-abi.json' -import { logger } from './services/appLogger' -import { TRANSFER_INDEX_SIZE, ENERGY_SIZE, TOKEN_SIZE } from './utils/constants' -import { numToHex, flattenProof, truncateHexPrefix, encodeProof, truncateMemoTxPrefix } from './utils/helpers' -import { Delta, getTxProofField, parseDelta } from './utils/proofInputs' -import type { DirectDeposit, WorkerTx, WorkerTxType } from './queue/poolTxQueue' -import type { Circuit, IProver } from './prover/IProver' - -// @ts-ignore -// Used only to get `transact` method selector -const PoolInstance = new Contract(PoolAbi as AbiItem[]) - -interface TxData { - txProof: SnarkProof - treeProof: SnarkProof - nullifier: string - outCommit: string - rootAfter: string - delta: Delta - txType: TxType - memo: string - depositSignature: string | null -} - -function buildTxData(txData: TxData) { - const selector: string = PoolInstance.methods.transact().encodeABI() - - const transferIndex = numToHex(txData.delta.transferIndex, TRANSFER_INDEX_SIZE) - const energyAmount = numToHex(txData.delta.energyAmount, ENERGY_SIZE) - const tokenAmount = numToHex(txData.delta.tokenAmount, TOKEN_SIZE) - logger.debug(`DELTA ${transferIndex} ${energyAmount} ${tokenAmount}`) - - const txFlatProof = encodeProof(txData.txProof) - const treeFlatProof = encodeProof(txData.treeProof) - - const memoMessage = txData.memo - const memoSize = numToHex(toBN(memoMessage.length).divn(2), 4) - - const data = [ - selector, - txData.nullifier, - txData.outCommit, - transferIndex, - energyAmount, - tokenAmount, - txFlatProof, - txData.rootAfter, - treeFlatProof, - txData.txType, - memoSize, - memoMessage, - ] - - if (txData.depositSignature) { - const signature = truncateHexPrefix(txData.depositSignature) - data.push(signature) - } - - return data.join('') -} - -async function getTreeProof(state: PoolState, outCommit: string, prover: IProver) { - const { pub, sec, commitIndex } = state.getVirtualTreeProofInputs(outCommit) - - logger.debug(`Proving tree...`) - const treeProof = await prover.prove(pub, sec) - logger.debug(`Tree proved`) - return { treeProof, commitIndex } -} - -export async function getDirectDepositProof(deposits: DirectDeposit[], prover: IProver) { - const { - public: pub, - secret: sec, - memo, - out_commitment_hash: outCommit, - } = await DelegatedDepositsData.create( - deposits.map(d => { - return { - id: d.nonce, - receiver_d: toBN(d.zkAddress.diversifier).toString(10), - receiver_p: toBN(d.zkAddress.pk).toString(10), - denominated_amount: d.deposit, - } - }) - ) - const proof = await prover.prove(pub, sec) - return { proof, memo, outCommit } -} - -export interface ProcessResult { - data: string - commitIndex: number - outCommit: string - rootAfter: string - memo: string - nullifier?: string -} - -export async function buildTx( - tx: WorkerTx, - treeProver: IProver, - state: PoolState -): Promise { - const { txType, txProof, rawMemo, depositSignature } = tx - - const nullifier = getTxProofField(txProof, 'nullifier') - const outCommit = getTxProofField(txProof, 'out_commit') - const delta = parseDelta(getTxProofField(txProof, 'delta')) - - const { treeProof, commitIndex } = await getTreeProof(state, outCommit, treeProver) - - const rootAfter = treeProof.inputs[1] - const data = buildTxData({ - txProof: txProof.proof, - treeProof: treeProof.proof, - nullifier: numToHex(toBN(nullifier)), - outCommit: numToHex(toBN(outCommit)), - rootAfter: numToHex(toBN(rootAfter)), - delta, - txType, - memo: rawMemo, - depositSignature, - }) - - const memo = truncateMemoTxPrefix(rawMemo, txType) - - return { data, commitIndex, outCommit, rootAfter, nullifier, memo } -} - -export async function buildDirectDeposits( - tx: WorkerTx, - treeProver: IProver, - state: PoolState -): Promise { - const outCommit = tx.outCommit - - const { treeProof, commitIndex } = await getTreeProof(state, outCommit, treeProver) - - const rootAfter = treeProof.inputs[1] - const indices = tx.deposits.map(d => d.nonce) - - const data: string = PoolInstance.methods - .appendDirectDeposits(rootAfter, indices, outCommit, flattenProof(tx.txProof.proof), flattenProof(treeProof.proof)) - .encodeABI() - - return { data, commitIndex, outCommit, rootAfter, memo: tx.memo } -} diff --git a/zp-relayer/utils/PoolCalldataParser.ts b/zp-relayer/utils/PoolCalldataParser.ts index 8bb94988..83abbffd 100644 --- a/zp-relayer/utils/PoolCalldataParser.ts +++ b/zp-relayer/utils/PoolCalldataParser.ts @@ -1,22 +1,40 @@ -type Field = 'selector' | 'nullifier' | 'outCommit' | 'txType' | 'memoSize' | 'memo' - -type FieldMapping = { - [key in Field]: { start: number; size: number } +const FIELDS = { + selector: { start: 0, size: 4 }, + nullifier: { start: 4, size: 32 }, + outCommit: { start: 36, size: 32 }, + txType: { start: 640, size: 2 }, + memoSize: { start: 642, size: 2 }, + memo: { start: 644, size: 0 }, } +type Field = keyof typeof FIELDS + export class PoolCalldataParser { - private fields: FieldMapping = { - selector: { start: 0, size: 4 }, - nullifier: { start: 4, size: 32 }, - outCommit: { start: 36, size: 32 }, - txType: { start: 640, size: 2 }, - memoSize: { start: 642, size: 2 }, - memo: { start: 644, size: 0 }, - } constructor(private calldata: Buffer) {} getField(f: Field, defaultSize?: number) { - let { start, size } = this.fields[f] + let { start, size } = FIELDS[f] + size = defaultSize || size + return '0x' + this.calldata.subarray(start, start + size).toString('hex') + } +} + +const FIELDS_V2 = { + selector: { start: 0, size: 4 }, + nullifier: { start: 5, size: 32 }, + outCommit: { start: 37, size: 32 }, + txType: { start: 353, size: 2 }, + memoSize: { start: 355, size: 2 }, + memo: { start: 357, size: 0 }, +} + +type FieldV2 = keyof typeof FIELDS_V2 + +export class PoolCalldataV2Parser { + constructor(private calldata: Buffer) {} + + getField(f: FieldV2, defaultSize?: number) { + let { start, size } = FIELDS_V2[f] size = defaultSize || size return '0x' + this.calldata.subarray(start, start + size).toString('hex') } diff --git a/zp-relayer/utils/constants.ts b/zp-relayer/utils/constants.ts index 1fc2b0e7..424af839 100644 --- a/zp-relayer/utils/constants.ts +++ b/zp-relayer/utils/constants.ts @@ -30,6 +30,7 @@ const constants = { randomize: true, }, MESSAGE_PREFIX_COMMON_V1: '0000', + MESSAGE_PREFIX_COMMON_V2: '0002', HEADER_TRACE_ID: 'zkbob-support-id' as const, HEADER_LIBJS: 'zkbob-libjs-version' as const, LIBJS_MIN_VERSION: '2.0.0', diff --git a/zp-relayer/utils/helpers.ts b/zp-relayer/utils/helpers.ts index 4d2c52b3..d969c7c5 100644 --- a/zp-relayer/utils/helpers.ts +++ b/zp-relayer/utils/helpers.ts @@ -1,14 +1,15 @@ -import fs from 'fs' -import crypto from 'crypto' +import { logger } from '@/lib/appLogger' +import type { Mutex } from 'async-mutex' import type BN from 'bn.js' +import crypto from 'crypto' +import type { Request, Response } from 'express' +import fs from 'fs' +import type { SnarkProof } from 'libzkbob-rs-node' +import promiseRetry from 'promise-retry' import type Web3 from 'web3' -import type { Mutex } from 'async-mutex' import type { Contract } from 'web3-eth-contract' -import type { SnarkProof } from 'libzkbob-rs-node' +import { padLeft, toBN, numberToHex } from 'web3-utils' import { TxType } from 'zp-memo-parser' -import promiseRetry from 'promise-retry' -import { padLeft, toBN } from 'web3-utils' -import { logger } from '@/services/appLogger' import { isContractCallError } from './web3Errors' const S_MASK = toBN('0x7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff') @@ -36,6 +37,18 @@ export function truncateMemoTxPrefix(memo: string, txType: TxType) { return memo.slice(txSpecificPrefixLen) } +const txTypePrefixLenV2 = { + [TxType.DEPOSIT]: 116, + [TxType.TRANSFER]: 116, + [TxType.WITHDRAWAL]: 172, + [TxType.PERMITTABLE_DEPOSIT]: 172, +} + +export function truncateMemoTxPrefixProverV2(memo: string, txType: TxType) { + const txSpecificPrefixLen = txTypePrefixLenV2[txType] + return memo.slice(txSpecificPrefixLen) +} + export function truncateHexPrefix(data: string) { if (data.startsWith('0x')) { data = data.slice(2) @@ -55,6 +68,25 @@ export function numToHex(num: BN, pad = 64) { return padLeft(hex, pad) } +export function packSignature(signature: string): string { + signature = truncateHexPrefix(signature) + + if (signature.length > 128) { + let v = signature.slice(128, 130) + if (v == '1c') { + return `${signature.slice(0, 64)}${(parseInt(signature[64], 16) | 8).toString(16)}${signature.slice(65, 128)}` + } else if (v != '1b') { + throw 'invalid signature: v should be 27 or 28' + } + + return signature.slice(0, 128) + } else if (signature.length < 128) { + throw 'invalid signature: it should consist at least 64 bytes (128 chars)' + } + + return signature +} + export function unpackSignature(packedSign: string) { if (packedSign.length === 130) { return '0x' + packedSign @@ -101,7 +133,7 @@ export function encodeProof(p: SnarkProof): string { } export function buildPrefixedMemo(outCommit: string, txHash: string, truncatedMemo: string) { - return numToHex(toBN(outCommit)).concat(txHash.slice(2)).concat(truncatedMemo) + return numToHex(toBN(outCommit)).concat(truncateHexPrefix(txHash)).concat(truncatedMemo) } export async function setIntervalAndRun(f: () => Promise | void, interval: number) { @@ -142,7 +174,7 @@ export async function withErrorLog( } } -function sleep(ms: number) { +export function sleep(ms: number) { return new Promise(resolve => setTimeout(resolve, ms)) } @@ -242,7 +274,11 @@ export function contractCallRetry(contract: Contract, method: string, args: any[ ) } -export function getFileHash(path: string) { +export function getFileHash(path: string | null) { + if (!path) { + return null + } + const buffer = fs.readFileSync(path) const hash = crypto.createHash('sha256') hash.update(buffer) @@ -250,7 +286,41 @@ export function getFileHash(path: string) { } export function applyDenominator(n: BN, d: BN) { - return d.testn(255) - ? n.div(d.maskn(255)) - : n.mul(d) + return d.testn(255) ? n.div(d.maskn(255)) : n.mul(d) +} + +export function inject(values: T, f: (req: Request, res: Response, e: T) => void) { + return (req: Request, res: Response) => { + return f(req, res, values) + } } + +export function txToV2Format(prefix: string, tx: string) { + const outCommit = tx.slice(0, 64) + const txHash = tx.slice(64, 128) + const memo = tx.slice(128) + return prefix + txHash + outCommit + memo +} + +export async function fetchJson(serverUrl: string, path: string, query: [string, string][]) { + const url = new URL(path, serverUrl) + + for (const [key, value] of query) { + url.searchParams.set(key, value) + } + + const res = await fetch(url) + if (!res.ok) { + throw new Error(`Failed to fetch ${path} from ${serverUrl}. Status: ${res.status}`) + } + + return await res.json() +} + +export function numberToHexPadded(num: number, numBytes: number): string { + return padLeft(numberToHex(num).slice(2), numBytes * 2); +} + +export function hexToNumber(hex: string): number { + return parseInt(hex, 16); +} \ No newline at end of file diff --git a/zp-relayer/utils/permit/IPermitRecover.ts b/zp-relayer/utils/permit/IPermitRecover.ts index 29d60c28..be43db0f 100644 --- a/zp-relayer/utils/permit/IPermitRecover.ts +++ b/zp-relayer/utils/permit/IPermitRecover.ts @@ -1,7 +1,6 @@ -import type Web3 from 'web3' -import type { Contract } from 'web3-eth-contract' +import { NetworkBackend } from '@/lib/network/NetworkBackend' +import { Network, NetworkContract } from '@/lib/network/types' import { ethers } from 'ethers' -import { contractCallRetry } from '../helpers' export class PreconditionError extends Error { name = 'PreconditionError' @@ -14,7 +13,7 @@ export interface CommonMessageParams { owner: string deadline: string spender: string - tokenContract: Contract + tokenContract: NetworkContract amount: string nullifier: string } @@ -25,10 +24,10 @@ export abstract class IPermitRecover> { DOMAIN_SEPARATOR: string | null = null abstract TYPES: { [key: string]: TypedMessage> } - constructor(protected web3: Web3, protected verifyingContract: string) {} + constructor(protected network: NetworkBackend, protected verifyingContract: string) {} async initializeDomain() { - const contract = new this.web3.eth.Contract( + const contract = this.network.contract( [ { inputs: [], @@ -46,7 +45,7 @@ export abstract class IPermitRecover> { ], this.verifyingContract ) - this.DOMAIN_SEPARATOR = await contractCallRetry(contract, 'DOMAIN_SEPARATOR') + this.DOMAIN_SEPARATOR = await contract.callRetry('DOMAIN_SEPARATOR') } abstract precondition(params: CommonMessageParams): Promise diff --git a/zp-relayer/utils/permit/Permit2Recover.ts b/zp-relayer/utils/permit/Permit2Recover.ts index beb1dbda..2c2d1bf7 100644 --- a/zp-relayer/utils/permit/Permit2Recover.ts +++ b/zp-relayer/utils/permit/Permit2Recover.ts @@ -1,7 +1,6 @@ -import { toBN, AbiItem } from 'web3-utils' -import { CommonMessageParams, IPermitRecover, PreconditionError, TypedMessage } from './IPermitRecover' -import { contractCallRetry } from '../helpers' import Permit2Abi from '@/abi/permit2.json' +import { AbiItem, toBN } from 'web3-utils' +import { CommonMessageParams, IPermitRecover, PreconditionError, TypedMessage } from './IPermitRecover' export interface ITokenPermissions { token: string @@ -35,16 +34,16 @@ export class Permit2Recover extends IPermitRecover { async precondition({ nullifier, amount, owner, tokenContract }: CommonMessageParams) { // Make sure user approved tokens for Permit2 contract - const approved = await contractCallRetry(tokenContract, 'allowance', [owner, this.verifyingContract]) + const approved = await tokenContract.callRetry('allowance', [owner, this.verifyingContract]) if (toBN(approved).lt(toBN(amount))) return new PreconditionError('Permit2: Allowance is too low') - const permit2 = new this.web3.eth.Contract(Permit2Abi as AbiItem[], this.verifyingContract) + const permit2 = this.network.contract(Permit2Abi as AbiItem[], this.verifyingContract) const nonce = toBN(nullifier) const wordPos = nonce.shrn(8) const bitPos = nonce.maskn(8) - const pointer = await contractCallRetry(permit2, 'nonceBitmap', [owner, wordPos]) + const pointer = await permit2.callRetry('nonceBitmap', [owner, wordPos]) const isSet = toBN(pointer).testn(bitPos.toNumber()) if (isSet) return new PreconditionError('Permit2: Nonce already used') @@ -58,7 +57,7 @@ export class Permit2Recover extends IPermitRecover { amount, nullifier, }: CommonMessageParams): Promise { - const token = tokenContract.options.address + const token = tokenContract.address() const message: IPermitTransferFrom = { permitted: { diff --git a/zp-relayer/utils/permit/SaltedPermitRecover.ts b/zp-relayer/utils/permit/SaltedPermitRecover.ts index 335c3076..0040a349 100644 --- a/zp-relayer/utils/permit/SaltedPermitRecover.ts +++ b/zp-relayer/utils/permit/SaltedPermitRecover.ts @@ -1,5 +1,4 @@ import { CommonMessageParams, IPermitRecover, TypedMessage } from './IPermitRecover' -import { contractCallRetry } from '../helpers' type SaltedPermitMessage = { owner: string @@ -36,7 +35,7 @@ export class SaltedPermitRecover extends IPermitRecover { amount, nullifier, }: CommonMessageParams): Promise { - const nonce = await contractCallRetry(tokenContract, 'nonces', [owner]) + const nonce = await tokenContract.callRetry('nonces', [owner]) const message: SaltedPermitMessage = { owner, diff --git a/zp-relayer/utils/permit/TransferWithAuthorizationRecover.ts b/zp-relayer/utils/permit/TransferWithAuthorizationRecover.ts index 96730cc5..faacd3da 100644 --- a/zp-relayer/utils/permit/TransferWithAuthorizationRecover.ts +++ b/zp-relayer/utils/permit/TransferWithAuthorizationRecover.ts @@ -1,7 +1,6 @@ -import { toBN, numberToHex, AbiItem } from 'web3-utils' -import { CommonMessageParams, IPermitRecover, PreconditionError, TypedMessage } from './IPermitRecover' -import { contractCallRetry } from '../helpers' import Erc3009Abi from '@/abi/erc3009.json' +import { AbiItem, numberToHex, toBN } from 'web3-utils' +import { CommonMessageParams, IPermitRecover, PreconditionError, TypedMessage } from './IPermitRecover' export interface ITransferWithAuthorization { from: string @@ -27,8 +26,8 @@ export class TransferWithAuthorizationRecover extends IPermitRecover getNonce(web3, config.relayerAddress)) +export const readNonce = (redis: Redis, web3: Web3, address: string) => + readFieldBuilder(redis, RelayerKeys.NONCE, () => getNonce(web3, address)) -function readFieldBuilder(key: RelayerKeys, forceUpdateFunc?: Function) { +function readFieldBuilder(redis: Redis, key: RelayerKeys, forceUpdateFunc?: Function) { return async (forceUpdate?: boolean) => { const update = () => { if (!forceUpdateFunc) throw new Error('Force update function not provided') @@ -34,10 +34,10 @@ function readFieldBuilder(key: RelayerKeys, forceUpdateFunc?: Function) { } } -export function updateField(key: RelayerKeys, val: any) { +export function updateField(redis: Redis, key: RelayerKeys, val: any) { return redis.set(key, val) } -export function updateNonce(nonce: number) { - return updateField(RelayerKeys.NONCE, nonce) +export function updateNonce(redis: Redis, nonce: number) { + return updateField(redis, RelayerKeys.NONCE, nonce) } diff --git a/zp-relayer/utils/web3.ts b/zp-relayer/utils/web3.ts index 6ac45b76..768a98bc 100644 --- a/zp-relayer/utils/web3.ts +++ b/zp-relayer/utils/web3.ts @@ -1,6 +1,8 @@ +import { logger } from '@/lib/appLogger' +import { NetworkBackend } from '@/lib/network/NetworkBackend' +import { Network } from '@/lib/network/types' import type Web3 from 'web3' import type { Contract, PastEventOptions } from 'web3-eth-contract' -import { logger } from '@/services/appLogger' export async function getNonce(web3: Web3, address: string) { try { @@ -60,10 +62,10 @@ export async function getChainId(web3: Web3) { } } -export async function getBlockNumber(web3: Web3) { +export async function getBlockNumber(network: NetworkBackend) { try { - logger.debug('Getting block number') - const blockNumber = await web3.eth.getBlockNumber() + //logger.debug('Getting block number') + const blockNumber = await network.getBlockNumber() logger.debug('Block number obtained', { blockNumber }) return blockNumber } catch (e) { diff --git a/zp-relayer/validation/api/validation.ts b/zp-relayer/validation/api/validation.ts index c688a2a9..267edf5d 100644 --- a/zp-relayer/validation/api/validation.ts +++ b/zp-relayer/validation/api/validation.ts @@ -1,11 +1,11 @@ +// @ts-ignore +import { logger } from '@/lib/appLogger' +import { BasePoolTx } from '@/queue/poolTxQueue' +import { HEADER_TRACE_ID, ZERO_ADDRESS } from '@/utils/constants' import Ajv, { JSONSchemaType } from 'ajv' -import { isAddress } from 'web3-utils' import { Proof, SnarkProof } from 'libzkbob-rs-node' +import { isAddress } from 'web3-utils' import { TxType } from 'zp-memo-parser' -import type { PoolTx } from '@/pool' -import { HEADER_TRACE_ID, ZERO_ADDRESS } from '@/utils/constants' -import config from '@/configs/relayerConfig' -import { logger } from '@/services/appLogger' const ajv = new Ajv({ allErrors: true, coerceTypes: true, useDefaults: true }) @@ -30,7 +30,6 @@ const AjvNullableString: JSONSchemaType = { type: 'string', nullable: tr const AjvNullableAddress: JSONSchemaType = { type: 'string', - pattern: '^0x[a-fA-F0-9]{40}$', default: ZERO_ADDRESS, isAddress: true, } @@ -71,7 +70,7 @@ const AjvProofSchema: JSONSchemaType = { required: ['inputs', 'proof'], } -const AjvSendTransactionSchema: JSONSchemaType = { +const AjvSendTransactionSchema: JSONSchemaType = { type: 'object', properties: { proof: AjvProofSchema, @@ -85,7 +84,23 @@ const AjvSendTransactionSchema: JSONSchemaType = { required: ['proof', 'memo', 'txType'], } -const AjvSendTransactionsSchema: JSONSchemaType = { +// @ts-ignore +const AjvSignMPCSchema: JSONSchemaType = { + type: 'object', + properties: { + txProof: AjvProofSchema, + treeProof: AjvProofSchema, + txType: { + type: 'string', + enum: [TxType.DEPOSIT, TxType.PERMITTABLE_DEPOSIT, TxType.TRANSFER, TxType.WITHDRAWAL], + }, + memo: AjvString, + depositSignature: AjvNullableString, + }, + required: ['txProof', 'treeProof', 'txType', 'memo'], +} + +const AjvSendTransactionsSchema: JSONSchemaType = { type: 'array', items: AjvSendTransactionSchema, } @@ -149,7 +164,21 @@ const AjvGetSiblingsSchema: JSONSchemaType<{ const AjvTraceIdSchema: JSONSchemaType<{ [HEADER_TRACE_ID]: string }> = { type: 'object', properties: { [HEADER_TRACE_ID]: AjvNullableString }, - required: config.requireTraceId ? [HEADER_TRACE_ID] : [], + required: [HEADER_TRACE_ID], +} + +const AjvGetRootSchema: JSONSchemaType<{ + index: string | number +}> = { + type: 'object', + properties: { + index: { + type: 'integer', + minimum: 0, + isDivBy128: true, + }, + }, + required: ['index'], } function checkErrors(schema: JSONSchemaType) { @@ -165,7 +194,7 @@ function checkErrors(schema: JSONSchemaType) { } } -type ValidationFunction = ReturnType +export type ValidationFunction = ReturnType export class ValidationError extends Error { constructor(public validationErrors: ReturnType) { @@ -173,6 +202,11 @@ export class ValidationError extends Error { } } +export function validateBatchWithTrace(req: any, validationSet: [ValidationFunction, any][]) { + validationSet.push([checkTraceId, req.headers]) + return validateBatch(validationSet) +} + export function validateBatch(validationSet: [ValidationFunction, any][]) { for (const [validate, data] of validationSet) { const errors = validate(data) @@ -183,10 +217,12 @@ export function validateBatch(validationSet: [ValidationFunction, any][]) { export const checkMerkleRootErrors = checkErrors(AjvMerkleRootSchema) export const checkSendTransactionsErrors = checkErrors(AjvSendTransactionsSchema) +export const checkSignMPCSchema = checkErrors(AjvSignMPCSchema) export const checkGetTransactionsV2 = checkErrors(AjvGetTransactionsV2Schema) export const checkGetLimits = checkErrors(AjvGetLimitsSchema) export const checkGetSiblings = checkErrors(AjvGetSiblingsSchema) export const checkTraceId = checkErrors(AjvTraceIdSchema) +export const checkGetRoot = checkErrors(AjvGetRootSchema) async function fetchSafe(url: string) { const r = await fetch(url) @@ -196,8 +232,8 @@ async function fetchSafe(url: string) { return r } -export async function validateCountryIP(ip: string) { - if (config.blockedCountries.length === 0) return null +export async function validateCountryIP(ip: string, blockedCountries: string[]) { + if (blockedCountries.length === 0) return null const apis = [ fetchSafe(`https://ipapi.co/${ip}/country`).then(res => res.text()), @@ -216,7 +252,7 @@ export async function validateCountryIP(ip: string) { ]) }) - if (config.blockedCountries.includes(country)) { + if (blockedCountries.includes(country)) { logger.warn('Restricted country', { ip, country }) throw new ValidationError([ { diff --git a/zp-relayer/validation/tx/common.ts b/zp-relayer/validation/tx/common.ts index 956a9751..c3cc2ce7 100644 --- a/zp-relayer/validation/tx/common.ts +++ b/zp-relayer/validation/tx/common.ts @@ -1,6 +1,22 @@ -import config from '@/configs/baseConfig' -import { logger } from '@/services/appLogger' -import { HEADER_TRACE_ID } from '@/utils/constants' +import { logger } from '@/lib/appLogger' +import type { NetworkBackend } from '@/lib/network/NetworkBackend' +import type { Network } from '@/lib/network/types' +import { Limits } from '@/pool/types' +import type { NullifierSet } from '@/state/nullifierSet' +import type { PoolState } from '@/state/PoolState' +import { HEADER_TRACE_ID, MESSAGE_PREFIX_COMMON_V1, MESSAGE_PREFIX_COMMON_V2, ZERO_ADDRESS } from '@/utils/constants' +import { + fetchJson, + numToHex, + truncateMemoTxPrefix, + truncateMemoTxPrefixProverV2, + unpackSignature, +} from '@/utils/helpers' +import type { PermitRecover } from '@/utils/permit/types' +import BN from 'bn.js' +import { Proof, SnarkProof } from 'libzkbob-rs-node' +import { bytesToHex, toBN, toChecksumAddress } from 'web3-utils' +import { TxData, TxType } from 'zp-memo-parser' export class TxValidationError extends Error { name = 'TxValidationError' @@ -27,22 +43,18 @@ export function checkSize(data: string, size: number) { return data.length === size } -export async function checkScreener(address: string, traceId?: string) { - if (config.screenerUrl === null || config.screenerToken === null) { - return null - } - +export async function checkScreener(address: string, screenerUrl: string, screenerToken: string, traceId?: string) { const ACC_VALIDATION_FAILED = 'Internal account validation failed' const headers: Record = { 'Content-type': 'application/json', - 'Authorization': `Bearer ${config.screenerToken}`, + 'Authorization': `Bearer ${screenerToken}`, } if (traceId) headers[HEADER_TRACE_ID] = traceId try { - const rawResponse = await fetch(config.screenerUrl, { + const rawResponse = await fetch(screenerUrl, { method: 'POST', headers, body: JSON.stringify({ address }), @@ -58,3 +70,216 @@ export async function checkScreener(address: string, traceId?: string) { return null } + +export function checkCommitment(treeProof: Proof, txProof: Proof) { + return treeProof.inputs[2] === txProof.inputs[2] +} + +export function checkProof(txProof: Proof, verify: (p: SnarkProof, i: Array) => boolean) { + const res = verify(txProof.proof, txProof.inputs) + if (!res) { + return new TxValidationError('Incorrect snark proof') + } + return null +} + +export async function checkNullifier(nullifier: string, nullifierSet: NullifierSet) { + const inSet = await nullifierSet.isInSet(nullifier) + if (inSet === 0) return null + return new TxValidationError(`Doublespend detected in ${nullifierSet.name}: ${nullifier}`) +} + +export async function checkNullifierContract(nullifier: string, network: NetworkBackend) { + const isSet = await network.pool.callRetry('nullifiers', [nullifier]) + if (!toBN(isSet).eqn(0)) { + return new TxValidationError(`Doublespend detected in contract ${nullifier}`) + } + return null +} + +export function checkTransferIndex(contractPoolIndex: BN, transferIndex: BN) { + if (transferIndex.lte(contractPoolIndex)) return null + return new TxValidationError(`Incorrect transfer index`) +} + +export function checkNativeAmount(nativeAmount: BN | null, withdrawalAmount: BN, maxNativeAmount: BN) { + logger.debug(`Native amount: ${nativeAmount}`) + if (nativeAmount === null) { + return null + } + if (nativeAmount.gt(maxNativeAmount) || nativeAmount.gt(withdrawalAmount)) { + return new TxValidationError('Native amount too high') + } + return null +} + +export function checkFee(userFee: BN, requiredFee: BN) { + logger.debug('Fee', { + userFee: userFee.toString(), + requiredFee: requiredFee.toString(), + }) + if (userFee.lt(requiredFee)) { + return new TxValidationError('Fee too low') + } + return null +} + +export function checkNonZeroWithdrawAddress(address: string) { + if (address === ZERO_ADDRESS) { + return new TxValidationError('Withdraw address cannot be zero') + } + return null +} + +/** + * @param signedDeadline deadline signed by user, in seconds + * @param threshold "window" added to current relayer time, in seconds + */ +export function checkDeadline(signedDeadline: BN, threshold: number) { + // Check native amount (relayer faucet) + const currentTimestamp = new BN(Math.floor(Date.now() / 1000)) + if (signedDeadline <= currentTimestamp.addn(threshold)) { + return new TxValidationError(`Deadline is expired`) + } + return null +} + +export function checkLimits(limits: Limits, amount: BN) { + if (amount.gt(toBN(0))) { + if (amount.gt(limits.depositCap)) { + return new TxValidationError('Single deposit cap exceeded') + } + if (limits.tvl.add(amount).gte(limits.tvlCap)) { + return new TxValidationError('Tvl cap exceeded') + } + if (limits.dailyUserDepositCapUsage.add(amount).gt(limits.dailyUserDepositCap)) { + return new TxValidationError('Daily user deposit cap exceeded') + } + if (limits.dailyDepositCapUsage.add(amount).gt(limits.dailyDepositCap)) { + return new TxValidationError('Daily deposit cap exceeded') + } + } else { + if (limits.dailyWithdrawalCapUsage.sub(amount).gt(limits.dailyWithdrawalCap)) { + return new TxValidationError('Daily withdrawal cap exceeded') + } + } + return null +} + +export async function checkDepositEnoughBalance( + network: NetworkBackend, + address: string, + requiredTokenAmount: BN +) { + if (requiredTokenAmount.lte(toBN(0))) { + throw new TxValidationError('Requested balance check for token amount <= 0') + } + const balance = await network.token.callRetry('balanceOf', [address]) + const res = toBN(balance).gte(requiredTokenAmount) + if (!res) { + return new TxValidationError('Not enough balance for deposit') + } + return null +} + +export async function getRecoveredAddress( + txType: T, + proofNullifier: string, + txData: TxData, + network: NetworkBackend, + tokenAmount: BN, + depositSignature: string, + permitRecover: PermitRecover +) { + // Signature without `0x` prefix, size is 64*2=128 + checkCondition(checkSize(depositSignature, 128), 'Invalid deposit signature size') + + const nullifier = '0x' + numToHex(toBN(proofNullifier)) + const sig = unpackSignature(depositSignature) + + let recoveredAddress: string + if (txType === TxType.DEPOSIT) { + recoveredAddress = await network.recover(nullifier, sig) + } else if (txType === TxType.PERMITTABLE_DEPOSIT) { + if (permitRecover === null) { + throw new TxValidationError('Permittable deposits are not enabled') + } + + const { holder, deadline } = txData as TxData + const spender = toChecksumAddress(network.pool.address()) + const owner = toChecksumAddress(bytesToHex(Array.from(holder))) + + const recoverParams = { + owner, + deadline, + spender, + tokenContract: network.token, + amount: tokenAmount.toString(10), + nullifier, + } + const preconditionRes = await permitRecover.precondition(recoverParams) + if (preconditionRes !== null) { + throw new TxValidationError(`Invalid permit precondition: ${preconditionRes.message}`) + } + recoveredAddress = await permitRecover.recoverPermitSignature(recoverParams, sig) + if (recoveredAddress.toLowerCase() !== owner.toLowerCase()) { + throw new TxValidationError(`Invalid deposit signer; Restored: ${recoveredAddress}; Expected: ${owner}`) + } + } else { + throw new TxValidationError('Unsupported TxType') + } + + return recoveredAddress +} + +export function checkRoot(proofIndex: BN, proofRoot: string, state: PoolState) { + const index = proofIndex.toNumber() + + const stateRoot = state.getMerkleRootAt(index) + if (stateRoot !== proofRoot) { + return new TxValidationError(`Incorrect root at index ${index}: given ${proofRoot}, expected ${stateRoot}`) + } + + return null +} + +export async function checkRootIndexer(proofIndex: BN, proofRoot: string, indexerUrl: string) { + const index = proofIndex.toNumber() + const { root } = await fetchJson(indexerUrl, '/root', [['index', index.toString()]]) + + if (root !== proofRoot) { + return new TxValidationError(`Incorrect root at index ${index}: given ${proofRoot}, expected ${root}`) + } + + return null +} + +export function checkPoolId(deltaPoolId: BN, contractPoolId: BN) { + if (deltaPoolId.eq(contractPoolId)) { + return null + } + return new TxValidationError(`Incorrect poolId: given ${deltaPoolId}, expected ${contractPoolId}`) +} + +export function checkMemoPrefix(memo: string, txType: TxType) { + const numItemsSuffix = truncateMemoTxPrefix(memo, txType).substring(4, 8) + if (numItemsSuffix === MESSAGE_PREFIX_COMMON_V1) { + return null + } + return new TxValidationError(`Memo prefix is incorrect: ${numItemsSuffix}`) +} + +export function checkMemoPrefixProverV2(memo: string, txType: TxType) { + const numItemsSuffix = truncateMemoTxPrefixProverV2(memo, txType).substring(4, 8) + if (numItemsSuffix === MESSAGE_PREFIX_COMMON_V2) { + return null + } + return new TxValidationError(`Memo prefix is incorrect: ${numItemsSuffix}`) +} + +export function checkAddressEq(address1: string, address2: string) { + if (address1.toLowerCase() === address2.toLowerCase()) { + return null + } + return new TxValidationError(`Addresses are not equal: ${address1} != ${address2}`) +} diff --git a/zp-relayer/validation/tx/validateDirectDeposit.ts b/zp-relayer/validation/tx/validateDirectDeposit.ts index 399024fe..037c90c0 100644 --- a/zp-relayer/validation/tx/validateDirectDeposit.ts +++ b/zp-relayer/validation/tx/validateDirectDeposit.ts @@ -1,8 +1,7 @@ -import { toBN } from 'web3-utils' -import type { Contract } from 'web3-eth-contract' +import { Network, NetworkContract } from '@/lib/network' import type { DirectDeposit } from '@/queue/poolTxQueue' import { Helpers } from 'libzkbob-rs-node' -import { contractCallRetry } from '@/utils/helpers' +import { toBN } from 'web3-utils' import { checkAssertion, checkScreener, TxValidationError } from './common' const SNARK_SCALAR_FIELD = toBN('21888242871839275222246405745257275088548364400416034343698204186575808495617') @@ -36,10 +35,8 @@ function checkDirectDepositPK(pk: string) { return null } -async function checkDirectDepositConsistency(dd: DirectDeposit, directDepositContract: Contract) { - const ddFromContract: DirectDepositStruct = await contractCallRetry(directDepositContract, 'getDirectDeposit', [ - dd.nonce, - ]) +async function checkDirectDepositConsistency(dd: DirectDeposit, directDepositContract: NetworkContract) { + const ddFromContract: DirectDepositStruct = await directDepositContract.callRetry('getDirectDeposit', [dd.nonce]) const errPrefix = `Direct deposit ${dd.nonce}` if (ddFromContract.status !== DirectDepositStatus.Pending) { @@ -62,9 +59,21 @@ async function checkDirectDepositConsistency(dd: DirectDeposit, directDepositCon return null } -export async function validateDirectDeposit(dd: DirectDeposit, directDepositContract: Contract) { +export interface TxScreener { + url: string + token: string +} + +export async function validateDirectDeposit( + dd: DirectDeposit, + directDepositContract: NetworkContract, + screener?: TxScreener +) { await checkAssertion(() => checkDirectDepositPK(dd.zkAddress.pk)) await checkAssertion(() => checkDirectDepositConsistency(dd, directDepositContract)) - await checkAssertion(() => checkScreener(dd.sender)) - await checkAssertion(() => checkScreener(dd.fallbackUser)) + + if (screener) { + await checkAssertion(() => checkScreener(dd.sender, screener.url, screener.token)) + await checkAssertion(() => checkScreener(dd.fallbackUser, screener.url, screener.token)) + } } diff --git a/zp-relayer/validation/tx/validateTx.ts b/zp-relayer/validation/tx/validateTx.ts deleted file mode 100644 index 6a0da120..00000000 --- a/zp-relayer/validation/tx/validateTx.ts +++ /dev/null @@ -1,291 +0,0 @@ -import BN from 'bn.js' -import { toBN } from 'web3-utils' -import type { Contract } from 'web3-eth-contract' -import { TxType, TxData, getTxData } from 'zp-memo-parser' -import { Proof, SnarkProof } from 'libzkbob-rs-node' -import { logger } from '@/services/appLogger' -import config from '@/configs/relayerConfig' -import type { Limits, Pool } from '@/pool' -import type { NullifierSet } from '@/state/nullifierSet' -import { web3 } from '@/services/web3' -import { applyDenominator, contractCallRetry, numToHex, truncateMemoTxPrefix, unpackSignature } from '@/utils/helpers' -import { ZERO_ADDRESS, MESSAGE_PREFIX_COMMON_V1, MOCK_CALLDATA } from '@/utils/constants' -import { getTxProofField, parseDelta } from '@/utils/proofInputs' -import type { TxPayload } from '@/queue/poolTxQueue' -import type { PoolState } from '@/state/PoolState' -import { checkAssertion, TxValidationError, checkSize, checkScreener, checkCondition } from './common' -import type { PermitRecover } from '@/utils/permit/types' -import type { FeeManager } from '@/services/fee' - -const ZERO = toBN(0) - -export async function checkBalance(token: Contract, address: string, minBalance: string) { - const balance = await contractCallRetry(token, 'balanceOf', [address]) - const res = toBN(balance).gte(toBN(minBalance)) - if (!res) { - return new TxValidationError('Not enough balance for deposit') - } - return null -} - -export function checkCommitment(treeProof: Proof, txProof: Proof) { - return treeProof.inputs[2] === txProof.inputs[2] -} - -export function checkProof(txProof: Proof, verify: (p: SnarkProof, i: Array) => boolean) { - const res = verify(txProof.proof, txProof.inputs) - if (!res) { - return new TxValidationError('Incorrect snark proof') - } - return null -} - -export async function checkNullifier(nullifier: string, nullifierSet: NullifierSet) { - const inSet = await nullifierSet.isInSet(nullifier) - if (inSet === 0) return null - return new TxValidationError(`Doublespend detected in ${nullifierSet.name}`) -} - -export function checkTransferIndex(contractPoolIndex: BN, transferIndex: BN) { - if (transferIndex.lte(contractPoolIndex)) return null - return new TxValidationError(`Incorrect transfer index`) -} - -export function checkNativeAmount(nativeAmount: BN | null, withdrawalAmount: BN) { - logger.debug(`Native amount: ${nativeAmount}`) - if (nativeAmount === null) { - return null - } - if (nativeAmount.gt(config.maxNativeAmount) || nativeAmount.gt(withdrawalAmount)) { - return new TxValidationError('Native amount too high') - } - return null -} - -export function checkFee(userFee: BN, requiredFee: BN) { - logger.debug('Fee', { - userFee: userFee.toString(), - requiredFee: requiredFee.toString(), - }) - if (userFee.lt(requiredFee)) { - return new TxValidationError('Fee too low') - } - return null -} - -export function checkNonZeroWithdrawAddress(address: string) { - if (address === ZERO_ADDRESS) { - return new TxValidationError('Withdraw address cannot be zero') - } - return null -} - -/** - * @param signedDeadline deadline signed by user, in seconds - * @param threshold "window" added to current relayer time, in seconds - */ -export function checkDeadline(signedDeadline: BN, threshold: number) { - // Check native amount (relayer faucet) - const currentTimestamp = new BN(Math.floor(Date.now() / 1000)) - if (signedDeadline <= currentTimestamp.addn(threshold)) { - return new TxValidationError(`Deadline is expired`) - } - return null -} - -export function checkLimits(limits: Limits, amount: BN) { - if (amount.gt(toBN(0))) { - if (amount.gt(limits.depositCap)) { - return new TxValidationError('Single deposit cap exceeded') - } - if (limits.tvl.add(amount).gte(limits.tvlCap)) { - return new TxValidationError('Tvl cap exceeded') - } - if (limits.dailyUserDepositCapUsage.add(amount).gt(limits.dailyUserDepositCap)) { - return new TxValidationError('Daily user deposit cap exceeded') - } - if (limits.dailyDepositCapUsage.add(amount).gt(limits.dailyDepositCap)) { - return new TxValidationError('Daily deposit cap exceeded') - } - } else { - if (limits.dailyWithdrawalCapUsage.sub(amount).gt(limits.dailyWithdrawalCap)) { - return new TxValidationError('Daily withdrawal cap exceeded') - } - } - return null -} - -async function checkDepositEnoughBalance(token: Contract, address: string, requiredTokenAmount: BN) { - if (requiredTokenAmount.lte(toBN(0))) { - throw new TxValidationError('Requested balance check for token amount <= 0') - } - - return checkBalance(token, address, requiredTokenAmount.toString(10)) -} - -async function getRecoveredAddress( - txType: T, - proofNullifier: string, - txData: TxData, - tokenContract: Contract, - tokenAmount: BN, - depositSignature: string, - permitRecover: PermitRecover -) { - // Signature without `0x` prefix, size is 64*2=128 - checkCondition(checkSize(depositSignature, 128), 'Invalid deposit signature size') - - const nullifier = '0x' + numToHex(toBN(proofNullifier)) - const sig = unpackSignature(depositSignature) - - let recoveredAddress: string - if (txType === TxType.DEPOSIT) { - recoveredAddress = web3.eth.accounts.recover(nullifier, sig) - } else if (txType === TxType.PERMITTABLE_DEPOSIT) { - const { holder, deadline } = txData as TxData - const spender = web3.utils.toChecksumAddress(config.poolAddress as string) - const owner = web3.utils.toChecksumAddress(web3.utils.bytesToHex(Array.from(holder))) - - const recoverParams = { - owner, - deadline, - spender, - tokenContract, - amount: tokenAmount.toString(10), - nullifier, - } - const preconditionRes = await permitRecover.precondition(recoverParams) - if (preconditionRes !== null) { - throw new TxValidationError(`Invalid permit precondition: ${preconditionRes.message}`) - } - recoveredAddress = await permitRecover.recoverPermitSignature(recoverParams, sig) - if (recoveredAddress.toLowerCase() !== owner.toLowerCase()) { - throw new TxValidationError(`Invalid deposit signer; Restored: ${recoveredAddress}; Expected: ${owner}`) - } - } else { - throw new TxValidationError('Unsupported TxType') - } - - return recoveredAddress -} - -function checkRoot(proofIndex: BN, proofRoot: string, state: PoolState) { - const index = proofIndex.toNumber() - - const stateRoot = state.getMerkleRootAt(index) - if (stateRoot !== proofRoot) { - return new TxValidationError(`Incorrect root at index ${index}: given ${proofRoot}, expected ${stateRoot}`) - } - - return null -} - -function checkPoolId(deltaPoolId: BN, contractPoolId: BN) { - if (deltaPoolId.eq(contractPoolId)) { - return null - } - return new TxValidationError(`Incorrect poolId: given ${deltaPoolId}, expected ${contractPoolId}`) -} - -function checkMemoPrefix(memo: string, txType: TxType) { - const numItemsSuffix = truncateMemoTxPrefix(memo, txType).substring(4, 8) - if (numItemsSuffix === MESSAGE_PREFIX_COMMON_V1) { - return null - } - return new TxValidationError(`Memo prefix is incorrect: ${numItemsSuffix}`) -} - -export async function validateTx( - { txType, rawMemo, txProof, depositSignature }: TxPayload, - pool: Pool, - feeManager: FeeManager, - traceId?: string -) { - await checkAssertion(() => checkMemoPrefix(rawMemo, txType)) - - const buf = Buffer.from(rawMemo, 'hex') - const txData = getTxData(buf, txType) - - const root = getTxProofField(txProof, 'root') - const nullifier = getTxProofField(txProof, 'nullifier') - const delta = parseDelta(getTxProofField(txProof, 'delta')) - const fee = toBN(txData.fee) - - logger.info( - 'Delta tokens: %s, Energy tokens: %s, Fee: %s', - delta.tokenAmount.toString(10), - delta.energyAmount.toString(10), - fee.toString(10) - ) - - await checkAssertion(() => checkPoolId(delta.poolId, pool.poolId)) - await checkAssertion(() => checkRoot(delta.transferIndex, root, pool.optimisticState)) - await checkAssertion(() => checkNullifier(nullifier, pool.state.nullifiers)) - await checkAssertion(() => checkNullifier(nullifier, pool.optimisticState.nullifiers)) - await checkAssertion(() => checkTransferIndex(toBN(pool.optimisticState.getNextIndex()), delta.transferIndex)) - await checkAssertion(() => checkProof(txProof, (p, i) => pool.verifyProof(p, i))) - - const tokenAmount = delta.tokenAmount - const tokenAmountWithFee = tokenAmount.add(fee) - const energyAmount = delta.energyAmount - - let nativeConvert = false - let userAddress: string - - if (txType === TxType.WITHDRAWAL) { - checkCondition(tokenAmountWithFee.lte(ZERO) && energyAmount.lte(ZERO), 'Incorrect withdraw amounts') - - const { nativeAmount, receiver } = txData as TxData - const nativeAmountBN = toBN(nativeAmount) - userAddress = web3.utils.bytesToHex(Array.from(receiver)) - logger.info('Withdraw address: %s', userAddress) - await checkAssertion(() => checkNonZeroWithdrawAddress(userAddress)) - await checkAssertion(() => checkNativeAmount(nativeAmountBN, tokenAmountWithFee.neg())) - - if (!nativeAmountBN.isZero()) { - nativeConvert = true - } - } else if (txType === TxType.DEPOSIT || txType === TxType.PERMITTABLE_DEPOSIT) { - checkCondition(tokenAmount.gt(ZERO) && energyAmount.eq(ZERO), 'Incorrect deposit amounts') - checkCondition(depositSignature !== null, 'Deposit signature is required') - - const requiredTokenAmount = applyDenominator(tokenAmountWithFee, pool.denominator) - userAddress = await getRecoveredAddress( - txType, - nullifier, - txData, - pool.TokenInstance, - requiredTokenAmount, - depositSignature as string, - pool.permitRecover - ) - logger.info('Deposit address: %s', userAddress) - await checkAssertion(() => checkDepositEnoughBalance(pool.TokenInstance, userAddress, requiredTokenAmount)) - } else if (txType === TxType.TRANSFER) { - userAddress = config.relayerAddress - checkCondition(tokenAmountWithFee.eq(ZERO) && energyAmount.eq(ZERO), 'Incorrect transfer amounts') - } else { - throw new TxValidationError('Unsupported TxType') - } - - const requiredFee = await feeManager.estimateFee({ - txType, - nativeConvert, - txData: MOCK_CALLDATA + rawMemo + (depositSignature || ''), - }) - const denominatedFee = requiredFee.denominate(pool.denominator).getEstimate() - await checkAssertion(() => checkFee(fee, denominatedFee)) - - const limits = await pool.getLimitsFor(userAddress) - await checkAssertion(() => checkLimits(limits, delta.tokenAmount)) - - if (txType === TxType.PERMITTABLE_DEPOSIT) { - const { deadline } = txData as TxData - logger.info('Deadline: %s', deadline) - await checkAssertion(() => checkDeadline(toBN(deadline), config.permitDeadlineThresholdInitial)) - } - - if (txType === TxType.DEPOSIT || txType === TxType.PERMITTABLE_DEPOSIT || txType === TxType.WITHDRAWAL) { - await checkAssertion(() => checkScreener(userAddress, traceId)) - } -} diff --git a/zp-relayer/watcher/Watcher.ts b/zp-relayer/watcher/Watcher.ts new file mode 100644 index 00000000..faa532cc --- /dev/null +++ b/zp-relayer/watcher/Watcher.ts @@ -0,0 +1,76 @@ +import { logger } from '@/lib/appLogger' +import { Event, Network, NetworkBackend, NetworkContract } from '@/lib/network' +import { redis } from '@/lib/redisClient' +import { getBlockNumber } from '@/utils/web3' + +interface WatcherConfig { + event: string + blockConfirmations: number + startBlock: number + eventPollingInterval: number + batchSize: number + processor: (batch: Event[]) => Promise +} + +export class Watcher { + private lastProcessedBlock: number + private lastBlockRedisKey: string + + constructor( + private network: NetworkBackend, + private contract: NetworkContract, + serviceKey: string, + private config: WatcherConfig + ) { + this.lastProcessedBlock = Math.max(config.startBlock - 1, 0) + this.lastBlockRedisKey = `${serviceKey}:lastProcessedBlock` + } + + async init() { + const result = await redis.get(this.lastBlockRedisKey) + logger.debug('Last Processed block obtained', { fromRedis: result, fromConfig: this.lastProcessedBlock }) + this.lastProcessedBlock = result ? parseInt(result, 10) : this.lastProcessedBlock + } + + private async watch() { + const lastBlockNumber = await getBlockNumber(this.network) + const lastBlockToProcess = lastBlockNumber - this.config.blockConfirmations + + const fromBlock = this.lastProcessedBlock + 1 +//TODO: remove + + const rangeEndBlock = fromBlock + this.config.batchSize + let toBlock = Math.min(lastBlockToProcess, rangeEndBlock) + + try { + for await (const batch of this.network.getEvents({ + startBlock: fromBlock, + lastBlock: toBlock, + event: this.config.event, + batchSize: this.config.batchSize, + contract: this.contract, + })) { + logger.info(`Found ${batch.events.length} ${this.config.event} events`) + await this.config.processor(batch.events) + logger.debug('Updating last processed block', { lastProcessedBlock: toBlock.toString() }) + + this.lastProcessedBlock = batch.toBlock + await redis.set(this.lastBlockRedisKey, this.lastProcessedBlock) + } + } catch (e) { + logger.error('Error processing events, continuing...', e) + } + } + + async run() { + try { + await this.watch() + } catch (e) { + logger.error(e) + } + + setTimeout(() => { + this.run() + }, this.config.eventPollingInterval) + } +} diff --git a/zp-relayer/workers/directDepositWorker.ts b/zp-relayer/workers/directDepositWorker.ts index 942051ec..e711ceab 100644 --- a/zp-relayer/workers/directDepositWorker.ts +++ b/zp-relayer/workers/directDepositWorker.ts @@ -1,12 +1,12 @@ -import { Job, Worker } from 'bullmq' -import { logger } from '@/services/appLogger' -import { withErrorLog } from '@/utils/helpers' +import { logger } from '@/lib/appLogger' +import { FinalizerPool } from '@/pool/FinalizerPool' +import { DirectDeposit, JobState, poolTxQueue, WorkerTxType, WorkerTxTypePriority } from '@/queue/poolTxQueue' import { DIRECT_DEPOSIT_QUEUE_NAME } from '@/utils/constants' -import { DirectDeposit, poolTxQueue, WorkerTxType, WorkerTxTypePriority } from '@/queue/poolTxQueue' +import { withErrorLog } from '@/utils/helpers' +import { Job, Worker } from 'bullmq' import type { IDirectDepositWorkerConfig } from './workerTypes' -import { getDirectDepositProof } from '@/txProcessor' -export async function createDirectDepositWorker({ redis, directDepositProver }: IDirectDepositWorkerConfig) { +export async function createDirectDepositWorker({ redis, pool }: IDirectDepositWorkerConfig) { const workerLogger = logger.child({ worker: 'dd-prove' }) const WORKER_OPTIONS = { autorun: false, @@ -22,21 +22,21 @@ export async function createDirectDepositWorker({ redis, directDepositProver }: const directDeposits = job.data jobLogger.info('Building direct deposit proof', { count: directDeposits.length }) - const { proof, memo: rawMemo, outCommit } = await getDirectDepositProof(directDeposits, directDepositProver) + const { proof, memo: rawMemo, outCommit } = await (pool as FinalizerPool).getDirectDepositProof(directDeposits) const memo = rawMemo.toString('hex') const poolJob = await poolTxQueue.add( '', { type: WorkerTxType.DirectDeposit, - transactions: [ - { - deposits: directDeposits, - txProof: proof, - outCommit, - memo, - }, - ], + transaction: { + deposits: directDeposits, + txProof: proof, + outCommit, + memo, + txHash: null, + state: JobState.WAITING, + }, }, { priority: WorkerTxTypePriority[WorkerTxType.DirectDeposit], diff --git a/zp-relayer/workers/poolTxWorker.ts b/zp-relayer/workers/poolTxWorker.ts index d6be1fb7..577de3ca 100644 --- a/zp-relayer/workers/poolTxWorker.ts +++ b/zp-relayer/workers/poolTxWorker.ts @@ -1,37 +1,30 @@ -import type { Logger } from 'winston' -import { Job, Worker } from 'bullmq' -import { toBN } from 'web3-utils' -import { web3 } from '@/services/web3' -import { logger } from '@/services/appLogger' -import { poolTxQueue, BatchTx, PoolTxResult, WorkerTx, WorkerTxType } from '@/queue/poolTxQueue' -import { TX_QUEUE_NAME } from '@/utils/constants' -import { buildPrefixedMemo, waitForFunds, withErrorLog, withMutex } from '@/utils/helpers' -import { pool } from '@/pool' +import { logger } from '@/lib/appLogger' +import { JobState, PoolTx, WorkerTxType } from '@/queue/poolTxQueue' +import { poolTxQueue } from '@/queue/poolTxQueue' import { sentTxQueue } from '@/queue/sentTxQueue' -import { buildDirectDeposits, ProcessResult, buildTx } from '@/txProcessor' -import config from '@/configs/relayerConfig' -import { getMaxRequiredGasPrice } from '@/services/gas-price' -import { isInsufficientBalanceError } from '@/utils/web3Errors' +import { TX_QUEUE_NAME } from '@/utils/constants' +import { withErrorLog, withMutex } from '@/utils/helpers' import { TxValidationError } from '@/validation/tx/common' +import { Job, Worker } from 'bullmq' +import Redis from 'ioredis' import type { IPoolWorkerConfig } from './workerTypes' +import { isInsufficientBalanceError } from '@/utils/web3Errors' +import { toBN } from 'web3-utils' + +const REVERTED_SET = 'reverted' +const RECHECK_ERROR = 'Waiting for next check' -interface HandlerConfig { - type: T - tx: WorkerTx - processResult: ProcessResult - logger: Logger - traceId?: string - jobId: string +async function markFailed(redis: Redis, ids: string[]) { + if (ids.length === 0) return + await redis.sadd(REVERTED_SET, ids) } -export async function createPoolTxWorker({ - redis, - mutex, - txManager, - validateTx, - treeProver, - feeManager, -}: IPoolWorkerConfig) { +async function checkMarked(redis: Redis, id: string) { + const inSet = await redis.sismember(REVERTED_SET, id) + return Boolean(inSet) +} + +export async function createPoolTxWorker({ redis, mutex, pool, txManager }: IPoolWorkerConfig) { const workerLogger = logger.child({ worker: 'pool' }) const WORKER_OPTIONS = { autorun: false, @@ -39,133 +32,89 @@ export async function createPoolTxWorker({ concurrency: 1, } - async function handleTx({ - type, - tx, - processResult, - logger, - traceId, - jobId, - }: HandlerConfig): Promise<[string, string]> { - const { data, outCommit, commitIndex, memo, rootAfter, nullifier } = processResult - - const gas = config.relayerGasLimit - const { txHash, rawTransaction, gasPrice, txConfig } = await txManager.prepareTx( - { - data, - gas: gas.toString(), - to: config.poolAddress, - }, - // XXX: Assumed that gasPrice was updated during fee validation - { shouldUpdateGasPrice: false } - ) - logger.info('Sending tx', { txHash }) - try { - await txManager.sendTransaction(rawTransaction) - } catch (e) { - if (isInsufficientBalanceError(e as Error)) { - const minimumBalance = gas.mul(toBN(getMaxRequiredGasPrice(gasPrice))) - logger.error('Insufficient balance, waiting for funds', { minimumBalance: minimumBalance.toString(10) }) - await Promise.all([poolTxQueue.pause(), sentTxQueue.pause()]) - waitForFunds( - web3, - config.relayerAddress, - () => Promise.all([poolTxQueue.resume(), sentTxQueue.resume()]), - minimumBalance, - config.insufficientBalanceCheckTimeout - ) - } - logger.warn('Tx send failed; it will be re-sent later', { txHash, error: (e as Error).message }) - } - - const prefixedMemo = buildPrefixedMemo(outCommit, txHash, memo) - - pool.optimisticState.updateState(commitIndex, outCommit, prefixedMemo) - - if (nullifier) { - logger.debug('Adding nullifier %s to OS', nullifier) - await pool.optimisticState.nullifiers.add([nullifier]) - } - - const sentJob = await sentTxQueue.add( - txHash, - { - poolJobId: jobId, - root: rootAfter, - outCommit, - commitIndex, - truncatedMemo: memo, - nullifier, - txConfig, - txPayload: { transactions: tx, traceId, type }, - prevAttempts: [[txHash, gasPrice]], - }, - { - delay: config.sentTxDelay, - } - ) - logger.info(`Added sentTxWorker job: ${sentJob.id}`) - return [txHash, sentJob.id as string] - } - - const poolTxWorkerProcessor = async (job: Job, PoolTxResult[]>) => { - const sentTxNum = await sentTxQueue.count() - if (sentTxNum >= config.maxSentQueueSize) { - throw new Error('Optimistic state overflow') - } - - const { transactions: txs, traceId, type } = job.data + const poolTxWorkerProcessor = async (job: Job>) => { + // TODO: handle queue overflow + const { traceId } = job.data const jobLogger = workerLogger.child({ jobId: job.id, traceId }) jobLogger.info('Processing...') - jobLogger.info('Received %s txs', txs.length) - - const txHashes: [string, string][] = [] - const baseConfig = { - logger: jobLogger, - traceId, - type, - jobId: job.id as string, + let processResult; + try { + await pool.validateTx( + job.data, + { + // TODO: optional checks + }, + traceId + ) + processResult = await pool.buildTx(job.data) + } catch(e) { + job.data.transaction.state = JobState.FAILED; + job.failedReason = (e as Error).message; + await job.update(job.data); + throw e; } - let handlerConfig: HandlerConfig - for (const payload of txs) { - let processResult: ProcessResult - if (type === WorkerTxType.DirectDeposit) { - const tx = payload as WorkerTx - jobLogger.info('Received direct deposit', { number: txs.length }) + const { data, func } = processResult - if (tx.deposits.length === 0) { - logger.warn('Empty direct deposit batch, skipping') - continue + const gas = 2000000; + const preparedTx = await txManager.prepareTx({ + txDesc: { + to: pool.network.pool.address(), // TODO: mpc + value: 0, + data, + }, + options: { + func, + // Assumed that gasPrice was updated during fee validation + shouldUpdateGasPrice: false, + // TODO: fee limit + }, + extraData: { + // TODO: abstract gas for EVM + gas, + }, + }) + const sendAttempt = preparedTx[1] + try { + await txManager.sendPreparedTx(preparedTx) + } catch (e) { + if (isInsufficientBalanceError(e as Error)) { + if (sendAttempt.extraData.gas && sendAttempt.extraData.gasPrice) { + const minimumBalance = toBN(sendAttempt.extraData.gas).mul(toBN(sendAttempt.extraData.gasPrice)); + logger.error('Insufficient balance, waiting for funds', { minimumBalance: minimumBalance.toString(10) }) + + await Promise.all([poolTxQueue.pause(), sentTxQueue.pause()]) + txManager.waitingForFunds( + minimumBalance, + () => Promise.all([poolTxQueue.resume(), sentTxQueue.resume()]) + ) } - - processResult = await buildDirectDeposits(tx, treeProver, pool.optimisticState) - } else if (type === WorkerTxType.Normal) { - const tx = payload as WorkerTx - - await validateTx(tx, pool, feeManager, traceId) - - processResult = await buildTx(tx, treeProver, pool.optimisticState) - } else { - throw new Error(`Unknown tx type: ${type}`) } + + logger.warn('Tx send failed; it will be re-sent later', { + txHash: preparedTx[1].txHash, + error: (e as Error).message, + }) + } + const txHash = sendAttempt.txHash + logger.info('Tx sent', { txHash }) - handlerConfig = { - ...baseConfig, - tx: payload, - processResult, - } + await pool.onSend(processResult, txHash) - const res = await handleTx(handlerConfig) - txHashes.push(res) - } + job.data.transaction.state = JobState.SENT + job.data.transaction.txHash = txHash + await job.update(job.data) - return txHashes + await sentTxQueue.add(txHash, { + poolJobId: job.id as string, + processResult, + prevAttempts: [sendAttempt], + }) } - const poolTxWorker = new Worker, PoolTxResult[]>( + const poolTxWorker = new Worker>( TX_QUEUE_NAME, job => withErrorLog( diff --git a/zp-relayer/workers/sentTxWorker.ts b/zp-relayer/workers/sentTxWorker.ts index 34ff8734..18336b9e 100644 --- a/zp-relayer/workers/sentTxWorker.ts +++ b/zp-relayer/workers/sentTxWorker.ts @@ -1,199 +1,15 @@ -import type Redis from 'ioredis' -import { toBN } from 'web3-utils' -import type { TransactionReceipt, TransactionConfig } from 'web3-core' +import { logger } from '@/lib/appLogger' +import { SendError } from '@/lib/network' +import { JobState, poolTxQueue } from '@/queue/poolTxQueue' +import { SentTxPayload } from '@/queue/sentTxQueue' +import { SENT_TX_QUEUE_NAME } from '@/utils/constants' +import { withErrorLog, withLoop, withMutex } from '@/utils/helpers' import { Job, Worker } from 'bullmq' -import { DIRECT_DEPOSIT_REPROCESS_NAME } from '@/utils/constants' -import config from '@/configs/relayerConfig' -import { pool } from '@/pool' -import { web3 } from '@/services/web3' -import { logger } from '@/services/appLogger' -import { getMaxRequiredGasPrice } from '@/services/gas-price' -import { buildPrefixedMemo, withErrorLog, withLoop, withMutex } from '@/utils/helpers' -import { OUTPLUSONE, SENT_TX_QUEUE_NAME } from '@/utils/constants' -import { isGasPriceError, isInsufficientBalanceError, isNonceError, isSameTransactionError } from '@/utils/web3Errors' -import { SendAttempt, SentTxPayload, sentTxQueue, SentTxResult, SentTxState } from '@/queue/sentTxQueue' -import { DirectDepositTxPayload, poolTxQueue, WorkerTxType } from '@/queue/poolTxQueue' -import { getNonce } from '@/utils/web3' import type { ISentWorkerConfig } from './workerTypes' -import type { TxManager } from '@/tx/TxManager' -const REVERTED_SET = 'reverted' const RECHECK_ERROR = 'Waiting for next check' -async function markFailed(redis: Redis, ids: string[]) { - if (ids.length === 0) return - await redis.sadd(REVERTED_SET, ids) -} - -async function checkMarked(redis: Redis, id: string) { - const inSet = await redis.sismember(REVERTED_SET, id) - return Boolean(inSet) -} - -async function clearOptimisticState() { - logger.info('Rollback optimistic state...') - pool.optimisticState.rollbackTo(pool.state) - logger.info('Clearing optimistic nullifiers...') - await pool.optimisticState.nullifiers.clear() - - const root1 = pool.state.getMerkleRoot() - const root2 = pool.optimisticState.getMerkleRoot() - logger.info(`Assert roots are equal: ${root1}, ${root2}, ${root1 === root2}`) -} - -async function handleMined( - { transactionHash, blockNumber }: TransactionReceipt, - { outCommit, commitIndex, nullifier, truncatedMemo, root }: SentTxPayload, - jobLogger = logger -): Promise { - // Successful - jobLogger.info('Transaction was successfully mined', { transactionHash, blockNumber }) - - const prefixedMemo = buildPrefixedMemo(outCommit, transactionHash, truncatedMemo) - pool.state.updateState(commitIndex, outCommit, prefixedMemo) - // Update tx hash in optimistic state tx db - pool.optimisticState.addTx(commitIndex * OUTPLUSONE, Buffer.from(prefixedMemo, 'hex')) - - // Add nullifier to confirmed state and remove from optimistic one - if (nullifier) { - jobLogger.info('Adding nullifier %s to PS', nullifier) - await pool.state.nullifiers.add([nullifier]) - jobLogger.info('Removing nullifier %s from OS', nullifier) - await pool.optimisticState.nullifiers.remove([nullifier]) - } - - const node1 = pool.state.getCommitment(commitIndex) - const node2 = pool.optimisticState.getCommitment(commitIndex) - jobLogger.info('Assert commitments are equal: %s, %s', node1, node2) - if (node1 !== node2) { - jobLogger.error('Commitments are not equal') - } - - const rootConfirmed = pool.state.getMerkleRoot() - jobLogger.info('Assert roots are equal') - if (rootConfirmed !== root) { - // TODO: Should be impossible but in such case - // we should recover from some checkpoint - jobLogger.error('Roots are not equal: %s should be %s', rootConfirmed, root) - } - - return [SentTxState.MINED, transactionHash, []] as SentTxResult -} - -async function handleReverted( - { transactionHash: txHash, blockNumber }: TransactionReceipt, - jobId: string, - redis: Redis, - jobLogger = logger -): Promise { - jobLogger.error('Transaction reverted', { txHash, blockNumber }) - - // Means that rollback was done previously, no need to do it now - if (await checkMarked(redis, jobId)) { - jobLogger.info('Job marked as failed, skipping') - return [SentTxState.REVERT, txHash, []] as SentTxResult - } - - await clearOptimisticState() - - // Send all jobs to re-process - // Validation of these jobs will be done in `poolTxWorker` - const waitingJobIds = [] - const reschedulePromises = [] - const newPoolJobIdMapping: Record = {} - const waitingJobs = await sentTxQueue.getJobs(['delayed', 'waiting']) - for (let wj of waitingJobs) { - // One of the jobs can be undefined, so we need to check it - // https://github.com/taskforcesh/bullmq/blob/master/src/commands/addJob-8.lua#L142-L143 - if (!wj?.id) continue - waitingJobIds.push(wj.id) - - const { txPayload } = wj.data - let reschedulePromise: Promise - - reschedulePromise = poolTxQueue.add(txHash, { - type: txPayload.type, - transactions: [txPayload.transactions], - traceId: txPayload.traceId, - }) - - // To not mess up traceId we add each transaction separately - reschedulePromises.push( - reschedulePromise.then(j => { - const newPoolJobId = j.id as string - newPoolJobIdMapping[wj.data.poolJobId] = newPoolJobId - return newPoolJobId - }) - ) - } - jobLogger.info('Marking ids %j as failed', waitingJobIds) - await markFailed(redis, waitingJobIds) - jobLogger.info('Rescheduling %d jobs to process...', waitingJobs.length) - const rescheduledIds = await Promise.all(reschedulePromises) - jobLogger.info('Update pool job id mapping %j ...', newPoolJobIdMapping) - await pool.state.jobIdsMapping.add(newPoolJobIdMapping) - - return [SentTxState.REVERT, txHash, rescheduledIds] as SentTxResult -} - -async function handleResend( - txConfig: TransactionConfig, - txManager: TxManager, - job: Job, - jobLogger = logger -) { - const [lastHash, lastGasPrice] = job.data.prevAttempts.at(-1) as SendAttempt - jobLogger.warn('Tx %s is not mined, resending', lastHash) - - const { - txConfig: newTxConfig, - gasPrice, - txHash, - rawTransaction, - } = await txManager.prepareTx(txConfig, { isResend: true }, jobLogger) - - job.data.prevAttempts.push([txHash, gasPrice]) - jobLogger.info('Re-send tx', { txHash }) - try { - await txManager.sendTransaction(rawTransaction) - } catch (e) { - const err = e as Error - jobLogger.warn('Tx resend failed', { error: err.message, txHash }) - if (isGasPriceError(err) || isSameTransactionError(err)) { - // Tx wasn't sent successfully, but still update last attempt's - // gasPrice to be accounted in the next iteration - await job.update({ - ...job.data, - }) - } else if (isInsufficientBalanceError(err)) { - // We don't want to take into account last gasPrice increase - job.data.prevAttempts.at(-1)![1] = lastGasPrice - - const minimumBalance = toBN(txConfig.gas!).mul(toBN(getMaxRequiredGasPrice(gasPrice))) - jobLogger.error('Insufficient balance, waiting for funds', { minimumBalance: minimumBalance.toString(10) }) - } else if (isNonceError(err)) { - jobLogger.warn('Nonce error', { error: err.message, txHash }) - // Throw suppressed error to be treated as a warning - throw new Error(RECHECK_ERROR) - } - // Error should be caught by `withLoop` to re-run job - throw e - } - - // Overwrite old tx recorded in optimistic state db with new tx hash - const { truncatedMemo, outCommit, commitIndex } = job.data - const prefixedMemo = buildPrefixedMemo(outCommit, txHash, truncatedMemo) - pool.optimisticState.addTx(commitIndex * OUTPLUSONE, Buffer.from(prefixedMemo, 'hex')) - - // Update job - await job.update({ - ...job.data, - txConfig: newTxConfig, - }) - await job.updateProgress({ txHash, gasPrice }) -} - -export async function createSentTxWorker({ redis, mutex, txManager }: ISentWorkerConfig) { +export async function createSentTxWorker({ redis, mutex, pool, txManager }: ISentWorkerConfig) { const workerLogger = logger.child({ worker: 'sent-tx' }) const WORKER_OPTIONS = { autorun: false, @@ -201,44 +17,16 @@ export async function createSentTxWorker({ redis, mutex, txManager }: ISentWorke concurrency: 1, } - async function checkMined( - prevAttempts: SendAttempt[], - txNonce: number - ): Promise<[TransactionReceipt | null, boolean]> { - // Transaction was not mined - const actualNonce = await getNonce(web3, config.relayerAddress) - logger.info('Nonce value from RPC: %d; tx nonce: %d', actualNonce, txNonce) - if (actualNonce <= txNonce) { - return [null, false] - } - - let tx = null - // Iterate in reverse order to check the latest hash first - for (let i = prevAttempts.length - 1; i >= 0; i--) { - const txHash = prevAttempts[i][0] - logger.info('Verifying tx', { txHash }) - try { - tx = await web3.eth.getTransactionReceipt(txHash) - } catch (e) { - logger.warn('Cannot get tx receipt; RPC response: %s', (e as Error).message, { txHash }) - // Exception should be caught by `withLoop` to re-run job - throw e - } - if (tx && tx.blockNumber) return [tx, false] - } - - // Transaction was not mined, but nonce was increased - return [null, true] - } - const sentTxWorkerProcessor = async (job: Job, resendNum: number = 1) => { - const jobLogger = workerLogger.child({ jobId: job.id, traceId: job.data.txPayload.traceId, resendNum }) - - jobLogger.info('Verifying job %s', job.data.poolJobId) - const { prevAttempts, txConfig, txPayload } = job.data + const jobId = job.id as string + const jobLogger = workerLogger.child({ jobId, resendNum }) + const poolJobId = job.data.poolJobId + jobLogger.info('Verifying job %s', poolJobId) + const { prevAttempts, processResult } = job.data // Any thrown web3 error will re-trigger re-send loop iteration - const [tx, shouldReprocess] = await checkMined(prevAttempts, txConfig.nonce as number) + + let [tx, shouldReprocess] = await txManager.confirmTx(prevAttempts.map(a => a.txHash)) if (shouldReprocess) { // TODO: handle this case later @@ -249,11 +37,32 @@ export async function createSentTxWorker({ redis, mutex, txManager }: ISentWorke if (!tx) { // Resend with updated gas price - if (resendNum > config.sentTxLogErrorThreshold) { + if (resendNum > 10) { jobLogger.error('Too many unsuccessful re-sends') } - await handleResend(txConfig, txManager, job, jobLogger) + jobLogger.debug('Trying to resend...') + const { attempt, error } = await txManager.resendTx(prevAttempts) + if (attempt) { + job.data.prevAttempts.push(attempt) + } + if (error) { + if (error === SendError.GAS_PRICE_ERROR) { + throw new Error(RECHECK_ERROR) + } else if (error === SendError.INSUFFICIENT_BALANCE) { + // We don't want to take into account last gasPrice increase + job.data.prevAttempts.pop() + + // TODO: wait for top-up + throw new Error(RECHECK_ERROR) + } else { + throw new Error(RECHECK_ERROR) + } + } + + await job.update(job.data) + + // TODO: add pool onResend logic // Tx re-send successful // Throw error to re-run job after delay and @@ -261,25 +70,34 @@ export async function createSentTxWorker({ redis, mutex, txManager }: ISentWorke throw new Error(RECHECK_ERROR) } - if (tx.status) { - return await handleMined(tx, job.data, jobLogger) - } else { - if (txPayload.type === WorkerTxType.DirectDeposit) { - const deposits = (txPayload.transactions as DirectDepositTxPayload).deposits - jobLogger.info('Adding reverted direct deposit to reprocess list', { count: deposits.length }) - await redis.lpush(DIRECT_DEPOSIT_REPROCESS_NAME, ...deposits.map(d => JSON.stringify(d))) + const txHash = tx.txHash + const updatePoolJobState = async () => { + const poolJob = await poolTxQueue.getJob(poolJobId) + if (!poolJob) { + jobLogger.error('Pool job not found', { poolJobId }) + } else { + poolJob.data.transaction.state = JobState.COMPLETED + poolJob.data.transaction.txHash = txHash + await poolJob.update(poolJob.data) } - return await handleReverted(tx, job.id as string, redis, jobLogger) + } + if (tx.success) { + // Successful + jobLogger.info('Transaction was successfully mined', { txHash, blockNumber: tx.blockNumber }) + + await pool.onConfirmed(processResult, txHash, updatePoolJobState, poolJobId) + } else { + await pool.onFailed(txHash, poolJobId); } } - const sentTxWorker = new Worker( + const sentTxWorker = new Worker( SENT_TX_QUEUE_NAME, job => withErrorLog( withLoop( withMutex(mutex, (i: number) => sentTxWorkerProcessor(job, i)), - config.sentTxDelay, + 5000, [RECHECK_ERROR] ) ), diff --git a/zp-relayer/workers/workerTypes.ts b/zp-relayer/workers/workerTypes.ts index 3bdb9b98..9d556139 100644 --- a/zp-relayer/workers/workerTypes.ts +++ b/zp-relayer/workers/workerTypes.ts @@ -1,28 +1,21 @@ -import type { Redis } from 'ioredis' +import { TransactionManager } from '@/lib/network' +import { BasePool } from '@/pool/BasePool' import type { Mutex } from 'async-mutex' -import type { TxManager } from '@/tx/TxManager' -import type { Pool } from '@/pool' -import type { TxPayload } from '@/queue/poolTxQueue' -import type { Circuit, IProver } from '@/prover' -import type { FeeManager } from '@/services/fee' +import type { Redis } from 'ioredis' export interface IWorkerBaseConfig { redis: Redis + pool: BasePool } export interface IPoolWorkerConfig extends IWorkerBaseConfig { - validateTx: (tx: TxPayload, pool: Pool, feeManager: FeeManager, traceId?: string) => Promise - treeProver: IProver mutex: Mutex - txManager: TxManager - feeManager: FeeManager + txManager: TransactionManager } export interface ISentWorkerConfig extends IWorkerBaseConfig { mutex: Mutex - txManager: TxManager + txManager: TransactionManager } -export interface IDirectDepositWorkerConfig extends IWorkerBaseConfig { - directDepositProver: IProver -} +export interface IDirectDepositWorkerConfig extends IWorkerBaseConfig {}