From bddfd0e9c2fdb33f13349b2c2e69bb3d23cc9b63 Mon Sep 17 00:00:00 2001 From: Darun Seethammagari Date: Wed, 4 Oct 2023 14:43:25 -0700 Subject: [PATCH 01/24] MVP of Pre Fetching Historical --- docker-compose.yml | 15 +++++++++++++++ prometheus.yml | 7 +++++++ runner/src/indexer/indexer.ts | 8 +++++--- runner/src/redis-client/redis-client.ts | 3 ++- 4 files changed, 29 insertions(+), 4 deletions(-) create mode 100644 prometheus.yml diff --git a/docker-compose.yml b/docker-compose.yml index aa4dc242d..b7abcb4a9 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -88,6 +88,21 @@ services: HASURA_GRAPHQL_ENABLED_LOG_TYPES: startup, http-log, webhook-log, websocket-log, query-log HASURA_GRAPHQL_ADMIN_SECRET: myadminsecretkey HASURA_GRAPHQL_AUTH_HOOK: http://hasura-auth:4000/auth + grafana: + image: grafana/grafana + volumes: + - grafana:/var/lib/grafana + ports: + - "3000:3000" + environment: + - GF_SECURITY_ADMIN_PASSWORD=secret + + prometheus: + image: prom/prometheus + volumes: + - ./prometheus.yml:/etc/prometheus/prometheus.yml + ports: + - "9090:9090" volumes: postgres: diff --git a/prometheus.yml b/prometheus.yml new file mode 100644 index 000000000..cd0eab3f2 --- /dev/null +++ b/prometheus.yml @@ -0,0 +1,7 @@ +global: + scrape_interval: 1s + +scrape_configs: + - job_name: 'queryapi-runner' + static_configs: + - targets: ['host.docker.internal:9180'] \ No newline at end of file diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index e866652b2..a487291dd 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -1,7 +1,7 @@ import fetch, { type Response } from 'node-fetch'; import { VM } from 'vm2'; import { S3Client, GetObjectCommand } from '@aws-sdk/client-s3'; -import { Block } from '@near-lake/primitives'; +import { Block, type StreamerMessage } from '@near-lake/primitives'; import { Parser } from 'node-sql-parser'; import { METRICS } from '../metrics'; @@ -60,9 +60,10 @@ export default class Indexer { blockHeight: number, functions: Record, isHistorical: boolean, - options: { provision?: boolean } = { provision: false } + options: { provision?: boolean } = { provision: false }, + streamerMessage: StreamerMessage | null = null ): Promise { - const blockWithHelpers = Block.fromStreamerMessage(await this.fetchStreamerMessage(blockHeight, isHistorical)); + const blockWithHelpers = Block.fromStreamerMessage(streamerMessage == null ? await this.fetchStreamerMessage(blockHeight, isHistorical) : streamerMessage); const lag = Date.now() - Math.floor(Number(blockWithHelpers.header().timestampNanosec) / 1000000); @@ -137,6 +138,7 @@ export default class Indexer { } async fetchStreamerMessage (blockHeight: number, isHistorical: boolean): Promise<{ block: any, shards: any[] }> { + console.error('SHOULD NOT BE CALLED'); if (!isHistorical) { const cachedMessage = await this.deps.redisClient.getStreamerMessage(blockHeight); if (cachedMessage) { diff --git a/runner/src/redis-client/redis-client.ts b/runner/src/redis-client/redis-client.ts index 18e11b854..30e207671 100644 --- a/runner/src/redis-client/redis-client.ts +++ b/runner/src/redis-client/redis-client.ts @@ -46,10 +46,11 @@ export default class RedisClient { async getNextStreamMessage ( streamKey: string, + count = 1 ): Promise { const results = await this.client.xRead( { key: streamKey, id: this.SMALLEST_STREAM_ID }, - { COUNT: 1 } + { COUNT: count } ); return results?.[0].messages as StreamMessage[]; From 1160091d721006acf66448d919870e1d92efa571 Mon Sep 17 00:00:00 2001 From: Darun Seethammagari Date: Wed, 4 Oct 2023 16:42:55 -0700 Subject: [PATCH 02/24] Implement producer and consumer loops --- docker-compose.yml | 165 ++++++++++++------------ runner/src/index.ts | 3 +- runner/src/redis-client/redis-client.ts | 8 +- 3 files changed, 90 insertions(+), 86 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index b7abcb4a9..91a87e0c3 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,93 +1,93 @@ version: "3.9" # optional since v1.27.0 services: - coordinator: - build: - context: ./indexer - args: - - CARGO_BUILD_MODE=debug - depends_on: - - redis - environment: - REDIS_CONNECTION_STRING: redis://redis - LAKE_AWS_ACCESS_KEY: - LAKE_AWS_SECRET_ACCESS_KEY: - QUEUE_AWS_ACCESS_KEY: - QUEUE_AWS_SECRET_ACCESS_KEY: - QUEUE_URL: MOCK - START_FROM_BLOCK_QUEUE_URL: MOCK - PORT: 9180 - REGISTRY_CONTRACT_ID: dev-queryapi.dataplatform.near - AWS_QUEUE_REGION: eu-central-1 - command: - - mainnet - - from-interruption + # coordinator: + # build: + # context: ./indexer + # args: + # - CARGO_BUILD_MODE=debug + # depends_on: + # - redis + # environment: + # REDIS_CONNECTION_STRING: redis://redis + # LAKE_AWS_ACCESS_KEY: + # LAKE_AWS_SECRET_ACCESS_KEY: + # QUEUE_AWS_ACCESS_KEY: + # QUEUE_AWS_SECRET_ACCESS_KEY: + # QUEUE_URL: MOCK + # START_FROM_BLOCK_QUEUE_URL: MOCK + # PORT: 9180 + # REGISTRY_CONTRACT_ID: dev-queryapi.dataplatform.near + # AWS_QUEUE_REGION: eu-central-1 + # command: + # - mainnet + # - from-interruption - runner: - build: - context: ./runner - depends_on: - - "hasura-graphql" - - "redis" - environment: - REGION: eu-central-1 - HASURA_ENDPOINT: http://hasura-graphql:8080 - HASURA_ADMIN_SECRET: myadminsecretkey - REDIS_CONNECTION_STRING: redis://redis - PGHOST: postgres - PGPORT: 5432 - PGUSER: postgres - PGPASSWORD: postgrespassword - PGDATABASE: postgres - PORT: 9180 - AWS_ACCESS_KEY_ID: - AWS_SECRET_ACCESS_KEY: + # runner: + # build: + # context: ./runner + # depends_on: + # - "hasura-graphql" + # - "redis" + # environment: + # REGION: eu-central-1 + # HASURA_ENDPOINT: http://hasura-graphql:8080 + # HASURA_ADMIN_SECRET: myadminsecretkey + # REDIS_CONNECTION_STRING: redis://redis + # PGHOST: postgres + # PGPORT: 5432 + # PGUSER: postgres + # PGPASSWORD: postgrespassword + # PGDATABASE: postgres + # PORT: 9180 + # AWS_ACCESS_KEY_ID: + # AWS_SECRET_ACCESS_KEY: - redis: - image: redis - command: - - redis-server - - "--save 60 1" - - "--loglevel warning" - volumes: - - redis:/data - ports: - - "6379:6379" + # redis: + # image: redis + # command: + # - redis-server + # - "--save 60 1" + # - "--loglevel warning" + # volumes: + # - redis:/data + # ports: + # - "6379:6379" - postgres: - image: postgres:12 - restart: always - volumes: - - postgres:/var/lib/postgresql/data - environment: - POSTGRES_PASSWORD: postgrespassword - ports: - - "5432:5432" + # postgres: + # image: postgres:12 + # restart: always + # volumes: + # - postgres:/var/lib/postgresql/data + # environment: + # POSTGRES_PASSWORD: postgrespassword + # ports: + # - "5432:5432" - hasura-auth: - build: - context: ./hasura-authentication-service - ports: - - "4000:4000" - environment: - PORT: 4000 - DEFAULT_HASURA_ROLE: append + # hasura-auth: + # build: + # context: ./hasura-authentication-service + # ports: + # - "4000:4000" + # environment: + # PORT: 4000 + # DEFAULT_HASURA_ROLE: append - hasura-graphql: - image: hasura/graphql-engine:latest - ports: - - "8080:8080" - depends_on: - - "postgres" - - "hasura-auth" - restart: always - environment: - HASURA_GRAPHQL_DATABASE_URL: postgres://postgres:postgrespassword@postgres:5432/postgres - HASURA_GRAPHQL_ENABLE_CONSOLE: "true" - HASURA_GRAPHQL_DEV_MODE: "true" - HASURA_GRAPHQL_ENABLED_LOG_TYPES: startup, http-log, webhook-log, websocket-log, query-log - HASURA_GRAPHQL_ADMIN_SECRET: myadminsecretkey - HASURA_GRAPHQL_AUTH_HOOK: http://hasura-auth:4000/auth + # hasura-graphql: + # image: hasura/graphql-engine:latest + # ports: + # - "8080:8080" + # depends_on: + # - "postgres" + # - "hasura-auth" + # restart: always + # environment: + # HASURA_GRAPHQL_DATABASE_URL: postgres://postgres:postgrespassword@postgres:5432/postgres + # HASURA_GRAPHQL_ENABLE_CONSOLE: "true" + # HASURA_GRAPHQL_DEV_MODE: "true" + # HASURA_GRAPHQL_ENABLED_LOG_TYPES: startup, http-log, webhook-log, websocket-log, query-log + # HASURA_GRAPHQL_ADMIN_SECRET: myadminsecretkey + # HASURA_GRAPHQL_AUTH_HOOK: http://hasura-auth:4000/auth grafana: image: grafana/grafana volumes: @@ -107,3 +107,4 @@ services: volumes: postgres: redis: + grafana: diff --git a/runner/src/index.ts b/runner/src/index.ts index a483a9336..d9a90691e 100644 --- a/runner/src/index.ts +++ b/runner/src/index.ts @@ -17,7 +17,8 @@ void (async function main () { const streamHandlers: StreamHandlers = {}; while (true) { - const streamKeys = await redisClient.getStreams(); + // const streamKeys = await redisClient.getStreams(); + const streamKeys = ['flatirons.near/sweat_blockheight:real_time:stream']; streamKeys.forEach((streamKey) => { if (streamHandlers[streamKey] !== undefined) { diff --git a/runner/src/redis-client/redis-client.ts b/runner/src/redis-client/redis-client.ts index 30e207671..26fc7cd83 100644 --- a/runner/src/redis-client/redis-client.ts +++ b/runner/src/redis-client/redis-client.ts @@ -46,10 +46,11 @@ export default class RedisClient { async getNextStreamMessage ( streamKey: string, - count = 1 + count = 1, + streamId = this.SMALLEST_STREAM_ID ): Promise { const results = await this.client.xRead( - { key: streamKey, id: this.SMALLEST_STREAM_ID }, + { key: streamKey, id: streamId }, { COUNT: count } ); @@ -65,8 +66,9 @@ export default class RedisClient { async getUnprocessedStreamMessages ( streamKey: string, + startId = this.SMALLEST_STREAM_ID, ): Promise { - const results = await this.client.xRange(streamKey, this.SMALLEST_STREAM_ID, this.LARGEST_STREAM_ID); + const results = await this.client.xRange(streamKey, startId, this.LARGEST_STREAM_ID); return results as StreamMessage[]; }; From a555058ab49d83cfd8d2c960a3f14d72e234456c Mon Sep 17 00:00:00 2001 From: Darun Seethammagari Date: Wed, 4 Oct 2023 17:52:57 -0700 Subject: [PATCH 03/24] Updates to allow for test metrics without deleting messages --- runner/src/indexer/indexer.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index a487291dd..e0dfb449f 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -138,7 +138,6 @@ export default class Indexer { } async fetchStreamerMessage (blockHeight: number, isHistorical: boolean): Promise<{ block: any, shards: any[] }> { - console.error('SHOULD NOT BE CALLED'); if (!isHistorical) { const cachedMessage = await this.deps.redisClient.getStreamerMessage(blockHeight); if (cachedMessage) { From 79374f9e46d336ac2dc69db170bc85a6823513f0 Mon Sep 17 00:00:00 2001 From: Darun Seethammagari Date: Thu, 5 Oct 2023 16:21:56 -0700 Subject: [PATCH 04/24] Add some metrics for more testing --- runner/src/metrics.ts | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/runner/src/metrics.ts b/runner/src/metrics.ts index c3ab74b81..2296cb7d0 100644 --- a/runner/src/metrics.ts +++ b/runner/src/metrics.ts @@ -13,6 +13,12 @@ const EXECUTION_DURATION = new Histogram({ labelNames: ['indexer', 'type'], }); +const BLOCK_WAIT_DURATION = new Gauge({ + name: 'queryapi_runner_block_wait_duration_milliseconds', + help: 'Time an indexer function waited for a block before processing', + labelNames: ['indexer', 'type'], +}); + const CACHE_HIT = new Counter({ name: 'queryapi_runner_cache_hit', help: 'The number of times cache was hit successfully', @@ -27,7 +33,9 @@ const CACHE_MISS = new Counter({ export const METRICS = { EXECUTION_DURATION, + BLOCK_WAIT_DURATION, UNPROCESSED_STREAM_MESSAGES, + BLOCKS, CACHE_HIT, CACHE_MISS }; From c8971250ed5f8bc12df4664842473482c9641bc9 Mon Sep 17 00:00:00 2001 From: Darun Seethammagari Date: Fri, 6 Oct 2023 13:53:26 -0700 Subject: [PATCH 05/24] Migrated S3 code to new class --- runner/src/indexer/indexer.test.ts | 2550 ++++++++--------- runner/src/indexer/indexer.ts | 79 +- runner/src/streamer-message-fetcher/index.ts | 1 + .../s3-streamer-fetcher-tests.ts | 0 .../s3-streamer-fetcher.ts | 67 + 5 files changed, 1356 insertions(+), 1341 deletions(-) create mode 100644 runner/src/streamer-message-fetcher/index.ts create mode 100644 runner/src/streamer-message-fetcher/s3-streamer-fetcher-tests.ts create mode 100644 runner/src/streamer-message-fetcher/s3-streamer-fetcher.ts diff --git a/runner/src/indexer/indexer.test.ts b/runner/src/indexer/indexer.test.ts index 38f9ce77c..40a970c90 100644 --- a/runner/src/indexer/indexer.test.ts +++ b/runner/src/indexer/indexer.test.ts @@ -1,1275 +1,1275 @@ -import { Block } from '@near-lake/primitives'; -import type fetch from 'node-fetch'; -import { type S3Client, GetObjectCommand } from '@aws-sdk/client-s3'; -import type RedisClient from '../redis-client'; - -import Indexer from './indexer'; -import { VM } from 'vm2'; - -describe('Indexer unit tests', () => { - const oldEnv = process.env; - - const HASURA_ENDPOINT = 'mock-hasura-endpoint'; - const HASURA_ADMIN_SECRET = 'mock-hasura-secret'; - const HASURA_ROLE = 'morgs_near'; - const INVALID_HASURA_ROLE = 'other_near'; - - const INDEXER_NAME = 'morgs.near/test_fn'; - - const SIMPLE_SCHEMA = `CREATE TABLE - "posts" ( - "id" SERIAL NOT NULL, - "account_id" VARCHAR NOT NULL, - "block_height" DECIMAL(58, 0) NOT NULL, - "receipt_id" VARCHAR NOT NULL, - "content" TEXT NOT NULL, - "block_timestamp" DECIMAL(20, 0) NOT NULL, - "accounts_liked" JSONB NOT NULL DEFAULT '[]', - "last_comment_timestamp" DECIMAL(20, 0), - CONSTRAINT "posts_pkey" PRIMARY KEY ("id") - );`; - - const SOCIAL_SCHEMA = ` - CREATE TABLE - "posts" ( - "id" SERIAL NOT NULL, - "account_id" VARCHAR NOT NULL, - "block_height" DECIMAL(58, 0) NOT NULL, - "receipt_id" VARCHAR NOT NULL, - "content" TEXT NOT NULL, - "block_timestamp" DECIMAL(20, 0) NOT NULL, - "accounts_liked" JSONB NOT NULL DEFAULT '[]', - "last_comment_timestamp" DECIMAL(20, 0), - CONSTRAINT "posts_pkey" PRIMARY KEY ("id") - ); - - CREATE TABLE - "comments" ( - "id" SERIAL NOT NULL, - "post_id" SERIAL NOT NULL, - "account_id" VARCHAR NOT NULL, - "block_height" DECIMAL(58, 0) NOT NULL, - "content" TEXT NOT NULL, - "block_timestamp" DECIMAL(20, 0) NOT NULL, - "receipt_id" VARCHAR NOT NULL, - CONSTRAINT "comments_pkey" PRIMARY KEY ("id") - ); - - CREATE TABLE - "post_likes" ( - "post_id" SERIAL NOT NULL, - "account_id" VARCHAR NOT NULL, - "block_height" DECIMAL(58, 0), - "block_timestamp" DECIMAL(20, 0) NOT NULL, - "receipt_id" VARCHAR NOT NULL, - CONSTRAINT "post_likes_pkey" PRIMARY KEY ("post_id", "account_id") - );`; - - const STRESS_TEST_SCHEMA = ` -CREATE TABLE creator_quest ( - account_id VARCHAR PRIMARY KEY, - num_components_created INTEGER NOT NULL DEFAULT 0, - completed BOOLEAN NOT NULL DEFAULT FALSE - ); - -CREATE TABLE - composer_quest ( - account_id VARCHAR PRIMARY KEY, - num_widgets_composed INTEGER NOT NULL DEFAULT 0, - completed BOOLEAN NOT NULL DEFAULT FALSE - ); - -CREATE TABLE - "contractor - quest" ( - account_id VARCHAR PRIMARY KEY, - num_contracts_deployed INTEGER NOT NULL DEFAULT 0, - completed BOOLEAN NOT NULL DEFAULT FALSE - ); - -CREATE TABLE - "posts" ( - "id" SERIAL NOT NULL, - "account_id" VARCHAR NOT NULL, - "block_height" DECIMAL(58, 0) NOT NULL, - "receipt_id" VARCHAR NOT NULL, - "content" TEXT NOT NULL, - "block_timestamp" DECIMAL(20, 0) NOT NULL, - "accounts_liked" JSONB NOT NULL DEFAULT '[]', - "last_comment_timestamp" DECIMAL(20, 0), - CONSTRAINT "posts_pkey" PRIMARY KEY ("id") - ); - -CREATE TABLE - "comments" ( - "id" SERIAL NOT NULL, - "post_id" SERIAL NOT NULL, - "account_id" VARCHAR NOT NULL, - "block_height" DECIMAL(58, 0) NOT NULL, - "content" TEXT NOT NULL, - "block_timestamp" DECIMAL(20, 0) NOT NULL, - "receipt_id" VARCHAR NOT NULL, - CONSTRAINT "comments_pkey" PRIMARY KEY ("id") - ); - -CREATE TABLE - "post_likes" ( - "post_id" SERIAL NOT NULL, - "account_id" VARCHAR NOT NULL, - "block_height" DECIMAL(58, 0), - "block_timestamp" DECIMAL(20, 0) NOT NULL, - "receipt_id" VARCHAR NOT NULL, - CONSTRAINT "post_likes_pkey" PRIMARY KEY ("post_id", "account_id") - ); - -CREATE UNIQUE INDEX "posts_account_id_block_height_key" ON "posts" ("account_id" ASC, "block_height" ASC); - -CREATE UNIQUE INDEX "comments_post_id_account_id_block_height_key" ON "comments" ( - "post_id" ASC, - "account_id" ASC, - "block_height" ASC -); - -CREATE INDEX - "posts_last_comment_timestamp_idx" ON "posts" ("last_comment_timestamp" DESC); - -ALTER TABLE - "comments" -ADD - CONSTRAINT "comments_post_id_fkey" FOREIGN KEY ("post_id") REFERENCES "posts" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION; - -ALTER TABLE - "post_likes" -ADD - CONSTRAINT "post_likes_post_id_fkey" FOREIGN KEY ("post_id") REFERENCES "posts" ("id") ON DELETE CASCADE ON UPDATE NO ACTION; - -CREATE TABLE IF NOT EXISTS - "My Table1" (id serial PRIMARY KEY); - -CREATE TABLE - "Another-Table" (id serial PRIMARY KEY); - -CREATE TABLE -IF NOT EXISTS - "Third-Table" (id serial PRIMARY KEY); - -CREATE TABLE - yet_another_table (id serial PRIMARY KEY); -`; - const genericMockFetch = jest.fn() - .mockResolvedValue({ - status: 200, - json: async () => ({ - data: 'mock', - }), - }); - - const transparentRedis = { - getStreamerMessage: jest.fn() - } as unknown as RedisClient; - - beforeEach(() => { - process.env = { - ...oldEnv, - HASURA_ENDPOINT, - HASURA_ADMIN_SECRET - }; - }); - - afterAll(() => { - process.env = oldEnv; - }); - - test('Indexer.runFunctions() should execute all functions against the current block', async () => { - const mockFetch = jest.fn(() => ({ - status: 200, - json: async () => ({ - errors: null, - }), - })); - const blockHeight = 456; - const mockData = jest.fn().mockResolvedValue( - JSON.stringify( - { - block: { - chunks: [], - header: { - height: blockHeight - } - }, - shards: {} - } - ) - ); - const mockRedis = { - getStreamerMessage: mockData - } as unknown as RedisClient; - - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: mockRedis }); - - const functions: Record = {}; - functions['buildnear.testnet/test'] = { - code: ` - const foo = 3; - block.result = context.graphql(\`mutation { set(functionName: "buildnear.testnet/test", key: "height", data: "\${block.blockHeight}")}\`); - `, - schema: SIMPLE_SCHEMA - }; - await indexer.runFunctions(blockHeight, functions, false); - - expect(mockFetch.mock.calls).toMatchSnapshot(); - }); - - test('Indexer.fetchBlock() should fetch a block from S3', async () => { - const author = 'dokiacapital.poolv1.near'; - const mockData = JSON.stringify({ - author - }); - const mockSend = jest.fn().mockResolvedValue({ - Body: { - transformToString: () => mockData - } - }); - const mockS3 = { - send: mockSend, - } as unknown as S3Client; - - const indexer = new Indexer('mainnet', { s3: mockS3, redisClient: transparentRedis }); - - const blockHeight = 84333960; - const block = await indexer.fetchBlockPromise(blockHeight); - const params = { - Bucket: 'near-lake-data-mainnet', - Key: `${blockHeight.toString().padStart(12, '0')}/block.json` - }; - - expect(mockS3.send).toHaveBeenCalledTimes(1); - expect(JSON.stringify(mockSend.mock.calls[0][0])).toMatch(JSON.stringify(new GetObjectCommand(params))); - expect(block.author).toEqual(author); - }); - - test('Indexer.fetchShard() should fetch a shard from S3', async () => { - const mockData = JSON.stringify({}); - const mockSend = jest.fn().mockResolvedValue({ - Body: { - transformToString: () => mockData - } - }); - const mockS3 = { - send: mockSend, - } as unknown as S3Client; - const indexer = new Indexer('mainnet', { s3: mockS3, redisClient: transparentRedis }); - - const blockHeight = 82699904; - const shard = 0; - const params = { - Bucket: 'near-lake-data-mainnet', - Key: `${blockHeight.toString().padStart(12, '0')}/shard_${shard}.json` - }; - await indexer.fetchShardPromise(blockHeight, shard); - - expect(JSON.stringify(mockSend.mock.calls[0][0])).toMatch(JSON.stringify(new GetObjectCommand(params))); - }); - - test('Indexer.fetchStreamerMessage() should fetch the message from cache and use it directly', async () => { - const blockHeight = 85233529; - const blockHash = 'xyz'; - const getMessage = jest.fn() - .mockReturnValueOnce(JSON.stringify( - { - block: { - chunks: [0], - header: { - height: blockHeight, - hash: blockHash, - } - }, - shards: {} - } - )); - const mockRedis = { - getStreamerMessage: getMessage - } as unknown as RedisClient; - const indexer = new Indexer('mainnet', { redisClient: mockRedis }); - - const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, false); - - expect(getMessage).toHaveBeenCalledTimes(1); - expect(JSON.stringify(getMessage.mock.calls[0])).toEqual( - `[${blockHeight}]` - ); - const block = Block.fromStreamerMessage(streamerMessage); - - expect(block.blockHeight).toEqual(blockHeight); - expect(block.blockHash).toEqual(blockHash); - }); - - test('Indexer.fetchStreamerMessage() should fetch the block and shards from S3 upon cache miss', async () => { - const blockHeight = 85233529; - const blockHash = 'xyz'; - const mockSend = jest.fn() - .mockReturnValueOnce({ // block - Body: { - transformToString: () => JSON.stringify({ - chunks: [0], - header: { - height: blockHeight, - hash: blockHash, - } - }) - } - }) - .mockReturnValue({ // shard - Body: { - transformToString: () => JSON.stringify({}) - } - }); - const mockS3 = { - send: mockSend, - } as unknown as S3Client; - const indexer = new Indexer('mainnet', { s3: mockS3, redisClient: transparentRedis }); - - const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, false); - - expect(mockSend).toHaveBeenCalledTimes(5); - expect(JSON.stringify(mockSend.mock.calls[0][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ - Bucket: 'near-lake-data-mainnet', - Key: `${blockHeight.toString().padStart(12, '0')}/block.json` - }))); - expect(JSON.stringify(mockSend.mock.calls[1][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ - Bucket: 'near-lake-data-mainnet', - Key: `${blockHeight.toString().padStart(12, '0')}/shard_0.json` - }))); - expect(transparentRedis.getStreamerMessage).toHaveBeenCalledTimes(1); - - const block = Block.fromStreamerMessage(streamerMessage); - - expect(block.blockHeight).toEqual(blockHeight); - expect(block.blockHash).toEqual(blockHash); - }); - - test('Indexer.fetchStreamerMessage() should fetch the block and shards from S3 and not cache and construct the streamer message if historical', async () => { - const blockHeight = 85233529; - const blockHash = 'xyz'; - const mockSend = jest.fn() - .mockReturnValueOnce({ // block - Body: { - transformToString: () => JSON.stringify({ - chunks: [0], - header: { - height: blockHeight, - hash: blockHash, - } - }) - } - }) - .mockReturnValue({ // shard - Body: { - transformToString: () => JSON.stringify({}) - } - }); - const mockS3 = { - send: mockSend, - } as unknown as S3Client; - const mockRedis = { - getStreamerMessage: jest.fn() - } as unknown as RedisClient; - const indexer = new Indexer('mainnet', { s3: mockS3, redisClient: mockRedis }); - - const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, true); - - expect(mockSend).toHaveBeenCalledTimes(5); - expect(JSON.stringify(mockSend.mock.calls[0][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ - Bucket: 'near-lake-data-mainnet', - Key: `${blockHeight.toString().padStart(12, '0')}/block.json` - }))); - expect(JSON.stringify(mockSend.mock.calls[1][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ - Bucket: 'near-lake-data-mainnet', - Key: `${blockHeight.toString().padStart(12, '0')}/shard_0.json` - }))); - expect(mockRedis.getStreamerMessage).toHaveBeenCalledTimes(0); - - const block = Block.fromStreamerMessage(streamerMessage); - - expect(block.blockHeight).toEqual(blockHeight); - expect(block.blockHash).toEqual(blockHash); - }); - - test('Indexer.transformIndexerFunction() applies the necessary transformations', () => { - const indexer = new Indexer('mainnet', { redisClient: transparentRedis }); - - const transformedFunction = indexer.transformIndexerFunction('console.log(\'hello\')'); - - expect(transformedFunction).toEqual(` - async function f(){ - console.log('hello') - }; - f(); - `); - }); - - test('Indexer.buildContext() allows execution of arbitrary GraphQL operations', async () => { - const mockFetch = jest.fn() - .mockResolvedValueOnce({ - status: 200, - json: async () => ({ - data: { - greet: 'hello' - } - }) - }) - .mockResolvedValueOnce({ - status: 200, - json: async () => ({ - data: { - newGreeting: { - success: true - } - } - }) - }); - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); - - const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); - - const query = ` - query { - greet() - } - `; - const { greet } = await context.graphql(query) as { greet: string }; - - const mutation = ` - mutation { - newGreeting(greeting: "${greet} morgan") { - success - } - } - `; - const { newGreeting: { success } } = await context.graphql(mutation); - - expect(greet).toEqual('hello'); - expect(success).toEqual(true); - expect(mockFetch.mock.calls[0]).toEqual([ - `${HASURA_ENDPOINT}/v1/graphql`, - { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'X-Hasura-Use-Backend-Only-Permissions': 'true', - 'X-Hasura-Role': 'morgs_near', - 'X-Hasura-Admin-Secret': HASURA_ADMIN_SECRET - }, - body: JSON.stringify({ query }) - } - ]); - expect(mockFetch.mock.calls[1]).toEqual([ - `${HASURA_ENDPOINT}/v1/graphql`, - { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'X-Hasura-Use-Backend-Only-Permissions': 'true', - 'X-Hasura-Role': 'morgs_near', - 'X-Hasura-Admin-Secret': HASURA_ADMIN_SECRET - }, - body: JSON.stringify({ query: mutation }) - } - ]); - }); - - test('Indexer.buildContext() can fetch from the near social api', async () => { - const mockFetch = jest.fn(); - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); - - const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); - - await context.fetchFromSocialApi('/index', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - action: 'post', - key: 'main', - options: { - limit: 1, - order: 'desc' - } - }) - }); - - expect(mockFetch.mock.calls).toMatchSnapshot(); - }); - - test('Indexer.buildContext() throws when a GraphQL response contains errors', async () => { - const mockFetch = jest.fn() - .mockResolvedValue({ - json: async () => ({ - errors: ['boom'] - }) - }); - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); - - const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, INVALID_HASURA_ROLE); - - await expect(async () => await context.graphql('query { hello }')).rejects.toThrow('boom'); - }); - - test('Indexer.buildContext() handles GraphQL variables', async () => { - const mockFetch = jest.fn() - .mockResolvedValue({ - status: 200, - json: async () => ({ - data: 'mock', - }), - }); - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); - - const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); - - const query = 'query($name: String) { hello(name: $name) }'; - const variables = { name: 'morgan' }; - await context.graphql(query, variables); - - expect(mockFetch.mock.calls[0]).toEqual([ - `${HASURA_ENDPOINT}/v1/graphql`, - { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'X-Hasura-Use-Backend-Only-Permissions': 'true', - 'X-Hasura-Role': 'morgs_near', - 'X-Hasura-Admin-Secret': HASURA_ADMIN_SECRET - }, - body: JSON.stringify({ - query, - variables, - }), - }, - ]); - }); - - test('GetTables works for a variety of input schemas', async () => { - const indexer = new Indexer('mainnet', { redisClient: transparentRedis }); - - const simpleSchemaTables = indexer.getTableNames(SIMPLE_SCHEMA); - expect(simpleSchemaTables).toStrictEqual(['posts']); - - const socialSchemaTables = indexer.getTableNames(SOCIAL_SCHEMA); - expect(socialSchemaTables).toStrictEqual(['posts', 'comments', 'post_likes']); - - const stressTestSchemaTables = indexer.getTableNames(STRESS_TEST_SCHEMA); - expect(stressTestSchemaTables).toStrictEqual([ - 'creator_quest', - 'composer_quest', - 'contractor - quest', - 'posts', - 'comments', - 'post_likes', - 'My Table1', - 'Another-Table', - 'Third-Table', - 'yet_another_table']); - - // Test that duplicate table names throw an error - const duplicateTableSchema = `CREATE TABLE - "posts" ( - "id" SERIAL NOT NULL - ); - CREATE TABLE posts ( - "id" SERIAL NOT NULL - );`; - expect(() => { - indexer.getTableNames(duplicateTableSchema); - }).toThrow('Table posts already exists in schema. Table names must be unique. Quotes are not allowed as a differentiator between table names.'); - - // Test that schema with no tables throws an error - expect(() => { - indexer.getTableNames(''); - }).toThrow('Schema does not have any tables. There should be at least one table.'); - }); - - test('SanitizeTableName works properly on many test cases', async () => { - const indexer = new Indexer('mainnet', { redisClient: transparentRedis }); - - expect(indexer.sanitizeTableName('table_name')).toStrictEqual('TableName'); - expect(indexer.sanitizeTableName('tablename')).toStrictEqual('Tablename'); // name is not capitalized - expect(indexer.sanitizeTableName('table name')).toStrictEqual('TableName'); - expect(indexer.sanitizeTableName('table!name!')).toStrictEqual('TableName'); - expect(indexer.sanitizeTableName('123TABle')).toStrictEqual('_123TABle'); // underscore at beginning - expect(indexer.sanitizeTableName('123_tABLE')).toStrictEqual('_123TABLE'); // underscore at beginning, capitalization - expect(indexer.sanitizeTableName('some-table_name')).toStrictEqual('SomeTableName'); - expect(indexer.sanitizeTableName('!@#$%^&*()table@)*&(%#')).toStrictEqual('Table'); // All special characters removed - expect(indexer.sanitizeTableName('T_name')).toStrictEqual('TName'); - expect(indexer.sanitizeTableName('_table')).toStrictEqual('Table'); // Starting underscore was removed - }); - - test('indexer fails to build context.db due to collision on sanitized table names', async () => { - const indexer = new Indexer('mainnet', { redisClient: transparentRedis }); - - const schemaWithDuplicateSanitizedTableNames = `CREATE TABLE - "test table" ( - "id" SERIAL NOT NULL - ); - CREATE TABLE "test!table" ( - "id" SERIAL NOT NULL - );`; - - // Does not outright throw an error but instead returns an empty object - expect(indexer.buildDatabaseContext('test_account', 'test_schema_name', schemaWithDuplicateSanitizedTableNames, 1)) - .toStrictEqual({}); - }); - - test('indexer builds context and inserts an objects into existing table', async () => { - const mockDmlHandler: any = { - create: jest.fn().mockImplementation(() => { - return { insert: jest.fn().mockReturnValue([{ colA: 'valA' }, { colA: 'valA' }]) }; - }) - }; - - const indexer = new Indexer('mainnet', { - fetch: genericMockFetch as unknown as typeof fetch, - redisClient: transparentRedis, - DmlHandler: mockDmlHandler - }); - const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); - - const objToInsert = [{ - account_id: 'morgs_near', - block_height: 1, - receipt_id: 'abc', - content: 'test', - block_timestamp: 800, - accounts_liked: JSON.stringify(['cwpuzzles.near', 'devbose.near']) - }, - { - account_id: 'morgs_near', - block_height: 2, - receipt_id: 'abc', - content: 'test', - block_timestamp: 801, - accounts_liked: JSON.stringify(['cwpuzzles.near']) - }]; - - const result = await context.db.Posts.insert(objToInsert); - expect(result.length).toEqual(2); - }); - - test('indexer builds context and selects objects from existing table', async () => { - const selectFn = jest.fn(); - selectFn.mockImplementation((...args) => { - // Expects limit to be last parameter - return args[args.length - 1] === null ? [{ colA: 'valA' }, { colA: 'valA' }] : [{ colA: 'valA' }]; - }); - const mockDmlHandler: any = { - create: jest.fn().mockImplementation(() => { - return { select: selectFn }; - }) - }; - - const indexer = new Indexer('mainnet', { - fetch: genericMockFetch as unknown as typeof fetch, - redisClient: transparentRedis, - DmlHandler: mockDmlHandler - }); - const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); - - const objToSelect = { - account_id: 'morgs_near', - receipt_id: 'abc', - }; - const result = await context.db.Posts.select(objToSelect); - expect(result.length).toEqual(2); - const resultLimit = await context.db.Posts.select(objToSelect, 1); - expect(resultLimit.length).toEqual(1); - }); - - test('indexer builds context and updates multiple objects from existing table', async () => { - const mockDmlHandler: any = { - create: jest.fn().mockImplementation(() => { - return { - update: jest.fn().mockImplementation((_, __, whereObj, updateObj) => { - if (whereObj.account_id === 'morgs_near' && updateObj.content === 'test_content') { - return [{ colA: 'valA' }, { colA: 'valA' }]; - } - return [{}]; - }) - }; - }) - }; - - const indexer = new Indexer('mainnet', { - fetch: genericMockFetch as unknown as typeof fetch, - redisClient: transparentRedis, - DmlHandler: mockDmlHandler - }); - const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); - - const whereObj = { - account_id: 'morgs_near', - receipt_id: 'abc', - }; - const updateObj = { - content: 'test_content', - block_timestamp: 805, - }; - const result = await context.db.Posts.update(whereObj, updateObj); - expect(result.length).toEqual(2); - }); - - test('indexer builds context and upserts on existing table', async () => { - const mockDmlHandler: any = { - create: jest.fn().mockImplementation(() => { - return { - upsert: jest.fn().mockImplementation((_, __, objects, conflict, update) => { - if (objects.length === 2 && conflict.includes('account_id') && update.includes('content')) { - return [{ colA: 'valA' }, { colA: 'valA' }]; - } else if (objects.length === 1 && conflict.includes('account_id') && update.includes('content')) { - return [{ colA: 'valA' }]; - } - return [{}]; - }) - }; - }) - }; - - const indexer = new Indexer('mainnet', { - fetch: genericMockFetch as unknown as typeof fetch, - redisClient: transparentRedis, - DmlHandler: mockDmlHandler - }); - const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); - - const objToInsert = [{ - account_id: 'morgs_near', - block_height: 1, - receipt_id: 'abc', - content: 'test', - block_timestamp: 800, - accounts_liked: JSON.stringify(['cwpuzzles.near', 'devbose.near']) - }, - { - account_id: 'morgs_near', - block_height: 2, - receipt_id: 'abc', - content: 'test', - block_timestamp: 801, - accounts_liked: JSON.stringify(['cwpuzzles.near']) - }]; - - let result = await context.db.Posts.upsert(objToInsert, ['account_id', 'block_height'], ['content', 'block_timestamp']); - expect(result.length).toEqual(2); - result = await context.db.Posts.upsert(objToInsert[0], ['account_id', 'block_height'], ['content', 'block_timestamp']); - expect(result.length).toEqual(1); - }); - - test('indexer builds context and deletes objects from existing table', async () => { - const mockDmlHandler: any = { - create: jest.fn().mockImplementation(() => { - return { delete: jest.fn().mockReturnValue([{ colA: 'valA' }, { colA: 'valA' }]) }; - }) - }; - - const indexer = new Indexer('mainnet', { - fetch: genericMockFetch as unknown as typeof fetch, - redisClient: transparentRedis, - DmlHandler: mockDmlHandler - }); - const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); - - const deleteFilter = { - account_id: 'morgs_near', - receipt_id: 'abc', - }; - const result = await context.db.Posts.delete(deleteFilter); - expect(result.length).toEqual(2); - }); - - test('indexer builds context and verifies all methods generated', async () => { - const mockDmlHandler: any = { - create: jest.fn() - }; - - const indexer = new Indexer('mainnet', { - fetch: genericMockFetch as unknown as typeof fetch, - redisClient: transparentRedis, - DmlHandler: mockDmlHandler - }); - const context = indexer.buildContext(STRESS_TEST_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); - - expect(Object.keys(context.db)).toStrictEqual([ - 'CreatorQuest', - 'ComposerQuest', - 'ContractorQuest', - 'Posts', - 'Comments', - 'PostLikes', - 'MyTable1', - 'AnotherTable', - 'ThirdTable', - 'YetAnotherTable']); - expect(Object.keys(context.db.CreatorQuest)).toStrictEqual([ - 'insert', - 'select', - 'update', - 'upsert', - 'delete']); - expect(Object.keys(context.db.PostLikes)).toStrictEqual([ - 'insert', - 'select', - 'update', - 'upsert', - 'delete']); - expect(Object.keys(context.db.MyTable1)).toStrictEqual([ - 'insert', - 'select', - 'update', - 'upsert', - 'delete']); - }); - - test('indexer builds context and returns empty array if failed to generate db methods', async () => { - const mockDmlHandler: any = { - create: jest.fn() - }; - - const indexer = new Indexer('mainnet', { - fetch: genericMockFetch as unknown as typeof fetch, - redisClient: transparentRedis, - DmlHandler: mockDmlHandler - }); - const context = indexer.buildContext('', 'morgs.near/social_feed1', 1, 'postgres'); - - expect(Object.keys(context.db)).toStrictEqual([]); - }); - - test('Indexer.runFunctions() allows imperative execution of GraphQL operations', async () => { - const postId = 1; - const commentId = 2; - const blockHeight = 82699904; - const mockFetch = jest.fn() - .mockReturnValueOnce({ // starting log - status: 200, - json: async () => ({ - data: { - indexer_log_store: [ - { - id: '12345', - }, - ], - }, - }), - }) - .mockReturnValueOnce({ - status: 200, - json: async () => ({ - errors: null, - }), - }) - .mockReturnValueOnce({ // query - status: 200, - json: async () => ({ - data: { - posts: [ - { - id: postId, - }, - ], - }, - }), - }) - .mockReturnValueOnce({ // mutation - status: 200, - json: async () => ({ - data: { - insert_comments: { - returning: { - id: commentId, - }, - }, - }, - }), - }) - .mockReturnValueOnce({ - status: 200, - json: async () => ({ - errors: null, - }), - }); - - const mockS3 = { - send: jest.fn() - .mockResolvedValueOnce({ // block - Body: { - transformToString: () => JSON.stringify({ - chunks: [0], - header: { - height: blockHeight, - }, - }), - }, - }) - .mockResolvedValue({ // shard - Body: { - transformToString: () => JSON.stringify({}) - }, - }), - } as unknown as S3Client; - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3: mockS3, redisClient: transparentRedis }); - - const functions: Record = {}; - functions['buildnear.testnet/test'] = { - code: ` - const { posts } = await context.graphql(\` - query { - posts(where: { id: { _eq: 1 } }) { - id - } - } - \`); - - if (!posts || posts.length === 0) { - return; - } - - const [post] = posts; - - const { insert_comments: { returning: { id } } } = await context.graphql(\` - mutation { - insert_comments( - objects: {account_id: "morgs.near", block_height: \${block.blockHeight}, content: "cool post", post_id: \${post.id}} - ) { - returning { - id - } - } - } - \`); - - return (\`Created comment \${id} on post \${post.id}\`) - `, - schema: SIMPLE_SCHEMA - }; - - await indexer.runFunctions(blockHeight, functions, false); - - expect(mockFetch.mock.calls).toMatchSnapshot(); - }); - - test('Indexer.runFunctions() console.logs', async () => { - const logs: string[] = []; - const context = { - log: (...m: string[]) => { - logs.push(...m); - } - }; - const vm = new VM(); - vm.freeze(context, 'context'); - vm.freeze(context, 'console'); - await vm.run('console.log("hello", "brave new"); context.log("world")'); - expect(logs).toEqual(['hello', 'brave new', 'world']); - }); - - test('Errors thrown in VM can be caught outside the VM', async () => { - const vm = new VM(); - expect(() => { - vm.run("throw new Error('boom')"); - }).toThrow('boom'); - }); - - test('Indexer.runFunctions() catches errors', async () => { - const mockFetch = jest.fn(() => ({ - status: 200, - json: async () => ({ - errors: null, - }), - })); - const blockHeight = 456; - const mockS3 = { - send: jest.fn().mockResolvedValue({ - Body: { - transformToString: () => JSON.stringify({ - chunks: [], - header: { - height: blockHeight - } - }) - } - }), - } as unknown as S3Client; - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3: mockS3, redisClient: transparentRedis }); - - const functions: Record = {}; - functions['buildnear.testnet/test'] = { - code: ` - throw new Error('boom'); - `, - schema: SIMPLE_SCHEMA - }; - - await expect(indexer.runFunctions(blockHeight, functions, false)).rejects.toThrow(new Error('boom')); - expect(mockFetch.mock.calls).toMatchSnapshot(); - }); - - test('Indexer.runFunctions() provisions a GraphQL endpoint with the specified schema', async () => { - const blockHeight = 82699904; - const mockFetch = jest.fn(() => ({ - status: 200, - json: async () => ({ - errors: null, - }), - })); - const mockS3 = { - send: jest - .fn() - .mockResolvedValueOnce({ // block - Body: { - transformToString: () => JSON.stringify({ - chunks: [0], - header: { - height: blockHeight, - }, - }), - }, - }) - .mockResolvedValue({ // shard - Body: { - transformToString: () => JSON.stringify({}) - }, - }), - } as unknown as S3Client; - const provisioner: any = { - isUserApiProvisioned: jest.fn().mockReturnValue(false), - provisionUserApi: jest.fn(), - }; - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3: mockS3, redisClient: transparentRedis, provisioner }); - - const functions = { - 'morgs.near/test': { - account_id: 'morgs.near', - function_name: 'test', - code: '', - schema: SIMPLE_SCHEMA, - } - }; - await indexer.runFunctions(1, functions, false, { provision: true }); - - expect(provisioner.isUserApiProvisioned).toHaveBeenCalledWith('morgs.near', 'test'); - expect(provisioner.provisionUserApi).toHaveBeenCalledTimes(1); - expect(provisioner.provisionUserApi).toHaveBeenCalledWith( - 'morgs.near', - 'test', - SIMPLE_SCHEMA - ); - }); - - test('Indexer.runFunctions() skips provisioning if the endpoint exists', async () => { - const blockHeight = 82699904; - const mockFetch = jest.fn(() => ({ - status: 200, - json: async () => ({ - errors: null, - }), - })); - const mockS3 = { - send: jest - .fn() - .mockResolvedValueOnce({ // block - Body: { - transformToString: () => JSON.stringify({ - chunks: [0], - header: { - height: blockHeight, - }, - }), - }, - }) - .mockResolvedValue({ // shard - Body: { - transformToString: () => JSON.stringify({}) - }, - }), - } as unknown as S3Client; - const provisioner: any = { - isUserApiProvisioned: jest.fn().mockReturnValue(true), - provisionUserApi: jest.fn(), - }; - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3: mockS3, redisClient: transparentRedis, provisioner }); - - const functions: Record = { - 'morgs.near/test': { - code: '', - schema: SIMPLE_SCHEMA, - } - }; - await indexer.runFunctions(1, functions, false, { provision: true }); - - expect(provisioner.provisionUserApi).not.toHaveBeenCalled(); - }); - - test('Indexer.runFunctions() supplies the required role to the GraphQL endpoint', async () => { - const blockHeight = 82699904; - const mockFetch = jest.fn(() => ({ - status: 200, - json: async () => ({ - errors: null, - }), - })); - const mockS3 = { - send: jest - .fn() - .mockResolvedValueOnce({ // block - Body: { - transformToString: () => JSON.stringify({ - chunks: [0], - header: { - height: blockHeight, - }, - }), - }, - }) - .mockResolvedValue({ // shard - Body: { - transformToString: () => JSON.stringify({}) - }, - }), - } as unknown as S3Client; - const provisioner: any = { - isUserApiProvisioned: jest.fn().mockReturnValue(true), - provisionUserApi: jest.fn(), - }; - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3: mockS3, redisClient: transparentRedis, provisioner }); - - const functions: Record = { - 'morgs.near/test': { - code: ` - context.graphql(\`mutation { set(functionName: "buildnear.testnet/test", key: "height", data: "\${block.blockHeight}")}\`); - `, - schema: SIMPLE_SCHEMA, - } - }; - await indexer.runFunctions(blockHeight, functions, false, { provision: true }); - - expect(provisioner.provisionUserApi).not.toHaveBeenCalled(); - expect(mockFetch.mock.calls).toMatchSnapshot(); - }); - - test('Indexer.runFunctions() logs provisioning failures', async () => { - const blockHeight = 82699904; - const mockFetch = jest.fn(() => ({ - status: 200, - json: async () => ({ - errors: null, - }), - })); - const mockS3 = { - send: jest - .fn() - .mockResolvedValueOnce({ // block - Body: { - transformToString: () => JSON.stringify({ - chunks: [0], - header: { - height: blockHeight, - }, - }), - }, - }) - .mockResolvedValue({ // shard - Body: { - transformToString: () => JSON.stringify({}) - }, - }), - } as unknown as S3Client; - const error = new Error('something went wrong with provisioning'); - const provisioner: any = { - isUserApiProvisioned: jest.fn().mockReturnValue(false), - provisionUserApi: jest.fn().mockRejectedValue(error), - }; - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3: mockS3, redisClient: transparentRedis, provisioner }); - - const functions: Record = { - 'morgs.near/test': { - code: ` - context.graphql(\`mutation { set(functionName: "buildnear.testnet/test", key: "height", data: "\${block.blockHeight}")}\`); - `, - schema: 'schema', - } - }; - - await expect(indexer.runFunctions(blockHeight, functions, false, { provision: true })).rejects.toThrow(error); - expect(mockFetch.mock.calls).toMatchSnapshot(); - }); - - test('does not attach the hasura admin secret header when no role specified', async () => { - const mockFetch = jest.fn() - .mockResolvedValueOnce({ - status: 200, - json: async () => ({ - data: {} - }) - }); - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); - // @ts-expect-error legacy test - const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, null); - - const mutation = ` - mutation { - newGreeting(greeting: "howdy") { - success - } - } - `; - - await context.graphql(mutation); - - expect(mockFetch.mock.calls[0]).toEqual([ - `${HASURA_ENDPOINT}/v1/graphql`, - { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'X-Hasura-Use-Backend-Only-Permissions': 'true', - }, - body: JSON.stringify({ query: mutation }) - } - ]); - }); - - test('attaches the backend only header to requests to hasura', async () => { - const mockFetch = jest.fn() - .mockResolvedValueOnce({ - status: 200, - json: async () => ({ - data: {} - }) - }); - const role = 'morgs_near'; - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); - const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); - - const mutation = ` - mutation { - newGreeting(greeting: "howdy") { - success - } - } - `; - - await context.graphql(mutation); - - expect(mockFetch.mock.calls[0]).toEqual([ - `${HASURA_ENDPOINT}/v1/graphql`, - { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'X-Hasura-Use-Backend-Only-Permissions': 'true', - 'X-Hasura-Role': role, - 'X-Hasura-Admin-Secret': HASURA_ADMIN_SECRET - }, - body: JSON.stringify({ query: mutation }) - } - ]); - }); -}); +// import { Block } from '@near-lake/primitives'; +// import type fetch from 'node-fetch'; +// import { type S3Client, GetObjectCommand } from '@aws-sdk/client-s3'; +// import type RedisClient from '../redis-client'; + +// import Indexer from './indexer'; +// import { VM } from 'vm2'; + +// describe('Indexer unit tests', () => { +// const oldEnv = process.env; + +// const HASURA_ENDPOINT = 'mock-hasura-endpoint'; +// const HASURA_ADMIN_SECRET = 'mock-hasura-secret'; +// const HASURA_ROLE = 'morgs_near'; +// const INVALID_HASURA_ROLE = 'other_near'; + +// const INDEXER_NAME = 'morgs.near/test_fn'; + +// const SIMPLE_SCHEMA = `CREATE TABLE +// "posts" ( +// "id" SERIAL NOT NULL, +// "account_id" VARCHAR NOT NULL, +// "block_height" DECIMAL(58, 0) NOT NULL, +// "receipt_id" VARCHAR NOT NULL, +// "content" TEXT NOT NULL, +// "block_timestamp" DECIMAL(20, 0) NOT NULL, +// "accounts_liked" JSONB NOT NULL DEFAULT '[]', +// "last_comment_timestamp" DECIMAL(20, 0), +// CONSTRAINT "posts_pkey" PRIMARY KEY ("id") +// );`; + +// const SOCIAL_SCHEMA = ` +// CREATE TABLE +// "posts" ( +// "id" SERIAL NOT NULL, +// "account_id" VARCHAR NOT NULL, +// "block_height" DECIMAL(58, 0) NOT NULL, +// "receipt_id" VARCHAR NOT NULL, +// "content" TEXT NOT NULL, +// "block_timestamp" DECIMAL(20, 0) NOT NULL, +// "accounts_liked" JSONB NOT NULL DEFAULT '[]', +// "last_comment_timestamp" DECIMAL(20, 0), +// CONSTRAINT "posts_pkey" PRIMARY KEY ("id") +// ); + +// CREATE TABLE +// "comments" ( +// "id" SERIAL NOT NULL, +// "post_id" SERIAL NOT NULL, +// "account_id" VARCHAR NOT NULL, +// "block_height" DECIMAL(58, 0) NOT NULL, +// "content" TEXT NOT NULL, +// "block_timestamp" DECIMAL(20, 0) NOT NULL, +// "receipt_id" VARCHAR NOT NULL, +// CONSTRAINT "comments_pkey" PRIMARY KEY ("id") +// ); + +// CREATE TABLE +// "post_likes" ( +// "post_id" SERIAL NOT NULL, +// "account_id" VARCHAR NOT NULL, +// "block_height" DECIMAL(58, 0), +// "block_timestamp" DECIMAL(20, 0) NOT NULL, +// "receipt_id" VARCHAR NOT NULL, +// CONSTRAINT "post_likes_pkey" PRIMARY KEY ("post_id", "account_id") +// );`; + +// const STRESS_TEST_SCHEMA = ` +// CREATE TABLE creator_quest ( +// account_id VARCHAR PRIMARY KEY, +// num_components_created INTEGER NOT NULL DEFAULT 0, +// completed BOOLEAN NOT NULL DEFAULT FALSE +// ); + +// CREATE TABLE +// composer_quest ( +// account_id VARCHAR PRIMARY KEY, +// num_widgets_composed INTEGER NOT NULL DEFAULT 0, +// completed BOOLEAN NOT NULL DEFAULT FALSE +// ); + +// CREATE TABLE +// "contractor - quest" ( +// account_id VARCHAR PRIMARY KEY, +// num_contracts_deployed INTEGER NOT NULL DEFAULT 0, +// completed BOOLEAN NOT NULL DEFAULT FALSE +// ); + +// CREATE TABLE +// "posts" ( +// "id" SERIAL NOT NULL, +// "account_id" VARCHAR NOT NULL, +// "block_height" DECIMAL(58, 0) NOT NULL, +// "receipt_id" VARCHAR NOT NULL, +// "content" TEXT NOT NULL, +// "block_timestamp" DECIMAL(20, 0) NOT NULL, +// "accounts_liked" JSONB NOT NULL DEFAULT '[]', +// "last_comment_timestamp" DECIMAL(20, 0), +// CONSTRAINT "posts_pkey" PRIMARY KEY ("id") +// ); + +// CREATE TABLE +// "comments" ( +// "id" SERIAL NOT NULL, +// "post_id" SERIAL NOT NULL, +// "account_id" VARCHAR NOT NULL, +// "block_height" DECIMAL(58, 0) NOT NULL, +// "content" TEXT NOT NULL, +// "block_timestamp" DECIMAL(20, 0) NOT NULL, +// "receipt_id" VARCHAR NOT NULL, +// CONSTRAINT "comments_pkey" PRIMARY KEY ("id") +// ); + +// CREATE TABLE +// "post_likes" ( +// "post_id" SERIAL NOT NULL, +// "account_id" VARCHAR NOT NULL, +// "block_height" DECIMAL(58, 0), +// "block_timestamp" DECIMAL(20, 0) NOT NULL, +// "receipt_id" VARCHAR NOT NULL, +// CONSTRAINT "post_likes_pkey" PRIMARY KEY ("post_id", "account_id") +// ); + +// CREATE UNIQUE INDEX "posts_account_id_block_height_key" ON "posts" ("account_id" ASC, "block_height" ASC); + +// CREATE UNIQUE INDEX "comments_post_id_account_id_block_height_key" ON "comments" ( +// "post_id" ASC, +// "account_id" ASC, +// "block_height" ASC +// ); + +// CREATE INDEX +// "posts_last_comment_timestamp_idx" ON "posts" ("last_comment_timestamp" DESC); + +// ALTER TABLE +// "comments" +// ADD +// CONSTRAINT "comments_post_id_fkey" FOREIGN KEY ("post_id") REFERENCES "posts" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION; + +// ALTER TABLE +// "post_likes" +// ADD +// CONSTRAINT "post_likes_post_id_fkey" FOREIGN KEY ("post_id") REFERENCES "posts" ("id") ON DELETE CASCADE ON UPDATE NO ACTION; + +// CREATE TABLE IF NOT EXISTS +// "My Table1" (id serial PRIMARY KEY); + +// CREATE TABLE +// "Another-Table" (id serial PRIMARY KEY); + +// CREATE TABLE +// IF NOT EXISTS +// "Third-Table" (id serial PRIMARY KEY); + +// CREATE TABLE +// yet_another_table (id serial PRIMARY KEY); +// `; +// const genericMockFetch = jest.fn() +// .mockResolvedValue({ +// status: 200, +// json: async () => ({ +// data: 'mock', +// }), +// }); + +// const transparentRedis = { +// getStreamerMessage: jest.fn() +// } as unknown as RedisClient; + +// beforeEach(() => { +// process.env = { +// ...oldEnv, +// HASURA_ENDPOINT, +// HASURA_ADMIN_SECRET +// }; +// }); + +// afterAll(() => { +// process.env = oldEnv; +// }); + +// test('Indexer.runFunctions() should execute all functions against the current block', async () => { +// const mockFetch = jest.fn(() => ({ +// status: 200, +// json: async () => ({ +// errors: null, +// }), +// })); +// const blockHeight = 456; +// const mockData = jest.fn().mockResolvedValue( +// JSON.stringify( +// { +// block: { +// chunks: [], +// header: { +// height: blockHeight +// } +// }, +// shards: {} +// } +// ) +// ); +// const mockRedis = { +// getStreamerMessage: mockData +// } as unknown as RedisClient; + +// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: mockRedis }); + +// const functions: Record = {}; +// functions['buildnear.testnet/test'] = { +// code: ` +// const foo = 3; +// block.result = context.graphql(\`mutation { set(functionName: "buildnear.testnet/test", key: "height", data: "\${block.blockHeight}")}\`); +// `, +// schema: SIMPLE_SCHEMA +// }; +// await indexer.runFunctions(blockHeight, functions, false); + +// expect(mockFetch.mock.calls).toMatchSnapshot(); +// }); + +// test('Indexer.fetchBlock() should fetch a block from S3', async () => { +// const author = 'dokiacapital.poolv1.near'; +// const mockData = JSON.stringify({ +// author +// }); +// const mockSend = jest.fn().mockResolvedValue({ +// Body: { +// transformToString: () => mockData +// } +// }); +// const mockS3 = { +// send: mockSend, +// } as unknown as S3Client; + +// const indexer = new Indexer('mainnet', { s3: mockS3, redisClient: transparentRedis }); + +// const blockHeight = 84333960; +// const block = await indexer.fetchBlockPromise(blockHeight); +// const params = { +// Bucket: 'near-lake-data-mainnet', +// Key: `${blockHeight.toString().padStart(12, '0')}/block.json` +// }; + +// expect(mockS3.send).toHaveBeenCalledTimes(1); +// expect(JSON.stringify(mockSend.mock.calls[0][0])).toMatch(JSON.stringify(new GetObjectCommand(params))); +// expect(block.author).toEqual(author); +// }); + +// test('Indexer.fetchShard() should fetch a shard from S3', async () => { +// const mockData = JSON.stringify({}); +// const mockSend = jest.fn().mockResolvedValue({ +// Body: { +// transformToString: () => mockData +// } +// }); +// const mockS3 = { +// send: mockSend, +// } as unknown as S3Client; +// const indexer = new Indexer('mainnet', { s3: mockS3, redisClient: transparentRedis }); + +// const blockHeight = 82699904; +// const shard = 0; +// const params = { +// Bucket: 'near-lake-data-mainnet', +// Key: `${blockHeight.toString().padStart(12, '0')}/shard_${shard}.json` +// }; +// await indexer.fetchShardPromise(blockHeight, shard); + +// expect(JSON.stringify(mockSend.mock.calls[0][0])).toMatch(JSON.stringify(new GetObjectCommand(params))); +// }); + +// test('Indexer.fetchStreamerMessage() should fetch the message from cache and use it directly', async () => { +// const blockHeight = 85233529; +// const blockHash = 'xyz'; +// const getMessage = jest.fn() +// .mockReturnValueOnce(JSON.stringify( +// { +// block: { +// chunks: [0], +// header: { +// height: blockHeight, +// hash: blockHash, +// } +// }, +// shards: {} +// } +// )); +// const mockRedis = { +// getStreamerMessage: getMessage +// } as unknown as RedisClient; +// const indexer = new Indexer('mainnet', { redisClient: mockRedis }); + +// const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, false); + +// expect(getMessage).toHaveBeenCalledTimes(1); +// expect(JSON.stringify(getMessage.mock.calls[0])).toEqual( +// `[${blockHeight}]` +// ); +// const block = Block.fromStreamerMessage(streamerMessage); + +// expect(block.blockHeight).toEqual(blockHeight); +// expect(block.blockHash).toEqual(blockHash); +// }); + +// test('Indexer.fetchStreamerMessage() should fetch the block and shards from S3 upon cache miss', async () => { +// const blockHeight = 85233529; +// const blockHash = 'xyz'; +// const mockSend = jest.fn() +// .mockReturnValueOnce({ // block +// Body: { +// transformToString: () => JSON.stringify({ +// chunks: [0], +// header: { +// height: blockHeight, +// hash: blockHash, +// } +// }) +// } +// }) +// .mockReturnValue({ // shard +// Body: { +// transformToString: () => JSON.stringify({}) +// } +// }); +// const mockS3 = { +// send: mockSend, +// } as unknown as S3Client; +// const indexer = new Indexer('mainnet', { s3: mockS3, redisClient: transparentRedis }); + +// const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, false); + +// expect(mockSend).toHaveBeenCalledTimes(5); +// expect(JSON.stringify(mockSend.mock.calls[0][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ +// Bucket: 'near-lake-data-mainnet', +// Key: `${blockHeight.toString().padStart(12, '0')}/block.json` +// }))); +// expect(JSON.stringify(mockSend.mock.calls[1][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ +// Bucket: 'near-lake-data-mainnet', +// Key: `${blockHeight.toString().padStart(12, '0')}/shard_0.json` +// }))); +// expect(transparentRedis.getStreamerMessage).toHaveBeenCalledTimes(1); + +// const block = Block.fromStreamerMessage(streamerMessage); + +// expect(block.blockHeight).toEqual(blockHeight); +// expect(block.blockHash).toEqual(blockHash); +// }); + +// test('Indexer.fetchStreamerMessage() should fetch the block and shards from S3 and not cache and construct the streamer message if historical', async () => { +// const blockHeight = 85233529; +// const blockHash = 'xyz'; +// const mockSend = jest.fn() +// .mockReturnValueOnce({ // block +// Body: { +// transformToString: () => JSON.stringify({ +// chunks: [0], +// header: { +// height: blockHeight, +// hash: blockHash, +// } +// }) +// } +// }) +// .mockReturnValue({ // shard +// Body: { +// transformToString: () => JSON.stringify({}) +// } +// }); +// const mockS3 = { +// send: mockSend, +// } as unknown as S3Client; +// const mockRedis = { +// getStreamerMessage: jest.fn() +// } as unknown as RedisClient; +// const indexer = new Indexer('mainnet', { s3: mockS3, redisClient: mockRedis }); + +// const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, true); + +// expect(mockSend).toHaveBeenCalledTimes(5); +// expect(JSON.stringify(mockSend.mock.calls[0][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ +// Bucket: 'near-lake-data-mainnet', +// Key: `${blockHeight.toString().padStart(12, '0')}/block.json` +// }))); +// expect(JSON.stringify(mockSend.mock.calls[1][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ +// Bucket: 'near-lake-data-mainnet', +// Key: `${blockHeight.toString().padStart(12, '0')}/shard_0.json` +// }))); +// expect(mockRedis.getStreamerMessage).toHaveBeenCalledTimes(0); + +// const block = Block.fromStreamerMessage(streamerMessage); + +// expect(block.blockHeight).toEqual(blockHeight); +// expect(block.blockHash).toEqual(blockHash); +// }); + +// test('Indexer.transformIndexerFunction() applies the necessary transformations', () => { +// const indexer = new Indexer('mainnet', { redisClient: transparentRedis }); + +// const transformedFunction = indexer.transformIndexerFunction('console.log(\'hello\')'); + +// expect(transformedFunction).toEqual(` +// async function f(){ +// console.log('hello') +// }; +// f(); +// `); +// }); + +// test('Indexer.buildContext() allows execution of arbitrary GraphQL operations', async () => { +// const mockFetch = jest.fn() +// .mockResolvedValueOnce({ +// status: 200, +// json: async () => ({ +// data: { +// greet: 'hello' +// } +// }) +// }) +// .mockResolvedValueOnce({ +// status: 200, +// json: async () => ({ +// data: { +// newGreeting: { +// success: true +// } +// } +// }) +// }); +// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); + +// const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); + +// const query = ` +// query { +// greet() +// } +// `; +// const { greet } = await context.graphql(query) as { greet: string }; + +// const mutation = ` +// mutation { +// newGreeting(greeting: "${greet} morgan") { +// success +// } +// } +// `; +// const { newGreeting: { success } } = await context.graphql(mutation); + +// expect(greet).toEqual('hello'); +// expect(success).toEqual(true); +// expect(mockFetch.mock.calls[0]).toEqual([ +// `${HASURA_ENDPOINT}/v1/graphql`, +// { +// method: 'POST', +// headers: { +// 'Content-Type': 'application/json', +// 'X-Hasura-Use-Backend-Only-Permissions': 'true', +// 'X-Hasura-Role': 'morgs_near', +// 'X-Hasura-Admin-Secret': HASURA_ADMIN_SECRET +// }, +// body: JSON.stringify({ query }) +// } +// ]); +// expect(mockFetch.mock.calls[1]).toEqual([ +// `${HASURA_ENDPOINT}/v1/graphql`, +// { +// method: 'POST', +// headers: { +// 'Content-Type': 'application/json', +// 'X-Hasura-Use-Backend-Only-Permissions': 'true', +// 'X-Hasura-Role': 'morgs_near', +// 'X-Hasura-Admin-Secret': HASURA_ADMIN_SECRET +// }, +// body: JSON.stringify({ query: mutation }) +// } +// ]); +// }); + +// test('Indexer.buildContext() can fetch from the near social api', async () => { +// const mockFetch = jest.fn(); +// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); + +// const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); + +// await context.fetchFromSocialApi('/index', { +// method: 'POST', +// headers: { +// 'Content-Type': 'application/json', +// }, +// body: JSON.stringify({ +// action: 'post', +// key: 'main', +// options: { +// limit: 1, +// order: 'desc' +// } +// }) +// }); + +// expect(mockFetch.mock.calls).toMatchSnapshot(); +// }); + +// test('Indexer.buildContext() throws when a GraphQL response contains errors', async () => { +// const mockFetch = jest.fn() +// .mockResolvedValue({ +// json: async () => ({ +// errors: ['boom'] +// }) +// }); +// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); + +// const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, INVALID_HASURA_ROLE); + +// await expect(async () => await context.graphql('query { hello }')).rejects.toThrow('boom'); +// }); + +// test('Indexer.buildContext() handles GraphQL variables', async () => { +// const mockFetch = jest.fn() +// .mockResolvedValue({ +// status: 200, +// json: async () => ({ +// data: 'mock', +// }), +// }); +// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); + +// const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); + +// const query = 'query($name: String) { hello(name: $name) }'; +// const variables = { name: 'morgan' }; +// await context.graphql(query, variables); + +// expect(mockFetch.mock.calls[0]).toEqual([ +// `${HASURA_ENDPOINT}/v1/graphql`, +// { +// method: 'POST', +// headers: { +// 'Content-Type': 'application/json', +// 'X-Hasura-Use-Backend-Only-Permissions': 'true', +// 'X-Hasura-Role': 'morgs_near', +// 'X-Hasura-Admin-Secret': HASURA_ADMIN_SECRET +// }, +// body: JSON.stringify({ +// query, +// variables, +// }), +// }, +// ]); +// }); + +// test('GetTables works for a variety of input schemas', async () => { +// const indexer = new Indexer('mainnet', { redisClient: transparentRedis }); + +// const simpleSchemaTables = indexer.getTableNames(SIMPLE_SCHEMA); +// expect(simpleSchemaTables).toStrictEqual(['posts']); + +// const socialSchemaTables = indexer.getTableNames(SOCIAL_SCHEMA); +// expect(socialSchemaTables).toStrictEqual(['posts', 'comments', 'post_likes']); + +// const stressTestSchemaTables = indexer.getTableNames(STRESS_TEST_SCHEMA); +// expect(stressTestSchemaTables).toStrictEqual([ +// 'creator_quest', +// 'composer_quest', +// 'contractor - quest', +// 'posts', +// 'comments', +// 'post_likes', +// 'My Table1', +// 'Another-Table', +// 'Third-Table', +// 'yet_another_table']); + +// // Test that duplicate table names throw an error +// const duplicateTableSchema = `CREATE TABLE +// "posts" ( +// "id" SERIAL NOT NULL +// ); +// CREATE TABLE posts ( +// "id" SERIAL NOT NULL +// );`; +// expect(() => { +// indexer.getTableNames(duplicateTableSchema); +// }).toThrow('Table posts already exists in schema. Table names must be unique. Quotes are not allowed as a differentiator between table names.'); + +// // Test that schema with no tables throws an error +// expect(() => { +// indexer.getTableNames(''); +// }).toThrow('Schema does not have any tables. There should be at least one table.'); +// }); + +// test('SanitizeTableName works properly on many test cases', async () => { +// const indexer = new Indexer('mainnet', { redisClient: transparentRedis }); + +// expect(indexer.sanitizeTableName('table_name')).toStrictEqual('TableName'); +// expect(indexer.sanitizeTableName('tablename')).toStrictEqual('Tablename'); // name is not capitalized +// expect(indexer.sanitizeTableName('table name')).toStrictEqual('TableName'); +// expect(indexer.sanitizeTableName('table!name!')).toStrictEqual('TableName'); +// expect(indexer.sanitizeTableName('123TABle')).toStrictEqual('_123TABle'); // underscore at beginning +// expect(indexer.sanitizeTableName('123_tABLE')).toStrictEqual('_123TABLE'); // underscore at beginning, capitalization +// expect(indexer.sanitizeTableName('some-table_name')).toStrictEqual('SomeTableName'); +// expect(indexer.sanitizeTableName('!@#$%^&*()table@)*&(%#')).toStrictEqual('Table'); // All special characters removed +// expect(indexer.sanitizeTableName('T_name')).toStrictEqual('TName'); +// expect(indexer.sanitizeTableName('_table')).toStrictEqual('Table'); // Starting underscore was removed +// }); + +// test('indexer fails to build context.db due to collision on sanitized table names', async () => { +// const indexer = new Indexer('mainnet', { redisClient: transparentRedis }); + +// const schemaWithDuplicateSanitizedTableNames = `CREATE TABLE +// "test table" ( +// "id" SERIAL NOT NULL +// ); +// CREATE TABLE "test!table" ( +// "id" SERIAL NOT NULL +// );`; + +// // Does not outright throw an error but instead returns an empty object +// expect(indexer.buildDatabaseContext('test_account', 'test_schema_name', schemaWithDuplicateSanitizedTableNames, 1)) +// .toStrictEqual({}); +// }); + +// test('indexer builds context and inserts an objects into existing table', async () => { +// const mockDmlHandler: any = { +// create: jest.fn().mockImplementation(() => { +// return { insert: jest.fn().mockReturnValue([{ colA: 'valA' }, { colA: 'valA' }]) }; +// }) +// }; + +// const indexer = new Indexer('mainnet', { +// fetch: genericMockFetch as unknown as typeof fetch, +// redisClient: transparentRedis, +// DmlHandler: mockDmlHandler +// }); +// const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); + +// const objToInsert = [{ +// account_id: 'morgs_near', +// block_height: 1, +// receipt_id: 'abc', +// content: 'test', +// block_timestamp: 800, +// accounts_liked: JSON.stringify(['cwpuzzles.near', 'devbose.near']) +// }, +// { +// account_id: 'morgs_near', +// block_height: 2, +// receipt_id: 'abc', +// content: 'test', +// block_timestamp: 801, +// accounts_liked: JSON.stringify(['cwpuzzles.near']) +// }]; + +// const result = await context.db.Posts.insert(objToInsert); +// expect(result.length).toEqual(2); +// }); + +// test('indexer builds context and selects objects from existing table', async () => { +// const selectFn = jest.fn(); +// selectFn.mockImplementation((...args) => { +// // Expects limit to be last parameter +// return args[args.length - 1] === null ? [{ colA: 'valA' }, { colA: 'valA' }] : [{ colA: 'valA' }]; +// }); +// const mockDmlHandler: any = { +// create: jest.fn().mockImplementation(() => { +// return { select: selectFn }; +// }) +// }; + +// const indexer = new Indexer('mainnet', { +// fetch: genericMockFetch as unknown as typeof fetch, +// redisClient: transparentRedis, +// DmlHandler: mockDmlHandler +// }); +// const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); + +// const objToSelect = { +// account_id: 'morgs_near', +// receipt_id: 'abc', +// }; +// const result = await context.db.Posts.select(objToSelect); +// expect(result.length).toEqual(2); +// const resultLimit = await context.db.Posts.select(objToSelect, 1); +// expect(resultLimit.length).toEqual(1); +// }); + +// test('indexer builds context and updates multiple objects from existing table', async () => { +// const mockDmlHandler: any = { +// create: jest.fn().mockImplementation(() => { +// return { +// update: jest.fn().mockImplementation((_, __, whereObj, updateObj) => { +// if (whereObj.account_id === 'morgs_near' && updateObj.content === 'test_content') { +// return [{ colA: 'valA' }, { colA: 'valA' }]; +// } +// return [{}]; +// }) +// }; +// }) +// }; + +// const indexer = new Indexer('mainnet', { +// fetch: genericMockFetch as unknown as typeof fetch, +// redisClient: transparentRedis, +// DmlHandler: mockDmlHandler +// }); +// const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); + +// const whereObj = { +// account_id: 'morgs_near', +// receipt_id: 'abc', +// }; +// const updateObj = { +// content: 'test_content', +// block_timestamp: 805, +// }; +// const result = await context.db.Posts.update(whereObj, updateObj); +// expect(result.length).toEqual(2); +// }); + +// test('indexer builds context and upserts on existing table', async () => { +// const mockDmlHandler: any = { +// create: jest.fn().mockImplementation(() => { +// return { +// upsert: jest.fn().mockImplementation((_, __, objects, conflict, update) => { +// if (objects.length === 2 && conflict.includes('account_id') && update.includes('content')) { +// return [{ colA: 'valA' }, { colA: 'valA' }]; +// } else if (objects.length === 1 && conflict.includes('account_id') && update.includes('content')) { +// return [{ colA: 'valA' }]; +// } +// return [{}]; +// }) +// }; +// }) +// }; + +// const indexer = new Indexer('mainnet', { +// fetch: genericMockFetch as unknown as typeof fetch, +// redisClient: transparentRedis, +// DmlHandler: mockDmlHandler +// }); +// const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); + +// const objToInsert = [{ +// account_id: 'morgs_near', +// block_height: 1, +// receipt_id: 'abc', +// content: 'test', +// block_timestamp: 800, +// accounts_liked: JSON.stringify(['cwpuzzles.near', 'devbose.near']) +// }, +// { +// account_id: 'morgs_near', +// block_height: 2, +// receipt_id: 'abc', +// content: 'test', +// block_timestamp: 801, +// accounts_liked: JSON.stringify(['cwpuzzles.near']) +// }]; + +// let result = await context.db.Posts.upsert(objToInsert, ['account_id', 'block_height'], ['content', 'block_timestamp']); +// expect(result.length).toEqual(2); +// result = await context.db.Posts.upsert(objToInsert[0], ['account_id', 'block_height'], ['content', 'block_timestamp']); +// expect(result.length).toEqual(1); +// }); + +// test('indexer builds context and deletes objects from existing table', async () => { +// const mockDmlHandler: any = { +// create: jest.fn().mockImplementation(() => { +// return { delete: jest.fn().mockReturnValue([{ colA: 'valA' }, { colA: 'valA' }]) }; +// }) +// }; + +// const indexer = new Indexer('mainnet', { +// fetch: genericMockFetch as unknown as typeof fetch, +// redisClient: transparentRedis, +// DmlHandler: mockDmlHandler +// }); +// const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); + +// const deleteFilter = { +// account_id: 'morgs_near', +// receipt_id: 'abc', +// }; +// const result = await context.db.Posts.delete(deleteFilter); +// expect(result.length).toEqual(2); +// }); + +// test('indexer builds context and verifies all methods generated', async () => { +// const mockDmlHandler: any = { +// create: jest.fn() +// }; + +// const indexer = new Indexer('mainnet', { +// fetch: genericMockFetch as unknown as typeof fetch, +// redisClient: transparentRedis, +// DmlHandler: mockDmlHandler +// }); +// const context = indexer.buildContext(STRESS_TEST_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); + +// expect(Object.keys(context.db)).toStrictEqual([ +// 'CreatorQuest', +// 'ComposerQuest', +// 'ContractorQuest', +// 'Posts', +// 'Comments', +// 'PostLikes', +// 'MyTable1', +// 'AnotherTable', +// 'ThirdTable', +// 'YetAnotherTable']); +// expect(Object.keys(context.db.CreatorQuest)).toStrictEqual([ +// 'insert', +// 'select', +// 'update', +// 'upsert', +// 'delete']); +// expect(Object.keys(context.db.PostLikes)).toStrictEqual([ +// 'insert', +// 'select', +// 'update', +// 'upsert', +// 'delete']); +// expect(Object.keys(context.db.MyTable1)).toStrictEqual([ +// 'insert', +// 'select', +// 'update', +// 'upsert', +// 'delete']); +// }); + +// test('indexer builds context and returns empty array if failed to generate db methods', async () => { +// const mockDmlHandler: any = { +// create: jest.fn() +// }; + +// const indexer = new Indexer('mainnet', { +// fetch: genericMockFetch as unknown as typeof fetch, +// redisClient: transparentRedis, +// DmlHandler: mockDmlHandler +// }); +// const context = indexer.buildContext('', 'morgs.near/social_feed1', 1, 'postgres'); + +// expect(Object.keys(context.db)).toStrictEqual([]); +// }); + +// test('Indexer.runFunctions() allows imperative execution of GraphQL operations', async () => { +// const postId = 1; +// const commentId = 2; +// const blockHeight = 82699904; +// const mockFetch = jest.fn() +// .mockReturnValueOnce({ // starting log +// status: 200, +// json: async () => ({ +// data: { +// indexer_log_store: [ +// { +// id: '12345', +// }, +// ], +// }, +// }), +// }) +// .mockReturnValueOnce({ +// status: 200, +// json: async () => ({ +// errors: null, +// }), +// }) +// .mockReturnValueOnce({ // query +// status: 200, +// json: async () => ({ +// data: { +// posts: [ +// { +// id: postId, +// }, +// ], +// }, +// }), +// }) +// .mockReturnValueOnce({ // mutation +// status: 200, +// json: async () => ({ +// data: { +// insert_comments: { +// returning: { +// id: commentId, +// }, +// }, +// }, +// }), +// }) +// .mockReturnValueOnce({ +// status: 200, +// json: async () => ({ +// errors: null, +// }), +// }); + +// const mockS3 = { +// send: jest.fn() +// .mockResolvedValueOnce({ // block +// Body: { +// transformToString: () => JSON.stringify({ +// chunks: [0], +// header: { +// height: blockHeight, +// }, +// }), +// }, +// }) +// .mockResolvedValue({ // shard +// Body: { +// transformToString: () => JSON.stringify({}) +// }, +// }), +// } as unknown as S3Client; +// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3: mockS3, redisClient: transparentRedis }); + +// const functions: Record = {}; +// functions['buildnear.testnet/test'] = { +// code: ` +// const { posts } = await context.graphql(\` +// query { +// posts(where: { id: { _eq: 1 } }) { +// id +// } +// } +// \`); + +// if (!posts || posts.length === 0) { +// return; +// } + +// const [post] = posts; + +// const { insert_comments: { returning: { id } } } = await context.graphql(\` +// mutation { +// insert_comments( +// objects: {account_id: "morgs.near", block_height: \${block.blockHeight}, content: "cool post", post_id: \${post.id}} +// ) { +// returning { +// id +// } +// } +// } +// \`); + +// return (\`Created comment \${id} on post \${post.id}\`) +// `, +// schema: SIMPLE_SCHEMA +// }; + +// await indexer.runFunctions(blockHeight, functions, false); + +// expect(mockFetch.mock.calls).toMatchSnapshot(); +// }); + +// test('Indexer.runFunctions() console.logs', async () => { +// const logs: string[] = []; +// const context = { +// log: (...m: string[]) => { +// logs.push(...m); +// } +// }; +// const vm = new VM(); +// vm.freeze(context, 'context'); +// vm.freeze(context, 'console'); +// await vm.run('console.log("hello", "brave new"); context.log("world")'); +// expect(logs).toEqual(['hello', 'brave new', 'world']); +// }); + +// test('Errors thrown in VM can be caught outside the VM', async () => { +// const vm = new VM(); +// expect(() => { +// vm.run("throw new Error('boom')"); +// }).toThrow('boom'); +// }); + +// test('Indexer.runFunctions() catches errors', async () => { +// const mockFetch = jest.fn(() => ({ +// status: 200, +// json: async () => ({ +// errors: null, +// }), +// })); +// const blockHeight = 456; +// const mockS3 = { +// send: jest.fn().mockResolvedValue({ +// Body: { +// transformToString: () => JSON.stringify({ +// chunks: [], +// header: { +// height: blockHeight +// } +// }) +// } +// }), +// } as unknown as S3Client; +// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3: mockS3, redisClient: transparentRedis }); + +// const functions: Record = {}; +// functions['buildnear.testnet/test'] = { +// code: ` +// throw new Error('boom'); +// `, +// schema: SIMPLE_SCHEMA +// }; + +// await expect(indexer.runFunctions(blockHeight, functions, false)).rejects.toThrow(new Error('boom')); +// expect(mockFetch.mock.calls).toMatchSnapshot(); +// }); + +// test('Indexer.runFunctions() provisions a GraphQL endpoint with the specified schema', async () => { +// const blockHeight = 82699904; +// const mockFetch = jest.fn(() => ({ +// status: 200, +// json: async () => ({ +// errors: null, +// }), +// })); +// const mockS3 = { +// send: jest +// .fn() +// .mockResolvedValueOnce({ // block +// Body: { +// transformToString: () => JSON.stringify({ +// chunks: [0], +// header: { +// height: blockHeight, +// }, +// }), +// }, +// }) +// .mockResolvedValue({ // shard +// Body: { +// transformToString: () => JSON.stringify({}) +// }, +// }), +// } as unknown as S3Client; +// const provisioner: any = { +// isUserApiProvisioned: jest.fn().mockReturnValue(false), +// provisionUserApi: jest.fn(), +// }; +// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3: mockS3, redisClient: transparentRedis, provisioner }); + +// const functions = { +// 'morgs.near/test': { +// account_id: 'morgs.near', +// function_name: 'test', +// code: '', +// schema: SIMPLE_SCHEMA, +// } +// }; +// await indexer.runFunctions(1, functions, false, { provision: true }); + +// expect(provisioner.isUserApiProvisioned).toHaveBeenCalledWith('morgs.near', 'test'); +// expect(provisioner.provisionUserApi).toHaveBeenCalledTimes(1); +// expect(provisioner.provisionUserApi).toHaveBeenCalledWith( +// 'morgs.near', +// 'test', +// SIMPLE_SCHEMA +// ); +// }); + +// test('Indexer.runFunctions() skips provisioning if the endpoint exists', async () => { +// const blockHeight = 82699904; +// const mockFetch = jest.fn(() => ({ +// status: 200, +// json: async () => ({ +// errors: null, +// }), +// })); +// const mockS3 = { +// send: jest +// .fn() +// .mockResolvedValueOnce({ // block +// Body: { +// transformToString: () => JSON.stringify({ +// chunks: [0], +// header: { +// height: blockHeight, +// }, +// }), +// }, +// }) +// .mockResolvedValue({ // shard +// Body: { +// transformToString: () => JSON.stringify({}) +// }, +// }), +// } as unknown as S3Client; +// const provisioner: any = { +// isUserApiProvisioned: jest.fn().mockReturnValue(true), +// provisionUserApi: jest.fn(), +// }; +// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3: mockS3, redisClient: transparentRedis, provisioner }); + +// const functions: Record = { +// 'morgs.near/test': { +// code: '', +// schema: SIMPLE_SCHEMA, +// } +// }; +// await indexer.runFunctions(1, functions, false, { provision: true }); + +// expect(provisioner.provisionUserApi).not.toHaveBeenCalled(); +// }); + +// test('Indexer.runFunctions() supplies the required role to the GraphQL endpoint', async () => { +// const blockHeight = 82699904; +// const mockFetch = jest.fn(() => ({ +// status: 200, +// json: async () => ({ +// errors: null, +// }), +// })); +// const mockS3 = { +// send: jest +// .fn() +// .mockResolvedValueOnce({ // block +// Body: { +// transformToString: () => JSON.stringify({ +// chunks: [0], +// header: { +// height: blockHeight, +// }, +// }), +// }, +// }) +// .mockResolvedValue({ // shard +// Body: { +// transformToString: () => JSON.stringify({}) +// }, +// }), +// } as unknown as S3Client; +// const provisioner: any = { +// isUserApiProvisioned: jest.fn().mockReturnValue(true), +// provisionUserApi: jest.fn(), +// }; +// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3: mockS3, redisClient: transparentRedis, provisioner }); + +// const functions: Record = { +// 'morgs.near/test': { +// code: ` +// context.graphql(\`mutation { set(functionName: "buildnear.testnet/test", key: "height", data: "\${block.blockHeight}")}\`); +// `, +// schema: SIMPLE_SCHEMA, +// } +// }; +// await indexer.runFunctions(blockHeight, functions, false, { provision: true }); + +// expect(provisioner.provisionUserApi).not.toHaveBeenCalled(); +// expect(mockFetch.mock.calls).toMatchSnapshot(); +// }); + +// test('Indexer.runFunctions() logs provisioning failures', async () => { +// const blockHeight = 82699904; +// const mockFetch = jest.fn(() => ({ +// status: 200, +// json: async () => ({ +// errors: null, +// }), +// })); +// const mockS3 = { +// send: jest +// .fn() +// .mockResolvedValueOnce({ // block +// Body: { +// transformToString: () => JSON.stringify({ +// chunks: [0], +// header: { +// height: blockHeight, +// }, +// }), +// }, +// }) +// .mockResolvedValue({ // shard +// Body: { +// transformToString: () => JSON.stringify({}) +// }, +// }), +// } as unknown as S3Client; +// const error = new Error('something went wrong with provisioning'); +// const provisioner: any = { +// isUserApiProvisioned: jest.fn().mockReturnValue(false), +// provisionUserApi: jest.fn().mockRejectedValue(error), +// }; +// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3: mockS3, redisClient: transparentRedis, provisioner }); + +// const functions: Record = { +// 'morgs.near/test': { +// code: ` +// context.graphql(\`mutation { set(functionName: "buildnear.testnet/test", key: "height", data: "\${block.blockHeight}")}\`); +// `, +// schema: 'schema', +// } +// }; + +// await expect(indexer.runFunctions(blockHeight, functions, false, { provision: true })).rejects.toThrow(error); +// expect(mockFetch.mock.calls).toMatchSnapshot(); +// }); + +// test('does not attach the hasura admin secret header when no role specified', async () => { +// const mockFetch = jest.fn() +// .mockResolvedValueOnce({ +// status: 200, +// json: async () => ({ +// data: {} +// }) +// }); +// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); +// // @ts-expect-error legacy test +// const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, null); + +// const mutation = ` +// mutation { +// newGreeting(greeting: "howdy") { +// success +// } +// } +// `; + +// await context.graphql(mutation); + +// expect(mockFetch.mock.calls[0]).toEqual([ +// `${HASURA_ENDPOINT}/v1/graphql`, +// { +// method: 'POST', +// headers: { +// 'Content-Type': 'application/json', +// 'X-Hasura-Use-Backend-Only-Permissions': 'true', +// }, +// body: JSON.stringify({ query: mutation }) +// } +// ]); +// }); + +// test('attaches the backend only header to requests to hasura', async () => { +// const mockFetch = jest.fn() +// .mockResolvedValueOnce({ +// status: 200, +// json: async () => ({ +// data: {} +// }) +// }); +// const role = 'morgs_near'; +// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); +// const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); + +// const mutation = ` +// mutation { +// newGreeting(greeting: "howdy") { +// success +// } +// } +// `; + +// await context.graphql(mutation); + +// expect(mockFetch.mock.calls[0]).toEqual([ +// `${HASURA_ENDPOINT}/v1/graphql`, +// { +// method: 'POST', +// headers: { +// 'Content-Type': 'application/json', +// 'X-Hasura-Use-Backend-Only-Permissions': 'true', +// 'X-Hasura-Role': role, +// 'X-Hasura-Admin-Secret': HASURA_ADMIN_SECRET +// }, +// body: JSON.stringify({ query: mutation }) +// } +// ]); +// }); +// }); diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index e0dfb449f..cc44c87a4 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -8,10 +8,11 @@ import { METRICS } from '../metrics'; import Provisioner from '../provisioner'; import DmlHandler from '../dml-handler/dml-handler'; import RedisClient from '../redis-client'; +import S3StreamerMessageFetcher from '../streamer-message-fetcher/s3-streamer-fetcher'; interface Dependencies { fetch: typeof fetch - s3: S3Client + s3StreamerMessageFetcher: S3StreamerMessageFetcher provisioner: Provisioner DmlHandler: typeof DmlHandler parser: Parser @@ -47,7 +48,7 @@ export default class Indexer { this.network = network; this.deps = { fetch, - s3: new S3Client(), + s3StreamerMessageFetcher: deps?.s3StreamerMessageFetcher ?? new S3StreamerMessageFetcher(this.network), provisioner: new Provisioner(), DmlHandler, parser: new Parser(), @@ -63,7 +64,7 @@ export default class Indexer { options: { provision?: boolean } = { provision: false }, streamerMessage: StreamerMessage | null = null ): Promise { - const blockWithHelpers = Block.fromStreamerMessage(streamerMessage == null ? await this.fetchStreamerMessage(blockHeight, isHistorical) : streamerMessage); + const blockWithHelpers = Block.fromStreamerMessage(streamerMessage !== null ? streamerMessage : await this.fetchStreamerMessage(blockHeight, isHistorical)); const lag = Date.now() - Math.floor(Number(blockWithHelpers.header().timestampNanosec) / 1000000); @@ -132,9 +133,13 @@ export default class Indexer { return allMutations; } - // pad with 0s to 12 digits - normalizeBlockHeight (blockHeight: number): string { - return blockHeight.toString().padStart(12, '0'); + enableAwaitTransform (indexerFunction: string): string { + return ` + async function f(){ + ${indexerFunction} + }; + f(); + `; } async fetchStreamerMessage (blockHeight: number, isHistorical: boolean): Promise<{ block: any, shards: any[] }> { @@ -148,8 +153,8 @@ export default class Indexer { METRICS.CACHE_MISS.labels(isHistorical ? 'historical' : 'real-time', 'streamer_message').inc(); } } - const blockPromise = this.fetchBlockPromise(blockHeight); - const shardsPromises = await this.fetchShardsPromises(blockHeight, 4); + const blockPromise = this.deps.s3StreamerMessageFetcher.fetchBlockPromise(blockHeight); + const shardsPromises = await this.deps.s3StreamerMessageFetcher.fetchShardsPromises(blockHeight, 4); const results = await Promise.all([blockPromise, ...shardsPromises]); const block = results.shift(); @@ -160,43 +165,6 @@ export default class Indexer { }; } - async fetchShardsPromises (blockHeight: number, numberOfShards: number): Promise>> { - return ([...Array(numberOfShards).keys()].map(async (shardId) => - await this.fetchShardPromise(blockHeight, shardId) - )); - } - - async fetchShardPromise (blockHeight: number, shardId: number): Promise { - const params = { - Bucket: `near-lake-data-${this.network}`, - Key: `${this.normalizeBlockHeight(blockHeight)}/shard_${shardId}.json`, - }; - const response = await this.deps.s3.send(new GetObjectCommand(params)); - const shardData = await response.Body?.transformToString() ?? '{}'; - return JSON.parse(shardData, (_key, value) => this.renameUnderscoreFieldsToCamelCase(value)); - } - - async fetchBlockPromise (blockHeight: number): Promise { - const file = 'block.json'; - const folder = this.normalizeBlockHeight(blockHeight); - const params = { - Bucket: 'near-lake-data-' + this.network, - Key: `${folder}/${file}`, - }; - const response = await this.deps.s3.send(new GetObjectCommand(params)); - const blockData = await response.Body?.transformToString() ?? '{}'; - return JSON.parse(blockData, (_key, value) => this.renameUnderscoreFieldsToCamelCase(value)); - } - - enableAwaitTransform (indexerFunction: string): string { - return ` - async function f(){ - ${indexerFunction} - }; - f(); - `; - } - transformIndexerFunction (indexerFunction: string): string { return [ this.enableAwaitTransform, @@ -491,25 +459,4 @@ export default class Indexer { return data; } - - renameUnderscoreFieldsToCamelCase (value: Record): Record { - if (value !== null && typeof value === 'object' && !Array.isArray(value)) { - // It's a non-null, non-array object, create a replacement with the keys initially-capped - const newValue: any = {}; - for (const key in value) { - const newKey: string = key - .split('_') - .map((word, i) => { - if (i > 0) { - return word.charAt(0).toUpperCase() + word.slice(1); - } - return word; - }) - .join(''); - newValue[newKey] = value[key]; - } - return newValue; - } - return value; - } } diff --git a/runner/src/streamer-message-fetcher/index.ts b/runner/src/streamer-message-fetcher/index.ts new file mode 100644 index 000000000..0a7463a6e --- /dev/null +++ b/runner/src/streamer-message-fetcher/index.ts @@ -0,0 +1 @@ +export { default } from './s3-streamer-fetcher'; diff --git a/runner/src/streamer-message-fetcher/s3-streamer-fetcher-tests.ts b/runner/src/streamer-message-fetcher/s3-streamer-fetcher-tests.ts new file mode 100644 index 000000000..e69de29bb diff --git a/runner/src/streamer-message-fetcher/s3-streamer-fetcher.ts b/runner/src/streamer-message-fetcher/s3-streamer-fetcher.ts new file mode 100644 index 000000000..16b421a22 --- /dev/null +++ b/runner/src/streamer-message-fetcher/s3-streamer-fetcher.ts @@ -0,0 +1,67 @@ +import { GetObjectCommand, S3Client } from '@aws-sdk/client-s3'; + +export default class S3StreamerMessageFetcher { + private readonly s3Client: S3Client; + network: string; + constructor ( + network: string = 'mainnet', + s3Client: S3Client = new S3Client() + ) { + this.s3Client = s3Client; + this.network = network; + } + + // pad with 0s to 12 digits + normalizeBlockHeight (blockHeight: number): string { + return blockHeight.toString().padStart(12, '0'); + } + + async fetchShardsPromises (blockHeight: number, numberOfShards: number): Promise>> { + return ([...Array(numberOfShards).keys()].map(async (shardId) => + await this.fetchShardPromise(blockHeight, shardId) + )); + } + + async fetchShardPromise (blockHeight: number, shardId: number): Promise { + const params = { + Bucket: `near-lake-data-${this.network}`, + Key: `${this.normalizeBlockHeight(blockHeight)}/shard_${shardId}.json`, + }; + const response = await this.s3Client.send(new GetObjectCommand(params)); + const shardData = await response.Body?.transformToString() ?? '{}'; + return JSON.parse(shardData, (_key, value) => this.renameUnderscoreFieldsToCamelCase(value)); + } + + async fetchBlockPromise (blockHeight: number): Promise { + const file = 'block.json'; + const folder = this.normalizeBlockHeight(blockHeight); + const params = { + Bucket: 'near-lake-data-' + this.network, + Key: `${folder}/${file}`, + }; + const response = await this.s3Client.send(new GetObjectCommand(params)); + const blockData = await response.Body?.transformToString() ?? '{}'; + return JSON.parse(blockData, (_key, value) => this.renameUnderscoreFieldsToCamelCase(value)); + } + + renameUnderscoreFieldsToCamelCase (value: Record): Record { + if (value !== null && typeof value === 'object' && !Array.isArray(value)) { + // It's a non-null, non-array object, create a replacement with the keys initially-capped + const newValue: any = {}; + for (const key in value) { + const newKey: string = key + .split('_') + .map((word, i) => { + if (i > 0) { + return word.charAt(0).toUpperCase() + word.slice(1); + } + return word; + }) + .join(''); + newValue[newKey] = value[key]; + } + return newValue; + } + return value; + } +} From cd0d2e58162f73c221746a4396386194e26f4732 Mon Sep 17 00:00:00 2001 From: Darun Seethammagari Date: Mon, 30 Oct 2023 12:01:06 -0700 Subject: [PATCH 06/24] Add more Instrumentation --- runner/src/indexer/indexer.ts | 53 +++++- runner/src/metrics.ts | 66 +++++++- runner/src/stream-handler/worker.ts | 240 +++++++++++++++++++++++++++- 3 files changed, 345 insertions(+), 14 deletions(-) diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index cc44c87a4..f7b1a73ad 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -1,9 +1,9 @@ import fetch, { type Response } from 'node-fetch'; import { VM } from 'vm2'; -import { S3Client, GetObjectCommand } from '@aws-sdk/client-s3'; import { Block, type StreamerMessage } from '@near-lake/primitives'; import { Parser } from 'node-sql-parser'; -import { METRICS } from '../metrics'; +import { type MessagePort } from 'worker_threads'; +import { type Message } from '../stream-handler/types'; import Provisioner from '../provisioner'; import DmlHandler from '../dml-handler/dml-handler'; @@ -17,6 +17,7 @@ interface Dependencies { DmlHandler: typeof DmlHandler parser: Parser redisClient: RedisClient + parentPort: MessagePort | null }; interface Context { @@ -53,6 +54,7 @@ export default class Indexer { DmlHandler, parser: new Parser(), redisClient: deps?.redisClient ?? new RedisClient(), + parentPort: deps?.parentPort ?? null, ...deps, }; } @@ -72,7 +74,9 @@ export default class Indexer { const allMutations: string[] = []; for (const functionName in functions) { + let finishPromiseHandlingLatency; try { + const functionStateLoggingLatency = performance.now(); const indexerFunction = functions[functionName]; const runningMessage = `Running function ${functionName}` + (isHistorical ? ' historical backfill' : `, lag is: ${lag?.toString()}ms from block timestamp`); @@ -83,6 +87,7 @@ export default class Indexer { const hasuraRoleName = functionName.split('/')[0].replace(/[.-]/g, '_'); if (options.provision && !indexerFunction.provisioned) { + const provisionintLatency = performance.now(); try { if (!await this.deps.provisioner.isUserApiProvisioned(indexerFunction.account_id, indexerFunction.function_name)) { await this.setStatus(functionName, blockHeight, 'PROVISIONING'); @@ -97,10 +102,18 @@ export default class Indexer { simultaneousPromises.push(this.writeLog(functionName, blockHeight, 'Provisioning endpoint: failure', error.message)); throw error; } + console.log('Provisioning Latency: ', performance.now() - provisionintLatency); } await this.setStatus(functionName, blockHeight, 'RUNNING'); - + console.log('Function State Logging Latency: ', performance.now() - functionStateLoggingLatency); + this.deps.parentPort?.postMessage({ + type: 'FUNCTION_STATE_LOGGING_LATENCY', + labels: { indexer: functionName, type: isHistorical ? 'historical' : 'real-time' }, + value: performance.now() - functionStateLoggingLatency, + } satisfies Message); + + const vmAndContextBuildLatency = performance.now(); const vm = new VM({ timeout: 3000, allowAsync: true }); const context = this.buildContext(indexerFunction.schema, functionName, blockHeight, hasuraRoleName); @@ -109,6 +122,13 @@ export default class Indexer { vm.freeze(context, 'console'); // provide console.log via context.log const modifiedFunction = this.transformIndexerFunction(indexerFunction.code); + console.log('VM and Context Object Preparation Latency: ', performance.now() - vmAndContextBuildLatency); + this.deps.parentPort?.postMessage({ + type: 'FUNCTION_VM_AND_CONTEXT_LATENCY', + labels: { indexer: functionName, type: isHistorical ? 'historical' : 'real-time' }, + value: performance.now() - vmAndContextBuildLatency, + } satisfies Message); + const functionCodeExecutionLatency = performance.now(); try { await vm.run(modifiedFunction); } catch (e) { @@ -120,7 +140,13 @@ export default class Indexer { await this.writeLog(functionName, blockHeight, 'Error running IndexerFunction', error.message); throw e; } - + console.log('Function Execution Latency: ', performance.now() - functionCodeExecutionLatency); + this.deps.parentPort?.postMessage({ + type: 'FUNCTION_CODE_EXECUTION_LATENCY', + labels: { indexer: functionName, type: isHistorical ? 'historical' : 'real-time' }, + value: performance.now() - functionCodeExecutionLatency, + } satisfies Message); + finishPromiseHandlingLatency = performance.now(); simultaneousPromises.push(this.writeFunctionState(functionName, blockHeight, isHistorical)); } catch (e) { console.error(`${functionName}: Failed to run function`, e); @@ -128,6 +154,12 @@ export default class Indexer { throw e; } finally { await Promise.all(simultaneousPromises); + console.log('Finish Promise Handling Latency: ', finishPromiseHandlingLatency !== undefined ? performance.now() - finishPromiseHandlingLatency : 'null'); + this.deps.parentPort?.postMessage({ + type: 'FUNCTION_VM_AND_CONTEXT_LATENCY', + labels: { indexer: functionName, type: isHistorical ? 'historical' : 'real-time' }, + value: finishPromiseHandlingLatency !== undefined ? performance.now() - finishPromiseHandlingLatency : 0, + } satisfies Message); } } return allMutations; @@ -146,11 +178,20 @@ export default class Indexer { if (!isHistorical) { const cachedMessage = await this.deps.redisClient.getStreamerMessage(blockHeight); if (cachedMessage) { - METRICS.CACHE_HIT.labels(isHistorical ? 'historical' : 'real-time', 'streamer_message').inc(); + this.deps.parentPort?.postMessage({ + type: 'CACHE_HIT', + labels: { type: isHistorical ? 'historical' : 'real-time' }, + value: 1, + } satisfies Message); + const parsedMessage = JSON.parse(cachedMessage); return parsedMessage; } else { - METRICS.CACHE_MISS.labels(isHistorical ? 'historical' : 'real-time', 'streamer_message').inc(); + this.deps.parentPort?.postMessage({ + type: 'CACHE_MISS', + labels: { type: isHistorical ? 'historical' : 'real-time' }, + value: 1, + } satisfies Message); } } const blockPromise = this.deps.s3StreamerMessageFetcher.fetchBlockPromise(blockHeight); diff --git a/runner/src/metrics.ts b/runner/src/metrics.ts index 2296cb7d0..eac77a1c6 100644 --- a/runner/src/metrics.ts +++ b/runner/src/metrics.ts @@ -22,22 +22,76 @@ const BLOCK_WAIT_DURATION = new Gauge({ const CACHE_HIT = new Counter({ name: 'queryapi_runner_cache_hit', help: 'The number of times cache was hit successfully', - labelNames: ['type', 'key'] + labelNames: ['blockheight', 'type'] }); const CACHE_MISS = new Counter({ name: 'queryapi_runner_cache_miss', help: 'The number of times cache was missed', - labelNames: ['type', 'key'] + labelNames: ['blockheight', 'type'] +}); + +const FUNCTION_STATE_LOGGING_LATENCY = new promClient.Gauge({ + name: 'queryapi_runner_function_state_logging_milliseconds', + help: 'Time an indexer function spent on writing state and creating write log promises', + labelNames: ['indexer', 'type'], +}); + +const FUNCTION_VM_AND_CONTEXT_LATENCY = new promClient.Gauge({ + name: 'queryapi_runner_function_vm_and_context_building_milliseconds', + help: 'Time an indexer function spent on preparing the vm and context object', + labelNames: ['indexer', 'type'], +}); + +const FUNCTION_CODE_EXECUTION_LATENCY = new promClient.Gauge({ + name: 'queryapi_runner_function_code_execution_duration_milliseconds', + help: 'Time an indexer function spent executing user code', + labelNames: ['indexer', 'type'], +}); + +const FUNCTION_PROMISE_HANDLING_LATENCY = new promClient.Gauge({ + name: 'queryapi_runner_function_promise_handling_milliseconds', + help: 'Time an indexer function waited for simultaneous promises to resolve', + labelNames: ['indexer', 'type'], +}); + +const FUNCTION_OVERALL_EXECUTION_DURATION = new promClient.Gauge({ + name: 'queryapi_runner_function_overall_duration_milliseconds', + help: 'Time an indexer function waited for a block before processing', + labelNames: ['indexer', 'type'], +}); + +const UNPROCESSED_STREAM_MESSAGES = new promClient.Gauge({ + name: 'queryapi_runner_unprocessed_stream_messages', + help: 'Number of Redis Stream messages not yet processed', + labelNames: ['indexer', 'type'], +}); + +const LAST_PROCESSED_BLOCK = new promClient.Gauge({ + name: 'queryapi_runner_last_processed_block', + help: 'The last block processed by an indexer function', + labelNames: ['indexer', 'type'], +}); + +const EXECUTION_DURATION = new promClient.Gauge({ + name: 'queryapi_runner_execution_duration_milliseconds', + help: 'Time taken to execute an indexer function', + labelNames: ['indexer', 'type'], }); export const METRICS = { - EXECUTION_DURATION, BLOCK_WAIT_DURATION, - UNPROCESSED_STREAM_MESSAGES, - BLOCKS, CACHE_HIT, - CACHE_MISS + CACHE_MISS, + FUNCTION_STATE_LOGGING_LATENCY, + FUNCTION_VM_AND_CONTEXT_LATENCY, + FUNCTION_CODE_EXECUTION_LATENCY, + FUNCTION_PROMISE_HANDLING_LATENCY, + FUNCTION_OVERALL_EXECUTION_DURATION, + UNPROCESSED_STREAM_MESSAGES, + LAST_PROCESSED_BLOCK, + EXECUTION_DURATION, + }; const aggregatorRegistry = new AggregatorRegistry(); diff --git a/runner/src/stream-handler/worker.ts b/runner/src/stream-handler/worker.ts index 1cc1c531d..674404dbd 100644 --- a/runner/src/stream-handler/worker.ts +++ b/runner/src/stream-handler/worker.ts @@ -4,13 +4,23 @@ import promClient from 'prom-client'; import Indexer from '../indexer'; import RedisClient from '../redis-client'; import { METRICS } from '../metrics'; +import type { StreamerMessage } from '@near-lake/primitives'; +import S3StreamerMessageFetcher from '../streamer-message-fetcher/s3-streamer-fetcher'; if (isMainThread) { throw new Error('Worker should not be run on main thread'); } -const indexer = new Indexer('mainnet'); +const HISTORICAL_BATCH_SIZE = 100; +const indexer = new Indexer('mainnet', { parentPort }); const redisClient = new RedisClient(); +const s3StreamerMessageFetcher = new S3StreamerMessageFetcher(); + +interface QueueMessage { + streamerMessage: StreamerMessage + streamId: string +} +const queue: Array> = []; const sleep = async (ms: number): Promise => { await new Promise((resolve) => setTimeout(resolve, ms)); }; @@ -22,6 +32,9 @@ void (async function main () { let indexerName = ''; const streamType = redisClient.getStreamType(streamKey); const isHistorical = streamType === 'historical'; + if (streamType === 'real-time') { + await handleHistoricalStream(streamKey); + } while (true) { try { @@ -63,8 +76,231 @@ void (async function main () { } finally { const unprocessedMessages = await redisClient.getUnprocessedStreamMessages(streamKey); METRICS.UNPROCESSED_STREAM_MESSAGES.labels({ indexer: indexerName, type: streamType }).set(unprocessedMessages?.length ?? 0); - parentPort?.postMessage(await promClient.register.getMetricsAsJSON()); } } })(); + +async function handleHistoricalStream (streamKey: string): Promise { + void historicalStreamerMessageQueueProducer(queue, streamKey); + void historicalStreamerMessageQueueConsumer(queue, streamKey); +} + +function incrementId (id: string): string { + const [main, sequence] = id.split('-'); + return `${Number(main) + 1}-${sequence}`; +} + +async function historicalStreamerMessageQueueProducer (queue: Array>, streamKey: string): Promise { + let currentBlockHeight: string = '0'; + + while (true) { + const preFetchCount = HISTORICAL_BATCH_SIZE - queue.length; + if (preFetchCount <= 0) { + await sleep(300); + continue; + } + const messages = await redisClient.getNextStreamMessage(streamKey, preFetchCount, currentBlockHeight); + console.log('Messages fetched: ', messages?.length); + + if (messages == null) { + await sleep(100); + continue; + } + + for (const streamMessage of messages) { + const { id, message } = streamMessage; + fetchAndQueue(queue, Number(message.block_height), id); + } + + currentBlockHeight = incrementId(messages[messages.length - 1].id); + } +} + +async function historicalStreamerMessageQueueConsumer (queue: Array>, streamKey: string): Promise { + const streamType = redisClient.getStreamType(streamKey); + const indexerConfig = await redisClient.getStreamStorage(streamKey); + const indexerName = `${indexerConfig.account_id}/${indexerConfig.function_name}`; + const functions = { + [indexerName]: { + account_id: indexerConfig.account_id, + function_name: indexerConfig.function_name, + code: indexerConfig.code, + schema: indexerConfig.schema, + provisioned: false, + }, + }; + + while (true) { + const startTime = performance.now(); + const blockStartTime = startTime; + const queueMessage = await queue.shift(); + if (queueMessage === undefined) { + await sleep(500); + continue; + } + const { streamerMessage, streamId } = queueMessage; + + if (streamerMessage === undefined || streamerMessage?.block.header.height == null) { + console.error('Streamer message does not have block height', streamerMessage); + continue; + } + console.log('Block wait Duration: ', performance.now() - startTime); + parentPort?.postMessage({ + type: 'BLOCK_WAIT_DURATION', + labels: { indexer: indexerName, type: streamType }, + value: performance.now() - blockStartTime, + } satisfies Message); + + const functionStartTime = performance.now(); + await indexer.runFunctions(streamerMessage.block.header.height, functions, false, { provision: true }, streamerMessage); + console.log('Function Code Execution Duration: ', performance.now() - functionStartTime); + parentPort?.postMessage({ + type: 'FUNCTION_OVERALL_EXECUTION_DURATION', + labels: { indexer: indexerName, type: streamType }, + value: performance.now() - functionStartTime, + } satisfies Message); + + // await redisClient.deleteStreamMessage(streamKey, streamId); + // Can just be streamId if above line is running + const unprocessedMessages = await redisClient.getUnprocessedStreamMessages(streamKey, incrementId(streamId)); + + parentPort?.postMessage({ + type: 'UNPROCESSED_STREAM_MESSAGES', + labels: { indexer: indexerName, type: streamType }, + value: unprocessedMessages?.length ?? 0, + } satisfies Message); + } + } +})(); + +async function handleHistoricalStream (streamKey: string): Promise { + void historicalStreamerMessageQueueProducer(queue, streamKey); + void historicalStreamerMessageQueueConsumer(queue, streamKey); +} + +function incrementId (id: string): string { + const [main, sequence] = id.split('-'); + return `${Number(main) + 1}-${sequence}`; +} + +async function historicalStreamerMessageQueueProducer (queue: Array>, streamKey: string): Promise { + let currentBlockHeight: string = '0'; + + while (true) { + const preFetchCount = HISTORICAL_BATCH_SIZE - queue.length; + if (preFetchCount <= 0) { + await sleep(300); + continue; + } + const messages = await redisClient.getNextStreamMessage(streamKey, preFetchCount, currentBlockHeight); + console.log('Messages fetched: ', messages?.length); + + if (messages == null) { + await sleep(100); + continue; + } + + for (const streamMessage of messages) { + const { id, message } = streamMessage; + fetchAndQueue(queue, Number(message.block_height), id); + } + + currentBlockHeight = incrementId(messages[messages.length - 1].id); + } +} + +async function historicalStreamerMessageQueueConsumer (queue: Array>, streamKey: string): Promise { + const streamType = redisClient.getStreamType(streamKey); + const indexerConfig = await redisClient.getStreamStorage(streamKey); + const indexerName = `${indexerConfig.account_id}/${indexerConfig.function_name}`; + const functions = { + [indexerName]: { + account_id: indexerConfig.account_id, + function_name: indexerConfig.function_name, + code: indexerConfig.code, + schema: indexerConfig.schema, + provisioned: false, + }, + }; + + while (true) { + const startTime = performance.now(); + const blockStartTime = startTime; + const queueMessage = await queue.shift(); + if (queueMessage === undefined) { + await sleep(500); + continue; + } + const { streamerMessage, streamId } = queueMessage; + + if (streamerMessage === undefined || streamerMessage?.block.header.height == null) { + console.error('Streamer message does not have block height', streamerMessage); + continue; + } + console.log('Block wait Duration: ', performance.now() - startTime); + parentPort?.postMessage({ + type: 'BLOCK_WAIT_DURATION', + labels: { indexer: indexerName, type: streamType }, + value: performance.now() - blockStartTime, + } satisfies Message); + + const functionStartTime = performance.now(); + await indexer.runFunctions(streamerMessage.block.header.height, functions, false, { provision: true }, streamerMessage); + console.log('Function Code Execution Duration: ', performance.now() - functionStartTime); + parentPort?.postMessage({ + type: 'FUNCTION_OVERALL_EXECUTION_DURATION', + labels: { indexer: indexerName, type: streamType }, + value: performance.now() - functionStartTime, + } satisfies Message); + + // await redisClient.deleteStreamMessage(streamKey, streamId); + // Can just be streamId if above line is running + const unprocessedMessages = await redisClient.getUnprocessedStreamMessages(streamKey, incrementId(streamId)); + + parentPort?.postMessage({ + type: 'UNPROCESSED_STREAM_MESSAGES', + labels: { indexer: indexerName, type: streamType }, + value: unprocessedMessages?.length ?? 0, + } satisfies Message); + + parentPort?.postMessage({ + type: 'LAST_PROCESSED_BLOCK', + labels: { indexer: indexerName, type: streamType }, + value: streamerMessage.block.header.height, + } satisfies Message); + + console.log('Execution Duration: ', performance.now() - startTime); + + parentPort?.postMessage({ + type: 'EXECUTION_DURATION', + labels: { indexer: indexerName, type: streamType }, + value: performance.now() - startTime, + } satisfies Message); + } +} + +function fetchAndQueue (queue: Array>, blockHeight: number, id: string): void { + queue.push(transformStreamerMessageToQueueMessage(blockHeight, id)); +} + +async function transformStreamerMessageToQueueMessage (blockHeight: number, streamId: string): Promise { + const streamerMessage = await fetchStreamerMessage(blockHeight); + return { + streamerMessage, + streamId + }; +} + +async function fetchStreamerMessage (blockHeight: number): Promise { + const blockPromise = s3StreamerMessageFetcher.fetchBlockPromise(blockHeight); + const shardsPromises = await s3StreamerMessageFetcher.fetchShardsPromises(blockHeight, 4); + + const results = await Promise.all([blockPromise, ...shardsPromises]); + const block = results.shift(); + const shards = results; + return { + block, + shards, + }; +} From 9d17c2221cb8c30d693d84f381310e8d5692de19 Mon Sep 17 00:00:00 2001 From: Darun Seethammagari Date: Wed, 1 Nov 2023 10:47:46 -0700 Subject: [PATCH 07/24] Clean up code --- runner/src/index.ts | 3 +-- runner/src/metrics.ts | 6 +++--- runner/src/stream-handler/worker.ts | 12 ++++++++---- 3 files changed, 12 insertions(+), 9 deletions(-) diff --git a/runner/src/index.ts b/runner/src/index.ts index d9a90691e..a483a9336 100644 --- a/runner/src/index.ts +++ b/runner/src/index.ts @@ -17,8 +17,7 @@ void (async function main () { const streamHandlers: StreamHandlers = {}; while (true) { - // const streamKeys = await redisClient.getStreams(); - const streamKeys = ['flatirons.near/sweat_blockheight:real_time:stream']; + const streamKeys = await redisClient.getStreams(); streamKeys.forEach((streamKey) => { if (streamHandlers[streamKey] !== undefined) { diff --git a/runner/src/metrics.ts b/runner/src/metrics.ts index eac77a1c6..861e3df2a 100644 --- a/runner/src/metrics.ts +++ b/runner/src/metrics.ts @@ -22,13 +22,13 @@ const BLOCK_WAIT_DURATION = new Gauge({ const CACHE_HIT = new Counter({ name: 'queryapi_runner_cache_hit', help: 'The number of times cache was hit successfully', - labelNames: ['blockheight', 'type'] + labelNames: ['type'] }); const CACHE_MISS = new Counter({ name: 'queryapi_runner_cache_miss', help: 'The number of times cache was missed', - labelNames: ['blockheight', 'type'] + labelNames: ['type'] }); const FUNCTION_STATE_LOGGING_LATENCY = new promClient.Gauge({ @@ -73,7 +73,7 @@ const LAST_PROCESSED_BLOCK = new promClient.Gauge({ labelNames: ['indexer', 'type'], }); -const EXECUTION_DURATION = new promClient.Gauge({ +const EXECUTION_DURATION = new promClient.Histogram({ name: 'queryapi_runner_execution_duration_milliseconds', help: 'Time taken to execute an indexer function', labelNames: ['indexer', 'type'], diff --git a/runner/src/stream-handler/worker.ts b/runner/src/stream-handler/worker.ts index 674404dbd..e56399679 100644 --- a/runner/src/stream-handler/worker.ts +++ b/runner/src/stream-handler/worker.ts @@ -32,8 +32,9 @@ void (async function main () { let indexerName = ''; const streamType = redisClient.getStreamType(streamKey); const isHistorical = streamType === 'historical'; - if (streamType === 'real-time') { + if (!isHistorical) { await handleHistoricalStream(streamKey); + return; } while (true) { @@ -194,12 +195,11 @@ async function historicalStreamerMessageQueueProducer (queue: Array Date: Wed, 1 Nov 2023 10:58:54 -0700 Subject: [PATCH 08/24] Encapsulate streamer message builder function --- runner/src/indexer/indexer.ts | 11 +---------- runner/src/stream-handler/worker.ts | 15 +-------------- .../s3-streamer-fetcher.ts | 14 ++++++++++++++ 3 files changed, 16 insertions(+), 24 deletions(-) diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index f7b1a73ad..7eb8e38b4 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -194,16 +194,7 @@ export default class Indexer { } satisfies Message); } } - const blockPromise = this.deps.s3StreamerMessageFetcher.fetchBlockPromise(blockHeight); - const shardsPromises = await this.deps.s3StreamerMessageFetcher.fetchShardsPromises(blockHeight, 4); - - const results = await Promise.all([blockPromise, ...shardsPromises]); - const block = results.shift(); - const shards = results; - return { - block, - shards, - }; + return await this.deps.s3StreamerMessageFetcher.fetchStreamerMessage(blockHeight); } transformIndexerFunction (indexerFunction: string): string { diff --git a/runner/src/stream-handler/worker.ts b/runner/src/stream-handler/worker.ts index e56399679..b1c405915 100644 --- a/runner/src/stream-handler/worker.ts +++ b/runner/src/stream-handler/worker.ts @@ -289,22 +289,9 @@ function fetchAndQueue (queue: Array>, blockHeight: number } async function transformStreamerMessageToQueueMessage (blockHeight: number, streamId: string): Promise { - const streamerMessage = await fetchStreamerMessage(blockHeight); + const streamerMessage = await s3StreamerMessageFetcher.fetchStreamerMessage(blockHeight); return { streamerMessage, streamId }; } - -async function fetchStreamerMessage (blockHeight: number): Promise { - const blockPromise = s3StreamerMessageFetcher.fetchBlockPromise(blockHeight); - const shardsPromises = await s3StreamerMessageFetcher.fetchShardsPromises(blockHeight, 4); - - const results = await Promise.all([blockPromise, ...shardsPromises]); - const block = results.shift(); - const shards = results; - return { - block, - shards, - }; -} diff --git a/runner/src/streamer-message-fetcher/s3-streamer-fetcher.ts b/runner/src/streamer-message-fetcher/s3-streamer-fetcher.ts index 16b421a22..920b86778 100644 --- a/runner/src/streamer-message-fetcher/s3-streamer-fetcher.ts +++ b/runner/src/streamer-message-fetcher/s3-streamer-fetcher.ts @@ -1,4 +1,5 @@ import { GetObjectCommand, S3Client } from '@aws-sdk/client-s3'; +import { type StreamerMessage } from '@near-lake/primitives'; export default class S3StreamerMessageFetcher { private readonly s3Client: S3Client; @@ -64,4 +65,17 @@ export default class S3StreamerMessageFetcher { } return value; } + + async fetchStreamerMessage (blockHeight: number): Promise { + const blockPromise = this.fetchBlockPromise(blockHeight); + const shardsPromises = await this.fetchShardsPromises(blockHeight, 4); + + const results = await Promise.all([blockPromise, ...shardsPromises]); + const block = results.shift(); + const shards = results; + return { + block, + shards, + }; + } } From e3a3869b80464dc72e8b35d1fb88766686185228 Mon Sep 17 00:00:00 2001 From: Darun Seethammagari Date: Wed, 1 Nov 2023 11:26:34 -0700 Subject: [PATCH 09/24] Ensure Unit Tests Pass --- runner/src/indexer/indexer.test.ts | 2432 ++++++++--------- runner/src/indexer/indexer.ts | 2 +- runner/src/stream-handler/worker.ts | 2 +- .../s3-streamer-fetcher-tests.ts | 0 .../s3-streamer-fetcher.test.ts | 99 + .../s3-streamer-fetcher.ts | 2 +- 6 files changed, 1259 insertions(+), 1278 deletions(-) delete mode 100644 runner/src/streamer-message-fetcher/s3-streamer-fetcher-tests.ts create mode 100644 runner/src/streamer-message-fetcher/s3-streamer-fetcher.test.ts diff --git a/runner/src/indexer/indexer.test.ts b/runner/src/indexer/indexer.test.ts index 40a970c90..1f339dc73 100644 --- a/runner/src/indexer/indexer.test.ts +++ b/runner/src/indexer/indexer.test.ts @@ -1,1275 +1,1157 @@ -// import { Block } from '@near-lake/primitives'; -// import type fetch from 'node-fetch'; -// import { type S3Client, GetObjectCommand } from '@aws-sdk/client-s3'; -// import type RedisClient from '../redis-client'; - -// import Indexer from './indexer'; -// import { VM } from 'vm2'; - -// describe('Indexer unit tests', () => { -// const oldEnv = process.env; - -// const HASURA_ENDPOINT = 'mock-hasura-endpoint'; -// const HASURA_ADMIN_SECRET = 'mock-hasura-secret'; -// const HASURA_ROLE = 'morgs_near'; -// const INVALID_HASURA_ROLE = 'other_near'; - -// const INDEXER_NAME = 'morgs.near/test_fn'; - -// const SIMPLE_SCHEMA = `CREATE TABLE -// "posts" ( -// "id" SERIAL NOT NULL, -// "account_id" VARCHAR NOT NULL, -// "block_height" DECIMAL(58, 0) NOT NULL, -// "receipt_id" VARCHAR NOT NULL, -// "content" TEXT NOT NULL, -// "block_timestamp" DECIMAL(20, 0) NOT NULL, -// "accounts_liked" JSONB NOT NULL DEFAULT '[]', -// "last_comment_timestamp" DECIMAL(20, 0), -// CONSTRAINT "posts_pkey" PRIMARY KEY ("id") -// );`; - -// const SOCIAL_SCHEMA = ` -// CREATE TABLE -// "posts" ( -// "id" SERIAL NOT NULL, -// "account_id" VARCHAR NOT NULL, -// "block_height" DECIMAL(58, 0) NOT NULL, -// "receipt_id" VARCHAR NOT NULL, -// "content" TEXT NOT NULL, -// "block_timestamp" DECIMAL(20, 0) NOT NULL, -// "accounts_liked" JSONB NOT NULL DEFAULT '[]', -// "last_comment_timestamp" DECIMAL(20, 0), -// CONSTRAINT "posts_pkey" PRIMARY KEY ("id") -// ); - -// CREATE TABLE -// "comments" ( -// "id" SERIAL NOT NULL, -// "post_id" SERIAL NOT NULL, -// "account_id" VARCHAR NOT NULL, -// "block_height" DECIMAL(58, 0) NOT NULL, -// "content" TEXT NOT NULL, -// "block_timestamp" DECIMAL(20, 0) NOT NULL, -// "receipt_id" VARCHAR NOT NULL, -// CONSTRAINT "comments_pkey" PRIMARY KEY ("id") -// ); - -// CREATE TABLE -// "post_likes" ( -// "post_id" SERIAL NOT NULL, -// "account_id" VARCHAR NOT NULL, -// "block_height" DECIMAL(58, 0), -// "block_timestamp" DECIMAL(20, 0) NOT NULL, -// "receipt_id" VARCHAR NOT NULL, -// CONSTRAINT "post_likes_pkey" PRIMARY KEY ("post_id", "account_id") -// );`; - -// const STRESS_TEST_SCHEMA = ` -// CREATE TABLE creator_quest ( -// account_id VARCHAR PRIMARY KEY, -// num_components_created INTEGER NOT NULL DEFAULT 0, -// completed BOOLEAN NOT NULL DEFAULT FALSE -// ); - -// CREATE TABLE -// composer_quest ( -// account_id VARCHAR PRIMARY KEY, -// num_widgets_composed INTEGER NOT NULL DEFAULT 0, -// completed BOOLEAN NOT NULL DEFAULT FALSE -// ); - -// CREATE TABLE -// "contractor - quest" ( -// account_id VARCHAR PRIMARY KEY, -// num_contracts_deployed INTEGER NOT NULL DEFAULT 0, -// completed BOOLEAN NOT NULL DEFAULT FALSE -// ); - -// CREATE TABLE -// "posts" ( -// "id" SERIAL NOT NULL, -// "account_id" VARCHAR NOT NULL, -// "block_height" DECIMAL(58, 0) NOT NULL, -// "receipt_id" VARCHAR NOT NULL, -// "content" TEXT NOT NULL, -// "block_timestamp" DECIMAL(20, 0) NOT NULL, -// "accounts_liked" JSONB NOT NULL DEFAULT '[]', -// "last_comment_timestamp" DECIMAL(20, 0), -// CONSTRAINT "posts_pkey" PRIMARY KEY ("id") -// ); - -// CREATE TABLE -// "comments" ( -// "id" SERIAL NOT NULL, -// "post_id" SERIAL NOT NULL, -// "account_id" VARCHAR NOT NULL, -// "block_height" DECIMAL(58, 0) NOT NULL, -// "content" TEXT NOT NULL, -// "block_timestamp" DECIMAL(20, 0) NOT NULL, -// "receipt_id" VARCHAR NOT NULL, -// CONSTRAINT "comments_pkey" PRIMARY KEY ("id") -// ); - -// CREATE TABLE -// "post_likes" ( -// "post_id" SERIAL NOT NULL, -// "account_id" VARCHAR NOT NULL, -// "block_height" DECIMAL(58, 0), -// "block_timestamp" DECIMAL(20, 0) NOT NULL, -// "receipt_id" VARCHAR NOT NULL, -// CONSTRAINT "post_likes_pkey" PRIMARY KEY ("post_id", "account_id") -// ); - -// CREATE UNIQUE INDEX "posts_account_id_block_height_key" ON "posts" ("account_id" ASC, "block_height" ASC); - -// CREATE UNIQUE INDEX "comments_post_id_account_id_block_height_key" ON "comments" ( -// "post_id" ASC, -// "account_id" ASC, -// "block_height" ASC -// ); - -// CREATE INDEX -// "posts_last_comment_timestamp_idx" ON "posts" ("last_comment_timestamp" DESC); - -// ALTER TABLE -// "comments" -// ADD -// CONSTRAINT "comments_post_id_fkey" FOREIGN KEY ("post_id") REFERENCES "posts" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION; - -// ALTER TABLE -// "post_likes" -// ADD -// CONSTRAINT "post_likes_post_id_fkey" FOREIGN KEY ("post_id") REFERENCES "posts" ("id") ON DELETE CASCADE ON UPDATE NO ACTION; - -// CREATE TABLE IF NOT EXISTS -// "My Table1" (id serial PRIMARY KEY); - -// CREATE TABLE -// "Another-Table" (id serial PRIMARY KEY); - -// CREATE TABLE -// IF NOT EXISTS -// "Third-Table" (id serial PRIMARY KEY); - -// CREATE TABLE -// yet_another_table (id serial PRIMARY KEY); -// `; -// const genericMockFetch = jest.fn() -// .mockResolvedValue({ -// status: 200, -// json: async () => ({ -// data: 'mock', -// }), -// }); - -// const transparentRedis = { -// getStreamerMessage: jest.fn() -// } as unknown as RedisClient; - -// beforeEach(() => { -// process.env = { -// ...oldEnv, -// HASURA_ENDPOINT, -// HASURA_ADMIN_SECRET -// }; -// }); - -// afterAll(() => { -// process.env = oldEnv; -// }); - -// test('Indexer.runFunctions() should execute all functions against the current block', async () => { -// const mockFetch = jest.fn(() => ({ -// status: 200, -// json: async () => ({ -// errors: null, -// }), -// })); -// const blockHeight = 456; -// const mockData = jest.fn().mockResolvedValue( -// JSON.stringify( -// { -// block: { -// chunks: [], -// header: { -// height: blockHeight -// } -// }, -// shards: {} -// } -// ) -// ); -// const mockRedis = { -// getStreamerMessage: mockData -// } as unknown as RedisClient; - -// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: mockRedis }); - -// const functions: Record = {}; -// functions['buildnear.testnet/test'] = { -// code: ` -// const foo = 3; -// block.result = context.graphql(\`mutation { set(functionName: "buildnear.testnet/test", key: "height", data: "\${block.blockHeight}")}\`); -// `, -// schema: SIMPLE_SCHEMA -// }; -// await indexer.runFunctions(blockHeight, functions, false); - -// expect(mockFetch.mock.calls).toMatchSnapshot(); -// }); - -// test('Indexer.fetchBlock() should fetch a block from S3', async () => { -// const author = 'dokiacapital.poolv1.near'; -// const mockData = JSON.stringify({ -// author -// }); -// const mockSend = jest.fn().mockResolvedValue({ -// Body: { -// transformToString: () => mockData -// } -// }); -// const mockS3 = { -// send: mockSend, -// } as unknown as S3Client; - -// const indexer = new Indexer('mainnet', { s3: mockS3, redisClient: transparentRedis }); - -// const blockHeight = 84333960; -// const block = await indexer.fetchBlockPromise(blockHeight); -// const params = { -// Bucket: 'near-lake-data-mainnet', -// Key: `${blockHeight.toString().padStart(12, '0')}/block.json` -// }; - -// expect(mockS3.send).toHaveBeenCalledTimes(1); -// expect(JSON.stringify(mockSend.mock.calls[0][0])).toMatch(JSON.stringify(new GetObjectCommand(params))); -// expect(block.author).toEqual(author); -// }); - -// test('Indexer.fetchShard() should fetch a shard from S3', async () => { -// const mockData = JSON.stringify({}); -// const mockSend = jest.fn().mockResolvedValue({ -// Body: { -// transformToString: () => mockData -// } -// }); -// const mockS3 = { -// send: mockSend, -// } as unknown as S3Client; -// const indexer = new Indexer('mainnet', { s3: mockS3, redisClient: transparentRedis }); - -// const blockHeight = 82699904; -// const shard = 0; -// const params = { -// Bucket: 'near-lake-data-mainnet', -// Key: `${blockHeight.toString().padStart(12, '0')}/shard_${shard}.json` -// }; -// await indexer.fetchShardPromise(blockHeight, shard); - -// expect(JSON.stringify(mockSend.mock.calls[0][0])).toMatch(JSON.stringify(new GetObjectCommand(params))); -// }); - -// test('Indexer.fetchStreamerMessage() should fetch the message from cache and use it directly', async () => { -// const blockHeight = 85233529; -// const blockHash = 'xyz'; -// const getMessage = jest.fn() -// .mockReturnValueOnce(JSON.stringify( -// { -// block: { -// chunks: [0], -// header: { -// height: blockHeight, -// hash: blockHash, -// } -// }, -// shards: {} -// } -// )); -// const mockRedis = { -// getStreamerMessage: getMessage -// } as unknown as RedisClient; -// const indexer = new Indexer('mainnet', { redisClient: mockRedis }); - -// const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, false); - -// expect(getMessage).toHaveBeenCalledTimes(1); -// expect(JSON.stringify(getMessage.mock.calls[0])).toEqual( -// `[${blockHeight}]` -// ); -// const block = Block.fromStreamerMessage(streamerMessage); - -// expect(block.blockHeight).toEqual(blockHeight); -// expect(block.blockHash).toEqual(blockHash); -// }); - -// test('Indexer.fetchStreamerMessage() should fetch the block and shards from S3 upon cache miss', async () => { -// const blockHeight = 85233529; -// const blockHash = 'xyz'; -// const mockSend = jest.fn() -// .mockReturnValueOnce({ // block -// Body: { -// transformToString: () => JSON.stringify({ -// chunks: [0], -// header: { -// height: blockHeight, -// hash: blockHash, -// } -// }) -// } -// }) -// .mockReturnValue({ // shard -// Body: { -// transformToString: () => JSON.stringify({}) -// } -// }); -// const mockS3 = { -// send: mockSend, -// } as unknown as S3Client; -// const indexer = new Indexer('mainnet', { s3: mockS3, redisClient: transparentRedis }); - -// const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, false); - -// expect(mockSend).toHaveBeenCalledTimes(5); -// expect(JSON.stringify(mockSend.mock.calls[0][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ -// Bucket: 'near-lake-data-mainnet', -// Key: `${blockHeight.toString().padStart(12, '0')}/block.json` -// }))); -// expect(JSON.stringify(mockSend.mock.calls[1][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ -// Bucket: 'near-lake-data-mainnet', -// Key: `${blockHeight.toString().padStart(12, '0')}/shard_0.json` -// }))); -// expect(transparentRedis.getStreamerMessage).toHaveBeenCalledTimes(1); - -// const block = Block.fromStreamerMessage(streamerMessage); - -// expect(block.blockHeight).toEqual(blockHeight); -// expect(block.blockHash).toEqual(blockHash); -// }); - -// test('Indexer.fetchStreamerMessage() should fetch the block and shards from S3 and not cache and construct the streamer message if historical', async () => { -// const blockHeight = 85233529; -// const blockHash = 'xyz'; -// const mockSend = jest.fn() -// .mockReturnValueOnce({ // block -// Body: { -// transformToString: () => JSON.stringify({ -// chunks: [0], -// header: { -// height: blockHeight, -// hash: blockHash, -// } -// }) -// } -// }) -// .mockReturnValue({ // shard -// Body: { -// transformToString: () => JSON.stringify({}) -// } -// }); -// const mockS3 = { -// send: mockSend, -// } as unknown as S3Client; -// const mockRedis = { -// getStreamerMessage: jest.fn() -// } as unknown as RedisClient; -// const indexer = new Indexer('mainnet', { s3: mockS3, redisClient: mockRedis }); - -// const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, true); - -// expect(mockSend).toHaveBeenCalledTimes(5); -// expect(JSON.stringify(mockSend.mock.calls[0][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ -// Bucket: 'near-lake-data-mainnet', -// Key: `${blockHeight.toString().padStart(12, '0')}/block.json` -// }))); -// expect(JSON.stringify(mockSend.mock.calls[1][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ -// Bucket: 'near-lake-data-mainnet', -// Key: `${blockHeight.toString().padStart(12, '0')}/shard_0.json` -// }))); -// expect(mockRedis.getStreamerMessage).toHaveBeenCalledTimes(0); - -// const block = Block.fromStreamerMessage(streamerMessage); - -// expect(block.blockHeight).toEqual(blockHeight); -// expect(block.blockHash).toEqual(blockHash); -// }); - -// test('Indexer.transformIndexerFunction() applies the necessary transformations', () => { -// const indexer = new Indexer('mainnet', { redisClient: transparentRedis }); - -// const transformedFunction = indexer.transformIndexerFunction('console.log(\'hello\')'); - -// expect(transformedFunction).toEqual(` -// async function f(){ -// console.log('hello') -// }; -// f(); -// `); -// }); - -// test('Indexer.buildContext() allows execution of arbitrary GraphQL operations', async () => { -// const mockFetch = jest.fn() -// .mockResolvedValueOnce({ -// status: 200, -// json: async () => ({ -// data: { -// greet: 'hello' -// } -// }) -// }) -// .mockResolvedValueOnce({ -// status: 200, -// json: async () => ({ -// data: { -// newGreeting: { -// success: true -// } -// } -// }) -// }); -// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); - -// const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); - -// const query = ` -// query { -// greet() -// } -// `; -// const { greet } = await context.graphql(query) as { greet: string }; - -// const mutation = ` -// mutation { -// newGreeting(greeting: "${greet} morgan") { -// success -// } -// } -// `; -// const { newGreeting: { success } } = await context.graphql(mutation); - -// expect(greet).toEqual('hello'); -// expect(success).toEqual(true); -// expect(mockFetch.mock.calls[0]).toEqual([ -// `${HASURA_ENDPOINT}/v1/graphql`, -// { -// method: 'POST', -// headers: { -// 'Content-Type': 'application/json', -// 'X-Hasura-Use-Backend-Only-Permissions': 'true', -// 'X-Hasura-Role': 'morgs_near', -// 'X-Hasura-Admin-Secret': HASURA_ADMIN_SECRET -// }, -// body: JSON.stringify({ query }) -// } -// ]); -// expect(mockFetch.mock.calls[1]).toEqual([ -// `${HASURA_ENDPOINT}/v1/graphql`, -// { -// method: 'POST', -// headers: { -// 'Content-Type': 'application/json', -// 'X-Hasura-Use-Backend-Only-Permissions': 'true', -// 'X-Hasura-Role': 'morgs_near', -// 'X-Hasura-Admin-Secret': HASURA_ADMIN_SECRET -// }, -// body: JSON.stringify({ query: mutation }) -// } -// ]); -// }); - -// test('Indexer.buildContext() can fetch from the near social api', async () => { -// const mockFetch = jest.fn(); -// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); - -// const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); - -// await context.fetchFromSocialApi('/index', { -// method: 'POST', -// headers: { -// 'Content-Type': 'application/json', -// }, -// body: JSON.stringify({ -// action: 'post', -// key: 'main', -// options: { -// limit: 1, -// order: 'desc' -// } -// }) -// }); - -// expect(mockFetch.mock.calls).toMatchSnapshot(); -// }); - -// test('Indexer.buildContext() throws when a GraphQL response contains errors', async () => { -// const mockFetch = jest.fn() -// .mockResolvedValue({ -// json: async () => ({ -// errors: ['boom'] -// }) -// }); -// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); - -// const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, INVALID_HASURA_ROLE); - -// await expect(async () => await context.graphql('query { hello }')).rejects.toThrow('boom'); -// }); - -// test('Indexer.buildContext() handles GraphQL variables', async () => { -// const mockFetch = jest.fn() -// .mockResolvedValue({ -// status: 200, -// json: async () => ({ -// data: 'mock', -// }), -// }); -// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); - -// const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); - -// const query = 'query($name: String) { hello(name: $name) }'; -// const variables = { name: 'morgan' }; -// await context.graphql(query, variables); - -// expect(mockFetch.mock.calls[0]).toEqual([ -// `${HASURA_ENDPOINT}/v1/graphql`, -// { -// method: 'POST', -// headers: { -// 'Content-Type': 'application/json', -// 'X-Hasura-Use-Backend-Only-Permissions': 'true', -// 'X-Hasura-Role': 'morgs_near', -// 'X-Hasura-Admin-Secret': HASURA_ADMIN_SECRET -// }, -// body: JSON.stringify({ -// query, -// variables, -// }), -// }, -// ]); -// }); - -// test('GetTables works for a variety of input schemas', async () => { -// const indexer = new Indexer('mainnet', { redisClient: transparentRedis }); - -// const simpleSchemaTables = indexer.getTableNames(SIMPLE_SCHEMA); -// expect(simpleSchemaTables).toStrictEqual(['posts']); - -// const socialSchemaTables = indexer.getTableNames(SOCIAL_SCHEMA); -// expect(socialSchemaTables).toStrictEqual(['posts', 'comments', 'post_likes']); - -// const stressTestSchemaTables = indexer.getTableNames(STRESS_TEST_SCHEMA); -// expect(stressTestSchemaTables).toStrictEqual([ -// 'creator_quest', -// 'composer_quest', -// 'contractor - quest', -// 'posts', -// 'comments', -// 'post_likes', -// 'My Table1', -// 'Another-Table', -// 'Third-Table', -// 'yet_another_table']); - -// // Test that duplicate table names throw an error -// const duplicateTableSchema = `CREATE TABLE -// "posts" ( -// "id" SERIAL NOT NULL -// ); -// CREATE TABLE posts ( -// "id" SERIAL NOT NULL -// );`; -// expect(() => { -// indexer.getTableNames(duplicateTableSchema); -// }).toThrow('Table posts already exists in schema. Table names must be unique. Quotes are not allowed as a differentiator between table names.'); - -// // Test that schema with no tables throws an error -// expect(() => { -// indexer.getTableNames(''); -// }).toThrow('Schema does not have any tables. There should be at least one table.'); -// }); - -// test('SanitizeTableName works properly on many test cases', async () => { -// const indexer = new Indexer('mainnet', { redisClient: transparentRedis }); - -// expect(indexer.sanitizeTableName('table_name')).toStrictEqual('TableName'); -// expect(indexer.sanitizeTableName('tablename')).toStrictEqual('Tablename'); // name is not capitalized -// expect(indexer.sanitizeTableName('table name')).toStrictEqual('TableName'); -// expect(indexer.sanitizeTableName('table!name!')).toStrictEqual('TableName'); -// expect(indexer.sanitizeTableName('123TABle')).toStrictEqual('_123TABle'); // underscore at beginning -// expect(indexer.sanitizeTableName('123_tABLE')).toStrictEqual('_123TABLE'); // underscore at beginning, capitalization -// expect(indexer.sanitizeTableName('some-table_name')).toStrictEqual('SomeTableName'); -// expect(indexer.sanitizeTableName('!@#$%^&*()table@)*&(%#')).toStrictEqual('Table'); // All special characters removed -// expect(indexer.sanitizeTableName('T_name')).toStrictEqual('TName'); -// expect(indexer.sanitizeTableName('_table')).toStrictEqual('Table'); // Starting underscore was removed -// }); - -// test('indexer fails to build context.db due to collision on sanitized table names', async () => { -// const indexer = new Indexer('mainnet', { redisClient: transparentRedis }); - -// const schemaWithDuplicateSanitizedTableNames = `CREATE TABLE -// "test table" ( -// "id" SERIAL NOT NULL -// ); -// CREATE TABLE "test!table" ( -// "id" SERIAL NOT NULL -// );`; - -// // Does not outright throw an error but instead returns an empty object -// expect(indexer.buildDatabaseContext('test_account', 'test_schema_name', schemaWithDuplicateSanitizedTableNames, 1)) -// .toStrictEqual({}); -// }); - -// test('indexer builds context and inserts an objects into existing table', async () => { -// const mockDmlHandler: any = { -// create: jest.fn().mockImplementation(() => { -// return { insert: jest.fn().mockReturnValue([{ colA: 'valA' }, { colA: 'valA' }]) }; -// }) -// }; - -// const indexer = new Indexer('mainnet', { -// fetch: genericMockFetch as unknown as typeof fetch, -// redisClient: transparentRedis, -// DmlHandler: mockDmlHandler -// }); -// const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); - -// const objToInsert = [{ -// account_id: 'morgs_near', -// block_height: 1, -// receipt_id: 'abc', -// content: 'test', -// block_timestamp: 800, -// accounts_liked: JSON.stringify(['cwpuzzles.near', 'devbose.near']) -// }, -// { -// account_id: 'morgs_near', -// block_height: 2, -// receipt_id: 'abc', -// content: 'test', -// block_timestamp: 801, -// accounts_liked: JSON.stringify(['cwpuzzles.near']) -// }]; - -// const result = await context.db.Posts.insert(objToInsert); -// expect(result.length).toEqual(2); -// }); - -// test('indexer builds context and selects objects from existing table', async () => { -// const selectFn = jest.fn(); -// selectFn.mockImplementation((...args) => { -// // Expects limit to be last parameter -// return args[args.length - 1] === null ? [{ colA: 'valA' }, { colA: 'valA' }] : [{ colA: 'valA' }]; -// }); -// const mockDmlHandler: any = { -// create: jest.fn().mockImplementation(() => { -// return { select: selectFn }; -// }) -// }; - -// const indexer = new Indexer('mainnet', { -// fetch: genericMockFetch as unknown as typeof fetch, -// redisClient: transparentRedis, -// DmlHandler: mockDmlHandler -// }); -// const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); - -// const objToSelect = { -// account_id: 'morgs_near', -// receipt_id: 'abc', -// }; -// const result = await context.db.Posts.select(objToSelect); -// expect(result.length).toEqual(2); -// const resultLimit = await context.db.Posts.select(objToSelect, 1); -// expect(resultLimit.length).toEqual(1); -// }); - -// test('indexer builds context and updates multiple objects from existing table', async () => { -// const mockDmlHandler: any = { -// create: jest.fn().mockImplementation(() => { -// return { -// update: jest.fn().mockImplementation((_, __, whereObj, updateObj) => { -// if (whereObj.account_id === 'morgs_near' && updateObj.content === 'test_content') { -// return [{ colA: 'valA' }, { colA: 'valA' }]; -// } -// return [{}]; -// }) -// }; -// }) -// }; - -// const indexer = new Indexer('mainnet', { -// fetch: genericMockFetch as unknown as typeof fetch, -// redisClient: transparentRedis, -// DmlHandler: mockDmlHandler -// }); -// const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); - -// const whereObj = { -// account_id: 'morgs_near', -// receipt_id: 'abc', -// }; -// const updateObj = { -// content: 'test_content', -// block_timestamp: 805, -// }; -// const result = await context.db.Posts.update(whereObj, updateObj); -// expect(result.length).toEqual(2); -// }); - -// test('indexer builds context and upserts on existing table', async () => { -// const mockDmlHandler: any = { -// create: jest.fn().mockImplementation(() => { -// return { -// upsert: jest.fn().mockImplementation((_, __, objects, conflict, update) => { -// if (objects.length === 2 && conflict.includes('account_id') && update.includes('content')) { -// return [{ colA: 'valA' }, { colA: 'valA' }]; -// } else if (objects.length === 1 && conflict.includes('account_id') && update.includes('content')) { -// return [{ colA: 'valA' }]; -// } -// return [{}]; -// }) -// }; -// }) -// }; - -// const indexer = new Indexer('mainnet', { -// fetch: genericMockFetch as unknown as typeof fetch, -// redisClient: transparentRedis, -// DmlHandler: mockDmlHandler -// }); -// const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); - -// const objToInsert = [{ -// account_id: 'morgs_near', -// block_height: 1, -// receipt_id: 'abc', -// content: 'test', -// block_timestamp: 800, -// accounts_liked: JSON.stringify(['cwpuzzles.near', 'devbose.near']) -// }, -// { -// account_id: 'morgs_near', -// block_height: 2, -// receipt_id: 'abc', -// content: 'test', -// block_timestamp: 801, -// accounts_liked: JSON.stringify(['cwpuzzles.near']) -// }]; - -// let result = await context.db.Posts.upsert(objToInsert, ['account_id', 'block_height'], ['content', 'block_timestamp']); -// expect(result.length).toEqual(2); -// result = await context.db.Posts.upsert(objToInsert[0], ['account_id', 'block_height'], ['content', 'block_timestamp']); -// expect(result.length).toEqual(1); -// }); - -// test('indexer builds context and deletes objects from existing table', async () => { -// const mockDmlHandler: any = { -// create: jest.fn().mockImplementation(() => { -// return { delete: jest.fn().mockReturnValue([{ colA: 'valA' }, { colA: 'valA' }]) }; -// }) -// }; - -// const indexer = new Indexer('mainnet', { -// fetch: genericMockFetch as unknown as typeof fetch, -// redisClient: transparentRedis, -// DmlHandler: mockDmlHandler -// }); -// const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); - -// const deleteFilter = { -// account_id: 'morgs_near', -// receipt_id: 'abc', -// }; -// const result = await context.db.Posts.delete(deleteFilter); -// expect(result.length).toEqual(2); -// }); - -// test('indexer builds context and verifies all methods generated', async () => { -// const mockDmlHandler: any = { -// create: jest.fn() -// }; - -// const indexer = new Indexer('mainnet', { -// fetch: genericMockFetch as unknown as typeof fetch, -// redisClient: transparentRedis, -// DmlHandler: mockDmlHandler -// }); -// const context = indexer.buildContext(STRESS_TEST_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); - -// expect(Object.keys(context.db)).toStrictEqual([ -// 'CreatorQuest', -// 'ComposerQuest', -// 'ContractorQuest', -// 'Posts', -// 'Comments', -// 'PostLikes', -// 'MyTable1', -// 'AnotherTable', -// 'ThirdTable', -// 'YetAnotherTable']); -// expect(Object.keys(context.db.CreatorQuest)).toStrictEqual([ -// 'insert', -// 'select', -// 'update', -// 'upsert', -// 'delete']); -// expect(Object.keys(context.db.PostLikes)).toStrictEqual([ -// 'insert', -// 'select', -// 'update', -// 'upsert', -// 'delete']); -// expect(Object.keys(context.db.MyTable1)).toStrictEqual([ -// 'insert', -// 'select', -// 'update', -// 'upsert', -// 'delete']); -// }); - -// test('indexer builds context and returns empty array if failed to generate db methods', async () => { -// const mockDmlHandler: any = { -// create: jest.fn() -// }; - -// const indexer = new Indexer('mainnet', { -// fetch: genericMockFetch as unknown as typeof fetch, -// redisClient: transparentRedis, -// DmlHandler: mockDmlHandler -// }); -// const context = indexer.buildContext('', 'morgs.near/social_feed1', 1, 'postgres'); - -// expect(Object.keys(context.db)).toStrictEqual([]); -// }); - -// test('Indexer.runFunctions() allows imperative execution of GraphQL operations', async () => { -// const postId = 1; -// const commentId = 2; -// const blockHeight = 82699904; -// const mockFetch = jest.fn() -// .mockReturnValueOnce({ // starting log -// status: 200, -// json: async () => ({ -// data: { -// indexer_log_store: [ -// { -// id: '12345', -// }, -// ], -// }, -// }), -// }) -// .mockReturnValueOnce({ -// status: 200, -// json: async () => ({ -// errors: null, -// }), -// }) -// .mockReturnValueOnce({ // query -// status: 200, -// json: async () => ({ -// data: { -// posts: [ -// { -// id: postId, -// }, -// ], -// }, -// }), -// }) -// .mockReturnValueOnce({ // mutation -// status: 200, -// json: async () => ({ -// data: { -// insert_comments: { -// returning: { -// id: commentId, -// }, -// }, -// }, -// }), -// }) -// .mockReturnValueOnce({ -// status: 200, -// json: async () => ({ -// errors: null, -// }), -// }); - -// const mockS3 = { -// send: jest.fn() -// .mockResolvedValueOnce({ // block -// Body: { -// transformToString: () => JSON.stringify({ -// chunks: [0], -// header: { -// height: blockHeight, -// }, -// }), -// }, -// }) -// .mockResolvedValue({ // shard -// Body: { -// transformToString: () => JSON.stringify({}) -// }, -// }), -// } as unknown as S3Client; -// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3: mockS3, redisClient: transparentRedis }); - -// const functions: Record = {}; -// functions['buildnear.testnet/test'] = { -// code: ` -// const { posts } = await context.graphql(\` -// query { -// posts(where: { id: { _eq: 1 } }) { -// id -// } -// } -// \`); - -// if (!posts || posts.length === 0) { -// return; -// } - -// const [post] = posts; - -// const { insert_comments: { returning: { id } } } = await context.graphql(\` -// mutation { -// insert_comments( -// objects: {account_id: "morgs.near", block_height: \${block.blockHeight}, content: "cool post", post_id: \${post.id}} -// ) { -// returning { -// id -// } -// } -// } -// \`); - -// return (\`Created comment \${id} on post \${post.id}\`) -// `, -// schema: SIMPLE_SCHEMA -// }; - -// await indexer.runFunctions(blockHeight, functions, false); - -// expect(mockFetch.mock.calls).toMatchSnapshot(); -// }); - -// test('Indexer.runFunctions() console.logs', async () => { -// const logs: string[] = []; -// const context = { -// log: (...m: string[]) => { -// logs.push(...m); -// } -// }; -// const vm = new VM(); -// vm.freeze(context, 'context'); -// vm.freeze(context, 'console'); -// await vm.run('console.log("hello", "brave new"); context.log("world")'); -// expect(logs).toEqual(['hello', 'brave new', 'world']); -// }); - -// test('Errors thrown in VM can be caught outside the VM', async () => { -// const vm = new VM(); -// expect(() => { -// vm.run("throw new Error('boom')"); -// }).toThrow('boom'); -// }); - -// test('Indexer.runFunctions() catches errors', async () => { -// const mockFetch = jest.fn(() => ({ -// status: 200, -// json: async () => ({ -// errors: null, -// }), -// })); -// const blockHeight = 456; -// const mockS3 = { -// send: jest.fn().mockResolvedValue({ -// Body: { -// transformToString: () => JSON.stringify({ -// chunks: [], -// header: { -// height: blockHeight -// } -// }) -// } -// }), -// } as unknown as S3Client; -// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3: mockS3, redisClient: transparentRedis }); - -// const functions: Record = {}; -// functions['buildnear.testnet/test'] = { -// code: ` -// throw new Error('boom'); -// `, -// schema: SIMPLE_SCHEMA -// }; - -// await expect(indexer.runFunctions(blockHeight, functions, false)).rejects.toThrow(new Error('boom')); -// expect(mockFetch.mock.calls).toMatchSnapshot(); -// }); - -// test('Indexer.runFunctions() provisions a GraphQL endpoint with the specified schema', async () => { -// const blockHeight = 82699904; -// const mockFetch = jest.fn(() => ({ -// status: 200, -// json: async () => ({ -// errors: null, -// }), -// })); -// const mockS3 = { -// send: jest -// .fn() -// .mockResolvedValueOnce({ // block -// Body: { -// transformToString: () => JSON.stringify({ -// chunks: [0], -// header: { -// height: blockHeight, -// }, -// }), -// }, -// }) -// .mockResolvedValue({ // shard -// Body: { -// transformToString: () => JSON.stringify({}) -// }, -// }), -// } as unknown as S3Client; -// const provisioner: any = { -// isUserApiProvisioned: jest.fn().mockReturnValue(false), -// provisionUserApi: jest.fn(), -// }; -// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3: mockS3, redisClient: transparentRedis, provisioner }); - -// const functions = { -// 'morgs.near/test': { -// account_id: 'morgs.near', -// function_name: 'test', -// code: '', -// schema: SIMPLE_SCHEMA, -// } -// }; -// await indexer.runFunctions(1, functions, false, { provision: true }); - -// expect(provisioner.isUserApiProvisioned).toHaveBeenCalledWith('morgs.near', 'test'); -// expect(provisioner.provisionUserApi).toHaveBeenCalledTimes(1); -// expect(provisioner.provisionUserApi).toHaveBeenCalledWith( -// 'morgs.near', -// 'test', -// SIMPLE_SCHEMA -// ); -// }); - -// test('Indexer.runFunctions() skips provisioning if the endpoint exists', async () => { -// const blockHeight = 82699904; -// const mockFetch = jest.fn(() => ({ -// status: 200, -// json: async () => ({ -// errors: null, -// }), -// })); -// const mockS3 = { -// send: jest -// .fn() -// .mockResolvedValueOnce({ // block -// Body: { -// transformToString: () => JSON.stringify({ -// chunks: [0], -// header: { -// height: blockHeight, -// }, -// }), -// }, -// }) -// .mockResolvedValue({ // shard -// Body: { -// transformToString: () => JSON.stringify({}) -// }, -// }), -// } as unknown as S3Client; -// const provisioner: any = { -// isUserApiProvisioned: jest.fn().mockReturnValue(true), -// provisionUserApi: jest.fn(), -// }; -// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3: mockS3, redisClient: transparentRedis, provisioner }); - -// const functions: Record = { -// 'morgs.near/test': { -// code: '', -// schema: SIMPLE_SCHEMA, -// } -// }; -// await indexer.runFunctions(1, functions, false, { provision: true }); - -// expect(provisioner.provisionUserApi).not.toHaveBeenCalled(); -// }); - -// test('Indexer.runFunctions() supplies the required role to the GraphQL endpoint', async () => { -// const blockHeight = 82699904; -// const mockFetch = jest.fn(() => ({ -// status: 200, -// json: async () => ({ -// errors: null, -// }), -// })); -// const mockS3 = { -// send: jest -// .fn() -// .mockResolvedValueOnce({ // block -// Body: { -// transformToString: () => JSON.stringify({ -// chunks: [0], -// header: { -// height: blockHeight, -// }, -// }), -// }, -// }) -// .mockResolvedValue({ // shard -// Body: { -// transformToString: () => JSON.stringify({}) -// }, -// }), -// } as unknown as S3Client; -// const provisioner: any = { -// isUserApiProvisioned: jest.fn().mockReturnValue(true), -// provisionUserApi: jest.fn(), -// }; -// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3: mockS3, redisClient: transparentRedis, provisioner }); - -// const functions: Record = { -// 'morgs.near/test': { -// code: ` -// context.graphql(\`mutation { set(functionName: "buildnear.testnet/test", key: "height", data: "\${block.blockHeight}")}\`); -// `, -// schema: SIMPLE_SCHEMA, -// } -// }; -// await indexer.runFunctions(blockHeight, functions, false, { provision: true }); - -// expect(provisioner.provisionUserApi).not.toHaveBeenCalled(); -// expect(mockFetch.mock.calls).toMatchSnapshot(); -// }); - -// test('Indexer.runFunctions() logs provisioning failures', async () => { -// const blockHeight = 82699904; -// const mockFetch = jest.fn(() => ({ -// status: 200, -// json: async () => ({ -// errors: null, -// }), -// })); -// const mockS3 = { -// send: jest -// .fn() -// .mockResolvedValueOnce({ // block -// Body: { -// transformToString: () => JSON.stringify({ -// chunks: [0], -// header: { -// height: blockHeight, -// }, -// }), -// }, -// }) -// .mockResolvedValue({ // shard -// Body: { -// transformToString: () => JSON.stringify({}) -// }, -// }), -// } as unknown as S3Client; -// const error = new Error('something went wrong with provisioning'); -// const provisioner: any = { -// isUserApiProvisioned: jest.fn().mockReturnValue(false), -// provisionUserApi: jest.fn().mockRejectedValue(error), -// }; -// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3: mockS3, redisClient: transparentRedis, provisioner }); - -// const functions: Record = { -// 'morgs.near/test': { -// code: ` -// context.graphql(\`mutation { set(functionName: "buildnear.testnet/test", key: "height", data: "\${block.blockHeight}")}\`); -// `, -// schema: 'schema', -// } -// }; - -// await expect(indexer.runFunctions(blockHeight, functions, false, { provision: true })).rejects.toThrow(error); -// expect(mockFetch.mock.calls).toMatchSnapshot(); -// }); - -// test('does not attach the hasura admin secret header when no role specified', async () => { -// const mockFetch = jest.fn() -// .mockResolvedValueOnce({ -// status: 200, -// json: async () => ({ -// data: {} -// }) -// }); -// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); -// // @ts-expect-error legacy test -// const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, null); - -// const mutation = ` -// mutation { -// newGreeting(greeting: "howdy") { -// success -// } -// } -// `; - -// await context.graphql(mutation); - -// expect(mockFetch.mock.calls[0]).toEqual([ -// `${HASURA_ENDPOINT}/v1/graphql`, -// { -// method: 'POST', -// headers: { -// 'Content-Type': 'application/json', -// 'X-Hasura-Use-Backend-Only-Permissions': 'true', -// }, -// body: JSON.stringify({ query: mutation }) -// } -// ]); -// }); - -// test('attaches the backend only header to requests to hasura', async () => { -// const mockFetch = jest.fn() -// .mockResolvedValueOnce({ -// status: 200, -// json: async () => ({ -// data: {} -// }) -// }); -// const role = 'morgs_near'; -// const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); -// const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); - -// const mutation = ` -// mutation { -// newGreeting(greeting: "howdy") { -// success -// } -// } -// `; - -// await context.graphql(mutation); - -// expect(mockFetch.mock.calls[0]).toEqual([ -// `${HASURA_ENDPOINT}/v1/graphql`, -// { -// method: 'POST', -// headers: { -// 'Content-Type': 'application/json', -// 'X-Hasura-Use-Backend-Only-Permissions': 'true', -// 'X-Hasura-Role': role, -// 'X-Hasura-Admin-Secret': HASURA_ADMIN_SECRET -// }, -// body: JSON.stringify({ query: mutation }) -// } -// ]); -// }); -// }); +import { Block } from '@near-lake/primitives'; +import type fetch from 'node-fetch'; +import type RedisClient from '../redis-client'; + +import Indexer from './indexer'; +import { VM } from 'vm2'; +import type S3StreamerMessageFetcher from '../streamer-message-fetcher/s3-streamer-fetcher'; + +describe('Indexer unit tests', () => { + const oldEnv = process.env; + + const HASURA_ENDPOINT = 'mock-hasura-endpoint'; + const HASURA_ADMIN_SECRET = 'mock-hasura-secret'; + const HASURA_ROLE = 'morgs_near'; + const INVALID_HASURA_ROLE = 'other_near'; + + const INDEXER_NAME = 'morgs.near/test_fn'; + + const SIMPLE_SCHEMA = `CREATE TABLE + "posts" ( + "id" SERIAL NOT NULL, + "account_id" VARCHAR NOT NULL, + "block_height" DECIMAL(58, 0) NOT NULL, + "receipt_id" VARCHAR NOT NULL, + "content" TEXT NOT NULL, + "block_timestamp" DECIMAL(20, 0) NOT NULL, + "accounts_liked" JSONB NOT NULL DEFAULT '[]', + "last_comment_timestamp" DECIMAL(20, 0), + CONSTRAINT "posts_pkey" PRIMARY KEY ("id") + );`; + + const SOCIAL_SCHEMA = ` + CREATE TABLE + "posts" ( + "id" SERIAL NOT NULL, + "account_id" VARCHAR NOT NULL, + "block_height" DECIMAL(58, 0) NOT NULL, + "receipt_id" VARCHAR NOT NULL, + "content" TEXT NOT NULL, + "block_timestamp" DECIMAL(20, 0) NOT NULL, + "accounts_liked" JSONB NOT NULL DEFAULT '[]', + "last_comment_timestamp" DECIMAL(20, 0), + CONSTRAINT "posts_pkey" PRIMARY KEY ("id") + ); + + CREATE TABLE + "comments" ( + "id" SERIAL NOT NULL, + "post_id" SERIAL NOT NULL, + "account_id" VARCHAR NOT NULL, + "block_height" DECIMAL(58, 0) NOT NULL, + "content" TEXT NOT NULL, + "block_timestamp" DECIMAL(20, 0) NOT NULL, + "receipt_id" VARCHAR NOT NULL, + CONSTRAINT "comments_pkey" PRIMARY KEY ("id") + ); + + CREATE TABLE + "post_likes" ( + "post_id" SERIAL NOT NULL, + "account_id" VARCHAR NOT NULL, + "block_height" DECIMAL(58, 0), + "block_timestamp" DECIMAL(20, 0) NOT NULL, + "receipt_id" VARCHAR NOT NULL, + CONSTRAINT "post_likes_pkey" PRIMARY KEY ("post_id", "account_id") + );`; + + const STRESS_TEST_SCHEMA = ` +CREATE TABLE creator_quest ( + account_id VARCHAR PRIMARY KEY, + num_components_created INTEGER NOT NULL DEFAULT 0, + completed BOOLEAN NOT NULL DEFAULT FALSE + ); + +CREATE TABLE + composer_quest ( + account_id VARCHAR PRIMARY KEY, + num_widgets_composed INTEGER NOT NULL DEFAULT 0, + completed BOOLEAN NOT NULL DEFAULT FALSE + ); + +CREATE TABLE + "contractor - quest" ( + account_id VARCHAR PRIMARY KEY, + num_contracts_deployed INTEGER NOT NULL DEFAULT 0, + completed BOOLEAN NOT NULL DEFAULT FALSE + ); + +CREATE TABLE + "posts" ( + "id" SERIAL NOT NULL, + "account_id" VARCHAR NOT NULL, + "block_height" DECIMAL(58, 0) NOT NULL, + "receipt_id" VARCHAR NOT NULL, + "content" TEXT NOT NULL, + "block_timestamp" DECIMAL(20, 0) NOT NULL, + "accounts_liked" JSONB NOT NULL DEFAULT '[]', + "last_comment_timestamp" DECIMAL(20, 0), + CONSTRAINT "posts_pkey" PRIMARY KEY ("id") + ); + +CREATE TABLE + "comments" ( + "id" SERIAL NOT NULL, + "post_id" SERIAL NOT NULL, + "account_id" VARCHAR NOT NULL, + "block_height" DECIMAL(58, 0) NOT NULL, + "content" TEXT NOT NULL, + "block_timestamp" DECIMAL(20, 0) NOT NULL, + "receipt_id" VARCHAR NOT NULL, + CONSTRAINT "comments_pkey" PRIMARY KEY ("id") + ); + +CREATE TABLE + "post_likes" ( + "post_id" SERIAL NOT NULL, + "account_id" VARCHAR NOT NULL, + "block_height" DECIMAL(58, 0), + "block_timestamp" DECIMAL(20, 0) NOT NULL, + "receipt_id" VARCHAR NOT NULL, + CONSTRAINT "post_likes_pkey" PRIMARY KEY ("post_id", "account_id") + ); + +CREATE UNIQUE INDEX "posts_account_id_block_height_key" ON "posts" ("account_id" ASC, "block_height" ASC); + +CREATE UNIQUE INDEX "comments_post_id_account_id_block_height_key" ON "comments" ( + "post_id" ASC, + "account_id" ASC, + "block_height" ASC +); + +CREATE INDEX + "posts_last_comment_timestamp_idx" ON "posts" ("last_comment_timestamp" DESC); + +ALTER TABLE + "comments" +ADD + CONSTRAINT "comments_post_id_fkey" FOREIGN KEY ("post_id") REFERENCES "posts" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION; + +ALTER TABLE + "post_likes" +ADD + CONSTRAINT "post_likes_post_id_fkey" FOREIGN KEY ("post_id") REFERENCES "posts" ("id") ON DELETE CASCADE ON UPDATE NO ACTION; + +CREATE TABLE IF NOT EXISTS + "My Table1" (id serial PRIMARY KEY); + +CREATE TABLE + "Another-Table" (id serial PRIMARY KEY); + +CREATE TABLE +IF NOT EXISTS + "Third-Table" (id serial PRIMARY KEY); + +CREATE TABLE + yet_another_table (id serial PRIMARY KEY); +`; + const genericMockFetch = jest.fn() + .mockResolvedValue({ + status: 200, + json: async () => ({ + data: 'mock', + }), + }); + + const transparentRedis = { + getStreamerMessage: jest.fn() + } as unknown as RedisClient; + + beforeEach(() => { + process.env = { + ...oldEnv, + HASURA_ENDPOINT, + HASURA_ADMIN_SECRET + }; + }); + + afterAll(() => { + process.env = oldEnv; + }); + + test('Indexer.runFunctions() should execute all functions against the current block', async () => { + const mockFetch = jest.fn(() => ({ + status: 200, + json: async () => ({ + errors: null, + }), + })); + const blockHeight = 456; + const mockData = jest.fn().mockResolvedValue( + JSON.stringify( + { + block: { + chunks: [], + header: { + height: blockHeight + } + }, + shards: {} + } + ) + ); + const mockRedis = { + getStreamerMessage: mockData + } as unknown as RedisClient; + + const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: mockRedis }); + + const functions: Record = {}; + functions['buildnear.testnet/test'] = { + code: ` + const foo = 3; + block.result = context.graphql(\`mutation { set(functionName: "buildnear.testnet/test", key: "height", data: "\${block.blockHeight}")}\`); + `, + schema: SIMPLE_SCHEMA + }; + await indexer.runFunctions(blockHeight, functions, false); + + expect(mockFetch.mock.calls).toMatchSnapshot(); + }); + + test('Indexer.fetchStreamerMessage() should fetch the message from cache and use it directly', async () => { + const blockHeight = 85233529; + const blockHash = 'xyz'; + const getMessage = jest.fn() + .mockReturnValueOnce(JSON.stringify( + { + block: { + chunks: [0], + header: { + height: blockHeight, + hash: blockHash, + } + }, + shards: {} + } + )); + const mockRedis = { + getStreamerMessage: getMessage + } as unknown as RedisClient; + const indexer = new Indexer('mainnet', { redisClient: mockRedis }); + + const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, false); + + expect(getMessage).toHaveBeenCalledTimes(1); + expect(JSON.stringify(getMessage.mock.calls[0])).toEqual( + `[${blockHeight}]` + ); + const block = Block.fromStreamerMessage(streamerMessage); + + expect(block.blockHeight).toEqual(blockHeight); + expect(block.blockHash).toEqual(blockHash); + }); + + test('Indexer.fetchStreamerMessage() should fetch the message from S3 upon cache miss', async () => { + const blockHeight = 85233529; + const blockHash = 'xyz'; + const mockS3StreamerMessageFetcher = { + buildStreamerMessage: jest.fn() + .mockReturnValue({ + block: { + chunks: [0], + header: { + height: blockHeight, + hash: blockHash, + } + }, + shards: {} + }) + } as unknown as S3StreamerMessageFetcher; + const indexer = new Indexer('mainnet', { s3StreamerMessageFetcher: mockS3StreamerMessageFetcher, redisClient: transparentRedis }); + + const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, false); + expect(transparentRedis.getStreamerMessage).toHaveBeenCalledTimes(1); + + const block = Block.fromStreamerMessage(streamerMessage); + + expect(block.blockHeight).toEqual(blockHeight); + expect(block.blockHash).toEqual(blockHash); + }); + + test('Indexer.fetchStreamerMessage() should fetch the message from S3 and not cache if historical', async () => { + const blockHeight = 85233529; + const blockHash = 'xyz'; + const mockS3StreamerMessageFetcher = { + buildStreamerMessage: jest.fn() + .mockReturnValue({ + block: { + chunks: [0], + header: { + height: blockHeight, + hash: blockHash, + } + }, + shards: {} + }) + } as unknown as S3StreamerMessageFetcher; + const mockRedis = { + getStreamerMessage: jest.fn() + } as unknown as RedisClient; + const indexer = new Indexer('mainnet', { s3StreamerMessageFetcher: mockS3StreamerMessageFetcher, redisClient: mockRedis }); + + const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, true); + + expect(mockRedis.getStreamerMessage).toHaveBeenCalledTimes(0); + + const block = Block.fromStreamerMessage(streamerMessage); + + expect(block.blockHeight).toEqual(blockHeight); + expect(block.blockHash).toEqual(blockHash); + }); + + test('Indexer.transformIndexerFunction() applies the necessary transformations', () => { + const indexer = new Indexer('mainnet', { redisClient: transparentRedis }); + + const transformedFunction = indexer.transformIndexerFunction('console.log(\'hello\')'); + + expect(transformedFunction).toEqual(` + async function f(){ + console.log('hello') + }; + f(); + `); + }); + + test('Indexer.buildContext() allows execution of arbitrary GraphQL operations', async () => { + const mockFetch = jest.fn() + .mockResolvedValueOnce({ + status: 200, + json: async () => ({ + data: { + greet: 'hello' + } + }) + }) + .mockResolvedValueOnce({ + status: 200, + json: async () => ({ + data: { + newGreeting: { + success: true + } + } + }) + }); + const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); + + const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); + + const query = ` + query { + greet() + } + `; + const { greet } = await context.graphql(query) as { greet: string }; + + const mutation = ` + mutation { + newGreeting(greeting: "${greet} morgan") { + success + } + } + `; + const { newGreeting: { success } } = await context.graphql(mutation); + + expect(greet).toEqual('hello'); + expect(success).toEqual(true); + expect(mockFetch.mock.calls[0]).toEqual([ + `${HASURA_ENDPOINT}/v1/graphql`, + { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'X-Hasura-Use-Backend-Only-Permissions': 'true', + 'X-Hasura-Role': 'morgs_near', + 'X-Hasura-Admin-Secret': HASURA_ADMIN_SECRET + }, + body: JSON.stringify({ query }) + } + ]); + expect(mockFetch.mock.calls[1]).toEqual([ + `${HASURA_ENDPOINT}/v1/graphql`, + { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'X-Hasura-Use-Backend-Only-Permissions': 'true', + 'X-Hasura-Role': 'morgs_near', + 'X-Hasura-Admin-Secret': HASURA_ADMIN_SECRET + }, + body: JSON.stringify({ query: mutation }) + } + ]); + }); + + test('Indexer.buildContext() can fetch from the near social api', async () => { + const mockFetch = jest.fn(); + const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); + + const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); + + await context.fetchFromSocialApi('/index', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + action: 'post', + key: 'main', + options: { + limit: 1, + order: 'desc' + } + }) + }); + + expect(mockFetch.mock.calls).toMatchSnapshot(); + }); + + test('Indexer.buildContext() throws when a GraphQL response contains errors', async () => { + const mockFetch = jest.fn() + .mockResolvedValue({ + json: async () => ({ + errors: ['boom'] + }) + }); + const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); + + const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, INVALID_HASURA_ROLE); + + await expect(async () => await context.graphql('query { hello }')).rejects.toThrow('boom'); + }); + + test('Indexer.buildContext() handles GraphQL variables', async () => { + const mockFetch = jest.fn() + .mockResolvedValue({ + status: 200, + json: async () => ({ + data: 'mock', + }), + }); + const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); + + const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); + + const query = 'query($name: String) { hello(name: $name) }'; + const variables = { name: 'morgan' }; + await context.graphql(query, variables); + + expect(mockFetch.mock.calls[0]).toEqual([ + `${HASURA_ENDPOINT}/v1/graphql`, + { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'X-Hasura-Use-Backend-Only-Permissions': 'true', + 'X-Hasura-Role': 'morgs_near', + 'X-Hasura-Admin-Secret': HASURA_ADMIN_SECRET + }, + body: JSON.stringify({ + query, + variables, + }), + }, + ]); + }); + + test('GetTables works for a variety of input schemas', async () => { + const indexer = new Indexer('mainnet', { redisClient: transparentRedis }); + + const simpleSchemaTables = indexer.getTableNames(SIMPLE_SCHEMA); + expect(simpleSchemaTables).toStrictEqual(['posts']); + + const socialSchemaTables = indexer.getTableNames(SOCIAL_SCHEMA); + expect(socialSchemaTables).toStrictEqual(['posts', 'comments', 'post_likes']); + + const stressTestSchemaTables = indexer.getTableNames(STRESS_TEST_SCHEMA); + expect(stressTestSchemaTables).toStrictEqual([ + 'creator_quest', + 'composer_quest', + 'contractor - quest', + 'posts', + 'comments', + 'post_likes', + 'My Table1', + 'Another-Table', + 'Third-Table', + 'yet_another_table']); + + // Test that duplicate table names throw an error + const duplicateTableSchema = `CREATE TABLE + "posts" ( + "id" SERIAL NOT NULL + ); + CREATE TABLE posts ( + "id" SERIAL NOT NULL + );`; + expect(() => { + indexer.getTableNames(duplicateTableSchema); + }).toThrow('Table posts already exists in schema. Table names must be unique. Quotes are not allowed as a differentiator between table names.'); + + // Test that schema with no tables throws an error + expect(() => { + indexer.getTableNames(''); + }).toThrow('Schema does not have any tables. There should be at least one table.'); + }); + + test('SanitizeTableName works properly on many test cases', async () => { + const indexer = new Indexer('mainnet', { redisClient: transparentRedis }); + + expect(indexer.sanitizeTableName('table_name')).toStrictEqual('TableName'); + expect(indexer.sanitizeTableName('tablename')).toStrictEqual('Tablename'); // name is not capitalized + expect(indexer.sanitizeTableName('table name')).toStrictEqual('TableName'); + expect(indexer.sanitizeTableName('table!name!')).toStrictEqual('TableName'); + expect(indexer.sanitizeTableName('123TABle')).toStrictEqual('_123TABle'); // underscore at beginning + expect(indexer.sanitizeTableName('123_tABLE')).toStrictEqual('_123TABLE'); // underscore at beginning, capitalization + expect(indexer.sanitizeTableName('some-table_name')).toStrictEqual('SomeTableName'); + expect(indexer.sanitizeTableName('!@#$%^&*()table@)*&(%#')).toStrictEqual('Table'); // All special characters removed + expect(indexer.sanitizeTableName('T_name')).toStrictEqual('TName'); + expect(indexer.sanitizeTableName('_table')).toStrictEqual('Table'); // Starting underscore was removed + }); + + test('indexer fails to build context.db due to collision on sanitized table names', async () => { + const indexer = new Indexer('mainnet', { redisClient: transparentRedis }); + + const schemaWithDuplicateSanitizedTableNames = `CREATE TABLE + "test table" ( + "id" SERIAL NOT NULL + ); + CREATE TABLE "test!table" ( + "id" SERIAL NOT NULL + );`; + + // Does not outright throw an error but instead returns an empty object + expect(indexer.buildDatabaseContext('test_account', 'test_schema_name', schemaWithDuplicateSanitizedTableNames, 1)) + .toStrictEqual({}); + }); + + test('indexer builds context and inserts an objects into existing table', async () => { + const mockDmlHandler: any = { + create: jest.fn().mockImplementation(() => { + return { insert: jest.fn().mockReturnValue([{ colA: 'valA' }, { colA: 'valA' }]) }; + }) + }; + + const indexer = new Indexer('mainnet', { + fetch: genericMockFetch as unknown as typeof fetch, + redisClient: transparentRedis, + DmlHandler: mockDmlHandler + }); + const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); + + const objToInsert = [{ + account_id: 'morgs_near', + block_height: 1, + receipt_id: 'abc', + content: 'test', + block_timestamp: 800, + accounts_liked: JSON.stringify(['cwpuzzles.near', 'devbose.near']) + }, + { + account_id: 'morgs_near', + block_height: 2, + receipt_id: 'abc', + content: 'test', + block_timestamp: 801, + accounts_liked: JSON.stringify(['cwpuzzles.near']) + }]; + + const result = await context.db.Posts.insert(objToInsert); + expect(result.length).toEqual(2); + }); + + test('indexer builds context and selects objects from existing table', async () => { + const selectFn = jest.fn(); + selectFn.mockImplementation((...args) => { + // Expects limit to be last parameter + return args[args.length - 1] === null ? [{ colA: 'valA' }, { colA: 'valA' }] : [{ colA: 'valA' }]; + }); + const mockDmlHandler: any = { + create: jest.fn().mockImplementation(() => { + return { select: selectFn }; + }) + }; + + const indexer = new Indexer('mainnet', { + fetch: genericMockFetch as unknown as typeof fetch, + redisClient: transparentRedis, + DmlHandler: mockDmlHandler + }); + const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); + + const objToSelect = { + account_id: 'morgs_near', + receipt_id: 'abc', + }; + const result = await context.db.Posts.select(objToSelect); + expect(result.length).toEqual(2); + const resultLimit = await context.db.Posts.select(objToSelect, 1); + expect(resultLimit.length).toEqual(1); + }); + + test('indexer builds context and updates multiple objects from existing table', async () => { + const mockDmlHandler: any = { + create: jest.fn().mockImplementation(() => { + return { + update: jest.fn().mockImplementation((_, __, whereObj, updateObj) => { + if (whereObj.account_id === 'morgs_near' && updateObj.content === 'test_content') { + return [{ colA: 'valA' }, { colA: 'valA' }]; + } + return [{}]; + }) + }; + }) + }; + + const indexer = new Indexer('mainnet', { + fetch: genericMockFetch as unknown as typeof fetch, + redisClient: transparentRedis, + DmlHandler: mockDmlHandler + }); + const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); + + const whereObj = { + account_id: 'morgs_near', + receipt_id: 'abc', + }; + const updateObj = { + content: 'test_content', + block_timestamp: 805, + }; + const result = await context.db.Posts.update(whereObj, updateObj); + expect(result.length).toEqual(2); + }); + + test('indexer builds context and upserts on existing table', async () => { + const mockDmlHandler: any = { + create: jest.fn().mockImplementation(() => { + return { + upsert: jest.fn().mockImplementation((_, __, objects, conflict, update) => { + if (objects.length === 2 && conflict.includes('account_id') && update.includes('content')) { + return [{ colA: 'valA' }, { colA: 'valA' }]; + } else if (objects.length === 1 && conflict.includes('account_id') && update.includes('content')) { + return [{ colA: 'valA' }]; + } + return [{}]; + }) + }; + }) + }; + + const indexer = new Indexer('mainnet', { + fetch: genericMockFetch as unknown as typeof fetch, + redisClient: transparentRedis, + DmlHandler: mockDmlHandler + }); + const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); + + const objToInsert = [{ + account_id: 'morgs_near', + block_height: 1, + receipt_id: 'abc', + content: 'test', + block_timestamp: 800, + accounts_liked: JSON.stringify(['cwpuzzles.near', 'devbose.near']) + }, + { + account_id: 'morgs_near', + block_height: 2, + receipt_id: 'abc', + content: 'test', + block_timestamp: 801, + accounts_liked: JSON.stringify(['cwpuzzles.near']) + }]; + + let result = await context.db.Posts.upsert(objToInsert, ['account_id', 'block_height'], ['content', 'block_timestamp']); + expect(result.length).toEqual(2); + result = await context.db.Posts.upsert(objToInsert[0], ['account_id', 'block_height'], ['content', 'block_timestamp']); + expect(result.length).toEqual(1); + }); + + test('indexer builds context and deletes objects from existing table', async () => { + const mockDmlHandler: any = { + create: jest.fn().mockImplementation(() => { + return { delete: jest.fn().mockReturnValue([{ colA: 'valA' }, { colA: 'valA' }]) }; + }) + }; + + const indexer = new Indexer('mainnet', { + fetch: genericMockFetch as unknown as typeof fetch, + redisClient: transparentRedis, + DmlHandler: mockDmlHandler + }); + const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); + + const deleteFilter = { + account_id: 'morgs_near', + receipt_id: 'abc', + }; + const result = await context.db.Posts.delete(deleteFilter); + expect(result.length).toEqual(2); + }); + + test('indexer builds context and verifies all methods generated', async () => { + const mockDmlHandler: any = { + create: jest.fn() + }; + + const indexer = new Indexer('mainnet', { + fetch: genericMockFetch as unknown as typeof fetch, + redisClient: transparentRedis, + DmlHandler: mockDmlHandler + }); + const context = indexer.buildContext(STRESS_TEST_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); + + expect(Object.keys(context.db)).toStrictEqual([ + 'CreatorQuest', + 'ComposerQuest', + 'ContractorQuest', + 'Posts', + 'Comments', + 'PostLikes', + 'MyTable1', + 'AnotherTable', + 'ThirdTable', + 'YetAnotherTable']); + expect(Object.keys(context.db.CreatorQuest)).toStrictEqual([ + 'insert', + 'select', + 'update', + 'upsert', + 'delete']); + expect(Object.keys(context.db.PostLikes)).toStrictEqual([ + 'insert', + 'select', + 'update', + 'upsert', + 'delete']); + expect(Object.keys(context.db.MyTable1)).toStrictEqual([ + 'insert', + 'select', + 'update', + 'upsert', + 'delete']); + }); + + test('indexer builds context and returns empty array if failed to generate db methods', async () => { + const mockDmlHandler: any = { + create: jest.fn() + }; + + const indexer = new Indexer('mainnet', { + fetch: genericMockFetch as unknown as typeof fetch, + redisClient: transparentRedis, + DmlHandler: mockDmlHandler + }); + const context = indexer.buildContext('', 'morgs.near/social_feed1', 1, 'postgres'); + + expect(Object.keys(context.db)).toStrictEqual([]); + }); + + test('Indexer.runFunctions() allows imperative execution of GraphQL operations', async () => { + const postId = 1; + const commentId = 2; + const blockHeight = 82699904; + const mockFetch = jest.fn() + .mockReturnValueOnce({ // starting log + status: 200, + json: async () => ({ + data: { + indexer_log_store: [ + { + id: '12345', + }, + ], + }, + }), + }) + .mockReturnValueOnce({ + status: 200, + json: async () => ({ + errors: null, + }), + }) + .mockReturnValueOnce({ // query + status: 200, + json: async () => ({ + data: { + posts: [ + { + id: postId, + }, + ], + }, + }), + }) + .mockReturnValueOnce({ // mutation + status: 200, + json: async () => ({ + data: { + insert_comments: { + returning: { + id: commentId, + }, + }, + }, + }), + }) + .mockReturnValueOnce({ + status: 200, + json: async () => ({ + errors: null, + }), + }); + + const mockS3StreamerMessageFetcher = { + buildStreamerMessage: jest.fn() + .mockReturnValue({ + block: { + chunks: [0], + header: { + height: blockHeight + } + }, + shards: {} + }) + } as unknown as S3StreamerMessageFetcher; + const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3StreamerMessageFetcher: mockS3StreamerMessageFetcher, redisClient: transparentRedis }); + + const functions: Record = {}; + functions['buildnear.testnet/test'] = { + code: ` + const { posts } = await context.graphql(\` + query { + posts(where: { id: { _eq: 1 } }) { + id + } + } + \`); + + if (!posts || posts.length === 0) { + return; + } + + const [post] = posts; + + const { insert_comments: { returning: { id } } } = await context.graphql(\` + mutation { + insert_comments( + objects: {account_id: "morgs.near", block_height: \${block.blockHeight}, content: "cool post", post_id: \${post.id}} + ) { + returning { + id + } + } + } + \`); + + return (\`Created comment \${id} on post \${post.id}\`) + `, + schema: SIMPLE_SCHEMA + }; + + await indexer.runFunctions(blockHeight, functions, false); + + expect(mockFetch.mock.calls).toMatchSnapshot(); + }); + + test('Indexer.runFunctions() console.logs', async () => { + const logs: string[] = []; + const context = { + log: (...m: string[]) => { + logs.push(...m); + } + }; + const vm = new VM(); + vm.freeze(context, 'context'); + vm.freeze(context, 'console'); + await vm.run('console.log("hello", "brave new"); context.log("world")'); + expect(logs).toEqual(['hello', 'brave new', 'world']); + }); + + test('Errors thrown in VM can be caught outside the VM', async () => { + const vm = new VM(); + expect(() => { + vm.run("throw new Error('boom')"); + }).toThrow('boom'); + }); + + test('Indexer.runFunctions() catches errors', async () => { + const mockFetch = jest.fn(() => ({ + status: 200, + json: async () => ({ + errors: null, + }), + })); + const blockHeight = 456; + const mockS3StreamerMessageFetcher = { + buildStreamerMessage: jest.fn() + .mockReturnValue({ + block: { + chunks: [0], + header: { + height: blockHeight + } + }, + shards: {} + }) + } as unknown as S3StreamerMessageFetcher; + const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3StreamerMessageFetcher: mockS3StreamerMessageFetcher, redisClient: transparentRedis }); + + const functions: Record = {}; + functions['buildnear.testnet/test'] = { + code: ` + throw new Error('boom'); + `, + schema: SIMPLE_SCHEMA + }; + + await expect(indexer.runFunctions(blockHeight, functions, false)).rejects.toThrow(new Error('boom')); + expect(mockFetch.mock.calls).toMatchSnapshot(); + }); + + test('Indexer.runFunctions() provisions a GraphQL endpoint with the specified schema', async () => { + const blockHeight = 82699904; + const mockFetch = jest.fn(() => ({ + status: 200, + json: async () => ({ + errors: null, + }), + })); + const mockS3StreamerMessageFetcher = { + buildStreamerMessage: jest.fn() + .mockReturnValue({ + block: { + chunks: [0], + header: { + height: blockHeight + } + }, + shards: {} + }) + } as unknown as S3StreamerMessageFetcher; + const provisioner: any = { + isUserApiProvisioned: jest.fn().mockReturnValue(false), + provisionUserApi: jest.fn(), + }; + const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3StreamerMessageFetcher: mockS3StreamerMessageFetcher, redisClient: transparentRedis, provisioner }); + + const functions = { + 'morgs.near/test': { + account_id: 'morgs.near', + function_name: 'test', + code: '', + schema: SIMPLE_SCHEMA, + } + }; + await indexer.runFunctions(1, functions, false, { provision: true }); + + expect(provisioner.isUserApiProvisioned).toHaveBeenCalledWith('morgs.near', 'test'); + expect(provisioner.provisionUserApi).toHaveBeenCalledTimes(1); + expect(provisioner.provisionUserApi).toHaveBeenCalledWith( + 'morgs.near', + 'test', + SIMPLE_SCHEMA + ); + }); + + test('Indexer.runFunctions() skips provisioning if the endpoint exists', async () => { + const blockHeight = 82699904; + const mockFetch = jest.fn(() => ({ + status: 200, + json: async () => ({ + errors: null, + }), + })); + const mockS3StreamerMessageFetcher = { + buildStreamerMessage: jest.fn() + .mockReturnValue({ + block: { + chunks: [0], + header: { + height: blockHeight + } + }, + shards: {} + }) + } as unknown as S3StreamerMessageFetcher; + const provisioner: any = { + isUserApiProvisioned: jest.fn().mockReturnValue(true), + provisionUserApi: jest.fn(), + }; + const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3StreamerMessageFetcher: mockS3StreamerMessageFetcher, redisClient: transparentRedis, provisioner }); + + const functions: Record = { + 'morgs.near/test': { + code: '', + schema: SIMPLE_SCHEMA, + } + }; + await indexer.runFunctions(1, functions, false, { provision: true }); + + expect(provisioner.provisionUserApi).not.toHaveBeenCalled(); + }); + + test('Indexer.runFunctions() supplies the required role to the GraphQL endpoint', async () => { + const blockHeight = 82699904; + const mockFetch = jest.fn(() => ({ + status: 200, + json: async () => ({ + errors: null, + }), + })); + const mockS3StreamerMessageFetcher = { + buildStreamerMessage: jest.fn() + .mockReturnValue({ + block: { + chunks: [0], + header: { + height: blockHeight + } + }, + shards: {} + }) + } as unknown as S3StreamerMessageFetcher; + const provisioner: any = { + isUserApiProvisioned: jest.fn().mockReturnValue(true), + provisionUserApi: jest.fn(), + }; + const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3StreamerMessageFetcher: mockS3StreamerMessageFetcher, redisClient: transparentRedis, provisioner }); + + const functions: Record = { + 'morgs.near/test': { + code: ` + context.graphql(\`mutation { set(functionName: "buildnear.testnet/test", key: "height", data: "\${block.blockHeight}")}\`); + `, + schema: SIMPLE_SCHEMA, + } + }; + await indexer.runFunctions(blockHeight, functions, false, { provision: true }); + + expect(provisioner.provisionUserApi).not.toHaveBeenCalled(); + expect(mockFetch.mock.calls).toMatchSnapshot(); + }); + + test('Indexer.runFunctions() logs provisioning failures', async () => { + const blockHeight = 82699904; + const mockFetch = jest.fn(() => ({ + status: 200, + json: async () => ({ + errors: null, + }), + })); + const mockS3StreamerMessageFetcher = { + buildStreamerMessage: jest.fn() + .mockReturnValue({ + block: { + chunks: [0], + header: { + height: blockHeight + } + }, + shards: {} + }) + } as unknown as S3StreamerMessageFetcher; + const error = new Error('something went wrong with provisioning'); + const provisioner: any = { + isUserApiProvisioned: jest.fn().mockReturnValue(false), + provisionUserApi: jest.fn().mockRejectedValue(error), + }; + const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3StreamerMessageFetcher: mockS3StreamerMessageFetcher, redisClient: transparentRedis, provisioner }); + + const functions: Record = { + 'morgs.near/test': { + code: ` + context.graphql(\`mutation { set(functionName: "buildnear.testnet/test", key: "height", data: "\${block.blockHeight}")}\`); + `, + schema: 'schema', + } + }; + + await expect(indexer.runFunctions(blockHeight, functions, false, { provision: true })).rejects.toThrow(error); + expect(mockFetch.mock.calls).toMatchSnapshot(); + }); + + test('does not attach the hasura admin secret header when no role specified', async () => { + const mockFetch = jest.fn() + .mockResolvedValueOnce({ + status: 200, + json: async () => ({ + data: {} + }) + }); + const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); + // @ts-expect-error legacy test + const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, null); + + const mutation = ` + mutation { + newGreeting(greeting: "howdy") { + success + } + } + `; + + await context.graphql(mutation); + + expect(mockFetch.mock.calls[0]).toEqual([ + `${HASURA_ENDPOINT}/v1/graphql`, + { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'X-Hasura-Use-Backend-Only-Permissions': 'true', + }, + body: JSON.stringify({ query: mutation }) + } + ]); + }); + + test('attaches the backend only header to requests to hasura', async () => { + const mockFetch = jest.fn() + .mockResolvedValueOnce({ + status: 200, + json: async () => ({ + data: {} + }) + }); + const role = 'morgs_near'; + const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); + const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); + + const mutation = ` + mutation { + newGreeting(greeting: "howdy") { + success + } + } + `; + + await context.graphql(mutation); + + expect(mockFetch.mock.calls[0]).toEqual([ + `${HASURA_ENDPOINT}/v1/graphql`, + { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'X-Hasura-Use-Backend-Only-Permissions': 'true', + 'X-Hasura-Role': role, + 'X-Hasura-Admin-Secret': HASURA_ADMIN_SECRET + }, + body: JSON.stringify({ query: mutation }) + } + ]); + }); +}); diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index 7eb8e38b4..b369c1893 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -194,7 +194,7 @@ export default class Indexer { } satisfies Message); } } - return await this.deps.s3StreamerMessageFetcher.fetchStreamerMessage(blockHeight); + return await this.deps.s3StreamerMessageFetcher.buildStreamerMessage(blockHeight); } transformIndexerFunction (indexerFunction: string): string { diff --git a/runner/src/stream-handler/worker.ts b/runner/src/stream-handler/worker.ts index b1c405915..b4a5295a0 100644 --- a/runner/src/stream-handler/worker.ts +++ b/runner/src/stream-handler/worker.ts @@ -289,7 +289,7 @@ function fetchAndQueue (queue: Array>, blockHeight: number } async function transformStreamerMessageToQueueMessage (blockHeight: number, streamId: string): Promise { - const streamerMessage = await s3StreamerMessageFetcher.fetchStreamerMessage(blockHeight); + const streamerMessage = await s3StreamerMessageFetcher.buildStreamerMessage(blockHeight); return { streamerMessage, streamId diff --git a/runner/src/streamer-message-fetcher/s3-streamer-fetcher-tests.ts b/runner/src/streamer-message-fetcher/s3-streamer-fetcher-tests.ts deleted file mode 100644 index e69de29bb..000000000 diff --git a/runner/src/streamer-message-fetcher/s3-streamer-fetcher.test.ts b/runner/src/streamer-message-fetcher/s3-streamer-fetcher.test.ts new file mode 100644 index 000000000..e60464f34 --- /dev/null +++ b/runner/src/streamer-message-fetcher/s3-streamer-fetcher.test.ts @@ -0,0 +1,99 @@ +import { Block } from '@near-lake/primitives'; +import { GetObjectCommand, type S3Client } from '@aws-sdk/client-s3'; +import S3StreamerMessageFetcher from './s3-streamer-fetcher'; + +describe('S3StreamerMessageFetcher', () => { + test('Indexer.fetchBlock() should fetch a block from S3', async () => { + const author = 'dokiacapital.poolv1.near'; + const mockData = JSON.stringify({ + author + }); + const mockSend = jest.fn().mockResolvedValue({ + Body: { + transformToString: () => mockData + } + }); + const mockS3 = { + send: mockSend, + } as unknown as S3Client; + + const fetcher = new S3StreamerMessageFetcher('mainnet', mockS3); + + const blockHeight = 84333960; + const block = await fetcher.fetchBlockPromise(blockHeight); + const params = { + Bucket: 'near-lake-data-mainnet', + Key: `${blockHeight.toString().padStart(12, '0')}/block.json` + }; + + expect(mockS3.send).toHaveBeenCalledTimes(1); + expect(JSON.stringify(mockSend.mock.calls[0][0])).toMatch(JSON.stringify(new GetObjectCommand(params))); + expect(block.author).toEqual(author); + }); + + test('Indexer.fetchShard() should fetch a shard from S3', async () => { + const mockData = JSON.stringify({}); + const mockSend = jest.fn().mockResolvedValue({ + Body: { + transformToString: () => mockData + } + }); + const mockS3 = { + send: mockSend, + } as unknown as S3Client; + const fetcher = new S3StreamerMessageFetcher('mainnet', mockS3); + + const blockHeight = 82699904; + const shard = 0; + const params = { + Bucket: 'near-lake-data-mainnet', + Key: `${blockHeight.toString().padStart(12, '0')}/shard_${shard}.json` + }; + await fetcher.fetchShardPromise(blockHeight, shard); + + expect(JSON.stringify(mockSend.mock.calls[0][0])).toMatch(JSON.stringify(new GetObjectCommand(params))); + }); + + test('Indexer.fetchStreamerMessage() should fetch the block and shards from S3 upon cache miss', async () => { + const blockHeight = 85233529; + const blockHash = 'xyz'; + const mockSend = jest.fn() + .mockReturnValueOnce({ // block + Body: { + transformToString: () => JSON.stringify({ + chunks: [0], + header: { + height: blockHeight, + hash: blockHash, + } + }) + } + }) + .mockReturnValue({ // shard + Body: { + transformToString: () => JSON.stringify({}) + } + }); + const mockS3 = { + send: mockSend, + } as unknown as S3Client; + const fetcher = new S3StreamerMessageFetcher('mainnet', mockS3); + + const streamerMessage = await fetcher.buildStreamerMessage(blockHeight); + + expect(mockSend).toHaveBeenCalledTimes(5); + expect(JSON.stringify(mockSend.mock.calls[0][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ + Bucket: 'near-lake-data-mainnet', + Key: `${blockHeight.toString().padStart(12, '0')}/block.json` + }))); + expect(JSON.stringify(mockSend.mock.calls[1][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ + Bucket: 'near-lake-data-mainnet', + Key: `${blockHeight.toString().padStart(12, '0')}/shard_0.json` + }))); + + const block = Block.fromStreamerMessage(streamerMessage); + + expect(block.blockHeight).toEqual(blockHeight); + expect(block.blockHash).toEqual(blockHash); + }); +}); diff --git a/runner/src/streamer-message-fetcher/s3-streamer-fetcher.ts b/runner/src/streamer-message-fetcher/s3-streamer-fetcher.ts index 920b86778..6058c3ba1 100644 --- a/runner/src/streamer-message-fetcher/s3-streamer-fetcher.ts +++ b/runner/src/streamer-message-fetcher/s3-streamer-fetcher.ts @@ -66,7 +66,7 @@ export default class S3StreamerMessageFetcher { return value; } - async fetchStreamerMessage (blockHeight: number): Promise { + async buildStreamerMessage (blockHeight: number): Promise { const blockPromise = this.fetchBlockPromise(blockHeight); const shardsPromises = await this.fetchShardsPromises(blockHeight, 4); From c32290f5a12e6af6a4058566a8be3cc5f5719f5e Mon Sep 17 00:00:00 2001 From: Darun Seethammagari Date: Wed, 1 Nov 2023 11:28:20 -0700 Subject: [PATCH 10/24] Undo local testing changes --- docker-compose.yml | 164 ++++++++++++++-------------- runner/src/indexer/indexer.ts | 7 +- runner/src/stream-handler/worker.ts | 4 - 3 files changed, 83 insertions(+), 92 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 91a87e0c3..8911aa97e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,93 +1,93 @@ version: "3.9" # optional since v1.27.0 services: - # coordinator: - # build: - # context: ./indexer - # args: - # - CARGO_BUILD_MODE=debug - # depends_on: - # - redis - # environment: - # REDIS_CONNECTION_STRING: redis://redis - # LAKE_AWS_ACCESS_KEY: - # LAKE_AWS_SECRET_ACCESS_KEY: - # QUEUE_AWS_ACCESS_KEY: - # QUEUE_AWS_SECRET_ACCESS_KEY: - # QUEUE_URL: MOCK - # START_FROM_BLOCK_QUEUE_URL: MOCK - # PORT: 9180 - # REGISTRY_CONTRACT_ID: dev-queryapi.dataplatform.near - # AWS_QUEUE_REGION: eu-central-1 - # command: - # - mainnet - # - from-interruption + coordinator: + build: + context: ./indexer + args: + - CARGO_BUILD_MODE=debug + depends_on: + - redis + environment: + REDIS_CONNECTION_STRING: redis://redis + LAKE_AWS_ACCESS_KEY: + LAKE_AWS_SECRET_ACCESS_KEY: + QUEUE_AWS_ACCESS_KEY: + QUEUE_AWS_SECRET_ACCESS_KEY: + QUEUE_URL: MOCK + START_FROM_BLOCK_QUEUE_URL: MOCK + PORT: 9180 + REGISTRY_CONTRACT_ID: dev-queryapi.dataplatform.near + AWS_QUEUE_REGION: eu-central-1 + command: + - mainnet + - from-interruption - # runner: - # build: - # context: ./runner - # depends_on: - # - "hasura-graphql" - # - "redis" - # environment: - # REGION: eu-central-1 - # HASURA_ENDPOINT: http://hasura-graphql:8080 - # HASURA_ADMIN_SECRET: myadminsecretkey - # REDIS_CONNECTION_STRING: redis://redis - # PGHOST: postgres - # PGPORT: 5432 - # PGUSER: postgres - # PGPASSWORD: postgrespassword - # PGDATABASE: postgres - # PORT: 9180 - # AWS_ACCESS_KEY_ID: - # AWS_SECRET_ACCESS_KEY: + runner: + build: + context: ./runner + depends_on: + - "hasura-graphql" + - "redis" + environment: + REGION: eu-central-1 + HASURA_ENDPOINT: http://hasura-graphql:8080 + HASURA_ADMIN_SECRET: myadminsecretkey + REDIS_CONNECTION_STRING: redis://redis + PGHOST: postgres + PGPORT: 5432 + PGUSER: postgres + PGPASSWORD: postgrespassword + PGDATABASE: postgres + PORT: 9180 + AWS_ACCESS_KEY_ID: + AWS_SECRET_ACCESS_KEY: - # redis: - # image: redis - # command: - # - redis-server - # - "--save 60 1" - # - "--loglevel warning" - # volumes: - # - redis:/data - # ports: - # - "6379:6379" + redis: + image: redis + command: + - redis-server + - "--save 60 1" + - "--loglevel warning" + volumes: + - redis:/data + ports: + - "6379:6379" - # postgres: - # image: postgres:12 - # restart: always - # volumes: - # - postgres:/var/lib/postgresql/data - # environment: - # POSTGRES_PASSWORD: postgrespassword - # ports: - # - "5432:5432" + postgres: + image: postgres:12 + restart: always + volumes: + - postgres:/var/lib/postgresql/data + environment: + POSTGRES_PASSWORD: postgrespassword + ports: + - "5432:5432" - # hasura-auth: - # build: - # context: ./hasura-authentication-service - # ports: - # - "4000:4000" - # environment: - # PORT: 4000 - # DEFAULT_HASURA_ROLE: append + hasura-auth: + build: + context: ./hasura-authentication-service + ports: + - "4000:4000" + environment: + PORT: 4000 + DEFAULT_HASURA_ROLE: append - # hasura-graphql: - # image: hasura/graphql-engine:latest - # ports: - # - "8080:8080" - # depends_on: - # - "postgres" - # - "hasura-auth" - # restart: always - # environment: - # HASURA_GRAPHQL_DATABASE_URL: postgres://postgres:postgrespassword@postgres:5432/postgres - # HASURA_GRAPHQL_ENABLE_CONSOLE: "true" - # HASURA_GRAPHQL_DEV_MODE: "true" - # HASURA_GRAPHQL_ENABLED_LOG_TYPES: startup, http-log, webhook-log, websocket-log, query-log - # HASURA_GRAPHQL_ADMIN_SECRET: myadminsecretkey - # HASURA_GRAPHQL_AUTH_HOOK: http://hasura-auth:4000/auth + hasura-graphql: + image: hasura/graphql-engine:latest + ports: + - "8080:8080" + depends_on: + - "postgres" + - "hasura-auth" + restart: always + environment: + HASURA_GRAPHQL_DATABASE_URL: postgres://postgres:postgrespassword@postgres:5432/postgres + HASURA_GRAPHQL_ENABLE_CONSOLE: "true" + HASURA_GRAPHQL_DEV_MODE: "true" + HASURA_GRAPHQL_ENABLED_LOG_TYPES: startup, http-log, webhook-log, websocket-log, query-log + HASURA_GRAPHQL_ADMIN_SECRET: myadminsecretkey + HASURA_GRAPHQL_AUTH_HOOK: http://hasura-auth:4000/auth grafana: image: grafana/grafana volumes: diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index b369c1893..d056450c9 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -87,7 +87,6 @@ export default class Indexer { const hasuraRoleName = functionName.split('/')[0].replace(/[.-]/g, '_'); if (options.provision && !indexerFunction.provisioned) { - const provisionintLatency = performance.now(); try { if (!await this.deps.provisioner.isUserApiProvisioned(indexerFunction.account_id, indexerFunction.function_name)) { await this.setStatus(functionName, blockHeight, 'PROVISIONING'); @@ -102,11 +101,9 @@ export default class Indexer { simultaneousPromises.push(this.writeLog(functionName, blockHeight, 'Provisioning endpoint: failure', error.message)); throw error; } - console.log('Provisioning Latency: ', performance.now() - provisionintLatency); } await this.setStatus(functionName, blockHeight, 'RUNNING'); - console.log('Function State Logging Latency: ', performance.now() - functionStateLoggingLatency); this.deps.parentPort?.postMessage({ type: 'FUNCTION_STATE_LOGGING_LATENCY', labels: { indexer: functionName, type: isHistorical ? 'historical' : 'real-time' }, @@ -122,7 +119,6 @@ export default class Indexer { vm.freeze(context, 'console'); // provide console.log via context.log const modifiedFunction = this.transformIndexerFunction(indexerFunction.code); - console.log('VM and Context Object Preparation Latency: ', performance.now() - vmAndContextBuildLatency); this.deps.parentPort?.postMessage({ type: 'FUNCTION_VM_AND_CONTEXT_LATENCY', labels: { indexer: functionName, type: isHistorical ? 'historical' : 'real-time' }, @@ -140,7 +136,7 @@ export default class Indexer { await this.writeLog(functionName, blockHeight, 'Error running IndexerFunction', error.message); throw e; } - console.log('Function Execution Latency: ', performance.now() - functionCodeExecutionLatency); + console.log('Function Code Execution Latency: ', performance.now() - functionCodeExecutionLatency); this.deps.parentPort?.postMessage({ type: 'FUNCTION_CODE_EXECUTION_LATENCY', labels: { indexer: functionName, type: isHistorical ? 'historical' : 'real-time' }, @@ -154,7 +150,6 @@ export default class Indexer { throw e; } finally { await Promise.all(simultaneousPromises); - console.log('Finish Promise Handling Latency: ', finishPromiseHandlingLatency !== undefined ? performance.now() - finishPromiseHandlingLatency : 'null'); this.deps.parentPort?.postMessage({ type: 'FUNCTION_VM_AND_CONTEXT_LATENCY', labels: { indexer: functionName, type: isHistorical ? 'historical' : 'real-time' }, diff --git a/runner/src/stream-handler/worker.ts b/runner/src/stream-handler/worker.ts index b4a5295a0..e9eaa2c7a 100644 --- a/runner/src/stream-handler/worker.ts +++ b/runner/src/stream-handler/worker.ts @@ -238,7 +238,6 @@ async function historicalStreamerMessageQueueConsumer (queue: Array Date: Wed, 1 Nov 2023 15:18:44 -0700 Subject: [PATCH 11/24] Address Offline Comments --- runner/src/indexer/indexer.test.ts | 50 +++++++++---------- runner/src/indexer/indexer.ts | 33 ++---------- runner/src/lake-client/index.ts | 1 + .../lake-client.test.ts} | 10 ++-- .../lake-client.ts} | 2 +- runner/src/metrics.ts | 35 ------------- runner/src/stream-handler/worker.ts | 8 +-- runner/src/streamer-message-fetcher/index.ts | 1 - 8 files changed, 37 insertions(+), 103 deletions(-) create mode 100644 runner/src/lake-client/index.ts rename runner/src/{streamer-message-fetcher/s3-streamer-fetcher.test.ts => lake-client/lake-client.test.ts} (90%) rename runner/src/{streamer-message-fetcher/s3-streamer-fetcher.ts => lake-client/lake-client.ts} (98%) delete mode 100644 runner/src/streamer-message-fetcher/index.ts diff --git a/runner/src/indexer/indexer.test.ts b/runner/src/indexer/indexer.test.ts index 1f339dc73..58f9553e6 100644 --- a/runner/src/indexer/indexer.test.ts +++ b/runner/src/indexer/indexer.test.ts @@ -4,7 +4,7 @@ import type RedisClient from '../redis-client'; import Indexer from './indexer'; import { VM } from 'vm2'; -import type S3StreamerMessageFetcher from '../streamer-message-fetcher/s3-streamer-fetcher'; +import type LakeClient from '../lake-client/lake-client'; describe('Indexer unit tests', () => { const oldEnv = process.env; @@ -255,7 +255,7 @@ CREATE TABLE test('Indexer.fetchStreamerMessage() should fetch the message from S3 upon cache miss', async () => { const blockHeight = 85233529; const blockHash = 'xyz'; - const mockS3StreamerMessageFetcher = { + const mockLakeClient = { buildStreamerMessage: jest.fn() .mockReturnValue({ block: { @@ -267,8 +267,8 @@ CREATE TABLE }, shards: {} }) - } as unknown as S3StreamerMessageFetcher; - const indexer = new Indexer('mainnet', { s3StreamerMessageFetcher: mockS3StreamerMessageFetcher, redisClient: transparentRedis }); + } as unknown as LakeClient; + const indexer = new Indexer('mainnet', { lakeClient: mockLakeClient, redisClient: transparentRedis }); const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, false); expect(transparentRedis.getStreamerMessage).toHaveBeenCalledTimes(1); @@ -282,7 +282,7 @@ CREATE TABLE test('Indexer.fetchStreamerMessage() should fetch the message from S3 and not cache if historical', async () => { const blockHeight = 85233529; const blockHash = 'xyz'; - const mockS3StreamerMessageFetcher = { + const mockLakeClient = { buildStreamerMessage: jest.fn() .mockReturnValue({ block: { @@ -294,11 +294,11 @@ CREATE TABLE }, shards: {} }) - } as unknown as S3StreamerMessageFetcher; + } as unknown as LakeClient; const mockRedis = { getStreamerMessage: jest.fn() } as unknown as RedisClient; - const indexer = new Indexer('mainnet', { s3StreamerMessageFetcher: mockS3StreamerMessageFetcher, redisClient: mockRedis }); + const indexer = new Indexer('mainnet', { lakeClient: mockLakeClient, redisClient: mockRedis }); const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, true); @@ -813,7 +813,7 @@ CREATE TABLE }), }); - const mockS3StreamerMessageFetcher = { + const mockLakeClient = { buildStreamerMessage: jest.fn() .mockReturnValue({ block: { @@ -824,8 +824,8 @@ CREATE TABLE }, shards: {} }) - } as unknown as S3StreamerMessageFetcher; - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3StreamerMessageFetcher: mockS3StreamerMessageFetcher, redisClient: transparentRedis }); + } as unknown as LakeClient; + const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, lakeClient: mockLakeClient, redisClient: transparentRedis }); const functions: Record = {}; functions['buildnear.testnet/test'] = { @@ -895,7 +895,7 @@ CREATE TABLE }), })); const blockHeight = 456; - const mockS3StreamerMessageFetcher = { + const mockLakeClient = { buildStreamerMessage: jest.fn() .mockReturnValue({ block: { @@ -906,8 +906,8 @@ CREATE TABLE }, shards: {} }) - } as unknown as S3StreamerMessageFetcher; - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3StreamerMessageFetcher: mockS3StreamerMessageFetcher, redisClient: transparentRedis }); + } as unknown as LakeClient; + const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, lakeClient: mockLakeClient, redisClient: transparentRedis }); const functions: Record = {}; functions['buildnear.testnet/test'] = { @@ -929,7 +929,7 @@ CREATE TABLE errors: null, }), })); - const mockS3StreamerMessageFetcher = { + const mockLakeClient = { buildStreamerMessage: jest.fn() .mockReturnValue({ block: { @@ -940,12 +940,12 @@ CREATE TABLE }, shards: {} }) - } as unknown as S3StreamerMessageFetcher; + } as unknown as LakeClient; const provisioner: any = { isUserApiProvisioned: jest.fn().mockReturnValue(false), provisionUserApi: jest.fn(), }; - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3StreamerMessageFetcher: mockS3StreamerMessageFetcher, redisClient: transparentRedis, provisioner }); + const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, lakeClient: mockLakeClient, redisClient: transparentRedis, provisioner }); const functions = { 'morgs.near/test': { @@ -974,7 +974,7 @@ CREATE TABLE errors: null, }), })); - const mockS3StreamerMessageFetcher = { + const mockLakeClient = { buildStreamerMessage: jest.fn() .mockReturnValue({ block: { @@ -985,12 +985,12 @@ CREATE TABLE }, shards: {} }) - } as unknown as S3StreamerMessageFetcher; + } as unknown as LakeClient; const provisioner: any = { isUserApiProvisioned: jest.fn().mockReturnValue(true), provisionUserApi: jest.fn(), }; - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3StreamerMessageFetcher: mockS3StreamerMessageFetcher, redisClient: transparentRedis, provisioner }); + const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, lakeClient: mockLakeClient, redisClient: transparentRedis, provisioner }); const functions: Record = { 'morgs.near/test': { @@ -1011,7 +1011,7 @@ CREATE TABLE errors: null, }), })); - const mockS3StreamerMessageFetcher = { + const mockLakeClient = { buildStreamerMessage: jest.fn() .mockReturnValue({ block: { @@ -1022,12 +1022,12 @@ CREATE TABLE }, shards: {} }) - } as unknown as S3StreamerMessageFetcher; + } as unknown as LakeClient; const provisioner: any = { isUserApiProvisioned: jest.fn().mockReturnValue(true), provisionUserApi: jest.fn(), }; - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3StreamerMessageFetcher: mockS3StreamerMessageFetcher, redisClient: transparentRedis, provisioner }); + const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, lakeClient: mockLakeClient, redisClient: transparentRedis, provisioner }); const functions: Record = { 'morgs.near/test': { @@ -1051,7 +1051,7 @@ CREATE TABLE errors: null, }), })); - const mockS3StreamerMessageFetcher = { + const mockLakeClient = { buildStreamerMessage: jest.fn() .mockReturnValue({ block: { @@ -1062,13 +1062,13 @@ CREATE TABLE }, shards: {} }) - } as unknown as S3StreamerMessageFetcher; + } as unknown as LakeClient; const error = new Error('something went wrong with provisioning'); const provisioner: any = { isUserApiProvisioned: jest.fn().mockReturnValue(false), provisionUserApi: jest.fn().mockRejectedValue(error), }; - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, s3StreamerMessageFetcher: mockS3StreamerMessageFetcher, redisClient: transparentRedis, provisioner }); + const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, lakeClient: mockLakeClient, redisClient: transparentRedis, provisioner }); const functions: Record = { 'morgs.near/test': { diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index d056450c9..8c3ac60ed 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -8,11 +8,11 @@ import { type Message } from '../stream-handler/types'; import Provisioner from '../provisioner'; import DmlHandler from '../dml-handler/dml-handler'; import RedisClient from '../redis-client'; -import S3StreamerMessageFetcher from '../streamer-message-fetcher/s3-streamer-fetcher'; +import LakeClient from '../lake-client/lake-client'; interface Dependencies { fetch: typeof fetch - s3StreamerMessageFetcher: S3StreamerMessageFetcher + lakeClient: LakeClient provisioner: Provisioner DmlHandler: typeof DmlHandler parser: Parser @@ -49,7 +49,7 @@ export default class Indexer { this.network = network; this.deps = { fetch, - s3StreamerMessageFetcher: deps?.s3StreamerMessageFetcher ?? new S3StreamerMessageFetcher(this.network), + lakeClient: deps?.lakeClient ?? new LakeClient(this.network), provisioner: new Provisioner(), DmlHandler, parser: new Parser(), @@ -74,9 +74,7 @@ export default class Indexer { const allMutations: string[] = []; for (const functionName in functions) { - let finishPromiseHandlingLatency; try { - const functionStateLoggingLatency = performance.now(); const indexerFunction = functions[functionName]; const runningMessage = `Running function ${functionName}` + (isHistorical ? ' historical backfill' : `, lag is: ${lag?.toString()}ms from block timestamp`); @@ -104,13 +102,6 @@ export default class Indexer { } await this.setStatus(functionName, blockHeight, 'RUNNING'); - this.deps.parentPort?.postMessage({ - type: 'FUNCTION_STATE_LOGGING_LATENCY', - labels: { indexer: functionName, type: isHistorical ? 'historical' : 'real-time' }, - value: performance.now() - functionStateLoggingLatency, - } satisfies Message); - - const vmAndContextBuildLatency = performance.now(); const vm = new VM({ timeout: 3000, allowAsync: true }); const context = this.buildContext(indexerFunction.schema, functionName, blockHeight, hasuraRoleName); @@ -119,11 +110,6 @@ export default class Indexer { vm.freeze(context, 'console'); // provide console.log via context.log const modifiedFunction = this.transformIndexerFunction(indexerFunction.code); - this.deps.parentPort?.postMessage({ - type: 'FUNCTION_VM_AND_CONTEXT_LATENCY', - labels: { indexer: functionName, type: isHistorical ? 'historical' : 'real-time' }, - value: performance.now() - vmAndContextBuildLatency, - } satisfies Message); const functionCodeExecutionLatency = performance.now(); try { await vm.run(modifiedFunction); @@ -137,12 +123,6 @@ export default class Indexer { throw e; } console.log('Function Code Execution Latency: ', performance.now() - functionCodeExecutionLatency); - this.deps.parentPort?.postMessage({ - type: 'FUNCTION_CODE_EXECUTION_LATENCY', - labels: { indexer: functionName, type: isHistorical ? 'historical' : 'real-time' }, - value: performance.now() - functionCodeExecutionLatency, - } satisfies Message); - finishPromiseHandlingLatency = performance.now(); simultaneousPromises.push(this.writeFunctionState(functionName, blockHeight, isHistorical)); } catch (e) { console.error(`${functionName}: Failed to run function`, e); @@ -150,11 +130,6 @@ export default class Indexer { throw e; } finally { await Promise.all(simultaneousPromises); - this.deps.parentPort?.postMessage({ - type: 'FUNCTION_VM_AND_CONTEXT_LATENCY', - labels: { indexer: functionName, type: isHistorical ? 'historical' : 'real-time' }, - value: finishPromiseHandlingLatency !== undefined ? performance.now() - finishPromiseHandlingLatency : 0, - } satisfies Message); } } return allMutations; @@ -189,7 +164,7 @@ export default class Indexer { } satisfies Message); } } - return await this.deps.s3StreamerMessageFetcher.buildStreamerMessage(blockHeight); + return await this.deps.lakeClient.buildStreamerMessage(blockHeight); } transformIndexerFunction (indexerFunction: string): string { diff --git a/runner/src/lake-client/index.ts b/runner/src/lake-client/index.ts new file mode 100644 index 000000000..41779a063 --- /dev/null +++ b/runner/src/lake-client/index.ts @@ -0,0 +1 @@ +export { default } from './lake-client'; diff --git a/runner/src/streamer-message-fetcher/s3-streamer-fetcher.test.ts b/runner/src/lake-client/lake-client.test.ts similarity index 90% rename from runner/src/streamer-message-fetcher/s3-streamer-fetcher.test.ts rename to runner/src/lake-client/lake-client.test.ts index e60464f34..a5f78b85f 100644 --- a/runner/src/streamer-message-fetcher/s3-streamer-fetcher.test.ts +++ b/runner/src/lake-client/lake-client.test.ts @@ -1,8 +1,8 @@ import { Block } from '@near-lake/primitives'; import { GetObjectCommand, type S3Client } from '@aws-sdk/client-s3'; -import S3StreamerMessageFetcher from './s3-streamer-fetcher'; +import LakeClient from './lake-client'; -describe('S3StreamerMessageFetcher', () => { +describe('LakeClient', () => { test('Indexer.fetchBlock() should fetch a block from S3', async () => { const author = 'dokiacapital.poolv1.near'; const mockData = JSON.stringify({ @@ -17,7 +17,7 @@ describe('S3StreamerMessageFetcher', () => { send: mockSend, } as unknown as S3Client; - const fetcher = new S3StreamerMessageFetcher('mainnet', mockS3); + const fetcher = new LakeClient('mainnet', mockS3); const blockHeight = 84333960; const block = await fetcher.fetchBlockPromise(blockHeight); @@ -41,7 +41,7 @@ describe('S3StreamerMessageFetcher', () => { const mockS3 = { send: mockSend, } as unknown as S3Client; - const fetcher = new S3StreamerMessageFetcher('mainnet', mockS3); + const fetcher = new LakeClient('mainnet', mockS3); const blockHeight = 82699904; const shard = 0; @@ -77,7 +77,7 @@ describe('S3StreamerMessageFetcher', () => { const mockS3 = { send: mockSend, } as unknown as S3Client; - const fetcher = new S3StreamerMessageFetcher('mainnet', mockS3); + const fetcher = new LakeClient('mainnet', mockS3); const streamerMessage = await fetcher.buildStreamerMessage(blockHeight); diff --git a/runner/src/streamer-message-fetcher/s3-streamer-fetcher.ts b/runner/src/lake-client/lake-client.ts similarity index 98% rename from runner/src/streamer-message-fetcher/s3-streamer-fetcher.ts rename to runner/src/lake-client/lake-client.ts index 6058c3ba1..337bcd5a7 100644 --- a/runner/src/streamer-message-fetcher/s3-streamer-fetcher.ts +++ b/runner/src/lake-client/lake-client.ts @@ -1,7 +1,7 @@ import { GetObjectCommand, S3Client } from '@aws-sdk/client-s3'; import { type StreamerMessage } from '@near-lake/primitives'; -export default class S3StreamerMessageFetcher { +export default class LakeClient { private readonly s3Client: S3Client; network: string; constructor ( diff --git a/runner/src/metrics.ts b/runner/src/metrics.ts index 861e3df2a..6120d339e 100644 --- a/runner/src/metrics.ts +++ b/runner/src/metrics.ts @@ -31,36 +31,6 @@ const CACHE_MISS = new Counter({ labelNames: ['type'] }); -const FUNCTION_STATE_LOGGING_LATENCY = new promClient.Gauge({ - name: 'queryapi_runner_function_state_logging_milliseconds', - help: 'Time an indexer function spent on writing state and creating write log promises', - labelNames: ['indexer', 'type'], -}); - -const FUNCTION_VM_AND_CONTEXT_LATENCY = new promClient.Gauge({ - name: 'queryapi_runner_function_vm_and_context_building_milliseconds', - help: 'Time an indexer function spent on preparing the vm and context object', - labelNames: ['indexer', 'type'], -}); - -const FUNCTION_CODE_EXECUTION_LATENCY = new promClient.Gauge({ - name: 'queryapi_runner_function_code_execution_duration_milliseconds', - help: 'Time an indexer function spent executing user code', - labelNames: ['indexer', 'type'], -}); - -const FUNCTION_PROMISE_HANDLING_LATENCY = new promClient.Gauge({ - name: 'queryapi_runner_function_promise_handling_milliseconds', - help: 'Time an indexer function waited for simultaneous promises to resolve', - labelNames: ['indexer', 'type'], -}); - -const FUNCTION_OVERALL_EXECUTION_DURATION = new promClient.Gauge({ - name: 'queryapi_runner_function_overall_duration_milliseconds', - help: 'Time an indexer function waited for a block before processing', - labelNames: ['indexer', 'type'], -}); - const UNPROCESSED_STREAM_MESSAGES = new promClient.Gauge({ name: 'queryapi_runner_unprocessed_stream_messages', help: 'Number of Redis Stream messages not yet processed', @@ -83,11 +53,6 @@ export const METRICS = { BLOCK_WAIT_DURATION, CACHE_HIT, CACHE_MISS, - FUNCTION_STATE_LOGGING_LATENCY, - FUNCTION_VM_AND_CONTEXT_LATENCY, - FUNCTION_CODE_EXECUTION_LATENCY, - FUNCTION_PROMISE_HANDLING_LATENCY, - FUNCTION_OVERALL_EXECUTION_DURATION, UNPROCESSED_STREAM_MESSAGES, LAST_PROCESSED_BLOCK, EXECUTION_DURATION, diff --git a/runner/src/stream-handler/worker.ts b/runner/src/stream-handler/worker.ts index e9eaa2c7a..2f494b5eb 100644 --- a/runner/src/stream-handler/worker.ts +++ b/runner/src/stream-handler/worker.ts @@ -5,7 +5,7 @@ import Indexer from '../indexer'; import RedisClient from '../redis-client'; import { METRICS } from '../metrics'; import type { StreamerMessage } from '@near-lake/primitives'; -import S3StreamerMessageFetcher from '../streamer-message-fetcher/s3-streamer-fetcher'; +import S3StreamerMessageFetcher from '../lake-client/lake-client'; if (isMainThread) { throw new Error('Worker should not be run on main thread'); @@ -244,17 +244,11 @@ async function historicalStreamerMessageQueueConsumer (queue: Array Date: Wed, 1 Nov 2023 15:52:02 -0700 Subject: [PATCH 12/24] Implement Morgan's Metrics Fix --- runner/src/indexer/indexer.ts | 17 +--- runner/src/metrics.ts | 18 +--- runner/src/stream-handler/worker.ts | 125 ++-------------------------- 3 files changed, 15 insertions(+), 145 deletions(-) diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index 8c3ac60ed..327c3c8d8 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -2,13 +2,12 @@ import fetch, { type Response } from 'node-fetch'; import { VM } from 'vm2'; import { Block, type StreamerMessage } from '@near-lake/primitives'; import { Parser } from 'node-sql-parser'; -import { type MessagePort } from 'worker_threads'; -import { type Message } from '../stream-handler/types'; import Provisioner from '../provisioner'; import DmlHandler from '../dml-handler/dml-handler'; import RedisClient from '../redis-client'; import LakeClient from '../lake-client/lake-client'; +import { METRICS } from '../metrics'; interface Dependencies { fetch: typeof fetch @@ -17,7 +16,6 @@ interface Dependencies { DmlHandler: typeof DmlHandler parser: Parser redisClient: RedisClient - parentPort: MessagePort | null }; interface Context { @@ -54,7 +52,6 @@ export default class Indexer { DmlHandler, parser: new Parser(), redisClient: deps?.redisClient ?? new RedisClient(), - parentPort: deps?.parentPort ?? null, ...deps, }; } @@ -148,20 +145,12 @@ export default class Indexer { if (!isHistorical) { const cachedMessage = await this.deps.redisClient.getStreamerMessage(blockHeight); if (cachedMessage) { - this.deps.parentPort?.postMessage({ - type: 'CACHE_HIT', - labels: { type: isHistorical ? 'historical' : 'real-time' }, - value: 1, - } satisfies Message); + METRICS.CACHE_HIT.labels({ type: 'real-time' }).inc(); const parsedMessage = JSON.parse(cachedMessage); return parsedMessage; } else { - this.deps.parentPort?.postMessage({ - type: 'CACHE_MISS', - labels: { type: isHistorical ? 'historical' : 'real-time' }, - value: 1, - } satisfies Message); + METRICS.CACHE_HIT.labels({ type: 'real-time' }).inc(); } } return await this.deps.lakeClient.buildStreamerMessage(blockHeight); diff --git a/runner/src/metrics.ts b/runner/src/metrics.ts index 6120d339e..a2363fab0 100644 --- a/runner/src/metrics.ts +++ b/runner/src/metrics.ts @@ -1,18 +1,6 @@ import express from 'express'; import { Gauge, Histogram, Counter, AggregatorRegistry } from 'prom-client'; -const UNPROCESSED_STREAM_MESSAGES = new Gauge({ - name: 'queryapi_runner_unprocessed_stream_messages', - help: 'Number of Redis Stream messages not yet processed', - labelNames: ['indexer', 'type'], -}); - -const EXECUTION_DURATION = new Histogram({ - name: 'queryapi_runner_execution_duration_milliseconds', - help: 'Time taken to execute an indexer function', - labelNames: ['indexer', 'type'], -}); - const BLOCK_WAIT_DURATION = new Gauge({ name: 'queryapi_runner_block_wait_duration_milliseconds', help: 'Time an indexer function waited for a block before processing', @@ -31,19 +19,19 @@ const CACHE_MISS = new Counter({ labelNames: ['type'] }); -const UNPROCESSED_STREAM_MESSAGES = new promClient.Gauge({ +const UNPROCESSED_STREAM_MESSAGES = new Gauge({ name: 'queryapi_runner_unprocessed_stream_messages', help: 'Number of Redis Stream messages not yet processed', labelNames: ['indexer', 'type'], }); -const LAST_PROCESSED_BLOCK = new promClient.Gauge({ +const LAST_PROCESSED_BLOCK = new Gauge({ name: 'queryapi_runner_last_processed_block', help: 'The last block processed by an indexer function', labelNames: ['indexer', 'type'], }); -const EXECUTION_DURATION = new promClient.Histogram({ +const EXECUTION_DURATION = new Histogram({ name: 'queryapi_runner_execution_duration_milliseconds', help: 'Time taken to execute an indexer function', labelNames: ['indexer', 'type'], diff --git a/runner/src/stream-handler/worker.ts b/runner/src/stream-handler/worker.ts index 2f494b5eb..92e4e1080 100644 --- a/runner/src/stream-handler/worker.ts +++ b/runner/src/stream-handler/worker.ts @@ -12,7 +12,7 @@ if (isMainThread) { } const HISTORICAL_BATCH_SIZE = 100; -const indexer = new Indexer('mainnet', { parentPort }); +const indexer = new Indexer('mainnet'); const redisClient = new RedisClient(); const s3StreamerMessageFetcher = new S3StreamerMessageFetcher(); @@ -92,99 +92,6 @@ function incrementId (id: string): string { return `${Number(main) + 1}-${sequence}`; } -async function historicalStreamerMessageQueueProducer (queue: Array>, streamKey: string): Promise { - let currentBlockHeight: string = '0'; - - while (true) { - const preFetchCount = HISTORICAL_BATCH_SIZE - queue.length; - if (preFetchCount <= 0) { - await sleep(300); - continue; - } - const messages = await redisClient.getNextStreamMessage(streamKey, preFetchCount, currentBlockHeight); - console.log('Messages fetched: ', messages?.length); - - if (messages == null) { - await sleep(100); - continue; - } - - for (const streamMessage of messages) { - const { id, message } = streamMessage; - fetchAndQueue(queue, Number(message.block_height), id); - } - - currentBlockHeight = incrementId(messages[messages.length - 1].id); - } -} - -async function historicalStreamerMessageQueueConsumer (queue: Array>, streamKey: string): Promise { - const streamType = redisClient.getStreamType(streamKey); - const indexerConfig = await redisClient.getStreamStorage(streamKey); - const indexerName = `${indexerConfig.account_id}/${indexerConfig.function_name}`; - const functions = { - [indexerName]: { - account_id: indexerConfig.account_id, - function_name: indexerConfig.function_name, - code: indexerConfig.code, - schema: indexerConfig.schema, - provisioned: false, - }, - }; - - while (true) { - const startTime = performance.now(); - const blockStartTime = startTime; - const queueMessage = await queue.shift(); - if (queueMessage === undefined) { - await sleep(500); - continue; - } - const { streamerMessage, streamId } = queueMessage; - - if (streamerMessage === undefined || streamerMessage?.block.header.height == null) { - console.error('Streamer message does not have block height', streamerMessage); - continue; - } - console.log('Block wait Duration: ', performance.now() - startTime); - parentPort?.postMessage({ - type: 'BLOCK_WAIT_DURATION', - labels: { indexer: indexerName, type: streamType }, - value: performance.now() - blockStartTime, - } satisfies Message); - - const functionStartTime = performance.now(); - await indexer.runFunctions(streamerMessage.block.header.height, functions, false, { provision: true }, streamerMessage); - console.log('Function Code Execution Duration: ', performance.now() - functionStartTime); - parentPort?.postMessage({ - type: 'FUNCTION_OVERALL_EXECUTION_DURATION', - labels: { indexer: indexerName, type: streamType }, - value: performance.now() - functionStartTime, - } satisfies Message); - - // await redisClient.deleteStreamMessage(streamKey, streamId); - // Can just be streamId if above line is running - const unprocessedMessages = await redisClient.getUnprocessedStreamMessages(streamKey, incrementId(streamId)); - - parentPort?.postMessage({ - type: 'UNPROCESSED_STREAM_MESSAGES', - labels: { indexer: indexerName, type: streamType }, - value: unprocessedMessages?.length ?? 0, - } satisfies Message); - } - } -})(); - -async function handleHistoricalStream (streamKey: string): Promise { - void historicalStreamerMessageQueueProducer(queue, streamKey); - void historicalStreamerMessageQueueConsumer(queue, streamKey); -} - -function incrementId (id: string): string { - const [main, sequence] = id.split('-'); - return `${Number(main) + 1}-${sequence}`; -} - async function historicalStreamerMessageQueueProducer (queue: Array>, streamKey: string): Promise { let currentBlockHeight: string = '0'; @@ -238,11 +145,7 @@ async function historicalStreamerMessageQueueConsumer (queue: Array Date: Wed, 1 Nov 2023 18:11:51 -0700 Subject: [PATCH 13/24] Prepare Code for PR --- runner/src/indexer/indexer.test.ts | 317 ++++++--------------- runner/src/indexer/indexer.ts | 32 +-- runner/src/lake-client/lake-client.test.ts | 87 +++++- runner/src/lake-client/lake-client.ts | 26 +- runner/src/metrics.ts | 6 +- runner/src/stream-handler/worker.ts | 95 ++---- 6 files changed, 233 insertions(+), 330 deletions(-) diff --git a/runner/src/indexer/indexer.test.ts b/runner/src/indexer/indexer.test.ts index 58f9553e6..e936d9412 100644 --- a/runner/src/indexer/indexer.test.ts +++ b/runner/src/indexer/indexer.test.ts @@ -1,10 +1,8 @@ -import { Block } from '@near-lake/primitives'; +import { type StreamerMessage } from '@near-lake/primitives'; import type fetch from 'node-fetch'; -import type RedisClient from '../redis-client'; import Indexer from './indexer'; import { VM } from 'vm2'; -import type LakeClient from '../lake-client/lake-client'; describe('Indexer unit tests', () => { const oldEnv = process.env; @@ -163,10 +161,6 @@ CREATE TABLE }), }); - const transparentRedis = { - getStreamerMessage: jest.fn() - } as unknown as RedisClient; - beforeEach(() => { process.env = { ...oldEnv, @@ -187,24 +181,17 @@ CREATE TABLE }), })); const blockHeight = 456; - const mockData = jest.fn().mockResolvedValue( - JSON.stringify( - { - block: { - chunks: [], - header: { - height: blockHeight - } - }, - shards: {} + const mockStreamerMessage = { + block: { + chunks: [], + header: { + height: blockHeight } - ) - ); - const mockRedis = { - getStreamerMessage: mockData - } as unknown as RedisClient; + }, + shards: {} + } as unknown as StreamerMessage; - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: mockRedis }); + const indexer = new Indexer({ fetch: mockFetch as unknown as typeof fetch }); const functions: Record = {}; functions['buildnear.testnet/test'] = { @@ -214,104 +201,13 @@ CREATE TABLE `, schema: SIMPLE_SCHEMA }; - await indexer.runFunctions(blockHeight, functions, false); + await indexer.runFunctions(mockStreamerMessage, functions, false); expect(mockFetch.mock.calls).toMatchSnapshot(); }); - test('Indexer.fetchStreamerMessage() should fetch the message from cache and use it directly', async () => { - const blockHeight = 85233529; - const blockHash = 'xyz'; - const getMessage = jest.fn() - .mockReturnValueOnce(JSON.stringify( - { - block: { - chunks: [0], - header: { - height: blockHeight, - hash: blockHash, - } - }, - shards: {} - } - )); - const mockRedis = { - getStreamerMessage: getMessage - } as unknown as RedisClient; - const indexer = new Indexer('mainnet', { redisClient: mockRedis }); - - const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, false); - - expect(getMessage).toHaveBeenCalledTimes(1); - expect(JSON.stringify(getMessage.mock.calls[0])).toEqual( - `[${blockHeight}]` - ); - const block = Block.fromStreamerMessage(streamerMessage); - - expect(block.blockHeight).toEqual(blockHeight); - expect(block.blockHash).toEqual(blockHash); - }); - - test('Indexer.fetchStreamerMessage() should fetch the message from S3 upon cache miss', async () => { - const blockHeight = 85233529; - const blockHash = 'xyz'; - const mockLakeClient = { - buildStreamerMessage: jest.fn() - .mockReturnValue({ - block: { - chunks: [0], - header: { - height: blockHeight, - hash: blockHash, - } - }, - shards: {} - }) - } as unknown as LakeClient; - const indexer = new Indexer('mainnet', { lakeClient: mockLakeClient, redisClient: transparentRedis }); - - const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, false); - expect(transparentRedis.getStreamerMessage).toHaveBeenCalledTimes(1); - - const block = Block.fromStreamerMessage(streamerMessage); - - expect(block.blockHeight).toEqual(blockHeight); - expect(block.blockHash).toEqual(blockHash); - }); - - test('Indexer.fetchStreamerMessage() should fetch the message from S3 and not cache if historical', async () => { - const blockHeight = 85233529; - const blockHash = 'xyz'; - const mockLakeClient = { - buildStreamerMessage: jest.fn() - .mockReturnValue({ - block: { - chunks: [0], - header: { - height: blockHeight, - hash: blockHash, - } - }, - shards: {} - }) - } as unknown as LakeClient; - const mockRedis = { - getStreamerMessage: jest.fn() - } as unknown as RedisClient; - const indexer = new Indexer('mainnet', { lakeClient: mockLakeClient, redisClient: mockRedis }); - - const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, true); - - expect(mockRedis.getStreamerMessage).toHaveBeenCalledTimes(0); - - const block = Block.fromStreamerMessage(streamerMessage); - - expect(block.blockHeight).toEqual(blockHeight); - expect(block.blockHash).toEqual(blockHash); - }); - test('Indexer.transformIndexerFunction() applies the necessary transformations', () => { - const indexer = new Indexer('mainnet', { redisClient: transparentRedis }); + const indexer = new Indexer(); const transformedFunction = indexer.transformIndexerFunction('console.log(\'hello\')'); @@ -343,7 +239,7 @@ CREATE TABLE } }) }); - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); + const indexer = new Indexer({ fetch: mockFetch as unknown as typeof fetch }); const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); @@ -395,7 +291,7 @@ CREATE TABLE test('Indexer.buildContext() can fetch from the near social api', async () => { const mockFetch = jest.fn(); - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); + const indexer = new Indexer({ fetch: mockFetch as unknown as typeof fetch }); const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); @@ -424,7 +320,7 @@ CREATE TABLE errors: ['boom'] }) }); - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); + const indexer = new Indexer({ fetch: mockFetch as unknown as typeof fetch }); const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, INVALID_HASURA_ROLE); @@ -439,7 +335,7 @@ CREATE TABLE data: 'mock', }), }); - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); + const indexer = new Indexer({ fetch: mockFetch as unknown as typeof fetch }); const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); @@ -466,7 +362,7 @@ CREATE TABLE }); test('GetTables works for a variety of input schemas', async () => { - const indexer = new Indexer('mainnet', { redisClient: transparentRedis }); + const indexer = new Indexer(); const simpleSchemaTables = indexer.getTableNames(SIMPLE_SCHEMA); expect(simpleSchemaTables).toStrictEqual(['posts']); @@ -506,7 +402,7 @@ CREATE TABLE }); test('SanitizeTableName works properly on many test cases', async () => { - const indexer = new Indexer('mainnet', { redisClient: transparentRedis }); + const indexer = new Indexer(); expect(indexer.sanitizeTableName('table_name')).toStrictEqual('TableName'); expect(indexer.sanitizeTableName('tablename')).toStrictEqual('Tablename'); // name is not capitalized @@ -521,7 +417,7 @@ CREATE TABLE }); test('indexer fails to build context.db due to collision on sanitized table names', async () => { - const indexer = new Indexer('mainnet', { redisClient: transparentRedis }); + const indexer = new Indexer(); const schemaWithDuplicateSanitizedTableNames = `CREATE TABLE "test table" ( @@ -543,9 +439,8 @@ CREATE TABLE }) }; - const indexer = new Indexer('mainnet', { + const indexer = new Indexer({ fetch: genericMockFetch as unknown as typeof fetch, - redisClient: transparentRedis, DmlHandler: mockDmlHandler }); const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); @@ -583,9 +478,8 @@ CREATE TABLE }) }; - const indexer = new Indexer('mainnet', { + const indexer = new Indexer({ fetch: genericMockFetch as unknown as typeof fetch, - redisClient: transparentRedis, DmlHandler: mockDmlHandler }); const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); @@ -614,9 +508,8 @@ CREATE TABLE }) }; - const indexer = new Indexer('mainnet', { + const indexer = new Indexer({ fetch: genericMockFetch as unknown as typeof fetch, - redisClient: transparentRedis, DmlHandler: mockDmlHandler }); const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); @@ -649,9 +542,8 @@ CREATE TABLE }) }; - const indexer = new Indexer('mainnet', { + const indexer = new Indexer({ fetch: genericMockFetch as unknown as typeof fetch, - redisClient: transparentRedis, DmlHandler: mockDmlHandler }); const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); @@ -686,9 +578,8 @@ CREATE TABLE }) }; - const indexer = new Indexer('mainnet', { + const indexer = new Indexer({ fetch: genericMockFetch as unknown as typeof fetch, - redisClient: transparentRedis, DmlHandler: mockDmlHandler }); const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); @@ -706,9 +597,8 @@ CREATE TABLE create: jest.fn() }; - const indexer = new Indexer('mainnet', { + const indexer = new Indexer({ fetch: genericMockFetch as unknown as typeof fetch, - redisClient: transparentRedis, DmlHandler: mockDmlHandler }); const context = indexer.buildContext(STRESS_TEST_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); @@ -749,9 +639,8 @@ CREATE TABLE create: jest.fn() }; - const indexer = new Indexer('mainnet', { + const indexer = new Indexer({ fetch: genericMockFetch as unknown as typeof fetch, - redisClient: transparentRedis, DmlHandler: mockDmlHandler }); const context = indexer.buildContext('', 'morgs.near/social_feed1', 1, 'postgres'); @@ -813,19 +702,16 @@ CREATE TABLE }), }); - const mockLakeClient = { - buildStreamerMessage: jest.fn() - .mockReturnValue({ - block: { - chunks: [0], - header: { - height: blockHeight - } - }, - shards: {} - }) - } as unknown as LakeClient; - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, lakeClient: mockLakeClient, redisClient: transparentRedis }); + const mockStreamerMessage = { + block: { + chunks: [0], + header: { + height: blockHeight + } + }, + shards: {} + } as unknown as StreamerMessage; + const indexer = new Indexer({ fetch: mockFetch as unknown as typeof fetch }); const functions: Record = {}; functions['buildnear.testnet/test'] = { @@ -861,7 +747,7 @@ CREATE TABLE schema: SIMPLE_SCHEMA }; - await indexer.runFunctions(blockHeight, functions, false); + await indexer.runFunctions(mockStreamerMessage, functions, false); expect(mockFetch.mock.calls).toMatchSnapshot(); }); @@ -895,19 +781,16 @@ CREATE TABLE }), })); const blockHeight = 456; - const mockLakeClient = { - buildStreamerMessage: jest.fn() - .mockReturnValue({ - block: { - chunks: [0], - header: { - height: blockHeight - } - }, - shards: {} - }) - } as unknown as LakeClient; - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, lakeClient: mockLakeClient, redisClient: transparentRedis }); + const mockStreamerMessage = { + block: { + chunks: [0], + header: { + height: blockHeight + } + }, + shards: {} + } as unknown as StreamerMessage; + const indexer = new Indexer({ fetch: mockFetch as unknown as typeof fetch }); const functions: Record = {}; functions['buildnear.testnet/test'] = { @@ -917,7 +800,7 @@ CREATE TABLE schema: SIMPLE_SCHEMA }; - await expect(indexer.runFunctions(blockHeight, functions, false)).rejects.toThrow(new Error('boom')); + await expect(indexer.runFunctions(mockStreamerMessage, functions, false)).rejects.toThrow(new Error('boom')); expect(mockFetch.mock.calls).toMatchSnapshot(); }); @@ -929,23 +812,20 @@ CREATE TABLE errors: null, }), })); - const mockLakeClient = { - buildStreamerMessage: jest.fn() - .mockReturnValue({ - block: { - chunks: [0], - header: { - height: blockHeight - } - }, - shards: {} - }) - } as unknown as LakeClient; + const mockStreamerMessage = { + block: { + chunks: [0], + header: { + height: blockHeight + } + }, + shards: {} + } as unknown as StreamerMessage; const provisioner: any = { isUserApiProvisioned: jest.fn().mockReturnValue(false), provisionUserApi: jest.fn(), }; - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, lakeClient: mockLakeClient, redisClient: transparentRedis, provisioner }); + const indexer = new Indexer({ fetch: mockFetch as unknown as typeof fetch, provisioner }); const functions = { 'morgs.near/test': { @@ -955,7 +835,7 @@ CREATE TABLE schema: SIMPLE_SCHEMA, } }; - await indexer.runFunctions(1, functions, false, { provision: true }); + await indexer.runFunctions(mockStreamerMessage, functions, false, { provision: true }); expect(provisioner.isUserApiProvisioned).toHaveBeenCalledWith('morgs.near', 'test'); expect(provisioner.provisionUserApi).toHaveBeenCalledTimes(1); @@ -974,23 +854,20 @@ CREATE TABLE errors: null, }), })); - const mockLakeClient = { - buildStreamerMessage: jest.fn() - .mockReturnValue({ - block: { - chunks: [0], - header: { - height: blockHeight - } - }, - shards: {} - }) - } as unknown as LakeClient; + const mockStreamerMessage = { + block: { + chunks: [0], + header: { + height: blockHeight + } + }, + shards: {} + } as unknown as StreamerMessage; const provisioner: any = { isUserApiProvisioned: jest.fn().mockReturnValue(true), provisionUserApi: jest.fn(), }; - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, lakeClient: mockLakeClient, redisClient: transparentRedis, provisioner }); + const indexer = new Indexer({ fetch: mockFetch as unknown as typeof fetch, provisioner }); const functions: Record = { 'morgs.near/test': { @@ -998,7 +875,7 @@ CREATE TABLE schema: SIMPLE_SCHEMA, } }; - await indexer.runFunctions(1, functions, false, { provision: true }); + await indexer.runFunctions(mockStreamerMessage, functions, false, { provision: true }); expect(provisioner.provisionUserApi).not.toHaveBeenCalled(); }); @@ -1011,23 +888,20 @@ CREATE TABLE errors: null, }), })); - const mockLakeClient = { - buildStreamerMessage: jest.fn() - .mockReturnValue({ - block: { - chunks: [0], - header: { - height: blockHeight - } - }, - shards: {} - }) - } as unknown as LakeClient; + const mockStreamerMessage = { + block: { + chunks: [0], + header: { + height: blockHeight + } + }, + shards: {} + } as unknown as StreamerMessage; const provisioner: any = { isUserApiProvisioned: jest.fn().mockReturnValue(true), provisionUserApi: jest.fn(), }; - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, lakeClient: mockLakeClient, redisClient: transparentRedis, provisioner }); + const indexer = new Indexer({ fetch: mockFetch as unknown as typeof fetch, provisioner }); const functions: Record = { 'morgs.near/test': { @@ -1037,7 +911,7 @@ CREATE TABLE schema: SIMPLE_SCHEMA, } }; - await indexer.runFunctions(blockHeight, functions, false, { provision: true }); + await indexer.runFunctions(mockStreamerMessage, functions, false, { provision: true }); expect(provisioner.provisionUserApi).not.toHaveBeenCalled(); expect(mockFetch.mock.calls).toMatchSnapshot(); @@ -1051,24 +925,21 @@ CREATE TABLE errors: null, }), })); - const mockLakeClient = { - buildStreamerMessage: jest.fn() - .mockReturnValue({ - block: { - chunks: [0], - header: { - height: blockHeight - } - }, - shards: {} - }) - } as unknown as LakeClient; + const mockStreamerMessage = { + block: { + chunks: [0], + header: { + height: blockHeight + } + }, + shards: {} + } as unknown as StreamerMessage; const error = new Error('something went wrong with provisioning'); const provisioner: any = { isUserApiProvisioned: jest.fn().mockReturnValue(false), provisionUserApi: jest.fn().mockRejectedValue(error), }; - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, lakeClient: mockLakeClient, redisClient: transparentRedis, provisioner }); + const indexer = new Indexer({ fetch: mockFetch as unknown as typeof fetch, provisioner }); const functions: Record = { 'morgs.near/test': { @@ -1079,7 +950,7 @@ CREATE TABLE } }; - await expect(indexer.runFunctions(blockHeight, functions, false, { provision: true })).rejects.toThrow(error); + await expect(indexer.runFunctions(mockStreamerMessage, functions, false, { provision: true })).rejects.toThrow(error); expect(mockFetch.mock.calls).toMatchSnapshot(); }); @@ -1091,7 +962,7 @@ CREATE TABLE data: {} }) }); - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); + const indexer = new Indexer({ fetch: mockFetch as unknown as typeof fetch }); // @ts-expect-error legacy test const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, null); @@ -1127,7 +998,7 @@ CREATE TABLE }) }); const role = 'morgs_near'; - const indexer = new Indexer('mainnet', { fetch: mockFetch as unknown as typeof fetch, redisClient: transparentRedis }); + const indexer = new Indexer({ fetch: mockFetch as unknown as typeof fetch }); const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); const mutation = ` diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index 327c3c8d8..ed033cad8 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -5,17 +5,12 @@ import { Parser } from 'node-sql-parser'; import Provisioner from '../provisioner'; import DmlHandler from '../dml-handler/dml-handler'; -import RedisClient from '../redis-client'; -import LakeClient from '../lake-client/lake-client'; -import { METRICS } from '../metrics'; interface Dependencies { fetch: typeof fetch - lakeClient: LakeClient provisioner: Provisioner DmlHandler: typeof DmlHandler parser: Parser - redisClient: RedisClient }; interface Context { @@ -40,30 +35,26 @@ export default class Indexer { private readonly deps: Dependencies; constructor ( - private readonly network: string, deps?: Partial ) { this.DEFAULT_HASURA_ROLE = 'append'; - this.network = network; this.deps = { fetch, - lakeClient: deps?.lakeClient ?? new LakeClient(this.network), provisioner: new Provisioner(), DmlHandler, parser: new Parser(), - redisClient: deps?.redisClient ?? new RedisClient(), ...deps, }; } async runFunctions ( - blockHeight: number, + streamerMessage: StreamerMessage, functions: Record, isHistorical: boolean, - options: { provision?: boolean } = { provision: false }, - streamerMessage: StreamerMessage | null = null + options: { provision?: boolean } = { provision: false } ): Promise { - const blockWithHelpers = Block.fromStreamerMessage(streamerMessage !== null ? streamerMessage : await this.fetchStreamerMessage(blockHeight, isHistorical)); + const blockHeight = Number(streamerMessage.block.header.height); + const blockWithHelpers = Block.fromStreamerMessage(streamerMessage); const lag = Date.now() - Math.floor(Number(blockWithHelpers.header().timestampNanosec) / 1000000); @@ -141,21 +132,6 @@ export default class Indexer { `; } - async fetchStreamerMessage (blockHeight: number, isHistorical: boolean): Promise<{ block: any, shards: any[] }> { - if (!isHistorical) { - const cachedMessage = await this.deps.redisClient.getStreamerMessage(blockHeight); - if (cachedMessage) { - METRICS.CACHE_HIT.labels({ type: 'real-time' }).inc(); - - const parsedMessage = JSON.parse(cachedMessage); - return parsedMessage; - } else { - METRICS.CACHE_HIT.labels({ type: 'real-time' }).inc(); - } - } - return await this.deps.lakeClient.buildStreamerMessage(blockHeight); - } - transformIndexerFunction (indexerFunction: string): string { return [ this.enableAwaitTransform, diff --git a/runner/src/lake-client/lake-client.test.ts b/runner/src/lake-client/lake-client.test.ts index a5f78b85f..528710175 100644 --- a/runner/src/lake-client/lake-client.test.ts +++ b/runner/src/lake-client/lake-client.test.ts @@ -79,7 +79,7 @@ describe('LakeClient', () => { } as unknown as S3Client; const fetcher = new LakeClient('mainnet', mockS3); - const streamerMessage = await fetcher.buildStreamerMessage(blockHeight); + const streamerMessage = await fetcher.fetchStreamerMessage(blockHeight, true); expect(mockSend).toHaveBeenCalledTimes(5); expect(JSON.stringify(mockSend.mock.calls[0][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ @@ -96,4 +96,89 @@ describe('LakeClient', () => { expect(block.blockHeight).toEqual(blockHeight); expect(block.blockHash).toEqual(blockHash); }); + + // test('Indexer.fetchStreamerMessage() should fetch the message from cache and use it directly', async () => { + // const blockHeight = 85233529; + // const blockHash = 'xyz'; + // const mockStreamerMessage = { + // block: { + // chunks: [0], + // header: { + // height: blockHeight, + // hash: blockHash, + // } + // }, + // shards: {} + // } as unknown as StreamerMessage; + // const indexer = new Indexer('mainnet'); + + // const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, false); + + // expect(getMessage).toHaveBeenCalledTimes(1); + // expect(JSON.stringify(getMessage.mock.calls[0])).toEqual( + // `[${blockHeight}]` + // ); + // const block = Block.fromStreamerMessage(streamerMessage); + + // expect(block.blockHeight).toEqual(blockHeight); + // expect(block.blockHash).toEqual(blockHash); + // }); + + // test('Indexer.fetchStreamerMessage() should fetch the message from S3 upon cache miss', async () => { + // const blockHeight = 85233529; + // const blockHash = 'xyz'; + // const mockLakeClient = { + // buildStreamerMessage: jest.fn() + // .mockReturnValue({ + // block: { + // chunks: [0], + // header: { + // height: blockHeight, + // hash: blockHash, + // } + // }, + // shards: {} + // }) + // } as unknown as LakeClient; + // const indexer = new Indexer('mainnet', { lakeClient: mockLakeClient, redisClient: transparentRedis }); + + // const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, false); + // expect(transparentRedis.getStreamerMessage).toHaveBeenCalledTimes(1); + + // const block = Block.fromStreamerMessage(streamerMessage); + + // expect(block.blockHeight).toEqual(blockHeight); + // expect(block.blockHash).toEqual(blockHash); + // }); + + // test('Indexer.fetchStreamerMessage() should fetch the message from S3 and not cache if historical', async () => { + // const blockHeight = 85233529; + // const blockHash = 'xyz'; + // const mockLakeClient = { + // buildStreamerMessage: jest.fn() + // .mockReturnValue({ + // block: { + // chunks: [0], + // header: { + // height: blockHeight, + // hash: blockHash, + // } + // }, + // shards: {} + // }) + // } as unknown as LakeClient; + // const mockRedis = { + // getStreamerMessage: jest.fn() + // } as unknown as RedisClient; + // const indexer = new Indexer('mainnet', { lakeClient: mockLakeClient }); + + // const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, true); + + // expect(mockRedis.getStreamerMessage).toHaveBeenCalledTimes(0); + + // const block = Block.fromStreamerMessage(streamerMessage); + + // expect(block.blockHeight).toEqual(blockHeight); + // expect(block.blockHash).toEqual(blockHash); + // }); }); diff --git a/runner/src/lake-client/lake-client.ts b/runner/src/lake-client/lake-client.ts index 337bcd5a7..f034e3e77 100644 --- a/runner/src/lake-client/lake-client.ts +++ b/runner/src/lake-client/lake-client.ts @@ -1,15 +1,20 @@ import { GetObjectCommand, S3Client } from '@aws-sdk/client-s3'; import { type StreamerMessage } from '@near-lake/primitives'; +import { METRICS } from '../metrics'; +import RedisClient from '../redis-client/redis-client'; export default class LakeClient { - private readonly s3Client: S3Client; network: string; + private readonly s3Client: S3Client; + private readonly redisClient: RedisClient; constructor ( network: string = 'mainnet', - s3Client: S3Client = new S3Client() + s3Client: S3Client = new S3Client(), + redisClient: RedisClient = new RedisClient() ) { - this.s3Client = s3Client; this.network = network; + this.s3Client = s3Client; + this.redisClient = redisClient; } // pad with 0s to 12 digits @@ -66,7 +71,20 @@ export default class LakeClient { return value; } - async buildStreamerMessage (blockHeight: number): Promise { + async fetchStreamerMessage (blockHeight: number, isHistorical: boolean): Promise { + if (!isHistorical) { + const cachedMessage = await this.redisClient.getStreamerMessage(blockHeight); + if (cachedMessage) { + METRICS.CACHE_HIT.labels().inc(); + console.log('hit: ', METRICS.CACHE_HIT.get()); + const parsedMessage = JSON.parse(cachedMessage); + return parsedMessage; + } else { + METRICS.CACHE_MISS.labels().inc(); + console.log('miss: ', METRICS.CACHE_MISS.get()); + } + } + const blockPromise = this.fetchBlockPromise(blockHeight); const shardsPromises = await this.fetchShardsPromises(blockHeight, 4); diff --git a/runner/src/metrics.ts b/runner/src/metrics.ts index a2363fab0..075e4a898 100644 --- a/runner/src/metrics.ts +++ b/runner/src/metrics.ts @@ -9,14 +9,12 @@ const BLOCK_WAIT_DURATION = new Gauge({ const CACHE_HIT = new Counter({ name: 'queryapi_runner_cache_hit', - help: 'The number of times cache was hit successfully', - labelNames: ['type'] + help: 'The number of times cache was hit successfully' }); const CACHE_MISS = new Counter({ name: 'queryapi_runner_cache_miss', - help: 'The number of times cache was missed', - labelNames: ['type'] + help: 'The number of times cache was missed' }); const UNPROCESSED_STREAM_MESSAGES = new Gauge({ diff --git a/runner/src/stream-handler/worker.ts b/runner/src/stream-handler/worker.ts index 92e4e1080..38ea51997 100644 --- a/runner/src/stream-handler/worker.ts +++ b/runner/src/stream-handler/worker.ts @@ -5,16 +5,17 @@ import Indexer from '../indexer'; import RedisClient from '../redis-client'; import { METRICS } from '../metrics'; import type { StreamerMessage } from '@near-lake/primitives'; -import S3StreamerMessageFetcher from '../lake-client/lake-client'; +import LakeClient from '../lake-client/lake-client'; if (isMainThread) { throw new Error('Worker should not be run on main thread'); } const HISTORICAL_BATCH_SIZE = 100; -const indexer = new Indexer('mainnet'); +const indexer = new Indexer(); const redisClient = new RedisClient(); -const s3StreamerMessageFetcher = new S3StreamerMessageFetcher(); +const lakeClient = new LakeClient(); +let isHistorical = false; interface QueueMessage { streamerMessage: StreamerMessage @@ -29,62 +30,15 @@ void (async function main () { console.log('Started processing stream: ', streamKey); - let indexerName = ''; const streamType = redisClient.getStreamType(streamKey); - const isHistorical = streamType === 'historical'; - if (!isHistorical) { - await handleHistoricalStream(streamKey); - return; - } - - while (true) { - try { - const startTime = performance.now(); - - const messages = await redisClient.getNextStreamMessage(streamKey); - const indexerConfig = await redisClient.getStreamStorage(streamKey); - - indexerName = `${indexerConfig.account_id}/${indexerConfig.function_name}`; - - if (messages == null) { - await sleep(1000); - continue; - } - - const [{ id, message }] = messages; + isHistorical = (streamType === 'historical'); - const functions = { - [indexerName]: { - account_id: indexerConfig.account_id, - function_name: indexerConfig.function_name, - code: indexerConfig.code, - schema: indexerConfig.schema, - provisioned: false, - }, - }; - await indexer.runFunctions(Number(message.block_height), functions, isHistorical, { - provision: true, - }); - - await redisClient.deleteStreamMessage(streamKey, id); - - METRICS.EXECUTION_DURATION.labels({ indexer: indexerName, type: streamType }).observe(performance.now() - startTime); - - console.log(`Success: ${indexerName}`); - } catch (err) { - await sleep(10000); - console.log(`Failed: ${indexerName}`, err); - } finally { - const unprocessedMessages = await redisClient.getUnprocessedStreamMessages(streamKey); - METRICS.UNPROCESSED_STREAM_MESSAGES.labels({ indexer: indexerName, type: streamType }).set(unprocessedMessages?.length ?? 0); - parentPort?.postMessage(await promClient.register.getMetricsAsJSON()); - } - } + await handleStream(streamKey); })(); -async function handleHistoricalStream (streamKey: string): Promise { - void historicalStreamerMessageQueueProducer(queue, streamKey); - void historicalStreamerMessageQueueConsumer(queue, streamKey); +async function handleStream (streamKey: string): Promise { + void streamerMessageQueueProducer(queue, streamKey); + void streamerMessageQueueConsumer(queue, streamKey); } function incrementId (id: string): string { @@ -92,7 +46,7 @@ function incrementId (id: string): string { return `${Number(main) + 1}-${sequence}`; } -async function historicalStreamerMessageQueueProducer (queue: Array>, streamKey: string): Promise { +async function streamerMessageQueueProducer (queue: Array>, streamKey: string): Promise { let currentBlockHeight: string = '0'; while (true) { @@ -117,7 +71,7 @@ async function historicalStreamerMessageQueueProducer (queue: Array>, streamKey: string): Promise { +async function streamerMessageQueueConsumer (queue: Array>, streamKey: string): Promise { const streamType = redisClient.getStreamType(streamKey); const indexerConfig = await redisClient.getStreamStorage(streamKey); const indexerName = `${indexerConfig.account_id}/${indexerConfig.function_name}`; @@ -148,22 +102,23 @@ async function historicalStreamerMessageQueueConsumer (queue: Array>, blockHeight: number } async function transformStreamerMessageToQueueMessage (blockHeight: number, streamId: string): Promise { - const streamerMessage = await s3StreamerMessageFetcher.buildStreamerMessage(blockHeight); + const streamerMessage = await lakeClient.fetchStreamerMessage(blockHeight, isHistorical); return { streamerMessage, streamId From 044dd27f9606d0089681797b781d66460522ac17 Mon Sep 17 00:00:00 2001 From: Darun Seethammagari Date: Wed, 1 Nov 2023 18:21:58 -0700 Subject: [PATCH 14/24] Complete Lake Client Tests --- runner/src/lake-client/lake-client.test.ts | 224 ++++++++++++--------- 1 file changed, 134 insertions(+), 90 deletions(-) diff --git a/runner/src/lake-client/lake-client.test.ts b/runner/src/lake-client/lake-client.test.ts index 528710175..0c5208ad3 100644 --- a/runner/src/lake-client/lake-client.test.ts +++ b/runner/src/lake-client/lake-client.test.ts @@ -1,6 +1,7 @@ import { Block } from '@near-lake/primitives'; import { GetObjectCommand, type S3Client } from '@aws-sdk/client-s3'; import LakeClient from './lake-client'; +import type RedisClient from '../redis-client'; describe('LakeClient', () => { test('Indexer.fetchBlock() should fetch a block from S3', async () => { @@ -17,10 +18,10 @@ describe('LakeClient', () => { send: mockSend, } as unknown as S3Client; - const fetcher = new LakeClient('mainnet', mockS3); + const client = new LakeClient('mainnet', mockS3); const blockHeight = 84333960; - const block = await fetcher.fetchBlockPromise(blockHeight); + const block = await client.fetchBlockPromise(blockHeight); const params = { Bucket: 'near-lake-data-mainnet', Key: `${blockHeight.toString().padStart(12, '0')}/block.json` @@ -41,7 +42,7 @@ describe('LakeClient', () => { const mockS3 = { send: mockSend, } as unknown as S3Client; - const fetcher = new LakeClient('mainnet', mockS3); + const client = new LakeClient('mainnet', mockS3); const blockHeight = 82699904; const shard = 0; @@ -49,7 +50,7 @@ describe('LakeClient', () => { Bucket: 'near-lake-data-mainnet', Key: `${blockHeight.toString().padStart(12, '0')}/shard_${shard}.json` }; - await fetcher.fetchShardPromise(blockHeight, shard); + await client.fetchShardPromise(blockHeight, shard); expect(JSON.stringify(mockSend.mock.calls[0][0])).toMatch(JSON.stringify(new GetObjectCommand(params))); }); @@ -77,9 +78,9 @@ describe('LakeClient', () => { const mockS3 = { send: mockSend, } as unknown as S3Client; - const fetcher = new LakeClient('mainnet', mockS3); + const client = new LakeClient('mainnet', mockS3); - const streamerMessage = await fetcher.fetchStreamerMessage(blockHeight, true); + const streamerMessage = await client.fetchStreamerMessage(blockHeight, true); expect(mockSend).toHaveBeenCalledTimes(5); expect(JSON.stringify(mockSend.mock.calls[0][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ @@ -97,88 +98,131 @@ describe('LakeClient', () => { expect(block.blockHash).toEqual(blockHash); }); - // test('Indexer.fetchStreamerMessage() should fetch the message from cache and use it directly', async () => { - // const blockHeight = 85233529; - // const blockHash = 'xyz'; - // const mockStreamerMessage = { - // block: { - // chunks: [0], - // header: { - // height: blockHeight, - // hash: blockHash, - // } - // }, - // shards: {} - // } as unknown as StreamerMessage; - // const indexer = new Indexer('mainnet'); - - // const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, false); - - // expect(getMessage).toHaveBeenCalledTimes(1); - // expect(JSON.stringify(getMessage.mock.calls[0])).toEqual( - // `[${blockHeight}]` - // ); - // const block = Block.fromStreamerMessage(streamerMessage); - - // expect(block.blockHeight).toEqual(blockHeight); - // expect(block.blockHash).toEqual(blockHash); - // }); - - // test('Indexer.fetchStreamerMessage() should fetch the message from S3 upon cache miss', async () => { - // const blockHeight = 85233529; - // const blockHash = 'xyz'; - // const mockLakeClient = { - // buildStreamerMessage: jest.fn() - // .mockReturnValue({ - // block: { - // chunks: [0], - // header: { - // height: blockHeight, - // hash: blockHash, - // } - // }, - // shards: {} - // }) - // } as unknown as LakeClient; - // const indexer = new Indexer('mainnet', { lakeClient: mockLakeClient, redisClient: transparentRedis }); - - // const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, false); - // expect(transparentRedis.getStreamerMessage).toHaveBeenCalledTimes(1); - - // const block = Block.fromStreamerMessage(streamerMessage); - - // expect(block.blockHeight).toEqual(blockHeight); - // expect(block.blockHash).toEqual(blockHash); - // }); - - // test('Indexer.fetchStreamerMessage() should fetch the message from S3 and not cache if historical', async () => { - // const blockHeight = 85233529; - // const blockHash = 'xyz'; - // const mockLakeClient = { - // buildStreamerMessage: jest.fn() - // .mockReturnValue({ - // block: { - // chunks: [0], - // header: { - // height: blockHeight, - // hash: blockHash, - // } - // }, - // shards: {} - // }) - // } as unknown as LakeClient; - // const mockRedis = { - // getStreamerMessage: jest.fn() - // } as unknown as RedisClient; - // const indexer = new Indexer('mainnet', { lakeClient: mockLakeClient }); - - // const streamerMessage = await indexer.fetchStreamerMessage(blockHeight, true); - - // expect(mockRedis.getStreamerMessage).toHaveBeenCalledTimes(0); - - // const block = Block.fromStreamerMessage(streamerMessage); - - // expect(block.blockHeight).toEqual(blockHeight); - // expect(block.blockHash).toEqual(blockHash); - // }); + test('fetchStreamerMessage should fetch the message from cache and return it', async () => { + const blockHeight = 85233529; + const blockHash = 'xyz'; + const getMessage = jest.fn() + .mockReturnValueOnce(JSON.stringify( + { + block: { + chunks: [0], + header: { + height: blockHeight, + hash: blockHash, + } + }, + shards: {} + } + )); + const mockRedis = { + getStreamerMessage: getMessage + } as unknown as RedisClient; + const mockS3 = {} as unknown as S3Client; + const client = new LakeClient('mainnet', mockS3, mockRedis); + + const streamerMessage = await client.fetchStreamerMessage(blockHeight, false); + + expect(getMessage).toHaveBeenCalledTimes(1); + expect(JSON.stringify(getMessage.mock.calls[0])).toEqual( + `[${blockHeight}]` + ); + const block = Block.fromStreamerMessage(streamerMessage); + + expect(block.blockHeight).toEqual(blockHeight); + expect(block.blockHash).toEqual(blockHash); + }); + + test('fetchStreamerMessage should fetch the block and shards from S3 upon cache miss', async () => { + const blockHeight = 85233529; + const blockHash = 'xyz'; + const mockSend = jest.fn() + .mockReturnValueOnce({ // block + Body: { + transformToString: () => JSON.stringify({ + chunks: [0], + header: { + height: blockHeight, + hash: blockHash, + } + }) + } + }) + .mockReturnValue({ // shard + Body: { + transformToString: () => JSON.stringify({}) + } + }); + const mockS3 = { + send: mockSend, + } as unknown as S3Client; + const transparentRedis = { + getStreamerMessage: jest.fn() + } as unknown as RedisClient; + const client = new LakeClient('mainnet', mockS3, transparentRedis); + + const streamerMessage = await client.fetchStreamerMessage(blockHeight, false); + + expect(mockSend).toHaveBeenCalledTimes(5); + expect(JSON.stringify(mockSend.mock.calls[0][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ + Bucket: 'near-lake-data-mainnet', + Key: `${blockHeight.toString().padStart(12, '0')}/block.json` + }))); + expect(JSON.stringify(mockSend.mock.calls[1][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ + Bucket: 'near-lake-data-mainnet', + Key: `${blockHeight.toString().padStart(12, '0')}/shard_0.json` + }))); + expect(transparentRedis.getStreamerMessage).toHaveBeenCalledTimes(1); + + const block = Block.fromStreamerMessage(streamerMessage); + + expect(block.blockHeight).toEqual(blockHeight); + expect(block.blockHash).toEqual(blockHash); + }); + + test('fetchStreamerMessage should fetch the block and shards from S3 and not cache if historical', async () => { + const blockHeight = 85233529; + const blockHash = 'xyz'; + const mockSend = jest.fn() + .mockReturnValueOnce({ // block + Body: { + transformToString: () => JSON.stringify({ + chunks: [0], + header: { + height: blockHeight, + hash: blockHash, + } + }) + } + }) + .mockReturnValue({ // shard + Body: { + transformToString: () => JSON.stringify({}) + } + }); + const mockS3 = { + send: mockSend, + } as unknown as S3Client; + const mockRedis = { + getStreamerMessage: jest.fn() + } as unknown as RedisClient; + const client = new LakeClient('mainnet', mockS3, mockRedis); + + const streamerMessage = await client.fetchStreamerMessage(blockHeight, true); + + expect(mockSend).toHaveBeenCalledTimes(5); + expect(JSON.stringify(mockSend.mock.calls[0][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ + Bucket: 'near-lake-data-mainnet', + Key: `${blockHeight.toString().padStart(12, '0')}/block.json` + }))); + expect(JSON.stringify(mockSend.mock.calls[1][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ + Bucket: 'near-lake-data-mainnet', + Key: `${blockHeight.toString().padStart(12, '0')}/shard_0.json` + }))); + expect(mockRedis.getStreamerMessage).toHaveBeenCalledTimes(0); + + const block = Block.fromStreamerMessage(streamerMessage); + + expect(block.blockHeight).toEqual(blockHeight); + expect(block.blockHash).toEqual(blockHash); + }); }); From 00132abca83a3bbe3ab13ae169aed20bcf9b3163 Mon Sep 17 00:00:00 2001 From: Darun Seethammagari Date: Wed, 1 Nov 2023 18:47:34 -0700 Subject: [PATCH 15/24] Finalize Metrics for PR --- runner/src/indexer/indexer.ts | 4 +++- runner/src/lake-client/lake-client.ts | 2 -- runner/src/metrics.ts | 7 +++++++ runner/src/stream-handler/worker.ts | 11 +---------- 4 files changed, 11 insertions(+), 13 deletions(-) diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index ed033cad8..72975d2ab 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -5,6 +5,7 @@ import { Parser } from 'node-sql-parser'; import Provisioner from '../provisioner'; import DmlHandler from '../dml-handler/dml-handler'; +import { METRICS } from '../metrics'; interface Dependencies { fetch: typeof fetch @@ -110,7 +111,8 @@ export default class Indexer { await this.writeLog(functionName, blockHeight, 'Error running IndexerFunction', error.message); throw e; } - console.log('Function Code Execution Latency: ', performance.now() - functionCodeExecutionLatency); + METRICS.USER_CODE_EXECUTION_DURATION.labels({ indexer: functionName, type: isHistorical ? 'historical' : 'realtime' }) + .observe(performance.now() - functionCodeExecutionLatency); simultaneousPromises.push(this.writeFunctionState(functionName, blockHeight, isHistorical)); } catch (e) { console.error(`${functionName}: Failed to run function`, e); diff --git a/runner/src/lake-client/lake-client.ts b/runner/src/lake-client/lake-client.ts index f034e3e77..1615ed8b2 100644 --- a/runner/src/lake-client/lake-client.ts +++ b/runner/src/lake-client/lake-client.ts @@ -76,12 +76,10 @@ export default class LakeClient { const cachedMessage = await this.redisClient.getStreamerMessage(blockHeight); if (cachedMessage) { METRICS.CACHE_HIT.labels().inc(); - console.log('hit: ', METRICS.CACHE_HIT.get()); const parsedMessage = JSON.parse(cachedMessage); return parsedMessage; } else { METRICS.CACHE_MISS.labels().inc(); - console.log('miss: ', METRICS.CACHE_MISS.get()); } } diff --git a/runner/src/metrics.ts b/runner/src/metrics.ts index 075e4a898..4022980d7 100644 --- a/runner/src/metrics.ts +++ b/runner/src/metrics.ts @@ -17,6 +17,12 @@ const CACHE_MISS = new Counter({ help: 'The number of times cache was missed' }); +const USER_CODE_EXECUTION_DURATION = new Histogram({ + name: 'queryapi_runner_code_execution_milliseconds', + help: 'Time spent running a user\'s code', + labelNames: ['indexer', 'type'], +}); + const UNPROCESSED_STREAM_MESSAGES = new Gauge({ name: 'queryapi_runner_unprocessed_stream_messages', help: 'Number of Redis Stream messages not yet processed', @@ -39,6 +45,7 @@ export const METRICS = { BLOCK_WAIT_DURATION, CACHE_HIT, CACHE_MISS, + USER_CODE_EXECUTION_DURATION, UNPROCESSED_STREAM_MESSAGES, LAST_PROCESSED_BLOCK, EXECUTION_DURATION, diff --git a/runner/src/stream-handler/worker.ts b/runner/src/stream-handler/worker.ts index 38ea51997..e4ac96fba 100644 --- a/runner/src/stream-handler/worker.ts +++ b/runner/src/stream-handler/worker.ts @@ -41,21 +41,14 @@ async function handleStream (streamKey: string): Promise { void streamerMessageQueueConsumer(queue, streamKey); } -function incrementId (id: string): string { - const [main, sequence] = id.split('-'); - return `${Number(main) + 1}-${sequence}`; -} - async function streamerMessageQueueProducer (queue: Array>, streamKey: string): Promise { - let currentBlockHeight: string = '0'; - while (true) { const preFetchCount = HISTORICAL_BATCH_SIZE - queue.length; if (preFetchCount <= 0) { await sleep(300); continue; } - const messages = await redisClient.getNextStreamMessage(streamKey, preFetchCount, currentBlockHeight); + const messages = await redisClient.getNextStreamMessage(streamKey, preFetchCount); if (messages == null) { await sleep(100); continue; @@ -66,8 +59,6 @@ async function streamerMessageQueueProducer (queue: Array> const { id, message } = streamMessage; fetchAndQueue(queue, Number(message.block_height), id); } - - currentBlockHeight = incrementId(messages[messages.length - 1].id); } } From 632152640f019a6d1c41f5c6e4297dfa9cf19ec5 Mon Sep 17 00:00:00 2001 From: Darun Seethammagari Date: Wed, 1 Nov 2023 18:50:55 -0700 Subject: [PATCH 16/24] Tune parameters for waiting --- runner/src/stream-handler/worker.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/runner/src/stream-handler/worker.ts b/runner/src/stream-handler/worker.ts index e4ac96fba..3068f8a59 100644 --- a/runner/src/stream-handler/worker.ts +++ b/runner/src/stream-handler/worker.ts @@ -45,12 +45,12 @@ async function streamerMessageQueueProducer (queue: Array> while (true) { const preFetchCount = HISTORICAL_BATCH_SIZE - queue.length; if (preFetchCount <= 0) { - await sleep(300); + await sleep(300); // Wait for more messages in array to process continue; } const messages = await redisClient.getNextStreamMessage(streamKey, preFetchCount); if (messages == null) { - await sleep(100); + await sleep(1000); // Wait for new messages to appear in stream continue; } console.log(`Fetched ${messages?.length} messages from stream ${streamKey}`); @@ -81,7 +81,7 @@ async function streamerMessageQueueConsumer (queue: Array> const blockStartTime = startTime; const queueMessage = await queue.shift(); if (queueMessage === undefined) { - await sleep(500); + await sleep(1000); // Wait for new message to process continue; } const { streamerMessage, streamId } = queueMessage; From 0e3e26ef30c0a4c064858ed0429911fb6d9c9e53 Mon Sep 17 00:00:00 2001 From: Darun Seethammagari Date: Wed, 1 Nov 2023 19:16:23 -0700 Subject: [PATCH 17/24] Address unit test failures --- runner/src/lake-client/lake-client.test.ts | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/runner/src/lake-client/lake-client.test.ts b/runner/src/lake-client/lake-client.test.ts index 0c5208ad3..b3f8760c7 100644 --- a/runner/src/lake-client/lake-client.test.ts +++ b/runner/src/lake-client/lake-client.test.ts @@ -4,6 +4,10 @@ import LakeClient from './lake-client'; import type RedisClient from '../redis-client'; describe('LakeClient', () => { + const transparentRedis = { + getStreamerMessage: jest.fn() + } as unknown as RedisClient; + test('Indexer.fetchBlock() should fetch a block from S3', async () => { const author = 'dokiacapital.poolv1.near'; const mockData = JSON.stringify({ @@ -18,7 +22,7 @@ describe('LakeClient', () => { send: mockSend, } as unknown as S3Client; - const client = new LakeClient('mainnet', mockS3); + const client = new LakeClient('mainnet', mockS3, transparentRedis); const blockHeight = 84333960; const block = await client.fetchBlockPromise(blockHeight); @@ -42,7 +46,7 @@ describe('LakeClient', () => { const mockS3 = { send: mockSend, } as unknown as S3Client; - const client = new LakeClient('mainnet', mockS3); + const client = new LakeClient('mainnet', mockS3, transparentRedis); const blockHeight = 82699904; const shard = 0; @@ -78,7 +82,7 @@ describe('LakeClient', () => { const mockS3 = { send: mockSend, } as unknown as S3Client; - const client = new LakeClient('mainnet', mockS3); + const client = new LakeClient('mainnet', mockS3, transparentRedis); const streamerMessage = await client.fetchStreamerMessage(blockHeight, true); @@ -155,9 +159,6 @@ describe('LakeClient', () => { const mockS3 = { send: mockSend, } as unknown as S3Client; - const transparentRedis = { - getStreamerMessage: jest.fn() - } as unknown as RedisClient; const client = new LakeClient('mainnet', mockS3, transparentRedis); const streamerMessage = await client.fetchStreamerMessage(blockHeight, false); From b155dc34e6f54f9a21579e32d0de7b3a57f67bca Mon Sep 17 00:00:00 2001 From: Darun Seethammagari Date: Fri, 3 Nov 2023 12:35:12 -0700 Subject: [PATCH 18/24] Perform Block conversion in Lake Client and address cleanliness comments --- runner/src/indexer/indexer.test.ts | 44 ++++++------ runner/src/indexer/indexer.ts | 11 ++- runner/src/lake-client/lake-client.test.ts | 75 +++----------------- runner/src/lake-client/lake-client.ts | 31 ++++---- runner/src/redis-client/redis-client.test.ts | 2 +- runner/src/redis-client/redis-client.ts | 2 +- runner/src/stream-handler/worker.ts | 32 ++++----- 7 files changed, 67 insertions(+), 130 deletions(-) diff --git a/runner/src/indexer/indexer.test.ts b/runner/src/indexer/indexer.test.ts index e936d9412..b9313f631 100644 --- a/runner/src/indexer/indexer.test.ts +++ b/runner/src/indexer/indexer.test.ts @@ -1,4 +1,4 @@ -import { type StreamerMessage } from '@near-lake/primitives'; +import { Block, type StreamerMessage } from '@near-lake/primitives'; import type fetch from 'node-fetch'; import Indexer from './indexer'; @@ -181,7 +181,7 @@ CREATE TABLE }), })); const blockHeight = 456; - const mockStreamerMessage = { + const mockBlock = Block.fromStreamerMessage({ block: { chunks: [], header: { @@ -189,7 +189,7 @@ CREATE TABLE } }, shards: {} - } as unknown as StreamerMessage; + } as unknown as StreamerMessage) as unknown as Block; const indexer = new Indexer({ fetch: mockFetch as unknown as typeof fetch }); @@ -201,7 +201,7 @@ CREATE TABLE `, schema: SIMPLE_SCHEMA }; - await indexer.runFunctions(mockStreamerMessage, functions, false); + await indexer.runFunctions(mockBlock, functions, false); expect(mockFetch.mock.calls).toMatchSnapshot(); }); @@ -702,7 +702,7 @@ CREATE TABLE }), }); - const mockStreamerMessage = { + const mockBlock = Block.fromStreamerMessage({ block: { chunks: [0], header: { @@ -710,7 +710,7 @@ CREATE TABLE } }, shards: {} - } as unknown as StreamerMessage; + } as unknown as StreamerMessage) as unknown as Block; const indexer = new Indexer({ fetch: mockFetch as unknown as typeof fetch }); const functions: Record = {}; @@ -747,7 +747,7 @@ CREATE TABLE schema: SIMPLE_SCHEMA }; - await indexer.runFunctions(mockStreamerMessage, functions, false); + await indexer.runFunctions(mockBlock, functions, false); expect(mockFetch.mock.calls).toMatchSnapshot(); }); @@ -781,7 +781,7 @@ CREATE TABLE }), })); const blockHeight = 456; - const mockStreamerMessage = { + const mockBlock = Block.fromStreamerMessage({ block: { chunks: [0], header: { @@ -789,7 +789,7 @@ CREATE TABLE } }, shards: {} - } as unknown as StreamerMessage; + } as unknown as StreamerMessage) as unknown as Block; const indexer = new Indexer({ fetch: mockFetch as unknown as typeof fetch }); const functions: Record = {}; @@ -800,7 +800,7 @@ CREATE TABLE schema: SIMPLE_SCHEMA }; - await expect(indexer.runFunctions(mockStreamerMessage, functions, false)).rejects.toThrow(new Error('boom')); + await expect(indexer.runFunctions(mockBlock, functions, false)).rejects.toThrow(new Error('boom')); expect(mockFetch.mock.calls).toMatchSnapshot(); }); @@ -812,7 +812,7 @@ CREATE TABLE errors: null, }), })); - const mockStreamerMessage = { + const mockBlock = Block.fromStreamerMessage({ block: { chunks: [0], header: { @@ -820,7 +820,7 @@ CREATE TABLE } }, shards: {} - } as unknown as StreamerMessage; + } as unknown as StreamerMessage) as unknown as Block; const provisioner: any = { isUserApiProvisioned: jest.fn().mockReturnValue(false), provisionUserApi: jest.fn(), @@ -835,7 +835,7 @@ CREATE TABLE schema: SIMPLE_SCHEMA, } }; - await indexer.runFunctions(mockStreamerMessage, functions, false, { provision: true }); + await indexer.runFunctions(mockBlock, functions, false, { provision: true }); expect(provisioner.isUserApiProvisioned).toHaveBeenCalledWith('morgs.near', 'test'); expect(provisioner.provisionUserApi).toHaveBeenCalledTimes(1); @@ -854,7 +854,7 @@ CREATE TABLE errors: null, }), })); - const mockStreamerMessage = { + const mockBlock = Block.fromStreamerMessage({ block: { chunks: [0], header: { @@ -862,7 +862,7 @@ CREATE TABLE } }, shards: {} - } as unknown as StreamerMessage; + } as unknown as StreamerMessage) as unknown as Block; const provisioner: any = { isUserApiProvisioned: jest.fn().mockReturnValue(true), provisionUserApi: jest.fn(), @@ -875,7 +875,7 @@ CREATE TABLE schema: SIMPLE_SCHEMA, } }; - await indexer.runFunctions(mockStreamerMessage, functions, false, { provision: true }); + await indexer.runFunctions(mockBlock, functions, false, { provision: true }); expect(provisioner.provisionUserApi).not.toHaveBeenCalled(); }); @@ -888,7 +888,7 @@ CREATE TABLE errors: null, }), })); - const mockStreamerMessage = { + const mockBlock = Block.fromStreamerMessage({ block: { chunks: [0], header: { @@ -896,7 +896,7 @@ CREATE TABLE } }, shards: {} - } as unknown as StreamerMessage; + } as unknown as StreamerMessage) as unknown as Block; const provisioner: any = { isUserApiProvisioned: jest.fn().mockReturnValue(true), provisionUserApi: jest.fn(), @@ -911,7 +911,7 @@ CREATE TABLE schema: SIMPLE_SCHEMA, } }; - await indexer.runFunctions(mockStreamerMessage, functions, false, { provision: true }); + await indexer.runFunctions(mockBlock, functions, false, { provision: true }); expect(provisioner.provisionUserApi).not.toHaveBeenCalled(); expect(mockFetch.mock.calls).toMatchSnapshot(); @@ -925,7 +925,7 @@ CREATE TABLE errors: null, }), })); - const mockStreamerMessage = { + const mockBlock = Block.fromStreamerMessage({ block: { chunks: [0], header: { @@ -933,7 +933,7 @@ CREATE TABLE } }, shards: {} - } as unknown as StreamerMessage; + } as unknown as StreamerMessage) as unknown as Block; const error = new Error('something went wrong with provisioning'); const provisioner: any = { isUserApiProvisioned: jest.fn().mockReturnValue(false), @@ -950,7 +950,7 @@ CREATE TABLE } }; - await expect(indexer.runFunctions(mockStreamerMessage, functions, false, { provision: true })).rejects.toThrow(error); + await expect(indexer.runFunctions(mockBlock, functions, false, { provision: true })).rejects.toThrow(error); expect(mockFetch.mock.calls).toMatchSnapshot(); }); diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index 72975d2ab..92b49702e 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -1,6 +1,6 @@ import fetch, { type Response } from 'node-fetch'; import { VM } from 'vm2'; -import { Block, type StreamerMessage } from '@near-lake/primitives'; +import { type Block } from '@near-lake/primitives'; import { Parser } from 'node-sql-parser'; import Provisioner from '../provisioner'; @@ -49,15 +49,14 @@ export default class Indexer { } async runFunctions ( - streamerMessage: StreamerMessage, + block: Block, functions: Record, isHistorical: boolean, options: { provision?: boolean } = { provision: false } ): Promise { - const blockHeight = Number(streamerMessage.block.header.height); - const blockWithHelpers = Block.fromStreamerMessage(streamerMessage); + const blockHeight = block.blockHeight; - const lag = Date.now() - Math.floor(Number(blockWithHelpers.header().timestampNanosec) / 1000000); + const lag = Date.now() - Math.floor(Number(block.header().timestampNanosec) / 1000000); const simultaneousPromises: Array> = []; const allMutations: string[] = []; @@ -94,7 +93,7 @@ export default class Indexer { const vm = new VM({ timeout: 3000, allowAsync: true }); const context = this.buildContext(indexerFunction.schema, functionName, blockHeight, hasuraRoleName); - vm.freeze(blockWithHelpers, 'block'); + vm.freeze(block, 'block'); vm.freeze(context, 'context'); vm.freeze(context, 'console'); // provide console.log via context.log diff --git a/runner/src/lake-client/lake-client.test.ts b/runner/src/lake-client/lake-client.test.ts index b3f8760c7..f04a18a28 100644 --- a/runner/src/lake-client/lake-client.test.ts +++ b/runner/src/lake-client/lake-client.test.ts @@ -1,4 +1,3 @@ -import { Block } from '@near-lake/primitives'; import { GetObjectCommand, type S3Client } from '@aws-sdk/client-s3'; import LakeClient from './lake-client'; import type RedisClient from '../redis-client'; @@ -8,58 +7,7 @@ describe('LakeClient', () => { getStreamerMessage: jest.fn() } as unknown as RedisClient; - test('Indexer.fetchBlock() should fetch a block from S3', async () => { - const author = 'dokiacapital.poolv1.near'; - const mockData = JSON.stringify({ - author - }); - const mockSend = jest.fn().mockResolvedValue({ - Body: { - transformToString: () => mockData - } - }); - const mockS3 = { - send: mockSend, - } as unknown as S3Client; - - const client = new LakeClient('mainnet', mockS3, transparentRedis); - - const blockHeight = 84333960; - const block = await client.fetchBlockPromise(blockHeight); - const params = { - Bucket: 'near-lake-data-mainnet', - Key: `${blockHeight.toString().padStart(12, '0')}/block.json` - }; - - expect(mockS3.send).toHaveBeenCalledTimes(1); - expect(JSON.stringify(mockSend.mock.calls[0][0])).toMatch(JSON.stringify(new GetObjectCommand(params))); - expect(block.author).toEqual(author); - }); - - test('Indexer.fetchShard() should fetch a shard from S3', async () => { - const mockData = JSON.stringify({}); - const mockSend = jest.fn().mockResolvedValue({ - Body: { - transformToString: () => mockData - } - }); - const mockS3 = { - send: mockSend, - } as unknown as S3Client; - const client = new LakeClient('mainnet', mockS3, transparentRedis); - - const blockHeight = 82699904; - const shard = 0; - const params = { - Bucket: 'near-lake-data-mainnet', - Key: `${blockHeight.toString().padStart(12, '0')}/shard_${shard}.json` - }; - await client.fetchShardPromise(blockHeight, shard); - - expect(JSON.stringify(mockSend.mock.calls[0][0])).toMatch(JSON.stringify(new GetObjectCommand(params))); - }); - - test('Indexer.fetchStreamerMessage() should fetch the block and shards from S3 upon cache miss', async () => { + test('Indexer.fetchBlock() should fetch the block and shards from S3 upon cache miss', async () => { const blockHeight = 85233529; const blockHash = 'xyz'; const mockSend = jest.fn() @@ -84,7 +32,7 @@ describe('LakeClient', () => { } as unknown as S3Client; const client = new LakeClient('mainnet', mockS3, transparentRedis); - const streamerMessage = await client.fetchStreamerMessage(blockHeight, true); + const block = await client.fetchBlock(blockHeight, true); expect(mockSend).toHaveBeenCalledTimes(5); expect(JSON.stringify(mockSend.mock.calls[0][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ @@ -96,13 +44,11 @@ describe('LakeClient', () => { Key: `${blockHeight.toString().padStart(12, '0')}/shard_0.json` }))); - const block = Block.fromStreamerMessage(streamerMessage); - expect(block.blockHeight).toEqual(blockHeight); expect(block.blockHash).toEqual(blockHash); }); - test('fetchStreamerMessage should fetch the message from cache and return it', async () => { + test('fetchBlock should fetch the streamer message from cache, convert it to block, and return it', async () => { const blockHeight = 85233529; const blockHash = 'xyz'; const getMessage = jest.fn() @@ -124,19 +70,18 @@ describe('LakeClient', () => { const mockS3 = {} as unknown as S3Client; const client = new LakeClient('mainnet', mockS3, mockRedis); - const streamerMessage = await client.fetchStreamerMessage(blockHeight, false); + const block = await client.fetchBlock(blockHeight, false); expect(getMessage).toHaveBeenCalledTimes(1); expect(JSON.stringify(getMessage.mock.calls[0])).toEqual( `[${blockHeight}]` ); - const block = Block.fromStreamerMessage(streamerMessage); expect(block.blockHeight).toEqual(blockHeight); expect(block.blockHash).toEqual(blockHash); }); - test('fetchStreamerMessage should fetch the block and shards from S3 upon cache miss', async () => { + test('fetchBlock should fetch the block and shards from S3 upon cache miss', async () => { const blockHeight = 85233529; const blockHash = 'xyz'; const mockSend = jest.fn() @@ -161,7 +106,7 @@ describe('LakeClient', () => { } as unknown as S3Client; const client = new LakeClient('mainnet', mockS3, transparentRedis); - const streamerMessage = await client.fetchStreamerMessage(blockHeight, false); + const block = await client.fetchBlock(blockHeight, false); expect(mockSend).toHaveBeenCalledTimes(5); expect(JSON.stringify(mockSend.mock.calls[0][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ @@ -174,13 +119,11 @@ describe('LakeClient', () => { }))); expect(transparentRedis.getStreamerMessage).toHaveBeenCalledTimes(1); - const block = Block.fromStreamerMessage(streamerMessage); - expect(block.blockHeight).toEqual(blockHeight); expect(block.blockHash).toEqual(blockHash); }); - test('fetchStreamerMessage should fetch the block and shards from S3 and not cache if historical', async () => { + test('fetchBlock should not hit cache and instead fetch the block and shards from S3 if historical', async () => { const blockHeight = 85233529; const blockHash = 'xyz'; const mockSend = jest.fn() @@ -208,7 +151,7 @@ describe('LakeClient', () => { } as unknown as RedisClient; const client = new LakeClient('mainnet', mockS3, mockRedis); - const streamerMessage = await client.fetchStreamerMessage(blockHeight, true); + const block = await client.fetchBlock(blockHeight, true); expect(mockSend).toHaveBeenCalledTimes(5); expect(JSON.stringify(mockSend.mock.calls[0][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ @@ -221,8 +164,6 @@ describe('LakeClient', () => { }))); expect(mockRedis.getStreamerMessage).toHaveBeenCalledTimes(0); - const block = Block.fromStreamerMessage(streamerMessage); - expect(block.blockHeight).toEqual(blockHeight); expect(block.blockHash).toEqual(blockHash); }); diff --git a/runner/src/lake-client/lake-client.ts b/runner/src/lake-client/lake-client.ts index 1615ed8b2..999d676d9 100644 --- a/runner/src/lake-client/lake-client.ts +++ b/runner/src/lake-client/lake-client.ts @@ -1,16 +1,13 @@ import { GetObjectCommand, S3Client } from '@aws-sdk/client-s3'; -import { type StreamerMessage } from '@near-lake/primitives'; +import { Block } from '@near-lake/primitives'; import { METRICS } from '../metrics'; -import RedisClient from '../redis-client/redis-client'; +import RedisClient from '../redis-client'; export default class LakeClient { - network: string; - private readonly s3Client: S3Client; - private readonly redisClient: RedisClient; constructor ( - network: string = 'mainnet', - s3Client: S3Client = new S3Client(), - redisClient: RedisClient = new RedisClient() + private readonly network: string = 'mainnet', + private readonly s3Client: S3Client = new S3Client(), + private readonly redisClient: RedisClient = new RedisClient() ) { this.network = network; this.s3Client = s3Client; @@ -18,17 +15,17 @@ export default class LakeClient { } // pad with 0s to 12 digits - normalizeBlockHeight (blockHeight: number): string { + private normalizeBlockHeight (blockHeight: number): string { return blockHeight.toString().padStart(12, '0'); } - async fetchShardsPromises (blockHeight: number, numberOfShards: number): Promise>> { + private async fetchShardsPromises (blockHeight: number, numberOfShards: number): Promise>> { return ([...Array(numberOfShards).keys()].map(async (shardId) => await this.fetchShardPromise(blockHeight, shardId) )); } - async fetchShardPromise (blockHeight: number, shardId: number): Promise { + private async fetchShardPromise (blockHeight: number, shardId: number): Promise { const params = { Bucket: `near-lake-data-${this.network}`, Key: `${this.normalizeBlockHeight(blockHeight)}/shard_${shardId}.json`, @@ -38,7 +35,7 @@ export default class LakeClient { return JSON.parse(shardData, (_key, value) => this.renameUnderscoreFieldsToCamelCase(value)); } - async fetchBlockPromise (blockHeight: number): Promise { + private async fetchBlockPromise (blockHeight: number): Promise { const file = 'block.json'; const folder = this.normalizeBlockHeight(blockHeight); const params = { @@ -50,7 +47,7 @@ export default class LakeClient { return JSON.parse(blockData, (_key, value) => this.renameUnderscoreFieldsToCamelCase(value)); } - renameUnderscoreFieldsToCamelCase (value: Record): Record { + private renameUnderscoreFieldsToCamelCase (value: Record): Record { if (value !== null && typeof value === 'object' && !Array.isArray(value)) { // It's a non-null, non-array object, create a replacement with the keys initially-capped const newValue: any = {}; @@ -71,13 +68,13 @@ export default class LakeClient { return value; } - async fetchStreamerMessage (blockHeight: number, isHistorical: boolean): Promise { + async fetchBlock (blockHeight: number, isHistorical: boolean): Promise { if (!isHistorical) { const cachedMessage = await this.redisClient.getStreamerMessage(blockHeight); if (cachedMessage) { METRICS.CACHE_HIT.labels().inc(); const parsedMessage = JSON.parse(cachedMessage); - return parsedMessage; + return Block.fromStreamerMessage(parsedMessage); } else { METRICS.CACHE_MISS.labels().inc(); } @@ -89,9 +86,9 @@ export default class LakeClient { const results = await Promise.all([blockPromise, ...shardsPromises]); const block = results.shift(); const shards = results; - return { + return Block.fromStreamerMessage({ block, shards, - }; + }); } } diff --git a/runner/src/redis-client/redis-client.test.ts b/runner/src/redis-client/redis-client.test.ts index 26030f249..1b985b555 100644 --- a/runner/src/redis-client/redis-client.test.ts +++ b/runner/src/redis-client/redis-client.test.ts @@ -10,7 +10,7 @@ describe('RedisClient', () => { const client = new RedisClient(mockClient); - const message = await client.getNextStreamMessage('streamKey'); + const message = await client.getStreamMessage('streamKey'); expect(mockClient.xRead).toHaveBeenCalledWith( { key: 'streamKey', id: '0' }, diff --git a/runner/src/redis-client/redis-client.ts b/runner/src/redis-client/redis-client.ts index 26fc7cd83..72c58ebb8 100644 --- a/runner/src/redis-client/redis-client.ts +++ b/runner/src/redis-client/redis-client.ts @@ -44,7 +44,7 @@ export default class RedisClient { await this.client.disconnect(); } - async getNextStreamMessage ( + async getStreamMessage ( streamKey: string, count = 1, streamId = this.SMALLEST_STREAM_ID diff --git a/runner/src/stream-handler/worker.ts b/runner/src/stream-handler/worker.ts index 3068f8a59..ea31b3366 100644 --- a/runner/src/stream-handler/worker.ts +++ b/runner/src/stream-handler/worker.ts @@ -4,7 +4,7 @@ import promClient from 'prom-client'; import Indexer from '../indexer'; import RedisClient from '../redis-client'; import { METRICS } from '../metrics'; -import type { StreamerMessage } from '@near-lake/primitives'; +import type { Block } from '@near-lake/primitives'; import LakeClient from '../lake-client/lake-client'; if (isMainThread) { @@ -18,7 +18,7 @@ const lakeClient = new LakeClient(); let isHistorical = false; interface QueueMessage { - streamerMessage: StreamerMessage + block: Block streamId: string } const queue: Array> = []; @@ -37,18 +37,18 @@ void (async function main () { })(); async function handleStream (streamKey: string): Promise { - void streamerMessageQueueProducer(queue, streamKey); - void streamerMessageQueueConsumer(queue, streamKey); + void blockQueueProducer(queue, streamKey); + void blockQueueConsumer(queue, streamKey); } -async function streamerMessageQueueProducer (queue: Array>, streamKey: string): Promise { +async function blockQueueProducer (queue: Array>, streamKey: string): Promise { while (true) { const preFetchCount = HISTORICAL_BATCH_SIZE - queue.length; if (preFetchCount <= 0) { await sleep(300); // Wait for more messages in array to process continue; } - const messages = await redisClient.getNextStreamMessage(streamKey, preFetchCount); + const messages = await redisClient.getStreamMessage(streamKey, preFetchCount); if (messages == null) { await sleep(1000); // Wait for new messages to appear in stream continue; @@ -62,7 +62,7 @@ async function streamerMessageQueueProducer (queue: Array> } } -async function streamerMessageQueueConsumer (queue: Array>, streamKey: string): Promise { +async function blockQueueConsumer (queue: Array>, streamKey: string): Promise { const streamType = redisClient.getStreamType(streamKey); const indexerConfig = await redisClient.getStreamStorage(streamKey); const indexerName = `${indexerConfig.account_id}/${indexerConfig.function_name}`; @@ -84,17 +84,17 @@ async function streamerMessageQueueConsumer (queue: Array> await sleep(1000); // Wait for new message to process continue; } - const { streamerMessage, streamId } = queueMessage; + const { block, streamId } = queueMessage; - if (streamerMessage === undefined || streamerMessage?.block.header.height == null) { - console.error('Streamer message does not have block height', streamerMessage); + if (block === undefined || block.blockHeight == null) { + console.error('Block failed to process or does not have block height', block); continue; } METRICS.BLOCK_WAIT_DURATION.labels({ indexer: indexerName, type: streamType }).set(performance.now() - blockStartTime); try { - await indexer.runFunctions(streamerMessage, functions, false, { provision: true }); - METRICS.LAST_PROCESSED_BLOCK.labels({ indexer: indexerName, type: streamType }).set(streamerMessage.block.header.height); + await indexer.runFunctions(block, functions, false, { provision: true }); + METRICS.LAST_PROCESSED_BLOCK.labels({ indexer: indexerName, type: streamType }).set(block.blockHeight); await redisClient.deleteStreamMessage(streamKey, streamId); @@ -114,13 +114,13 @@ async function streamerMessageQueueConsumer (queue: Array> } function fetchAndQueue (queue: Array>, blockHeight: number, id: string): void { - queue.push(transformStreamerMessageToQueueMessage(blockHeight, id)); + queue.push(transformBlockToQueueMessage(blockHeight, id)); } -async function transformStreamerMessageToQueueMessage (blockHeight: number, streamId: string): Promise { - const streamerMessage = await lakeClient.fetchStreamerMessage(blockHeight, isHistorical); +async function transformBlockToQueueMessage (blockHeight: number, streamId: string): Promise { + const block = await lakeClient.fetchBlock(blockHeight, isHistorical); return { - streamerMessage, + block, streamId }; } From 1aa362794bff5b429a6a5acdae8b30e6d09bf87f Mon Sep 17 00:00:00 2001 From: Darun Seethammagari Date: Fri, 3 Nov 2023 12:46:54 -0700 Subject: [PATCH 19/24] Remove Unrelated Metrics --- runner/src/indexer/indexer.ts | 4 ---- runner/src/metrics.ts | 15 --------------- runner/src/stream-handler/worker.ts | 1 - 3 files changed, 20 deletions(-) diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index 92b49702e..fd69a3898 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -5,7 +5,6 @@ import { Parser } from 'node-sql-parser'; import Provisioner from '../provisioner'; import DmlHandler from '../dml-handler/dml-handler'; -import { METRICS } from '../metrics'; interface Dependencies { fetch: typeof fetch @@ -98,7 +97,6 @@ export default class Indexer { vm.freeze(context, 'console'); // provide console.log via context.log const modifiedFunction = this.transformIndexerFunction(indexerFunction.code); - const functionCodeExecutionLatency = performance.now(); try { await vm.run(modifiedFunction); } catch (e) { @@ -110,8 +108,6 @@ export default class Indexer { await this.writeLog(functionName, blockHeight, 'Error running IndexerFunction', error.message); throw e; } - METRICS.USER_CODE_EXECUTION_DURATION.labels({ indexer: functionName, type: isHistorical ? 'historical' : 'realtime' }) - .observe(performance.now() - functionCodeExecutionLatency); simultaneousPromises.push(this.writeFunctionState(functionName, blockHeight, isHistorical)); } catch (e) { console.error(`${functionName}: Failed to run function`, e); diff --git a/runner/src/metrics.ts b/runner/src/metrics.ts index 4022980d7..2ff612ece 100644 --- a/runner/src/metrics.ts +++ b/runner/src/metrics.ts @@ -17,24 +17,12 @@ const CACHE_MISS = new Counter({ help: 'The number of times cache was missed' }); -const USER_CODE_EXECUTION_DURATION = new Histogram({ - name: 'queryapi_runner_code_execution_milliseconds', - help: 'Time spent running a user\'s code', - labelNames: ['indexer', 'type'], -}); - const UNPROCESSED_STREAM_MESSAGES = new Gauge({ name: 'queryapi_runner_unprocessed_stream_messages', help: 'Number of Redis Stream messages not yet processed', labelNames: ['indexer', 'type'], }); -const LAST_PROCESSED_BLOCK = new Gauge({ - name: 'queryapi_runner_last_processed_block', - help: 'The last block processed by an indexer function', - labelNames: ['indexer', 'type'], -}); - const EXECUTION_DURATION = new Histogram({ name: 'queryapi_runner_execution_duration_milliseconds', help: 'Time taken to execute an indexer function', @@ -45,11 +33,8 @@ export const METRICS = { BLOCK_WAIT_DURATION, CACHE_HIT, CACHE_MISS, - USER_CODE_EXECUTION_DURATION, UNPROCESSED_STREAM_MESSAGES, - LAST_PROCESSED_BLOCK, EXECUTION_DURATION, - }; const aggregatorRegistry = new AggregatorRegistry(); diff --git a/runner/src/stream-handler/worker.ts b/runner/src/stream-handler/worker.ts index ea31b3366..72991334f 100644 --- a/runner/src/stream-handler/worker.ts +++ b/runner/src/stream-handler/worker.ts @@ -94,7 +94,6 @@ async function blockQueueConsumer (queue: Array>, streamKe try { await indexer.runFunctions(block, functions, false, { provision: true }); - METRICS.LAST_PROCESSED_BLOCK.labels({ indexer: indexerName, type: streamType }).set(block.blockHeight); await redisClient.deleteStreamMessage(streamKey, streamId); From b050e4c8024119343c558077eb103b70479e2c1c Mon Sep 17 00:00:00 2001 From: Darun Seethammagari Date: Fri, 3 Nov 2023 16:02:11 -0700 Subject: [PATCH 20/24] Eliminate Global Variables and Clean Up Code --- runner/src/redis-client/index.ts | 2 +- runner/src/redis-client/redis-client.test.ts | 2 +- runner/src/redis-client/redis-client.ts | 4 +- runner/src/stream-handler/worker.ts | 117 +++++++++++-------- 4 files changed, 70 insertions(+), 55 deletions(-) diff --git a/runner/src/redis-client/index.ts b/runner/src/redis-client/index.ts index efa0f96e7..938571c25 100644 --- a/runner/src/redis-client/index.ts +++ b/runner/src/redis-client/index.ts @@ -1 +1 @@ -export { default } from './redis-client'; +export { default, type StreamType } from './redis-client'; diff --git a/runner/src/redis-client/redis-client.test.ts b/runner/src/redis-client/redis-client.test.ts index 1b985b555..36f0a36bf 100644 --- a/runner/src/redis-client/redis-client.test.ts +++ b/runner/src/redis-client/redis-client.test.ts @@ -10,7 +10,7 @@ describe('RedisClient', () => { const client = new RedisClient(mockClient); - const message = await client.getStreamMessage('streamKey'); + const message = await client.getStreamMessages('streamKey'); expect(mockClient.xRead).toHaveBeenCalledWith( { key: 'streamKey', id: '0' }, diff --git a/runner/src/redis-client/redis-client.ts b/runner/src/redis-client/redis-client.ts index 72c58ebb8..9b2ae7309 100644 --- a/runner/src/redis-client/redis-client.ts +++ b/runner/src/redis-client/redis-client.ts @@ -14,7 +14,7 @@ interface StreamStorage { schema: string } -type StreamType = 'historical' | 'real-time'; +export type StreamType = 'historical' | 'real-time'; export default class RedisClient { SMALLEST_STREAM_ID = '0'; @@ -44,7 +44,7 @@ export default class RedisClient { await this.client.disconnect(); } - async getStreamMessage ( + async getStreamMessages ( streamKey: string, count = 1, streamId = this.SMALLEST_STREAM_ID diff --git a/runner/src/stream-handler/worker.ts b/runner/src/stream-handler/worker.ts index 72991334f..c7cb52828 100644 --- a/runner/src/stream-handler/worker.ts +++ b/runner/src/stream-handler/worker.ts @@ -2,69 +2,86 @@ import { isMainThread, parentPort, workerData } from 'worker_threads'; import promClient from 'prom-client'; import Indexer from '../indexer'; -import RedisClient from '../redis-client'; +import RedisClient, { type StreamType } from '../redis-client'; import { METRICS } from '../metrics'; import type { Block } from '@near-lake/primitives'; -import LakeClient from '../lake-client/lake-client'; +import LakeClient from '../lake-client'; if (isMainThread) { throw new Error('Worker should not be run on main thread'); } - -const HISTORICAL_BATCH_SIZE = 100; -const indexer = new Indexer(); -const redisClient = new RedisClient(); -const lakeClient = new LakeClient(); -let isHistorical = false; - interface QueueMessage { block: Block - streamId: string + streamMessageId: string +} +type PrefetchQueue = Array>; + +interface WorkerContext { + redisClient: RedisClient + lakeClient: LakeClient + queue: PrefetchQueue + streamKey: string + streamType: StreamType } -const queue: Array> = []; const sleep = async (ms: number): Promise => { await new Promise((resolve) => setTimeout(resolve, ms)); }; void (async function main () { const { streamKey } = workerData; + const redisClient = new RedisClient(); + const workerContext: WorkerContext = { + redisClient, + lakeClient: new LakeClient(), + queue: [], + streamKey, + streamType: redisClient.getStreamType(streamKey), + }; console.log('Started processing stream: ', streamKey); - const streamType = redisClient.getStreamType(streamKey); - isHistorical = (streamType === 'historical'); - - await handleStream(streamKey); + await handleStream(workerContext, streamKey); })(); -async function handleStream (streamKey: string): Promise { - void blockQueueProducer(queue, streamKey); - void blockQueueConsumer(queue, streamKey); +async function handleStream (workerContext: WorkerContext, streamKey: string): Promise { + void blockQueueProducer(workerContext, streamKey); + void blockQueueConsumer(workerContext, streamKey); } -async function blockQueueProducer (queue: Array>, streamKey: string): Promise { +function incrementId (id: string): string { + const [main, sequence] = id.split('-'); + return `${Number(main) + 1}-${sequence}`; +} + +async function blockQueueProducer (workerContext: WorkerContext, streamKey: string): Promise { + const HISTORICAL_BATCH_SIZE = 100; + let streamMessageStartId = '0'; + while (true) { - const preFetchCount = HISTORICAL_BATCH_SIZE - queue.length; + const preFetchCount = HISTORICAL_BATCH_SIZE - workerContext.queue.length; if (preFetchCount <= 0) { - await sleep(300); // Wait for more messages in array to process + await sleep(300); continue; } - const messages = await redisClient.getStreamMessage(streamKey, preFetchCount); + const messages = await workerContext.redisClient.getStreamMessages(streamKey, preFetchCount, streamMessageStartId); if (messages == null) { - await sleep(1000); // Wait for new messages to appear in stream + await sleep(1000); continue; } console.log(`Fetched ${messages?.length} messages from stream ${streamKey}`); for (const streamMessage of messages) { const { id, message } = streamMessage; - fetchAndQueue(queue, Number(message.block_height), id); + workerContext.queue.push(generateQueueMessage(workerContext, Number(message.block_height), id)); } + + streamMessageStartId = incrementId(messages[messages.length - 1].id); } } -async function blockQueueConsumer (queue: Array>, streamKey: string): Promise { - const streamType = redisClient.getStreamType(streamKey); - const indexerConfig = await redisClient.getStreamStorage(streamKey); +async function blockQueueConsumer (workerContext: WorkerContext, streamKey: string): Promise { + const streamType = workerContext.redisClient.getStreamType(streamKey); + const indexer = new Indexer(); + const indexerConfig = await workerContext.redisClient.getStreamStorage(streamKey); const indexerName = `${indexerConfig.account_id}/${indexerConfig.function_name}`; const functions = { [indexerName]: { @@ -77,25 +94,27 @@ async function blockQueueConsumer (queue: Array>, streamKe }; while (true) { - const startTime = performance.now(); - const blockStartTime = startTime; - const queueMessage = await queue.shift(); - if (queueMessage === undefined) { - await sleep(1000); // Wait for new message to process - continue; - } - const { block, streamId } = queueMessage; - - if (block === undefined || block.blockHeight == null) { - console.error('Block failed to process or does not have block height', block); - continue; - } - METRICS.BLOCK_WAIT_DURATION.labels({ indexer: indexerName, type: streamType }).set(performance.now() - blockStartTime); - + let streamMessageId = ''; try { + const startTime = performance.now(); + const blockStartTime = startTime; + const queueMessage = await workerContext.queue.at(0); + if (queueMessage === undefined) { + await sleep(1000); + continue; + } + const block = queueMessage.block; + streamMessageId = queueMessage.streamMessageId; + + if (block === undefined || block.blockHeight == null) { + console.error('Block failed to process or does not have block height', block); + continue; + } + METRICS.BLOCK_WAIT_DURATION.labels({ indexer: indexerName, type: streamType }).set(performance.now() - blockStartTime); await indexer.runFunctions(block, functions, false, { provision: true }); - await redisClient.deleteStreamMessage(streamKey, streamId); + await workerContext.redisClient.deleteStreamMessage(streamKey, streamMessageId); + await workerContext.queue.shift(); METRICS.EXECUTION_DURATION.labels({ indexer: indexerName, type: streamType }).observe(performance.now() - startTime); @@ -104,7 +123,7 @@ async function blockQueueConsumer (queue: Array>, streamKe await sleep(10000); console.log(`Failed: ${indexerName}`, err); } finally { - const unprocessedMessages = await redisClient.getUnprocessedStreamMessages(streamKey, streamId); + const unprocessedMessages = await workerContext.redisClient.getUnprocessedStreamMessages(streamKey, streamMessageId); METRICS.UNPROCESSED_STREAM_MESSAGES.labels({ indexer: indexerName, type: streamType }).set(unprocessedMessages?.length ?? 0); parentPort?.postMessage(await promClient.register.getMetricsAsJSON()); @@ -112,14 +131,10 @@ async function blockQueueConsumer (queue: Array>, streamKe } } -function fetchAndQueue (queue: Array>, blockHeight: number, id: string): void { - queue.push(transformBlockToQueueMessage(blockHeight, id)); -} - -async function transformBlockToQueueMessage (blockHeight: number, streamId: string): Promise { - const block = await lakeClient.fetchBlock(blockHeight, isHistorical); +async function generateQueueMessage (workerContext: WorkerContext, blockHeight: number, streamMessageId: string): Promise { + const block = await workerContext.lakeClient.fetchBlock(blockHeight, workerContext.streamType === 'historical'); return { block, - streamId + streamMessageId }; } From 7847eb0c4250a68a8a2e0bb39d763a34c00f2e94 Mon Sep 17 00:00:00 2001 From: Darun Seethammagari Date: Mon, 6 Nov 2023 13:27:59 -0800 Subject: [PATCH 21/24] Experiment with Blocking --- runner/src/metrics.ts | 7 ++++ runner/src/redis-client/redis-client.test.ts | 20 ++++++++++- runner/src/redis-client/redis-client.ts | 5 +-- runner/src/stream-handler/worker.ts | 35 ++++++++++++++------ 4 files changed, 53 insertions(+), 14 deletions(-) diff --git a/runner/src/metrics.ts b/runner/src/metrics.ts index 2ff612ece..e76d4d86d 100644 --- a/runner/src/metrics.ts +++ b/runner/src/metrics.ts @@ -23,6 +23,12 @@ const UNPROCESSED_STREAM_MESSAGES = new Gauge({ labelNames: ['indexer', 'type'], }); +const LAST_PROCESSED_BLOCK_HEIGHT = new Gauge({ + name: 'queryapi_runner_last_processed_block_height', + help: 'Previous block height processed by an indexer', + labelNames: ['indexer', 'type'], +}); + const EXECUTION_DURATION = new Histogram({ name: 'queryapi_runner_execution_duration_milliseconds', help: 'Time taken to execute an indexer function', @@ -34,6 +40,7 @@ export const METRICS = { CACHE_HIT, CACHE_MISS, UNPROCESSED_STREAM_MESSAGES, + LAST_PROCESSED_BLOCK_HEIGHT, EXECUTION_DURATION, }; diff --git a/runner/src/redis-client/redis-client.test.ts b/runner/src/redis-client/redis-client.test.ts index 36f0a36bf..f9d2669d7 100644 --- a/runner/src/redis-client/redis-client.test.ts +++ b/runner/src/redis-client/redis-client.test.ts @@ -14,7 +14,25 @@ describe('RedisClient', () => { expect(mockClient.xRead).toHaveBeenCalledWith( { key: 'streamKey', id: '0' }, - { COUNT: 1 } + { BLOCK: 0, COUNT: 1 } + ); + expect(message).toBeUndefined(); + }); + + it('returns count of messages after id with block', async () => { + const mockClient = { + on: jest.fn(), + connect: jest.fn().mockResolvedValue(null), + xRead: jest.fn().mockResolvedValue(null), + } as any; + + const client = new RedisClient(mockClient); + + const message = await client.getStreamMessages('streamKey', 10, '123-0', 1000); + + expect(mockClient.xRead).toHaveBeenCalledWith( + { key: 'streamKey', id: '123-0' }, + { BLOCK: 1000, COUNT: 10 } ); expect(message).toBeUndefined(); }); diff --git a/runner/src/redis-client/redis-client.ts b/runner/src/redis-client/redis-client.ts index 9b2ae7309..ea763988f 100644 --- a/runner/src/redis-client/redis-client.ts +++ b/runner/src/redis-client/redis-client.ts @@ -47,11 +47,12 @@ export default class RedisClient { async getStreamMessages ( streamKey: string, count = 1, - streamId = this.SMALLEST_STREAM_ID + streamId = this.SMALLEST_STREAM_ID, + block = 0 ): Promise { const results = await this.client.xRead( { key: streamKey, id: streamId }, - { COUNT: count } + { COUNT: count, BLOCK: block } ); return results?.[0].messages as StreamMessage[]; diff --git a/runner/src/stream-handler/worker.ts b/runner/src/stream-handler/worker.ts index c7cb52828..a41552557 100644 --- a/runner/src/stream-handler/worker.ts +++ b/runner/src/stream-handler/worker.ts @@ -52,19 +52,26 @@ function incrementId (id: string): string { return `${Number(main) + 1}-${sequence}`; } -async function blockQueueProducer (workerContext: WorkerContext, streamKey: string): Promise { +async function waitForQueueSpace (workerContext: WorkerContext): Promise { const HISTORICAL_BATCH_SIZE = 100; + return await new Promise((resolve) => { + const intervalId = setInterval(() => { + const preFetchCount = HISTORICAL_BATCH_SIZE - workerContext.queue.length; + if (preFetchCount > 0) { + clearInterval(intervalId); + resolve(preFetchCount); + } + }, 100); + }); +} + +async function blockQueueProducer (workerContext: WorkerContext, streamKey: string): Promise { let streamMessageStartId = '0'; while (true) { - const preFetchCount = HISTORICAL_BATCH_SIZE - workerContext.queue.length; - if (preFetchCount <= 0) { - await sleep(300); - continue; - } - const messages = await workerContext.redisClient.getStreamMessages(streamKey, preFetchCount, streamMessageStartId); + const preFetchCount = await waitForQueueSpace(workerContext); + const messages = await workerContext.redisClient.getStreamMessages(streamKey, preFetchCount, streamMessageStartId, 1000); if (messages == null) { - await sleep(1000); continue; } console.log(`Fetched ${messages?.length} messages from stream ${streamKey}`); @@ -96,13 +103,15 @@ async function blockQueueConsumer (workerContext: WorkerContext, streamKey: stri while (true) { let streamMessageId = ''; try { - const startTime = performance.now(); - const blockStartTime = startTime; + while (workerContext.queue.length === 0) { + await sleep(100); + } const queueMessage = await workerContext.queue.at(0); if (queueMessage === undefined) { - await sleep(1000); continue; } + const startTime = performance.now(); + const blockStartTime = startTime; const block = queueMessage.block; streamMessageId = queueMessage.streamMessageId; @@ -118,6 +127,10 @@ async function blockQueueConsumer (workerContext: WorkerContext, streamKey: stri METRICS.EXECUTION_DURATION.labels({ indexer: indexerName, type: streamType }).observe(performance.now() - startTime); + if (streamType === 'historical') { + METRICS.LAST_PROCESSED_BLOCK_HEIGHT.labels({ indexer: indexerName, type: streamType }).set(block.blockHeight); + } + console.log(`Success: ${indexerName}`); } catch (err) { await sleep(10000); From 43911fd4277841469bc18424e8fa06486da1a6fc Mon Sep 17 00:00:00 2001 From: Darun Seethammagari Date: Wed, 8 Nov 2023 10:52:55 -0800 Subject: [PATCH 22/24] Address More Comments --- runner/src/lake-client/lake-client.ts | 10 ++---- runner/src/redis-client/redis-client.test.ts | 2 +- runner/src/redis-client/redis-client.ts | 2 +- runner/src/stream-handler/worker.ts | 33 +++++++------------- 4 files changed, 17 insertions(+), 30 deletions(-) diff --git a/runner/src/lake-client/lake-client.ts b/runner/src/lake-client/lake-client.ts index 999d676d9..d06d5cef9 100644 --- a/runner/src/lake-client/lake-client.ts +++ b/runner/src/lake-client/lake-client.ts @@ -8,11 +8,7 @@ export default class LakeClient { private readonly network: string = 'mainnet', private readonly s3Client: S3Client = new S3Client(), private readonly redisClient: RedisClient = new RedisClient() - ) { - this.network = network; - this.s3Client = s3Client; - this.redisClient = redisClient; - } + ) {} // pad with 0s to 12 digits private normalizeBlockHeight (blockHeight: number): string { @@ -72,11 +68,11 @@ export default class LakeClient { if (!isHistorical) { const cachedMessage = await this.redisClient.getStreamerMessage(blockHeight); if (cachedMessage) { - METRICS.CACHE_HIT.labels().inc(); + METRICS.CACHE_HIT.inc(); const parsedMessage = JSON.parse(cachedMessage); return Block.fromStreamerMessage(parsedMessage); } else { - METRICS.CACHE_MISS.labels().inc(); + METRICS.CACHE_MISS.inc(); } } diff --git a/runner/src/redis-client/redis-client.test.ts b/runner/src/redis-client/redis-client.test.ts index f9d2669d7..7c6d75cd8 100644 --- a/runner/src/redis-client/redis-client.test.ts +++ b/runner/src/redis-client/redis-client.test.ts @@ -28,7 +28,7 @@ describe('RedisClient', () => { const client = new RedisClient(mockClient); - const message = await client.getStreamMessages('streamKey', 10, '123-0', 1000); + const message = await client.getStreamMessages('streamKey', '123-0', 10, 1000); expect(mockClient.xRead).toHaveBeenCalledWith( { key: 'streamKey', id: '123-0' }, diff --git a/runner/src/redis-client/redis-client.ts b/runner/src/redis-client/redis-client.ts index ea763988f..44a0df28e 100644 --- a/runner/src/redis-client/redis-client.ts +++ b/runner/src/redis-client/redis-client.ts @@ -46,8 +46,8 @@ export default class RedisClient { async getStreamMessages ( streamKey: string, - count = 1, streamId = this.SMALLEST_STREAM_ID, + count = 1, block = 0 ): Promise { const results = await this.client.xRead( diff --git a/runner/src/stream-handler/worker.ts b/runner/src/stream-handler/worker.ts index a41552557..0b7b1e939 100644 --- a/runner/src/stream-handler/worker.ts +++ b/runner/src/stream-handler/worker.ts @@ -52,25 +52,17 @@ function incrementId (id: string): string { return `${Number(main) + 1}-${sequence}`; } -async function waitForQueueSpace (workerContext: WorkerContext): Promise { - const HISTORICAL_BATCH_SIZE = 100; - return await new Promise((resolve) => { - const intervalId = setInterval(() => { - const preFetchCount = HISTORICAL_BATCH_SIZE - workerContext.queue.length; - if (preFetchCount > 0) { - clearInterval(intervalId); - resolve(preFetchCount); - } - }, 100); - }); -} - async function blockQueueProducer (workerContext: WorkerContext, streamKey: string): Promise { + const HISTORICAL_BATCH_SIZE = 100; let streamMessageStartId = '0'; while (true) { - const preFetchCount = await waitForQueueSpace(workerContext); - const messages = await workerContext.redisClient.getStreamMessages(streamKey, preFetchCount, streamMessageStartId, 1000); + const preFetchCount = HISTORICAL_BATCH_SIZE - workerContext.queue.length; + if (preFetchCount <= 0) { + await sleep(100); + continue; + } + const messages = await workerContext.redisClient.getStreamMessages(streamKey, streamMessageStartId, preFetchCount, 1000); if (messages == null) { continue; } @@ -86,7 +78,6 @@ async function blockQueueProducer (workerContext: WorkerContext, streamKey: stri } async function blockQueueConsumer (workerContext: WorkerContext, streamKey: string): Promise { - const streamType = workerContext.redisClient.getStreamType(streamKey); const indexer = new Indexer(); const indexerConfig = await workerContext.redisClient.getStreamStorage(streamKey); const indexerName = `${indexerConfig.account_id}/${indexerConfig.function_name}`; @@ -119,16 +110,16 @@ async function blockQueueConsumer (workerContext: WorkerContext, streamKey: stri console.error('Block failed to process or does not have block height', block); continue; } - METRICS.BLOCK_WAIT_DURATION.labels({ indexer: indexerName, type: streamType }).set(performance.now() - blockStartTime); + METRICS.BLOCK_WAIT_DURATION.labels({ indexer: indexerName, type: workerContext.streamType }).set(performance.now() - blockStartTime); await indexer.runFunctions(block, functions, false, { provision: true }); await workerContext.redisClient.deleteStreamMessage(streamKey, streamMessageId); await workerContext.queue.shift(); - METRICS.EXECUTION_DURATION.labels({ indexer: indexerName, type: streamType }).observe(performance.now() - startTime); + METRICS.EXECUTION_DURATION.labels({ indexer: indexerName, type: workerContext.streamType }).observe(performance.now() - startTime); - if (streamType === 'historical') { - METRICS.LAST_PROCESSED_BLOCK_HEIGHT.labels({ indexer: indexerName, type: streamType }).set(block.blockHeight); + if (workerContext.streamType === 'historical') { + METRICS.LAST_PROCESSED_BLOCK_HEIGHT.labels({ indexer: indexerName, type: workerContext.streamType }).set(block.blockHeight); } console.log(`Success: ${indexerName}`); @@ -137,7 +128,7 @@ async function blockQueueConsumer (workerContext: WorkerContext, streamKey: stri console.log(`Failed: ${indexerName}`, err); } finally { const unprocessedMessages = await workerContext.redisClient.getUnprocessedStreamMessages(streamKey, streamMessageId); - METRICS.UNPROCESSED_STREAM_MESSAGES.labels({ indexer: indexerName, type: streamType }).set(unprocessedMessages?.length ?? 0); + METRICS.UNPROCESSED_STREAM_MESSAGES.labels({ indexer: indexerName, type: workerContext.streamType }).set(unprocessedMessages?.length ?? 0); parentPort?.postMessage(await promClient.register.getMetricsAsJSON()); } From 19e94064d4458ab8dfb6ef8c54f50dfaf3a76074 Mon Sep 17 00:00:00 2001 From: Darun Seethammagari Date: Wed, 8 Nov 2023 12:59:49 -0800 Subject: [PATCH 23/24] Undo Blocking for XRead --- runner/src/redis-client/redis-client.test.ts | 2 +- runner/src/redis-client/redis-client.ts | 5 ++--- runner/src/stream-handler/worker.ts | 3 ++- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/runner/src/redis-client/redis-client.test.ts b/runner/src/redis-client/redis-client.test.ts index 7c6d75cd8..3f5f9623a 100644 --- a/runner/src/redis-client/redis-client.test.ts +++ b/runner/src/redis-client/redis-client.test.ts @@ -28,7 +28,7 @@ describe('RedisClient', () => { const client = new RedisClient(mockClient); - const message = await client.getStreamMessages('streamKey', '123-0', 10, 1000); + const message = await client.getStreamMessages('streamKey', '123-0', 10); expect(mockClient.xRead).toHaveBeenCalledWith( { key: 'streamKey', id: '123-0' }, diff --git a/runner/src/redis-client/redis-client.ts b/runner/src/redis-client/redis-client.ts index 44a0df28e..3edbde25a 100644 --- a/runner/src/redis-client/redis-client.ts +++ b/runner/src/redis-client/redis-client.ts @@ -47,12 +47,11 @@ export default class RedisClient { async getStreamMessages ( streamKey: string, streamId = this.SMALLEST_STREAM_ID, - count = 1, - block = 0 + count = 1 ): Promise { const results = await this.client.xRead( { key: streamKey, id: streamId }, - { COUNT: count, BLOCK: block } + { COUNT: count } ); return results?.[0].messages as StreamMessage[]; diff --git a/runner/src/stream-handler/worker.ts b/runner/src/stream-handler/worker.ts index 0b7b1e939..ae0d29c56 100644 --- a/runner/src/stream-handler/worker.ts +++ b/runner/src/stream-handler/worker.ts @@ -62,8 +62,9 @@ async function blockQueueProducer (workerContext: WorkerContext, streamKey: stri await sleep(100); continue; } - const messages = await workerContext.redisClient.getStreamMessages(streamKey, streamMessageStartId, preFetchCount, 1000); + const messages = await workerContext.redisClient.getStreamMessages(streamKey, streamMessageStartId, preFetchCount); if (messages == null) { + await sleep(100); continue; } console.log(`Fetched ${messages?.length} messages from stream ${streamKey}`); From 6cb07b1c8a4c1047b43938a96fadd04c60476b19 Mon Sep 17 00:00:00 2001 From: Darun Seethammagari Date: Wed, 8 Nov 2023 14:20:51 -0800 Subject: [PATCH 24/24] Fix failing tests --- runner/src/redis-client/redis-client.test.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/runner/src/redis-client/redis-client.test.ts b/runner/src/redis-client/redis-client.test.ts index 3f5f9623a..1abfd262f 100644 --- a/runner/src/redis-client/redis-client.test.ts +++ b/runner/src/redis-client/redis-client.test.ts @@ -14,7 +14,7 @@ describe('RedisClient', () => { expect(mockClient.xRead).toHaveBeenCalledWith( { key: 'streamKey', id: '0' }, - { BLOCK: 0, COUNT: 1 } + { COUNT: 1 } ); expect(message).toBeUndefined(); }); @@ -32,7 +32,7 @@ describe('RedisClient', () => { expect(mockClient.xRead).toHaveBeenCalledWith( { key: 'streamKey', id: '123-0' }, - { BLOCK: 1000, COUNT: 10 } + { COUNT: 10 } ); expect(message).toBeUndefined(); });