Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Prod Release 02/11/23 #356

Merged
merged 7 commits into from
Nov 2, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/deploy-lambdas.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ jobs:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
HASURA_ENDPOINT: ${{ vars.HASURA_ENDPOINT }}
HASURA_ENDPOINT_V2: ${{ vars.HASURA_ENDPOINT_V2 }}
HASURA_ADMIN_SECRET: ${{ secrets.HASURA_ADMIN_SECRET }}
PG_ADMIN_USER: ${{ secrets.PG_ADMIN_USER }}
PG_ADMIN_PASSWORD: ${{ secrets.PG_ADMIN_PASSWORD }}
Expand Down
1 change: 0 additions & 1 deletion frontend/replacement.dev.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
{
"REPL_ACCOUNT_ID": "dev-queryapi.dataplatform.near",
"REPL_GRAPHQL_ENDPOINT": "https://near-queryapi.dev.api.pagoda.co",
"REPL_GRAPHQL_ENDPOINT_V2": "https://queryapi-hasura-graphql-mainnet-vcqilefdcq-ew.a.run.app",
"REPL_EXTERNAL_APP_URL": "https://queryapi-frontend-vcqilefdcq-ew.a.run.app",
"REPL_REGISTRY_CONTRACT_ID": "dev-queryapi.dataplatform.near"
}
1 change: 0 additions & 1 deletion frontend/replacement.local.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
{
"REPL_ACCOUNT_ID": "dataplatform.near",
"REPL_GRAPHQL_ENDPOINT": "https://near-queryapi.api.pagoda.co",
"REPL_GRAPHQL_ENDPOINT_V2": "https://queryapi-hasura-graphql-mainnet-vcqilefdcq-ew.a.run.app",
"REPL_EXTERNAL_APP_URL": "http://localhost:3000",
"REPL_REGISTRY_CONTRACT_ID": "queryapi.dataplatform.near"
}
1 change: 0 additions & 1 deletion frontend/replacement.mainnet.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
{
"REPL_ACCOUNT_ID": "dataplatform.near",
"REPL_GRAPHQL_ENDPOINT": "https://near-queryapi.api.pagoda.co",
"REPL_GRAPHQL_ENDPOINT_V2": "https://queryapi-hasura-graphql-mainnet-24ktefolwq-ew.a.run.app",
"REPL_EXTERNAL_APP_URL": "https://queryapi-frontend-24ktefolwq-ew.a.run.app",
"REPL_REGISTRY_CONTRACT_ID": "queryapi.dataplatform.near"
}
84 changes: 2 additions & 82 deletions frontend/widgets/src/QueryApi.IndexerStatus.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -95,9 +95,6 @@ const TextLink = styled.a`

if (!indexer_name) return "missing indexer_name";

let v1_endpoint = `${REPL_GRAPHQL_ENDPOINT}`;
let v2_endpoint = `${REPL_GRAPHQL_ENDPOINT_V2}`;

State.init({
logs: [],
state: [],
Expand All @@ -108,10 +105,9 @@ State.init({
indexer_resPage: 0,
logsPage: 0,
statePage: 0,
v2Toggle: Storage.privateGet("QueryApiV2Toggle") || false,
});

let graphQLEndpoint = state.v2Toggle ? v2_endpoint : v1_endpoint;
let graphQLEndpoint = `${REPL_GRAPHQL_ENDPOINT}`;

function fetchGraphQL(operationsDoc, operationName, variables) {
return asyncFetch(`${graphQLEndpoint}/v1/graphql`, {
Expand Down Expand Up @@ -164,12 +160,7 @@ const indexerStateDoc = `
}
`;

const prevV2ToggleSelected = Storage.privateGet("QueryApiV2Toggle");
if (
!state.initialFetch ||
(prevV2ToggleSelected !== state.v2Toggle)
) {
Storage.privateSet("QueryApiV2Toggle", state.v2Toggle);
if (!state.initialFetch) {
State.update({
logs: [],
state: [],
Expand Down Expand Up @@ -256,44 +247,6 @@ const onIndexerResPageChange = (page) => {
State.update({ indexer_resPage: page, currentPage: page });
};

const DisclaimerContainer = styled.div`
padding: 10px;
margin: 0.5px;
text-color: black;
display: flex;
width: 50;
border: 2px solid rgb(240, 240, 240);
border-radius: 8px;
align-items: "center";
margin-bottom: 5px;
`;

const Notice = styled.div`
font-weight: 900;
font-size: 22px;
align-self: flex-start;
margin: 10px 0px 30px;
text-align: center;
padding-bottom: 5px;
border-bottom: 1px solid rgb(240, 240, 241);
color: rgb(36, 39, 42);
`;

const DisclaimerText = styled.p`
font-size: 14px;
line-height: 20px;
font-weight: 400;
color: rgb(17, 24, 28);
word-break: break-word;
width: 700px;
text-align: start;
padding-left: 10px;

@media (max-width: 1024px) {
width: 80%;
}
`;

return (
<>
<Card>
Expand All @@ -303,39 +256,6 @@ return (
GraphQL Playground
<i className="bi bi-box-arrow-up-right"></i>
</TextLink>
<div
style={{
marginTop: "5px",
display: "flex",
width: "100%",
justifyContent: "center",
}}
>
<DisclaimerContainer>
<div className="flex">
<Notice>V2 Testing Notice</Notice>
<div style={{ display: "flex" }}>
<DisclaimerText>
QueryAPI is still in beta. We are working on a OueryAPI V2
with faster historical processing, easier access to DB and
more control over your indexer. V2 is running in parallel and
you can see the logs from this new version by toggling this
button.
</DisclaimerText>
<Widget
src={`${REPL_ACCOUNT_ID}/widget/components.toggle`}
props={{
active: state.v2Toggle,
label: "",
onSwitch: () => {
State.update({ v2Toggle: !state.v2Toggle });
},
}}
/>
</div>
</div>
</DisclaimerContainer>
</div>
</Title>

<CardBody>
Expand Down
1 change: 1 addition & 0 deletions indexer-js-queue-handler/serverless.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ provider:
REGION: ${self:provider.region}
STAGE: ${opt:stage, 'dev'}
HASURA_ENDPOINT: ${env:HASURA_ENDPOINT}
HASURA_ENDPOINT_V2: ${env:HASURA_ENDPOINT_V2}
HASURA_ADMIN_SECRET: ${env:HASURA_ADMIN_SECRET}
PG_ADMIN_USER: ${env:PG_ADMIN_USER}
PG_ADMIN_PASSWORD: ${env:PG_ADMIN_PASSWORD}
Expand Down
5 changes: 2 additions & 3 deletions indexer-js-queue-handler/social-lag-metrics-writer.js
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ export const handler = async () => {
},
}),
fetchJson(
`${process.env.HASURA_ENDPOINT}/v1/graphql`,
`${process.env.HASURA_ENDPOINT_V2}/v1/graphql`,
{
query: `{
dataplatform_near_social_feed_posts(
Expand All @@ -54,8 +54,7 @@ export const handler = async () => {

const nearSocialBlockHeight = nearSocialResponse[0].blockHeight;
const feedIndexerBlockHeight =
feedIndexerResponse.data.dataplatform_near_social_feed_posts[0]
.block_height;
feedIndexerResponse.data.dataplatform_near_social_feed_posts[0].block_height;

const lag = nearSocialBlockHeight - feedIndexerBlockHeight;

Expand Down
21 changes: 14 additions & 7 deletions runner/src/metrics.ts
Original file line number Diff line number Diff line change
@@ -1,25 +1,25 @@
import express from 'express';
import promClient from 'prom-client';
import { Gauge, Histogram, Counter, AggregatorRegistry } from 'prom-client';

const UNPROCESSED_STREAM_MESSAGES = new promClient.Gauge({
const UNPROCESSED_STREAM_MESSAGES = new Gauge({
name: 'queryapi_runner_unprocessed_stream_messages',
help: 'Number of Redis Stream messages not yet processed',
labelNames: ['indexer', 'type'],
});

const EXECUTION_DURATION = new promClient.Histogram({
const EXECUTION_DURATION = new Histogram({
name: 'queryapi_runner_execution_duration_milliseconds',
help: 'Time taken to execute an indexer function',
labelNames: ['indexer', 'type'],
});

const CACHE_HIT = new promClient.Counter({
const CACHE_HIT = new Counter({
name: 'queryapi_runner_cache_hit',
help: 'The number of times cache was hit successfully',
labelNames: ['type', 'key']
});

const CACHE_MISS = new promClient.Counter({
const CACHE_MISS = new Counter({
name: 'queryapi_runner_cache_miss',
help: 'The number of times cache was missed',
labelNames: ['type', 'key']
Expand All @@ -32,15 +32,22 @@ export const METRICS = {
CACHE_MISS
};

const aggregatorRegistry = new AggregatorRegistry();
const workerMetrics: Record<number, string> = {};

export const registerWorkerMetrics = (workerId: number, metrics: string): void => {
workerMetrics[workerId] = metrics;
};

export const startServer = async (): Promise<void> => {
const app = express();

// https://github.com/DefinitelyTyped/DefinitelyTyped/issues/50871
// eslint-disable-next-line @typescript-eslint/no-misused-promises
app.get('/metrics', async (_req, res) => {
res.set('Content-Type', promClient.register.contentType);
res.set('Content-Type', aggregatorRegistry.contentType);

const metrics = await promClient.register.metrics();
const metrics = await AggregatorRegistry.aggregate(Object.values(workerMetrics)).metrics();
res.send(metrics);
});

Expand Down
22 changes: 7 additions & 15 deletions runner/src/stream-handler/stream-handler.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,10 @@
import path from 'path';
import { Worker, isMainThread } from 'worker_threads';

import { type Message } from './types';
import { METRICS } from '../metrics';
import { Gauge, Histogram } from 'prom-client';
import { registerWorkerMetrics } from '../metrics';

export default class StreamHandler {
private readonly worker?: Worker;
private readonly worker: Worker;

constructor (
public readonly streamKey: string
Expand All @@ -18,27 +16,21 @@ export default class StreamHandler {
},
});

this.worker.on('message', this.handleMessage);
this.worker.on('error', this.handleError);
this.worker.on('message', this.handleMessage.bind(this));
this.worker.on('error', this.handleError.bind(this));
} else {
throw new Error('StreamHandler should not be instantiated in a worker thread');
}
}

private handleError (error: Error): void {
console.log(`Encountered error processing stream: ${this.streamKey}, terminating thread`, error);
this.worker?.terminate().catch(() => {
this.worker.terminate().catch(() => {
console.log(`Failed to terminate thread for stream: ${this.streamKey}`);
});
}

private handleMessage (message: Message): void {
if (METRICS[message.type] instanceof Gauge) {
(METRICS[message.type] as Gauge).labels(message.labels).set(message.value);
}

if (METRICS[message.type] instanceof Histogram) {
(METRICS[message.type] as Histogram).labels(message.labels).observe(message.value);
}
private handleMessage (message: string): void {
registerWorkerMetrics(this.worker.threadId, message);
}
}
9 changes: 0 additions & 9 deletions runner/src/stream-handler/types.ts

This file was deleted.

27 changes: 11 additions & 16 deletions runner/src/stream-handler/worker.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import { isMainThread, parentPort, workerData } from 'worker_threads';
import promClient from 'prom-client';

import Indexer from '../indexer';
import RedisClient from '../redis-client';
import { type Message } from './types';
import { METRICS } from '../metrics';

if (isMainThread) {
throw new Error('Worker should not be run on main thread');
Expand All @@ -19,11 +20,12 @@ void (async function main () {
console.log('Started processing stream: ', streamKey);

let indexerName = '';
const streamType = redisClient.getStreamType(streamKey);
const isHistorical = streamType === 'historical';

while (true) {
try {
const startTime = performance.now();
const streamType = redisClient.getStreamType(streamKey);

const messages = await redisClient.getNextStreamMessage(streamKey);
const indexerConfig = await redisClient.getStreamStorage(streamKey);
Expand All @@ -46,30 +48,23 @@ void (async function main () {
provisioned: false,
},
};
await indexer.runFunctions(Number(message.block_height), functions, false, {
await indexer.runFunctions(Number(message.block_height), functions, isHistorical, {
provision: true,
});

await redisClient.deleteStreamMessage(streamKey, id);

const unprocessedMessages = await redisClient.getUnprocessedStreamMessages(streamKey);

parentPort?.postMessage({
type: 'UNPROCESSED_STREAM_MESSAGES',
labels: { indexer: indexerName, type: streamType },
value: unprocessedMessages?.length ?? 0,
} satisfies Message);

parentPort?.postMessage({
type: 'EXECUTION_DURATION',
labels: { indexer: indexerName, type: streamType },
value: performance.now() - startTime,
} satisfies Message);
METRICS.EXECUTION_DURATION.labels({ indexer: indexerName, type: streamType }).observe(performance.now() - startTime);

console.log(`Success: ${indexerName}`);
} catch (err) {
await sleep(10000);
console.log(`Failed: ${indexerName}`, err);
} finally {
const unprocessedMessages = await redisClient.getUnprocessedStreamMessages(streamKey);
METRICS.UNPROCESSED_STREAM_MESSAGES.labels({ indexer: indexerName, type: streamType }).set(unprocessedMessages?.length ?? 0);

parentPort?.postMessage(await promClient.register.getMetricsAsJSON());
}
}
})();