Skip to content
This repository has been archived by the owner on May 28, 2022. It is now read-only.

feature: Indexer #191

Draft
wants to merge 63 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
63 commits
Select commit Hold shift + click to select a range
1773291
feat: created indexer
kekonen Jan 16, 2022
159f18f
feat: added consumer
kekonen Jan 16, 2022
9416ca7
works, but bunch of debug junk
kekonen Jan 17, 2022
bd8f80a
woorks
kekonen Jan 17, 2022
65bf84b
debugging
kekonen Jan 17, 2022
15b5e81
debugging
kekonen Jan 17, 2022
a63a70c
feature: no more subscription
kekonen Jan 18, 2022
7bffcb5
chore: increased time
kekonen Jan 18, 2022
7c53aa5
feature: metrics
kekonen Jan 18, 2022
c98c43c
chore: a bit cleaning, prettier
kekonen Jan 19, 2022
dd3790a
chore: reorg stuff
kekonen Jan 19, 2022
0977f82
feature: db for messages
kekonen Jan 21, 2022
ecf37f5
feature: more db
kekonen Jan 21, 2022
046e8a4
db and nonce
kekonen Jan 23, 2022
1b66d2b
feature: sender, received event
kekonen Jan 23, 2022
1a9f660
message time
kekonen Jan 25, 2022
9f71642
feature: evm tx
kekonen Jan 25, 2022
d8e5a5c
feature: db request
kekonen Jan 25, 2022
ebc7d8f
feature: docker
kekonen Jan 25, 2022
237ffce
chore: ethereup rpc provider
kekonen Jan 25, 2022
f7282da
chore: script
kekonen Jan 25, 2022
9352b21
added docker-compose config and turned on indexer metrics
yourbuddyconner Jan 26, 2022
da03c8e
chore: env handling
kekonen Jan 26, 2022
8a778ad
chore: env example
kekonen Jan 26, 2022
e7f45f0
chore: allow big tables for postgres
kekonen Jan 26, 2022
946ff7f
fix: correct names
kekonen Jan 26, 2022
0f8035e
minor nits, almost works
yourbuddyconner Jan 27, 2022
bb3b1e1
fix: nits
kekonen Jan 27, 2022
afcf907
feature: home health
kekonen Jan 27, 2022
f783320
fix: nits TS
kekonen Jan 27, 2022
3f71a4a
fix: nits health restore, though might not need
kekonen Jan 27, 2022
3e0f480
fix: error handling
kekonen Jan 27, 2022
f27a748
feature: retry throws warns
kekonen Jan 27, 2022
181fcf1
chore: prettier
kekonen Jan 27, 2022
44ced31
pagination fix
yourbuddyconner Jan 28, 2022
ba23d08
limit concurrent number of requests to 20
yourbuddyconner Jan 28, 2022
4e5e699
concurrent requests per-endpoint
yourbuddyconner Jan 28, 2022
f92fcfe
error logs
yourbuddyconner Jan 28, 2022
5010e32
removed extra pagination
kekonen Jan 28, 2022
9b4b809
integrity test for multiple replicas
kekonen Jan 29, 2022
e6891dd
column length
kekonen Jan 29, 2022
9a510fb
retries everywhere
kekonen Jan 30, 2022
76e64b3
retries, timeout, refetch on failed integrity
kekonen Jan 31, 2022
e7ce901
debug logs
kekonen Jan 31, 2022
2a62a46
chore: env.exmaple, docker-compose
kekonen Jan 31, 2022
d12f4ca
chore: wrapped getBlockNumber retry
kekonen Feb 1, 2022
5a0037b
Daniil/tx monitor prisma (#156)
kekonen Feb 3, 2022
e7a61bb
Daniil/tx monitor prisma (#178)
kekonen Feb 6, 2022
a85dbaa
chore: log and docker-compose
kekonen Feb 7, 2022
892759d
feature: hostgrams
kekonen Feb 8, 2022
122eb87
fix: hostgrams and ts
kekonen Feb 8, 2022
37e7f9e
feature: gas usage metrics
kekonen Feb 10, 2022
08a5f21
refactor: metrics
kekonen Feb 11, 2022
c70a808
fix: pagination
kekonen Feb 11, 2022
3706672
fix: reorg
kekonen Feb 14, 2022
1bc7a18
feature: rpc and db metrics
kekonen Feb 14, 2022
556e092
feature: rpc metrics
kekonen Feb 14, 2022
4477c84
feature: logs improved, debug api
kekonen Feb 16, 2022
2fbca40
fix: db load
kekonen Feb 17, 2022
58bcd53
fix: db load
kekonen Feb 17, 2022
1fc1919
fix: call stack size, naming
kekonen Mar 2, 2022
15f0b0f
fix: naming
kekonen Mar 4, 2022
2ac21ae
fix: naming
kekonen Mar 4, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions tools/indexer/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
ETHEREUM_RPC=https://eth-mainnet.alchemyapi.io/v2/xxx
MOONBEAM_RPC=https://moonbeam.api.onfinality.io/public
ENVIRONMENT=development # or production or staging

PORT=3000
BATCH_SIZE=5000

PGHOST=pg1
PGUSER=postgres
PGDATABASE=postgres
PGPASSWORD=postgres
PGPORT=5432
4 changes: 4 additions & 0 deletions tools/indexer/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
node_modules
# Keep environment variables out of version control
.env
postgres-data*
29 changes: 29 additions & 0 deletions tools/indexer/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
FROM node:16
ENV NODE_ENV=production

WORKDIR /app

COPY package.json ./package.json
COPY package-lock.json ./package-lock.json


# START should delete later
COPY nomad-xyz-sdk-1.2.4.tgz ./nomad-xyz-sdk-1.2.4.tgz
RUN npm i


RUN npm i ./nomad-xyz-sdk-1.2.4.tgz

# RUN rm ./nomad-xyz-sdk-1.2.4.tgz
# END should delete later


COPY api ./api
COPY main.ts ./main.ts

COPY prisma ./prisma
RUN npm run build

COPY core ./core

CMD [ "npm", "run", "start" ]
80 changes: 80 additions & 0 deletions tools/indexer/api/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
import express from "express";
import { DB, MsgRequest } from "../core/db";
import * as dotenv from "dotenv";
import Logger from "bunyan";
import promBundle from 'express-prom-bundle';
import { prefix } from "../core/metrics";

dotenv.config({});

function fail(res: any, code: number, reason: string) {
return res.status(code).json({ error: reason });
}

const PORT = process.env.PORT;

export async function run(db: DB, logger: Logger) {
const app = express();

const log = (
req: express.Request,
res: express.Response,
next: express.NextFunction
) => {
logger.info(`request to ${req.url}`);
next();
};

const metricsMiddleware = promBundle({
httpDurationMetricName: prefix + '_api',
buckets: [0.1, 0.3, 0.6, 1, 1.5, 2.5, 5],
includeMethod: true,
includePath: true,
metricsPath: '/metrics',
});
metricsMiddleware

app.use(metricsMiddleware)

// app.use(promMid({
// metricsPath: '/metrics',
// collectDefaultMetrics: true,
// requestDurationBuckets: [0.1, 0.5, 1, 1.5],
// requestLengthBuckets: [512, 1024, 5120, 10240, 51200, 102400],
// responseLengthBuckets: [512, 1024, 5120, 10240, 51200, 102400],
// prefix: prefix + '_api',
// }));

app.get("/healthcheck", log, (req, res) => {
res.send("OK!");
});

app.get("/tx/:tx", log, async (req, res) => {
const messages = await db.getMessageByEvm(req.params.tx);
return res.json(messages.map(m => m.serialize()));
});

app.get("/hash/:hash", log, async (req, res) => {
const message = await db.getMessageByHash(req.params.hash);
if (!message) return res.status(404).json({});
return res.json(message.serialize());
});

app.get(
"/tx",
log,
async (req: express.Request<{}, {}, {}, MsgRequest>, res) => {
const { size } = req.query;

if (size && size > 30) return fail(res, 403, "maximum page size is 30");

const messages = await db.getMessages(req.query);

return res.json(messages.map(m => m.serialize()));
}
);

app.listen(PORT, () => {
logger.info(`Server is running at https://localhost:${PORT}`);
});
}
102 changes: 102 additions & 0 deletions tools/indexer/core/api.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
import express from "express";
import { DB, MsgRequest } from "./db";
import * as dotenv from "dotenv";
import Logger from "bunyan";
import { Orchestrator } from "./orchestrator";
import { Processor } from "./consumer";
import { replacer } from "./utils";
dotenv.config({});

function fail(res: any, code: number, reason: string) {
return res.status(code).json({ error: reason });
}

const PORT = process.env.DEBUG_PORT || '1337';

export async function run(o: Orchestrator, logger: Logger) {
const app = express();

const log = (
req: express.Request,
res: express.Response,
next: express.NextFunction
) => {
logger.info(`request to ${req.url}`);
next();
};

app.get("/healthcheck", log, (req, res) => {
res.send("OK!");
});

app.get("/hash/:hash", log, async (req, res) => {
const p = (o.consumer as Processor);
const message = p.getMsg(req.params.hash);
if (message) {
return res.json(message.serialize());
} else {
return res.status(404).json({});
}
});

app.get("/tx/:tx", log, async (req, res) => {
const p = (o.consumer as Processor);
const message = Array.from(p.messages).find(m => m.tx && m.tx! === req.params.tx);
if (message) {
return res.json(message.serialize());
} else {
return res.status(404).json({});
}
});

app.get("/status", log, async (req, res) => {
const promises: Promise<[number, {
lastIndexed: number;
numMessages: number;
numRpcFailures: number;
}]>[] = Array.from(o.indexers.entries()).map(async ([domain, indexer]): Promise<[number, {
lastIndexed: number;
numMessages: number;
numRpcFailures: number;
}]> => {
return [domain, {
lastIndexed: indexer.lastIndexed.valueOf(),
numMessages: await o.db.getMessageCount(domain),
numRpcFailures: indexer.failureCounter.num()
}]
});
const entries = await Promise.all(
promises
);

const x = new Map(
entries
);
return res.json(JSON.stringify(x, replacer));
})

app.get("/msg/:origin/:state", log, async (req, res) => {

const {origin: originStr, state: stateStr} = req.params;
let origin: number, state: number;

try {
origin = parseInt(originStr);
state = parseInt(stateStr);
} catch(e) {
return res.status(407).json({error: `One of the params (origin or stage) is invalid`});
}

const p = (o.consumer as Processor);
const messages = Array.from(p.messages).filter(m => m.origin === origin && m.state === state);
if (messages.length) {
return res.json(messages.map(m => m.serialize()));
} else {
return res.status(404).json({});
}
});

app.listen(PORT, () => {
logger.info(`Server is running at https://localhost:${PORT}`);
});
}
Loading