Skip to content

Commit

Permalink
Merge branch 'develop' into refactor/sources-ea
Browse files Browse the repository at this point in the history
  • Loading branch information
justinkaseman authored Feb 4, 2022
2 parents 007d18d + 67c1c23 commit 24934c5
Show file tree
Hide file tree
Showing 69 changed files with 310 additions and 301 deletions.
5 changes: 5 additions & 0 deletions .changeset/giant-elephants-drop.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'@chainlink/coinpaprika-adapter': minor
---

update rate limits for coinpaprika
156 changes: 17 additions & 139 deletions packages/core/bootstrap/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
import {
AdapterMetricsMeta,
AdapterRequest,
AdapterErrorLog,
AdapterContext,
Execute,
ExecuteSync,
Expand All @@ -12,24 +10,20 @@ import {
Config,
} from '@chainlink/types'
import { combineReducers, Store } from 'redux'
import { Cache, withCache } from './lib/cache'
import * as cacheWarmer from './lib/cache-warmer'
import { WARMUP_REQUEST_ID } from './lib/cache-warmer/config'
import {
AdapterError,
logger as Logger,
Requester,
Validator,
Builder,
normalizeInput,
} from './lib/external-adapter'
import { Cache, withCache } from './lib/middleware/cache'
import * as cacheWarmer from './lib/middleware/cache-warmer'
import { AdapterError, logger as Logger, Requester, Validator, Builder } from './lib/modules'
import * as metrics from './lib/metrics'
import * as RateLimit from './lib/rate-limit'
import * as burstLimit from './lib/burst-limit'
import * as RateLimit from './lib/middleware/rate-limit'
import * as burstLimit from './lib/middleware/burst-limit'
import * as ioLogger from './lib/middleware/io-logger'
import * as statusCode from './lib/middleware/status-code'
import * as debug from './lib/middleware/debugger'
import * as normalize from './lib/middleware/normalize'
import * as server from './lib/server'
import { configureStore } from './lib/store'
import * as util from './lib/util'
import * as ws from './lib/ws'
import * as ws from './lib/middleware/ws'
import http from 'http'

const REDUX_MIDDLEWARE = ['burstLimit', 'cacheWarmer', 'rateLimit', 'ws'] as const
Expand Down Expand Up @@ -61,122 +55,6 @@ export const storeSlice = (slice: ReduxMiddleware): Store =>
dispatch: (a) => store.dispatch(a),
} as Store)

// Make sure data has the same statusCode as the one we got as a result
const withStatusCode: Middleware = async (execute, context) => async (input) => {
const { statusCode, data, ...rest } = await execute(input, context)
if (data && typeof data === 'object' && data.statusCode) {
return {
...rest,
statusCode,
data: {
...data,
statusCode,
},
}
}

return { ...rest, statusCode, data }
}

// Log adapter input & output data
const withLogger: Middleware = async (execute, context) => async (input: AdapterRequest) => {
Logger.debug('Input: ', { input })
try {
const result = await execute(input, context)
Logger.debug(`Output: [${result.statusCode}]: `, { output: result.data })
return result
} catch (error) {
const feedID = metrics.util.getFeedId(input)
const errorLog: AdapterErrorLog = {
message: error.toString(),
jobRunID: input.id,
params: input.data,
feedID,
url: error.url,
errorResponse: error.errorResponse,
}

if (Logger.level === 'debug') {
errorLog.stack = error.stack
}

if (Logger.level === 'trace') {
errorLog.rawError = error.cause
errorLog.stack = undefined
}

Logger.error(errorLog)
throw error
}
}

const withMetrics: Middleware = async (execute, context) => async (input: AdapterRequest) => {
const feedId = metrics.util.getFeedId(input)
const metricsMeta: AdapterMetricsMeta = {
feedId,
}

const recordMetrics = () => {
const labels: Parameters<typeof metrics.httpRequestsTotal.labels>[0] = {
is_cache_warming: String(input.id === WARMUP_REQUEST_ID),
method: 'POST',
feed_id: feedId,
}
const end = metrics.httpRequestDurationSeconds.startTimer()

return (props: {
providerStatusCode?: number
statusCode?: number
type?: metrics.HttpRequestType
}) => {
labels.type = props.type
labels.status_code = metrics.util.normalizeStatusCode(props.statusCode)
labels.provider_status_code = metrics.util.normalizeStatusCode(props.providerStatusCode)
end()
metrics.httpRequestsTotal.labels(labels).inc()
}
}

const record = recordMetrics()
try {
const result = await execute({ ...input, metricsMeta }, context)
record({
statusCode: result.statusCode,
type:
result.data.maxAge || (result as any).maxAge
? metrics.HttpRequestType.CACHE_HIT
: metrics.HttpRequestType.DATA_PROVIDER_HIT,
})
return { ...result, metricsMeta: { ...result.metricsMeta, ...metricsMeta } }
} catch (error) {
const providerStatusCode: number | undefined = error.cause?.response?.status
record({
statusCode: providerStatusCode ? 200 : 500,
providerStatusCode,
type: providerStatusCode
? metrics.HttpRequestType.DATA_PROVIDER_HIT
: metrics.HttpRequestType.ADAPTER_ERROR,
})
throw error
}
}

export const withDebug: Middleware = async (execute, context) => async (input: AdapterRequest) => {
const result = await execute(input, context)
if (!util.isDebug()) {
const { debug, ...rest } = result
return rest
}
return result
}

export const withNormalizedInput: <C extends Config>(
endpointSelector?: (request: AdapterRequest) => APIEndpoint<C>,
) => Middleware = (endpointSelector) => async (execute, context) => async (input: AdapterRequest) => {
const normalizedInput = endpointSelector ? normalizeInput(input, endpointSelector(input)) : input
return execute(normalizedInput, context)
}

export const makeMiddleware = <C extends Config>(
execute: Execute,
makeWsHandler?: MakeWSHandler,
Expand All @@ -185,22 +63,22 @@ export const makeMiddleware = <C extends Config>(
const warmerMiddleware = [
withCache(storeSlice('burstLimit')),
RateLimit.withRateLimit(storeSlice('rateLimit')),
withStatusCode,
withNormalizedInput(endpointSelector),
].concat(metrics.METRICS_ENABLED ? [withMetrics] : [])
statusCode.withStatusCode,
normalize.withNormalizedInput(endpointSelector),
].concat(metrics.METRICS_ENABLED ? [metrics.withMetrics] : [])

return [
withLogger,
ioLogger.withIOLogger,
withCache(storeSlice('burstLimit')),
cacheWarmer.withCacheWarmer(storeSlice('cacheWarmer'), warmerMiddleware, {
store: storeSlice('ws'),
makeWSHandler: makeWsHandler,
})(execute),
ws.withWebSockets(storeSlice('ws'), makeWsHandler),
RateLimit.withRateLimit(storeSlice('rateLimit')),
withStatusCode,
withNormalizedInput(endpointSelector),
].concat(metrics.METRICS_ENABLED ? [withMetrics, withDebug] : [withDebug])
statusCode.withStatusCode,
normalize.withNormalizedInput(endpointSelector),
].concat(metrics.METRICS_ENABLED ? [metrics.withMetrics, debug.withDebug] : [debug.withDebug])
}

// Wrap raw Execute function with middleware
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { getRandomRequiredEnv, getRandomEnv, getEnv } from '../util'
import { Config } from '@chainlink/types'
import { logger } from './logger'
import { logger } from '../modules/logger'

const ENV_API_KEY = 'API_KEY'
const ENV_API_ENDPOINT = 'API_ENDPOINT'
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { logger } from '../external-adapter'
import { logger } from '../../modules'
import limits from './limits.json'

export const DEFAULT_MINUTE_RATE_LIMIT = 60
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -379,9 +379,22 @@
"http": {
"free": {
"rateLimit1s": 10,
"rateLimit1m": 600,
"rateLimit1h": 34246.5,
"note": "600/m and 25M/mo for free"
"rateLimit1h": 69.44,
"note": "50k/mo for free"
},
"pro": {
"rateLimit1s": 10,
"rateLimit1h": 347.22,
"note": "250k/mo for pro"
},
"business": {
"rateLimit1s": 10,
"rateLimit1h": 1388.888,
"note": "1mil/mo for business"
},
"enterprise": {
"rateLimit1s": 10,
"note": "unlimited per month"
}
},
"ws": {}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import Ajv, { JSONSchemaType } from 'ajv'
import { logger } from '../external-adapter'
import { logger } from '../modules'
import path from 'path'
import { AdapterRequestData } from '@chainlink/types'

Expand Down
58 changes: 57 additions & 1 deletion packages/core/bootstrap/src/lib/metrics/index.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import * as client from 'prom-client'
import { parseBool } from '../util'
export * as util from './util'
import { WARMUP_REQUEST_ID } from '../middleware/cache-warmer/config'
import * as util from './util'
import { Middleware, AdapterRequest, AdapterMetricsMeta } from '@chainlink/types'

export const setupMetrics = (name: string): void => {
client.collectDefaultMetrics()
Expand All @@ -12,6 +14,58 @@ export const setupMetrics = (name: string): void => {

export const METRICS_ENABLED = parseBool(process.env.EXPERIMENTAL_METRICS_ENABLED)

export const withMetrics: Middleware =
async (execute, context) => async (input: AdapterRequest) => {
const feedId = util.getFeedId(input)
const metricsMeta: AdapterMetricsMeta = {
feedId,
}

const recordMetrics = () => {
const labels: Parameters<typeof httpRequestsTotal.labels>[0] = {
is_cache_warming: String(input.id === WARMUP_REQUEST_ID),
method: 'POST',
feed_id: feedId,
}
const end = httpRequestDurationSeconds.startTimer()

return (props: {
providerStatusCode?: number
statusCode?: number
type?: HttpRequestType
}) => {
labels.type = props.type
labels.status_code = util.normalizeStatusCode(props.statusCode)
labels.provider_status_code = util.normalizeStatusCode(props.providerStatusCode)
end()
httpRequestsTotal.labels(labels).inc()
}
}

const record = recordMetrics()
try {
const result = await execute({ ...input, metricsMeta }, context)
record({
statusCode: result.statusCode,
type:
result.data.maxAge || (result as any).maxAge
? HttpRequestType.CACHE_HIT
: HttpRequestType.DATA_PROVIDER_HIT,
})
return { ...result, metricsMeta: { ...result.metricsMeta, ...metricsMeta } }
} catch (error) {
const providerStatusCode: number | undefined = error.cause?.response?.status
record({
statusCode: providerStatusCode ? 200 : 500,
providerStatusCode,
type: providerStatusCode
? HttpRequestType.DATA_PROVIDER_HIT
: HttpRequestType.ADAPTER_ERROR,
})
throw error
}
}

export enum HttpRequestType {
CACHE_HIT = 'cacheHit',
DATA_PROVIDER_HIT = 'dataProviderHit',
Expand Down Expand Up @@ -62,3 +116,5 @@ export const httpRateLimit = new client.Counter({
name: 'http_requests_rate_limit',
help: 'The number of denied requests because of server rate limiting',
})

export * as util from './util'
2 changes: 1 addition & 1 deletion packages/core/bootstrap/src/lib/metrics/util.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { AdapterRequest } from '@chainlink/types'
import { logger, Validator } from '../external-adapter'
import { logger, Validator } from '../modules'
import { excludableAdapterRequestProperties } from '../util'
import * as crypto from 'crypto'

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import {
} from './reducer'
import * as actions from './actions'
import { WARMUP_BATCH_REQUEST_ID } from '../cache-warmer/config'
import { AdapterError, logger } from '../external-adapter'
import { AdapterError, logger } from '../../modules'

export * as actions from './actions'
export * as reducer from './reducer'
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import objectHash from 'object-hash'
import { getHashOpts } from '../util'
import { logger } from '../external-adapter'
import { getHashOpts } from '../../util'
import { logger } from '../../modules'

export const WARMUP_REQUEST_ID = '9001'
export const WARMUP_BATCH_REQUEST_ID = '9002'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ import {
takeUntil,
withLatestFrom,
} from 'rxjs/operators'
import { RootState } from '../..'
import { RootState } from '../../..'
import {
warmupExecute,
warmupFailed,
Expand Down Expand Up @@ -43,7 +43,7 @@ import {
import { concatenateBatchResults, getSubscriptionKey, splitIntoBatches } from './util'
import { getTTL, getMaxAgeOverride } from '../cache/ttl'
import * as metrics from './metrics'
import { getFeedId } from '../metrics/util'
import { getFeedId } from '../../metrics/util'
import { PayloadAction } from '@reduxjs/toolkit'

export interface EpicDependencies {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import { AdapterRequest, Execute, MakeWSHandler, Middleware } from '@chainlink/types'
import { Store } from 'redux'
import { withMiddleware } from '../../index'
import { logger } from '../external-adapter'
import { getFeedId } from '../metrics/util'
import * as util from '../util'
import { withMiddleware } from '../../../index'
import { logger } from '../../modules'
import { getFeedId } from '../../metrics/util'
import * as util from '../../util'
import { getWSConfig } from '../ws/config'
import { getSubsId, RootState as WSState } from '../ws/reducer'
import { separateBatches } from '../ws/utils'
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { AdapterRequest, Execute } from '@chainlink/types'
import { combineReducers, createReducer } from '@reduxjs/toolkit'
import { logger } from '../external-adapter'
import { logger } from '../../modules'
import * as actions from './actions'
import { getSubscriptionKey } from './util'
import { merge, uniq } from 'lodash'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { WarmupExecutePayload, WarmupSubscribedPayload } from './actions'
import { get } from './config'
import { BatchableProperty, SubscriptionData } from './reducer'
import { AdapterRequest, AdapterResponse } from '@chainlink/types'
import { hash } from '../util'
import { hash } from '../../util'

const conf = get()
export function getSubscriptionKey(
Expand Down
Loading

0 comments on commit 24934c5

Please sign in to comment.