Skip to content

Commit

Permalink
Compress and compact response
Browse files Browse the repository at this point in the history
  • Loading branch information
Tcintra committed Dec 13, 2023
1 parent 472036c commit f1a02b6
Show file tree
Hide file tree
Showing 3 changed files with 41 additions and 12 deletions.
35 changes: 25 additions & 10 deletions src/app/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,56 +4,71 @@
from flask import Flask, jsonify, request
from flask.wrappers import Response
from flask_sqlalchemy import SQLAlchemy
from flask_compress import Compress
from ..db.datahandler import DataHandler
from ..configs import URI

db = SQLAlchemy()

app = Flask(__name__)
Compress(app) # add gzip compress middleware

app.config["SQLALCHEMY_DATABASE_URI"] = URI
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False


db.init_app(app)
app.json.compact = True # type: ignore [attr-defined]


# TODO add response LRU caching
@app.route("/quotes", methods=["GET"])
def get_quotes() -> Response:
"""
Get 1Inch quotes.
Parameters
----------
pair : tuple | None
The pair to get quotes for. If not provided,
all pairs are returned.
start : int | None
start : int
The start timestamp to get quotes for. If not provided,
all quotes until `end` are returned.
end : int | None
end : int
The end timestamp to get quotes for. If not provided,
all quotes from `start` are returned.
cols : list | None
pair : tuple | None, default=None
The pair to get quotes for. If not provided,
all pairs are returned.
cols : list | None, default=None
The columns to return. If not provided, all columns are returned.
process : bool
process : bool, default=True
Whether to process the quotes. If processed, the returned quotes
will be grouped by `hour` and a `price_impact` column will be added.
Refer to :func:`src.db.datahandler.DataHandler.process_quotes`.
include-ref-price : bool, default=False
Whether to include the inferred reference price for the
price impact calc.
Returns
-------
flask.wrappers.Response
The quotes.
"""
pair = request.args.get("pair", type=tuple)
start = request.args.get("start", type=int)
end = request.args.get("end", type=int)
pair = request.args.get("pair", type=tuple)
cols = request.args.get("cols", type=list)
process = request.args.get("process", True, type=bool)
include_ref_price = request.args.get("include-ref-price", False, type=bool)

with DataHandler() as datahandler:
try:
return jsonify(
datahandler.get_quotes(
pair, start, end, cols=cols, process=process
pair,
start,
end,
cols=cols,
process=process,
include_ref_price=include_ref_price,
).to_dict(orient="records")
)
except Exception as e: # pylint: disable=broad-except
Expand Down
14 changes: 12 additions & 2 deletions src/db/datahandler.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,7 @@ def get_quotes(
end: int | None = None,
cols: List[str] | None = None,
process: bool = False,
include_ref_price: bool = False,
) -> pd.DataFrame:
"""
Get 1inch quotes from database. Filter
Expand All @@ -173,10 +174,12 @@ def get_quotes(
return pd.DataFrame()
results = pd.DataFrame.from_dict(results)
if process:
results = self.process_quotes(results)
results = self.process_quotes(results, include_ref_price)
return results

def process_quotes(self, df: pd.DataFrame) -> pd.DataFrame:
def process_quotes(
self, df: pd.DataFrame, include_ref_price: bool = False
) -> pd.DataFrame:
"""
Performs the following processing steps:
1. Create datetime index (floored to hours).
Expand Down Expand Up @@ -212,6 +215,13 @@ def process_quotes(self, df: pd.DataFrame) -> pd.DataFrame:
"reference_price"
]

drop = ["hour"]

if not include_ref_price:
drop.append("reference_price")

df.drop(columns=drop, inplace=True)

df.set_index(["src", "dst"], inplace=True)
df.sort_index(inplace=True)

Expand Down
4 changes: 4 additions & 0 deletions src/db/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,3 +46,7 @@ class Quote(Base):
price = Column(Float)
protocols = Column(JSONB)
timestamp = Column(Integer)

# TODO migrate db using Alembic and apply below index
# (index was created manually via CLI for now.)
# idx_timestamp = Index('idx_timestamp', timestamp)

0 comments on commit f1a02b6

Please sign in to comment.