Skip to content

Commit

Permalink
test: fix fuzzing by cleaning out tokenMap fields from schema (#34)
Browse files Browse the repository at this point in the history
  • Loading branch information
fubuloubu authored Jul 18, 2023
1 parent 2df521e commit 4bfbe4d
Showing 1 changed file with 11 additions and 8 deletions.
19 changes: 11 additions & 8 deletions tests/functional/test_schema_fuzzing.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import pytest
import requests
import requests # type: ignore[import]
from hypothesis import HealthCheck, given, settings
from hypothesis_jsonschema import from_schema
from pydantic import ValidationError
Expand All @@ -9,13 +9,16 @@
TOKENLISTS_SCHEMA = "https://uniswap.org/tokenlist.schema.json"


def clean_iso_timestamps(tl: dict) -> dict:
"""
Timestamps can be in any format, and our processing handles it okay
However, for testing purposes, we want the output format to line up,
and unfortunately there is some ambiguity in ISO timestamp formats.
"""
def clean_data(tl: dict) -> dict:
# NOTE: Timestamps can be in any format, and our processing handles it okay
# However, for testing purposes, we want the output format to line up,
# and unfortunately there is some ambiguity in ISO timestamp formats.
tl["timestamp"] = tl["timestamp"].replace("Z", "+00:00")

# NOTE: We do not implement `tokenMap` schema version yet
if "tokenMap" in tl:
del tl["tokenMap"]

return tl


Expand All @@ -24,7 +27,7 @@ def clean_iso_timestamps(tl: dict) -> dict:
@settings(suppress_health_check=(HealthCheck.too_slow,))
def test_schema(token_list):
try:
assert TokenList.parse_obj(token_list).dict() == clean_iso_timestamps(token_list)
assert TokenList.parse_obj(token_list).dict() == clean_data(token_list)

except (ValidationError, ValueError):
pass # Expect these kinds of errors

0 comments on commit 4bfbe4d

Please sign in to comment.