From 2df521e5a4592e82f795850dffae97f2743296b9 Mon Sep 17 00:00:00 2001 From: El De-dog-lo <3859395+fubuloubu@users.noreply.github.com> Date: Tue, 18 Jul 2023 00:18:01 -0400 Subject: [PATCH] fix: handle extensions (#27) * WIP * fix: bridge addresses are optional * fix: update validators for extensions to function correctly * test: fix warning * refactor: don't serialize URLs to AnyUrl class * fix: also include unparsed dicts * test: remove test skips * style: fix type extensions issue * fix: there was a update reference bug, so flatten structure a bit * test: skip new key that we don't want to handle yet * chore: deprecate Python 3.7 * style: ignore typing error for the moment --- .github/workflows/test.yaml | 2 +- setup.py | 3 +- tests/functional/test_uniswap_examples.py | 17 ++--- tokenlists/_cli.py | 2 + tokenlists/typing.py | 88 ++++++++++++++++++----- 5 files changed, 85 insertions(+), 27 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index e02a780..890ebfb 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -55,7 +55,7 @@ jobs: strategy: matrix: os: [ubuntu-latest, macos-latest] # eventually add `windows-latest` - python-version: [3.7, 3.8, 3.9, "3.10", "3.11"] + python-version: [3.8, 3.9, "3.10", "3.11"] steps: - uses: actions/checkout@v3 diff --git a/setup.py b/setup.py index 2fde40e..cd5cd40 100644 --- a/setup.py +++ b/setup.py @@ -64,7 +64,7 @@ long_description_content_type="text/markdown", url="https://github.com/ApeWorX/py-tokenlists", include_package_data=True, - python_requires=">=3.7.2,<4", + python_requires=">=3.8,<4", install_requires=[ "importlib-metadata ; python_version<'3.8'", "click>=8.1.3,<9", @@ -89,7 +89,6 @@ "Operating System :: MacOS", "Operating System :: POSIX", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", diff --git a/tests/functional/test_uniswap_examples.py b/tests/functional/test_uniswap_examples.py index 5937d6d..f733daa 100644 --- a/tests/functional/test_uniswap_examples.py +++ b/tests/functional/test_uniswap_examples.py @@ -2,13 +2,16 @@ import github import pytest -import requests +import requests # type: ignore[import] from pydantic import ValidationError from tokenlists import TokenList # NOTE: Must export GITHUB_ACCESS_TOKEN -UNISWAP_REPO = github.Github(os.environ["GITHUB_ACCESS_TOKEN"]).get_repo("Uniswap/token-lists") +UNISWAP_REPO = github.Github(auth=github.Auth.Token(os.environ["GITHUB_ACCESS_TOKEN"])).get_repo( + "Uniswap/token-lists" +) + UNISWAP_RAW_URL = "https://raw.githubusercontent.com/Uniswap/token-lists/master/test/schema/" @@ -19,12 +22,10 @@ def test_uniswap_tokenlists(token_list_name): token_list = requests.get(UNISWAP_RAW_URL + token_list_name).json() - if token_list_name in ( - "example-crosschain.tokenlist.json", - "extensions-valid-object.tokenlist.json", - ): - # TODO: Unskip once can handle object extensions - pytest.skip("https://github.com/ApeWorX/py-tokenlists/issues/20") + if token_list_name == "example.tokenlist.json": + # NOTE: No idea why this breaking change was necessary + # https://github.com/Uniswap/token-lists/pull/420 + token_list.pop("tokenMap") if "invalid" not in token_list_name: assert TokenList.parse_obj(token_list).dict() == token_list diff --git a/tokenlists/_cli.py b/tokenlists/_cli.py index 2ca4b3e..a642c02 100644 --- a/tokenlists/_cli.py +++ b/tokenlists/_cli.py @@ -1,3 +1,5 @@ +# TODO: Seems like Click 8.1.5 introduced this +# mypy: disable-error-code=attr-defined import re import click diff --git a/tokenlists/typing.py b/tokenlists/typing.py index 8636e2f..6c0197b 100644 --- a/tokenlists/typing.py +++ b/tokenlists/typing.py @@ -1,6 +1,6 @@ from datetime import datetime from itertools import chain -from typing import Dict, List, Optional +from typing import Any, Dict, List, Optional from pydantic import AnyUrl from pydantic import BaseModel as _BaseModel @@ -28,15 +28,73 @@ class Config: froze = True +class BridgeInfo(BaseModel): + tokenAddress: TokenAddress + originBridgeAddress: Optional[TokenAddress] = None + destBridgeAddress: Optional[TokenAddress] = None + + class TokenInfo(BaseModel): chainId: ChainId address: TokenAddress name: TokenName decimals: TokenDecimals symbol: TokenSymbol - logoURI: Optional[AnyUrl] = None + logoURI: Optional[str] = None tags: Optional[List[TagId]] = None - extensions: Optional[dict] = None + extensions: Optional[Dict[str, Any]] = None + + @validator("logoURI") + def validate_uri(cls, v: Optional[str]) -> Optional[str]: + if v is None: + return v + + if "://" not in v or not AnyUrl(v, scheme=v.split("://")[0]): + raise ValueError(f"'{v}' is not a valid URI") + + return v + + @validator("extensions", pre=True) + def parse_extensions(cls, v: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]: + # 1. Check extension depth first + def extension_depth(obj: Optional[Dict[str, Any]]) -> int: + if not isinstance(obj, dict) or len(obj) == 0: + return 0 + + return 1 + max(extension_depth(v) for v in obj.values()) + + if (depth := extension_depth(v)) > 3: + raise ValueError(f"Extension depth is greater than 3: {depth}") + + # 2. Parse valid extensions + if v and "bridgeInfo" in v: + raw_bridge_info = v.pop("bridgeInfo") + v["bridgeInfo"] = {int(k): BridgeInfo.parse_obj(v) for k, v in raw_bridge_info.items()} + + return v + + @validator("extensions") + def extensions_must_contain_allowed_types( + cls, d: Optional[Dict[str, Any]] + ) -> Optional[Dict[str, Any]]: + if not d: + return d + + # NOTE: `extensions` is mapping from `str` to either: + # - a parsed `dict` type (e.g. `BaseModel`) + # - a "simple" type (e.g. dict, string, integer or boolean value) + for key, val in d.items(): + if val is not None and not isinstance(val, (BaseModel, str, int, bool, dict)): + raise ValueError(f"Incorrect extension field value: {val}") + + return d + + @property + def bridge_info(self) -> Optional[BridgeInfo]: + if self.extensions and "bridgeInfo" in self.extensions: + return self.extensions["bridgeInfo"] # type: ignore + + return None @validator("address") def address_must_hex(cls, v: str): @@ -57,18 +115,6 @@ def decimals_must_be_uint8(cls, v: TokenDecimals): return v - @validator("extensions") - def extensions_must_contain_simple_types(cls, d: Optional[dict]) -> Optional[dict]: - if not d: - return d - - # `extensions` is `Dict[str, Union[str, int, bool, None]]`, but pydantic mutates entries - for val in d.values(): - if not isinstance(val, (str, int, bool)) and val is not None: - raise ValueError(f"Incorrect extension field value: {val}") - - return d - class Tag(BaseModel): name: str @@ -109,7 +155,7 @@ class TokenList(BaseModel): tokens: List[TokenInfo] keywords: Optional[List[str]] = None tags: Optional[Dict[TagId, Tag]] = None - logoURI: Optional[AnyUrl] = None + logoURI: Optional[str] = None def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -134,6 +180,16 @@ class Config: # NOTE: Not frozen as we may need to dynamically modify this froze = False + @validator("logoURI") + def validate_uri(cls, v: Optional[str]) -> Optional[str]: + if v is None: + return v + + if "://" not in v or not AnyUrl(v, scheme=v.split("://")[0]): + raise ValueError(f"'{v}' is not a valid URI") + + return v + def dict(self, *args, **kwargs) -> dict: data = super().dict(*args, **kwargs) # NOTE: This was the easiest way to make sure this property returns isoformat