diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 165018d1..67169206 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -22,7 +22,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] package: - "hooks/openfeature-hooks-opentelemetry" - "providers/openfeature-provider-flagd" @@ -38,9 +38,10 @@ jobs: python-version: ${{ matrix.python-version }} cache: "pip" - - uses: bufbuild/buf-setup-action@v1.28.1 + - uses: bufbuild/buf-action@v1 with: github_token: ${{ github.token }} + setup_only: true - name: Install hatch run: pip install hatch @@ -49,8 +50,9 @@ jobs: run: hatch run cov working-directory: ${{ matrix.package }} - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v4.3.0 + - if: matrix.python-version == '3.11' + name: Upload coverage to Codecov + uses: codecov/codecov-action@v4.5.0 with: name: Code Coverage for ${{ matrix.package }} on Python ${{ matrix.python-version }} directory: ${{ matrix.package }} @@ -69,7 +71,7 @@ jobs: cache: "pip" - name: Run pre-commit - uses: pre-commit/action@v3.0.0 + uses: pre-commit/action@v3.0.1 sast: runs-on: ubuntu-latest diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ac517000..d2ec28d6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,14 +1,14 @@ default_stages: [commit] repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.3.3 + rev: v0.6.3 hooks: - id: ruff args: [--fix] - id: ruff-format - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: check-toml - id: check-yaml @@ -16,7 +16,7 @@ repos: - id: check-merge-conflict - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.9.0 + rev: v1.11.2 hooks: - id: mypy args: [--python-version=3.8] @@ -25,6 +25,7 @@ repos: - opentelemetry-api - types-protobuf - types-PyYAML + - types-requests - mmh3 - semver - panzi-json-logic diff --git a/.release-please-manifest.json b/.release-please-manifest.json index edcfb397..fc1bf3c1 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,4 +1,5 @@ { "hooks/openfeature-hooks-opentelemetry": "0.1.3", - "providers/openfeature-provider-flagd": "0.1.5" + "providers/openfeature-provider-flagd": "0.1.5", + "providers/openfeature-provider-ofrep": "0.1.0" } diff --git a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/__init__.py b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/__init__.py index 53e17938..d0d46f59 100644 --- a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/__init__.py +++ b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/__init__.py @@ -1,7 +1,5 @@ import typing -from typing_extensions import Protocol - from openfeature.evaluation_context import EvaluationContext from openfeature.flag_evaluation import FlagResolutionDetails @@ -9,7 +7,7 @@ from .in_process import InProcessResolver -class AbstractResolver(Protocol): +class AbstractResolver(typing.Protocol): def shutdown(self) -> None: ... def resolve_boolean_details( diff --git a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/custom_ops.py b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/custom_ops.py index 17763615..44bb5a7a 100644 --- a/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/custom_ops.py +++ b/providers/openfeature-provider-flagd/src/openfeature/contrib/provider/flagd/resolvers/process/custom_ops.py @@ -1,5 +1,6 @@ import logging import typing +from dataclasses import dataclass import mmh3 import semver @@ -10,6 +11,12 @@ logger = logging.getLogger("openfeature.contrib") +@dataclass +class Fraction: + variant: str + weight: int = 1 + + def fractional(data: dict, *args: JsonLogicArg) -> typing.Optional[str]: if not args: logger.error("No arguments provided to fractional operator.") @@ -32,28 +39,51 @@ def fractional(data: dict, *args: JsonLogicArg) -> typing.Optional[str]: return None hash_ratio = abs(mmh3.hash(bucket_by)) / (2**31 - 1) - bucket = int(hash_ratio * 100) + bucket = hash_ratio * 100 + total_weight = 0 + fractions = [] for arg in args: - if ( - not isinstance(arg, (tuple, list)) - or len(arg) != 2 - or not isinstance(arg[0], str) - or not isinstance(arg[1], int) - ): - logger.error("Fractional variant weights must be (str, int) tuple") - return None - variant_weights: typing.Tuple[typing.Tuple[str, int]] = args # type: ignore[assignment] - - range_end = 0 - for variant, weight in variant_weights: - range_end += weight + fraction = _parse_fraction(arg) + if fraction: + fractions.append(fraction) + total_weight += fraction.weight + + range_end: float = 0 + for fraction in fractions: + range_end += fraction.weight * 100 / total_weight if bucket < range_end: - return variant + return fraction.variant return None +def _parse_fraction(arg: JsonLogicArg) -> typing.Optional[Fraction]: + if not isinstance(arg, (tuple, list)) or not arg: + logger.error( + "Fractional variant weights must be (str, int) tuple or [str] list" + ) + return None + + if not isinstance(arg[0], str): + logger.error( + "Fractional variant identifier (first element) isn't of type 'str'" + ) + return None + + if len(arg) >= 2 and not isinstance(arg[1], int): + logger.error( + "Fractional variant weight value (second element) isn't of type 'int'" + ) + return None + + fraction = Fraction(variant=arg[0]) + if len(arg) >= 2: + fraction.weight = arg[1] + + return fraction + + def starts_with(data: dict, *args: JsonLogicArg) -> typing.Optional[bool]: def f(s1: str, s2: str) -> bool: return s1.startswith(s2) diff --git a/providers/openfeature-provider-flagd/tests/flags/invalid-fractional-args-wrong-content.json b/providers/openfeature-provider-flagd/tests/flags/invalid-fractional-args-wrong-content.json new file mode 100644 index 00000000..a40e34ba --- /dev/null +++ b/providers/openfeature-provider-flagd/tests/flags/invalid-fractional-args-wrong-content.json @@ -0,0 +1,16 @@ +{ + "flags": { + "basic-flag": { + "state": "ENABLED", + "variants": { + "default": "default", + "true": "true", + "false": "false" + }, + "defaultVariant": "default", + "targeting": { + "fractional": [[]] + } + } + } +} diff --git a/providers/openfeature-provider-flagd/tests/flags/invalid-fractional-weights-strings.json b/providers/openfeature-provider-flagd/tests/flags/invalid-fractional-weights-strings.json new file mode 100644 index 00000000..2f62796e --- /dev/null +++ b/providers/openfeature-provider-flagd/tests/flags/invalid-fractional-weights-strings.json @@ -0,0 +1,19 @@ +{ + "flags": { + "basic-flag": { + "state": "ENABLED", + "variants": { + "default": "default", + "true": "true", + "false": "false" + }, + "defaultVariant": "default", + "targeting": { + "fractional": [ + ["a", "one"], + ["b", "one"] + ] + } + } + } +} diff --git a/providers/openfeature-provider-flagd/tests/test_errors.py b/providers/openfeature-provider-flagd/tests/test_errors.py index 4adb332e..3e576e8a 100644 --- a/providers/openfeature-provider-flagd/tests/test_errors.py +++ b/providers/openfeature-provider-flagd/tests/test_errors.py @@ -48,7 +48,9 @@ def test_file_load_errors(file_name: str): "invalid-semver-args.json", "invalid-stringcomp-args.json", "invalid-fractional-args.json", + "invalid-fractional-args-wrong-content.json", "invalid-fractional-weights.json", + "invalid-fractional-weights-strings.json", ], ) def test_json_logic_parse_errors(file_name: str): diff --git a/providers/openfeature-provider-ofrep/CHANGELOG.md b/providers/openfeature-provider-ofrep/CHANGELOG.md index 825c32f0..f5e2aaa7 100644 --- a/providers/openfeature-provider-ofrep/CHANGELOG.md +++ b/providers/openfeature-provider-ofrep/CHANGELOG.md @@ -1 +1,11 @@ # Changelog + +## 0.1.0 (2024-10-02) + + +### ✨ New Features + +* add initial skeleton for OFREP provider ([#86](https://github.com/open-feature/python-sdk-contrib/issues/86)) ([00a5a18](https://github.com/open-feature/python-sdk-contrib/commit/00a5a18a76ef1435de67f312e384a97823bd185b)) +* implement basic functionality of OFREP provider ([#88](https://github.com/open-feature/python-sdk-contrib/issues/88)) ([cb42cc0](https://github.com/open-feature/python-sdk-contrib/commit/cb42cc0001e19793f391351a1ce5bafe1831025f)) + +## Changelog diff --git a/providers/openfeature-provider-ofrep/pyproject.toml b/providers/openfeature-provider-ofrep/pyproject.toml index 42418256..693ec7bf 100644 --- a/providers/openfeature-provider-ofrep/pyproject.toml +++ b/providers/openfeature-provider-ofrep/pyproject.toml @@ -18,6 +18,7 @@ classifiers = [ keywords = [] dependencies = [ "openfeature-sdk>=0.7.0", + "requests" ] requires-python = ">=3.8" @@ -30,6 +31,8 @@ Homepage = "https://github.com/open-feature/python-sdk-contrib" dependencies = [ "coverage[toml]>=6.5", "pytest", + "requests-mock", + "types-requests", ] [tool.hatch.envs.default.scripts] diff --git a/providers/openfeature-provider-ofrep/src/openfeature/contrib/provider/ofrep/__init__.py b/providers/openfeature-provider-ofrep/src/openfeature/contrib/provider/ofrep/__init__.py index 5201dd5d..90002d9f 100644 --- a/providers/openfeature-provider-ofrep/src/openfeature/contrib/provider/ofrep/__init__.py +++ b/providers/openfeature-provider-ofrep/src/openfeature/contrib/provider/ofrep/__init__.py @@ -1,49 +1,219 @@ -from typing import List, Optional, Union +import re +from datetime import datetime, timedelta, timezone +from email.utils import parsedate_to_datetime +from typing import Any, Callable, Dict, List, NoReturn, Optional, Tuple, Type, Union +from urllib.parse import urljoin + +import requests +from requests.exceptions import JSONDecodeError from openfeature.evaluation_context import EvaluationContext -from openfeature.flag_evaluation import FlagResolutionDetails +from openfeature.exception import ( + ErrorCode, + FlagNotFoundError, + GeneralError, + InvalidContextError, + OpenFeatureError, + ParseError, + TargetingKeyMissingError, + TypeMismatchError, +) +from openfeature.flag_evaluation import FlagResolutionDetails, FlagType, Reason from openfeature.hook import Hook from openfeature.provider import AbstractProvider, Metadata +__all__ = ["OFREPProvider"] + + +TypeMap = Dict[ + FlagType, + Union[ + Type[bool], + Type[int], + Type[float], + Type[str], + Tuple[Type[dict], Type[list]], + ], +] + class OFREPProvider(AbstractProvider): + def __init__( + self, + base_url: str, + *, + headers_factory: Optional[Callable[[], Dict[str, str]]] = None, + timeout: float = 5.0, + ): + self.base_url = base_url + self.headers_factory = headers_factory + self.timeout = timeout + self.retry_after: Optional[datetime] = None + self.session = requests.Session() + def get_metadata(self) -> Metadata: return Metadata(name="OpenFeature Remote Evaluation Protocol Provider") def get_provider_hooks(self) -> List[Hook]: return [] - def resolve_boolean_details( # type: ignore[empty-body] + def resolve_boolean_details( self, flag_key: str, default_value: bool, evaluation_context: Optional[EvaluationContext] = None, - ) -> FlagResolutionDetails[bool]: ... + ) -> FlagResolutionDetails[bool]: + return self._resolve( + FlagType.BOOLEAN, flag_key, default_value, evaluation_context + ) - def resolve_string_details( # type: ignore[empty-body] + def resolve_string_details( self, flag_key: str, default_value: str, evaluation_context: Optional[EvaluationContext] = None, - ) -> FlagResolutionDetails[str]: ... + ) -> FlagResolutionDetails[str]: + return self._resolve( + FlagType.STRING, flag_key, default_value, evaluation_context + ) - def resolve_integer_details( # type: ignore[empty-body] + def resolve_integer_details( self, flag_key: str, default_value: int, evaluation_context: Optional[EvaluationContext] = None, - ) -> FlagResolutionDetails[int]: ... + ) -> FlagResolutionDetails[int]: + return self._resolve( + FlagType.INTEGER, flag_key, default_value, evaluation_context + ) - def resolve_float_details( # type: ignore[empty-body] + def resolve_float_details( self, flag_key: str, default_value: float, evaluation_context: Optional[EvaluationContext] = None, - ) -> FlagResolutionDetails[float]: ... + ) -> FlagResolutionDetails[float]: + return self._resolve( + FlagType.FLOAT, flag_key, default_value, evaluation_context + ) - def resolve_object_details( # type: ignore[empty-body] + def resolve_object_details( self, flag_key: str, default_value: Union[dict, list], evaluation_context: Optional[EvaluationContext] = None, - ) -> FlagResolutionDetails[Union[dict, list]]: ... + ) -> FlagResolutionDetails[Union[dict, list]]: + return self._resolve( + FlagType.OBJECT, flag_key, default_value, evaluation_context + ) + + def _resolve( + self, + flag_type: FlagType, + flag_key: str, + default_value: Union[bool, str, int, float, dict, list], + evaluation_context: Optional[EvaluationContext] = None, + ) -> FlagResolutionDetails[Any]: + now = datetime.now(timezone.utc) + if self.retry_after and now <= self.retry_after: + raise GeneralError( + f"OFREP evaluation paused due to TooManyRequests until {self.retry_after}" + ) + elif self.retry_after: + self.retry_after = None + + try: + response = self.session.post( + urljoin(self.base_url, f"/ofrep/v1/evaluate/flags/{flag_key}"), + json=_build_request_data(evaluation_context), + timeout=self.timeout, + headers=self.headers_factory() if self.headers_factory else None, + ) + response.raise_for_status() + + except requests.RequestException as e: + self._handle_error(e) + + try: + data = response.json() + except JSONDecodeError as e: + raise ParseError(str(e)) from e + + _typecheck_flag_value(data["value"], flag_type) + + return FlagResolutionDetails( + value=data["value"], + reason=Reason[data["reason"]], + variant=data["variant"], + flag_metadata=data["metadata"], + ) + + def _handle_error(self, exception: requests.RequestException) -> NoReturn: + response = exception.response + if response is None: + raise GeneralError(str(exception)) from exception + + if response.status_code == 429: + retry_after = response.headers.get("Retry-After") + self.retry_after = _parse_retry_after(retry_after) + raise GeneralError( + f"Rate limited, retry after: {retry_after}" + ) from exception + + try: + data = response.json() + except JSONDecodeError: + raise ParseError(str(exception)) from exception + + error_code = ErrorCode(data["errorCode"]) + error_details = data["errorDetails"] + + if response.status_code == 404: + raise FlagNotFoundError(error_details) from exception + + if error_code == ErrorCode.PARSE_ERROR: + raise ParseError(error_details) from exception + if error_code == ErrorCode.TARGETING_KEY_MISSING: + raise TargetingKeyMissingError(error_details) from exception + if error_code == ErrorCode.INVALID_CONTEXT: + raise InvalidContextError(error_details) from exception + if error_code == ErrorCode.GENERAL: + raise GeneralError(error_details) from exception + + raise OpenFeatureError(error_code, error_details) from exception + + +def _build_request_data( + evaluation_context: Optional[EvaluationContext], +) -> Dict[str, Any]: + data: Dict[str, Any] = {} + if evaluation_context: + data["context"] = {} + if evaluation_context.targeting_key: + data["context"]["targetingKey"] = evaluation_context.targeting_key + data["context"].update(evaluation_context.attributes) + return data + + +def _parse_retry_after(retry_after: Optional[str]) -> Optional[datetime]: + if retry_after is None: + return None + if re.match(r"^\s*[0-9]+\s*$", retry_after): + seconds = int(retry_after) + return datetime.now(timezone.utc) + timedelta(seconds=seconds) + return parsedate_to_datetime(retry_after) + + +def _typecheck_flag_value(value: Any, flag_type: FlagType) -> None: + type_map: TypeMap = { + FlagType.BOOLEAN: bool, + FlagType.STRING: str, + FlagType.OBJECT: (dict, list), + FlagType.FLOAT: float, + FlagType.INTEGER: int, + } + _type = type_map.get(flag_type) + if not _type: + raise GeneralError(error_message="Unknown flag type") + if not isinstance(value, _type): + raise TypeMismatchError(f"Expected type {_type} but got {type(value)}") diff --git a/providers/openfeature-provider-ofrep/tests/conftest.py b/providers/openfeature-provider-ofrep/tests/conftest.py new file mode 100644 index 00000000..33dae884 --- /dev/null +++ b/providers/openfeature-provider-ofrep/tests/conftest.py @@ -0,0 +1,8 @@ +import pytest + +from openfeature.contrib.provider.ofrep import OFREPProvider + + +@pytest.fixture +def ofrep_provider(): + return OFREPProvider("http://localhost:8080") diff --git a/providers/openfeature-provider-ofrep/tests/test_provider.py b/providers/openfeature-provider-ofrep/tests/test_provider.py new file mode 100644 index 00000000..ca7dbba1 --- /dev/null +++ b/providers/openfeature-provider-ofrep/tests/test_provider.py @@ -0,0 +1,166 @@ +import pytest + +from openfeature.contrib.provider.ofrep import OFREPProvider +from openfeature.evaluation_context import EvaluationContext +from openfeature.exception import ( + FlagNotFoundError, + GeneralError, + InvalidContextError, + ParseError, + TypeMismatchError, +) +from openfeature.flag_evaluation import FlagResolutionDetails, Reason + + +def test_provider_init(): + OFREPProvider( + "http://localhost:8080", + headers_factory=lambda: {"Authorization": "Bearer token"}, + ) + + +@pytest.mark.parametrize( + "flag_type, resolved_value, default_value, get_method", + ( + (bool, True, False, "resolve_boolean_details"), + (str, "String", "default", "resolve_string_details"), + (int, 100, 0, "resolve_integer_details"), + (float, 10.23, 0.0, "resolve_float_details"), + ( + dict, + { + "String": "string", + "Number": 2, + "Boolean": True, + }, + {}, + "resolve_object_details", + ), + ( + list, + ["string1", "string2"], + [], + "resolve_object_details", + ), + ), +) +def test_provider_successful_resolution( + flag_type, resolved_value, default_value, get_method, ofrep_provider, requests_mock +): + requests_mock.post( + "http://localhost:8080/ofrep/v1/evaluate/flags/flag_key", + json={ + "key": "flag_key", + "reason": "TARGETING_MATCH", + "variant": str(resolved_value), + "metadata": {"foo": "bar"}, + "value": resolved_value, + }, + ) + + resolution = getattr(ofrep_provider, get_method)("flag_key", default_value) + + assert resolution == FlagResolutionDetails( + value=resolved_value, + reason=Reason.TARGETING_MATCH, + variant=str(resolved_value), + flag_metadata={"foo": "bar"}, + ) + + +def test_provider_flag_not_found(ofrep_provider, requests_mock): + requests_mock.post( + "http://localhost:8080/ofrep/v1/evaluate/flags/flag_key", + status_code=404, + json={ + "key": "flag_key", + "errorCode": "FLAG_NOT_FOUND", + "errorDetails": "Flag 'flag_key' not found", + }, + ) + + with pytest.raises(FlagNotFoundError): + ofrep_provider.resolve_boolean_details("flag_key", False) + + +def test_provider_invalid_context(ofrep_provider, requests_mock): + requests_mock.post( + "http://localhost:8080/ofrep/v1/evaluate/flags/flag_key", + status_code=400, + json={ + "key": "flag_key", + "errorCode": "INVALID_CONTEXT", + "errorDetails": "Invalid context provided", + }, + ) + + with pytest.raises(InvalidContextError): + ofrep_provider.resolve_boolean_details("flag_key", False) + + +def test_provider_invalid_response(ofrep_provider, requests_mock): + requests_mock.post( + "http://localhost:8080/ofrep/v1/evaluate/flags/flag_key", text="invalid" + ) + + with pytest.raises(ParseError): + ofrep_provider.resolve_boolean_details("flag_key", False) + + +def test_provider_evaluation_context(ofrep_provider, requests_mock): + def match_request_json(request): + return request.json() == {"context": {"targetingKey": "1", "foo": "bar"}} + + requests_mock.post( + "http://localhost:8080/ofrep/v1/evaluate/flags/flag_key", + json={ + "key": "flag_key", + "reason": "TARGETING_MATCH", + "variant": "true", + "metadata": {}, + "value": True, + }, + additional_matcher=match_request_json, + ) + + context = EvaluationContext("1", {"foo": "bar"}) + resolution = ofrep_provider.resolve_boolean_details( + "flag_key", False, evaluation_context=context + ) + + assert resolution == FlagResolutionDetails( + value=True, + reason=Reason.TARGETING_MATCH, + variant="true", + ) + + +def test_provider_retry_after_shortcircuit_resolution(ofrep_provider, requests_mock): + requests_mock.post( + "http://localhost:8080/ofrep/v1/evaluate/flags/flag_key", + status_code=429, + headers={"Retry-After": "1"}, + ) + + with pytest.raises(GeneralError, match="Rate limited, retry after: 1"): + ofrep_provider.resolve_boolean_details("flag_key", False) + with pytest.raises( + GeneralError, match="OFREP evaluation paused due to TooManyRequests" + ): + ofrep_provider.resolve_boolean_details("flag_key", False) + + +def test_provider_typecheck_flag_value(ofrep_provider, requests_mock): + requests_mock.post( + "http://localhost:8080/ofrep/v1/evaluate/flags/flag_key", + json={ + "key": "flag_key", + "reason": "TARGETING_MATCH", + "variant": "true", + "metadata": {}, + "value": "true", + }, + ) + + with pytest.raises(TypeMismatchError): + ofrep_provider.resolve_boolean_details("flag_key", False)