From 958dfef4029289a69ed599bba26323048c54852b Mon Sep 17 00:00:00 2001 From: Wim De Clercq Date: Wed, 9 Oct 2024 11:19:49 +0200 Subject: [PATCH 1/6] Refactor crabpy_pyramid include. Issue #218 --- .gitignore | 4 + crabpy_pyramid/__init__.py | 235 ++---------------- crabpy_pyramid/adressenregister.py | 138 ++++++++++ crabpy_pyramid/capakey.py | 106 ++++++++ crabpy_pyramid/tests/__init__.py | 0 .../tests/renderers/test_capakey.py | 231 ++++++++--------- crabpy_pyramid/tests/test_capakey.py | 138 ++++------ crabpy_pyramid/tests/test_functional.py | 8 +- crabpy_pyramid/tests/test_utils.py | 86 +++---- crabpy_pyramid/tests/tests.py | 99 -------- crabpy_pyramid/utils.py | 73 +++--- 11 files changed, 509 insertions(+), 609 deletions(-) create mode 100644 crabpy_pyramid/adressenregister.py create mode 100644 crabpy_pyramid/capakey.py create mode 100644 crabpy_pyramid/tests/__init__.py delete mode 100644 crabpy_pyramid/tests/tests.py diff --git a/.gitignore b/.gitignore index 1d9ca06..4c9d905 100644 --- a/.gitignore +++ b/.gitignore @@ -6,6 +6,7 @@ # Packages *.egg *.egg-info +.venv dist build eggs @@ -43,4 +44,7 @@ dogpile_data # Rope .ropeproject + +# Editors .idea/ +.vscode/ diff --git a/crabpy_pyramid/__init__.py b/crabpy_pyramid/__init__.py index 5067c3e..b2e5047 100644 --- a/crabpy_pyramid/__init__.py +++ b/crabpy_pyramid/__init__.py @@ -1,192 +1,19 @@ import logging -import os from collections.abc import Sequence -from crabpy.client import AdressenRegisterClient -from crabpy.gateway.adressenregister import Gateway -from crabpy.gateway.capakey import CapakeyRestGateway from pyramid.config import Configurator from pyramid.settings import asbool -from zope.interface import Interface -from crabpy_pyramid.renderers.adressenregister import ( - json_item_renderer as adresreg_json_item_renderer, -) -from crabpy_pyramid.renderers.adressenregister import ( - json_list_renderer as adresreg_json_list_renderer, -) -from crabpy_pyramid.renderers.capakey import ( - json_item_renderer as capakey_json_item_renderer, -) -from crabpy_pyramid.renderers.capakey import ( - json_list_renderer as capakey_json_list_renderer, -) -log = logging.getLogger(__name__) +LOG = logging.getLogger(__name__) GENERATE_ETAG_ROUTE_NAMES = set() -class ICapakey(Interface): - pass - - -class IAdressenregister(Interface): - pass - - -def _parse_settings(settings): - defaults = { - "capakey.include": False, - "adressenregister.include": True, - "adressenregister.base_url": "https://api.basisregisters.vlaanderen.be", - "adressenregister.api_key": None, - "cache.file.root": "/tmp/dogpile_data", - } - args = defaults.copy() - if "crabpy.adressenregister.api_key" not in settings: - log.warning( - "No adressenregister.api_key set in settings. " - "The api might stop working after reaching the limit of x requests per day." - ) - - # booelean settings - for short_key_name in ( - "capakey.include", - "adressenregister.include", - ): - key_name = "crabpy.%s" % short_key_name - if key_name in settings: - args[short_key_name] = asbool( - settings.get(key_name, defaults.get(short_key_name)) - ) - - # string setting - for short_key_name in ( - "proxy.http", - "proxy.https", - "cache.file.root", - "adressenregister.base_url", - "adressenregister.api_key", - ): - key_name = "crabpy.%s" % short_key_name - if key_name in settings: - args[short_key_name] = settings.get(key_name) - - # cache configuration - for short_key_name in ( - "capakey.cache_config", - "adressenregister.cache_config", - ): - key_name = "crabpy.%s." % short_key_name - cache_config = {} - for skey in settings.keys(): - if skey.startswith(key_name): - cache_config[skey[len(key_name) :]] = settings.get(skey) - if cache_config: - args[short_key_name] = cache_config - - log.debug(settings) - log.debug(args) - return args - - -def _filter_settings(settings, prefix): - """ - Filter all settings to only return settings that start with a certain - prefix. - - :param dict settings: A settings dictionary. - :param str prefix: A prefix. - """ - ret = {} - for skey in settings.keys(): - if skey.startswith(prefix): - key = skey[len(prefix) :] - ret[key] = settings[skey] - return ret - - -def _build_capakey(registry, settings): - capakey = registry.queryUtility(ICapakey) - if capakey is not None: - return capakey - if "cache_config" in settings: - cache_config = settings["cache_config"] - del settings["cache_config"] - else: - cache_config = {} - gateway = CapakeyRestGateway(cache_config=cache_config) - - registry.registerUtility(gateway, ICapakey) - return registry.queryUtility(ICapakey) - - -def _build_adressenregister(registry, settings): - adressenregister = registry.queryUtility(IAdressenregister) - if adressenregister is not None: - return adressenregister - if "cache_config" in settings: - cache_config = settings["cache_config"] - del settings["cache_config"] - else: - cache_config = None - gateway = Gateway( - client=AdressenRegisterClient(settings["base_url"], settings["api_key"]), - cache_settings=cache_config, - ) - - registry.registerUtility(gateway, IAdressenregister) - return registry.queryUtility(IAdressenregister) - - -def get_capakey(registry): - """ - Get the Capakey Gateway - - :rtype: :class:`crabpy.gateway.capakey.CapakeyRestGateway` - """ - # argument might be a config or a request - regis = getattr(registry, "registry", None) - if regis is None: - regis = registry - - return regis.queryUtility(ICapakey) - - -def get_adressenregister(registry): - """ - Get the Adresssenregister Gateway - - :rtype: :class:`crabpy.gateway.adressenregister.Gateway` - # argument might be a config or a request - """ - # argument might be a config or a request - regis = getattr(registry, "registry", None) - if regis is None: - regis = registry - - return regis.queryUtility(IAdressenregister) - - -def _get_proxy_settings(settings): - base_settings = {} - http = settings.get("proxy.http", None) - https = settings.get("proxy.https", None) - if http or https: - base_settings["proxy"] = {} - if "proxy.http" in settings: - base_settings["proxy"]["http"] = settings["proxy.http"] - log.info("HTTP proxy: %s" % base_settings["proxy"]["http"]) - if "proxy.https" in settings: - base_settings["proxy"]["https"] = settings["proxy.https"] - log.info("HTTPS proxy: %s" % base_settings["proxy"]["https"]) - return base_settings - - def add_route(config, name, pattern, *args, **kwargs): """ - Adds a pyramid route to the config. All args and kwargs will be - passed on to config.add_route. + Add a pyramid route to the config with etag tween support. + + All args and kwargs will be passed on to config.add_route. This exists so the default behaviour of including crabpy will still be to cache all crabpy routes. @@ -197,8 +24,7 @@ def add_route(config, name, pattern, *args, **kwargs): def conditional_http_tween_factory(handler, registry): """ - Tween that adds ETag headers and tells Pyramid to enable - conditional responses where appropriate. + Tween that automatically adds ETag headers enables conditional responses. """ settings = registry.settings if hasattr(registry, "settings") else {} if "generate_etag_for.list" in settings: @@ -233,62 +59,33 @@ def conditional_http_tween(request): return conditional_http_tween -def includeme(config): +def includeme(config: Configurator): """ Include `crabpy_pyramid` in this `Pyramid` application. :param pyramid.config.Configurator config: A Pyramid configurator. """ - - settings = _parse_settings(config.registry.settings) - base_settings = _get_proxy_settings(settings) - + settings = config.registry.settings # http caching tween if not settings.get("etag_tween_disabled", False): config.add_tween("crabpy_pyramid.conditional_http_tween_factory") - # create cache - root = settings.get("cache.file.root", "/tmp/dogpile_data") - if not os.path.exists(root): - os.makedirs(root) - - capakey_settings = dict(_filter_settings(settings, "capakey."), **base_settings) - if "include" in capakey_settings: - log.info( + # capakey + if "crabpy.capakey.include" in settings: + LOG.info( "The 'capakey.include' setting is deprecated. Capakey will " "always be included." ) - log.info("Adding CAPAKEY Gateway.") - config.add_renderer("capakey_listjson", capakey_json_list_renderer) - config.add_renderer("capakey_itemjson", capakey_json_item_renderer) - _build_capakey(config.registry, capakey_settings) - config.add_request_method(get_capakey, "capakey_gateway") - config.add_directive("get_capakey", get_capakey) - config.include("crabpy_pyramid.routes.capakey") - config.scan("crabpy_pyramid.views.capakey") - - # adressenregister wordt afgekort tot adresreg - adresreg_settings = dict( - _filter_settings(settings, "adressenregister."), **base_settings - ) + config.include("crabpy_pyramid.capakey") - if adresreg_settings["include"]: - log.info("Adding adressen register Gateway.") - del adresreg_settings["include"] - config.add_renderer("adresreg_listjson", adresreg_json_list_renderer) - config.add_renderer("adresreg_itemjson", adresreg_json_item_renderer) - _build_adressenregister(config.registry, adresreg_settings) - config.add_directive("get_adressenregister", get_adressenregister) - config.add_request_method(get_adressenregister, "adressenregister_gateway") - config.include("crabpy_pyramid.routes.adressenregister") - config.scan("crabpy_pyramid.views.adressenregister") - config.scan("crabpy_pyramid.views.exceptions") + # adressenregister + if "crabpy.adressenregister.include" in settings: + if asbool(settings["crabpy.adressenregister.include"]): + config.include("crabpy_pyramid.adressenregister") def main(global_config, **settings): - """ - This function returns a Pyramid WSGI application. - """ + """Create a Pyramid WSGI application.""" config = Configurator(settings=settings) includeme(config) diff --git a/crabpy_pyramid/adressenregister.py b/crabpy_pyramid/adressenregister.py new file mode 100644 index 0000000..32bbdf6 --- /dev/null +++ b/crabpy_pyramid/adressenregister.py @@ -0,0 +1,138 @@ +""" +The adressenregister component for crabpy pyramid. + +Known settings: +crabpy.adressenregister.include +crabpy.adressenregister.base_url +crabpy.adressenregister.api_key +crabpy.adressenregister.cache_config.* +""" + +import functools +import logging +from dataclasses import dataclass +from typing import Any +from typing import Mapping + +from crabpy.client import AdressenRegisterClient +from crabpy.gateway.adressenregister import Gateway +from pyramid.config import Configurator +from pyramid.registry import Registry +from pyramid.request import Request +from zope.interface import Interface + +from crabpy_pyramid.renderers.adressenregister import json_item_renderer +from crabpy_pyramid.renderers.adressenregister import json_list_renderer + + +LOG = logging.getLogger(__name__) + + +class IAdressenregister(Interface): + pass + + +@dataclass +class ParsedSettings: + settings: dict[str, str] + cache_config: dict[str, str] | None + + +def includeme(config: Configurator): + LOG.info("Adding adressenregister to the application.") + add_renderers(config) + build_adressenregister(config) + add_routes(config) + add_views(config) + + +def parse_settings(settings: Mapping[str, Any]) -> ParsedSettings: + parsed_settings = { # defaults + "base_url": "https://api.basisregisters.vlaanderen.be", + "api_key": None, + } + + # remove the `crabpy.adressenregister.` prefix from known settings + for short_key_name in ("base_url", "api_key"): + key_name = f"crabpy.adressenregister.{short_key_name}" + if key_name in settings: + parsed_settings[short_key_name] = settings.get(key_name) + + # cache configuration + prefix = "crabpy.adressenregister.cache_config." + cutoff = len(prefix) + cache_config = {} + for key, value in settings.items(): + if key.startswith(prefix): + cache_config[key[cutoff:]] = value + + LOG.debug(f"{ParsedSettings=}") + if "api_key" not in parsed_settings: + LOG.warning( + "No adressenregister api_key set in settings. " + "The api might stop working after reaching the limit of x requests per day." + ) + return ParsedSettings(settings=parsed_settings, cache_config=cache_config or None) + + +def build_adressenregister(config: Configurator) -> Gateway: + """ + Create an adressenregister Gateway and set it up for pyramid usage. + + This method does 3 things: + - Create the gateway + - Register it as a utility in the registry under `IAdressenregister` + - Add a request method `adressenregister_gateway` + """ + registry: Registry = config.registry # type: ignore + adressenregister = registry.queryUtility(IAdressenregister) + if adressenregister is not None: + return adressenregister + + # Start building + settings = registry.settings + parsed_settings = parse_settings(registry.settings) + settings = parsed_settings.settings + client = AdressenRegisterClient(settings["base_url"], settings["api_key"]) + gateway = Gateway(client=client, cache_settings=parsed_settings.cache_config) + + registry.registerUtility(gateway, IAdressenregister) + config.add_request_method(get_adressenregister, "adressenregister_gateway") + return gateway + + +def get_adressenregister(registry: Registry | Request) -> Gateway: + regis = getattr(registry, "registry", None) + if regis is None: + regis = registry + + return regis.queryUtility(IAdressenregister) + + +@functools.singledispatch +def get_adressenregister(arg) -> Gateway: + raise NotImplementedError(f"Invalid argument {arg}. Pass a request or registry.") + + +@get_adressenregister.register +def _(registry: Registry) -> Gateway: + return registry.queryUtility(IAdressenregister) + + +@get_adressenregister.register +def _(request: Request) -> Gateway: + return request.registry.queryUtility(IAdressenregister) + + +def add_views(config: Configurator): + config.scan("crabpy_pyramid.views.adressenregister") + config.scan("crabpy_pyramid.views.exceptions") + + +def add_routes(config: Configurator): + config.include("crabpy_pyramid.routes.adressenregister") + + +def add_renderers(config: Configurator): + config.add_renderer("adresreg_listjson", json_list_renderer) + config.add_renderer("adresreg_itemjson", json_item_renderer) diff --git a/crabpy_pyramid/capakey.py b/crabpy_pyramid/capakey.py new file mode 100644 index 0000000..22d828b --- /dev/null +++ b/crabpy_pyramid/capakey.py @@ -0,0 +1,106 @@ +""" +The capakey component for crabpy pyramid. + +Known settings: +crabpy.capakey.cache_config.* +""" + +import functools +import logging +from dataclasses import dataclass +from typing import Any +from typing import Mapping + +from crabpy.gateway.capakey import CapakeyRestGateway +from pyramid.config import Configurator +from pyramid.registry import Registry +from pyramid.request import Request +from zope.interface import Interface + +from crabpy_pyramid.renderers.capakey import json_item_renderer +from crabpy_pyramid.renderers.capakey import json_list_renderer + + +LOG = logging.getLogger(__name__) + + +class ICapakey(Interface): + pass + + +@dataclass +class ParsedSettings: + cache_config: dict[str, str] | None + + +def includeme(config: Configurator): + LOG.info("Adding capakey to the application.") + add_renderers(config) + build_capakey(config) + add_routes(config) + add_views(config) + + +def parse_settings(settings: Mapping[str, Any]) -> ParsedSettings: + # cache configuration + prefix = "crabpy.capakey.cache_config." + cutoff = len(prefix) + cache_config = {} + for key, value in settings.items(): + if key.startswith(prefix): + cache_config[key[cutoff:]] = value + + LOG.debug(f"{ParsedSettings=}") + return ParsedSettings(cache_config=cache_config or None) + + +def add_views(config: Configurator): + config.scan("crabpy_pyramid.views.capakey") + + +def add_routes(config: Configurator): + config.include("crabpy_pyramid.routes.capakey") + + +def add_renderers(config: Configurator): + config.add_renderer("capakey_listjson", json_list_renderer) + config.add_renderer("capakey_itemjson", json_item_renderer) + + +@functools.singledispatch +def get_capakey(arg) -> CapakeyRestGateway: + raise NotImplementedError(f"Invalid argument {arg}. Pass a request or registry.") + + +@get_capakey.register +def _(registry: Registry) -> CapakeyRestGateway: + return registry.queryUtility(ICapakey) + + +@get_capakey.register +def _(request: Request) -> CapakeyRestGateway: + return request.registry.queryUtility(ICapakey) + + +def build_capakey(config: Configurator) -> CapakeyRestGateway: + """ + Create an capakey Gateway and set it up for pyramid usage. + + This method does 3 things: + - Create the gateway + - Register it as a utility in the registry under `ICapakey` + - Add a request method `capakey_gateway` + """ + registry: Registry = config.registry # type: ignore + capakey = get_capakey(registry) + if capakey is not None: + return capakey + + # Start building + settings = registry.settings + parsed_settings = parse_settings(registry.settings) + gateway = CapakeyRestGateway(cache_config=parsed_settings.cache_config) + + registry.registerUtility(gateway, ICapakey) + config.add_request_method(get_capakey, "capakey_gateway") + return gateway diff --git a/crabpy_pyramid/tests/__init__.py b/crabpy_pyramid/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/crabpy_pyramid/tests/renderers/test_capakey.py b/crabpy_pyramid/tests/renderers/test_capakey.py index 02c0bb4..21d5c9b 100644 --- a/crabpy_pyramid/tests/renderers/test_capakey.py +++ b/crabpy_pyramid/tests/renderers/test_capakey.py @@ -5,8 +5,6 @@ .. versionadded:: 0.1.0 """ -from __future__ import unicode_literals - import json import unittest @@ -28,87 +26,64 @@ def tearDown(self): del self.renderer def test_list_gemeenten(self): - gemeenten = [ - Gemeente(44021, 'Gent'), - Gemeente(31043, 'Knokke-Heist') - ] + gemeenten = [Gemeente(44021, "Gent"), Gemeente(31043, "Knokke-Heist")] dump = self.renderer(gemeenten, {}) self.assertEquals( json.loads(dump), - [ - { - 'id': 44021, - 'naam': 'Gent' - }, { - 'id': 31043, - 'naam': 'Knokke-Heist' - } - ] + [{"id": 44021, "naam": "Gent"}, {"id": 31043, "naam": "Knokke-Heist"}], ) def test_list_afdelingen(self): afdelingen = [ - Afdeling(44021, 'GENT 1 AFD', Gemeente(44021, 'Gent')), - Afdeling(31043, 'KNOKKE-HEIST 1 AFD', Gemeente(31043, 'Knokke-Heist')) + Afdeling(44021, "GENT 1 AFD", Gemeente(44021, "Gent")), + Afdeling(31043, "KNOKKE-HEIST 1 AFD", Gemeente(31043, "Knokke-Heist")), ] dump = self.renderer(afdelingen, {}) self.assertEquals( json.loads(dump), [ { - 'id': 44021, - 'naam': 'GENT 1 AFD', - 'gemeente': { - 'id': 44021, - 'naam': 'Gent' - } - }, { - 'id': 31043, - 'naam': 'KNOKKE-HEIST 1 AFD', - 'gemeente': { - 'id': 31043, - 'naam': 'Knokke-Heist' - } - } - ] + "id": 44021, + "naam": "GENT 1 AFD", + "gemeente": {"id": 44021, "naam": "Gent"}, + }, + { + "id": 31043, + "naam": "KNOKKE-HEIST 1 AFD", + "gemeente": {"id": 31043, "naam": "Knokke-Heist"}, + }, + ], ) def test_list_secties(self): - secties = [ - Sectie('A', Afdeling(44021, 'GENT 1 AFD', Gemeente(44021, 'Gent'))) - ] + secties = [Sectie("A", Afdeling(44021, "GENT 1 AFD", Gemeente(44021, "Gent")))] dump = self.renderer(secties, {}) self.assertEquals( json.loads(dump), [ { - 'id': 'A', - 'afdeling': { - 'id': 44021, - 'naam': 'GENT 1 AFD', - 'gemeente': { - 'id': 44021, - 'naam': 'Gent' - } - } + "id": "A", + "afdeling": { + "id": 44021, + "naam": "GENT 1 AFD", + "gemeente": {"id": 44021, "naam": "Gent"}, + }, } - ] + ], ) def test_list_percelen(self): percelen = [ Perceel( - '1154/02C000', + "1154/02C000", Sectie( - 'A', + "A", Afdeling( - 46013, - 'KRUIBEKE 1 AFD/KRUIBEKE/', - Gemeente(46013, 'Kruibeke') - ) + 46013, "KRUIBEKE 1 AFD/KRUIBEKE/", Gemeente(46013, "Kruibeke") + ), ), - '46013A1154/02C000', - '46013_A_1154_C_000_02' + "46013A1154/02C000", + "46013_A_1154_C_000_02", ) ] dump = self.renderer(percelen, {}) @@ -116,22 +91,19 @@ def test_list_percelen(self): json.loads(dump), [ { - 'id': '1154/02C000', - 'sectie': { - 'id': 'A', - 'afdeling': { - 'id': 46013, - 'naam': 'KRUIBEKE 1 AFD/KRUIBEKE/', - 'gemeente': { - 'id': 46013, - 'naam': 'Kruibeke' - } - } + "id": "1154/02C000", + "sectie": { + "id": "A", + "afdeling": { + "id": 46013, + "naam": "KRUIBEKE 1 AFD/KRUIBEKE/", + "gemeente": {"id": 46013, "naam": "Kruibeke"}, + }, }, - 'percid': '46013_A_1154_C_000_02', - 'capakey': '46013A1154/02C000' + "percid": "46013_A_1154_C_000_02", + "capakey": "46013A1154/02C000", } - ] + ], ) @@ -146,109 +118,112 @@ def tearDown(self): def test_item_gemeente(self): g = Gemeente( 44021, - 'Gent', + "Gent", (104154.2225, 197300.703), - (94653.453, 185680.984, 113654.992, 208920.422) + (94653.453, 185680.984, 113654.992, 208920.422), ) dump = self.renderer(g, {}) self.assertEquals( json.loads(dump), { - 'id': 44021, - 'naam': 'Gent', - 'centroid': [104154.2225, 197300.703], - 'bounding_box': [94653.453, 185680.984, 113654.992, 208920.422] - } + "id": 44021, + "naam": "Gent", + "centroid": [104154.2225, 197300.703], + "bounding_box": [94653.453, 185680.984, 113654.992, 208920.422], + }, ) def test_item_afdeling(self): a = Afdeling( 44021, - 'GENT 1 AFD', - Gemeente(44021, 'Gent'), + "GENT 1 AFD", + Gemeente(44021, "Gent"), (104893.06375, 196022.244094), - (104002.076625, 194168.3415, 105784.050875, 197876.146688) + (104002.076625, 194168.3415, 105784.050875, 197876.146688), ) dump = self.renderer(a, {}) self.assertEquals( json.loads(dump), { - 'id': 44021, - 'naam': 'GENT 1 AFD', - 'gemeente': { - 'id': 44021, - 'naam': 'Gent' - }, - 'centroid': [104893.06375, 196022.244094], - 'bounding_box': [104002.076625, 194168.3415, 105784.050875, 197876.146688] - } + "id": 44021, + "naam": "GENT 1 AFD", + "gemeente": {"id": 44021, "naam": "Gent"}, + "centroid": [104893.06375, 196022.244094], + "bounding_box": [ + 104002.076625, + 194168.3415, + 105784.050875, + 197876.146688, + ], + }, ) def test_item_sectie(self): s = Sectie( - 'A', - Afdeling(44021, 'GENT 1 AFD', Gemeente(44021, 'Gent')), + "A", + Afdeling(44021, "GENT 1 AFD", Gemeente(44021, "Gent")), (104893.06375, 196022.244094), - (104002.076625, 194168.3415, 105784.050875, 197876.146688) + (104002.076625, 194168.3415, 105784.050875, 197876.146688), ) dump = self.renderer(s, {}) self.assertEquals( json.loads(dump), { - 'id': 'A', - 'afdeling': { - 'id': 44021, - 'naam': 'GENT 1 AFD', - 'gemeente': { - 'id': 44021, - 'naam': 'Gent' - }, + "id": "A", + "afdeling": { + "id": 44021, + "naam": "GENT 1 AFD", + "gemeente": {"id": 44021, "naam": "Gent"}, }, - 'centroid': [104893.06375, 196022.244094], - 'bounding_box': [104002.076625, 194168.3415, 105784.050875, 197876.146688] - } + "centroid": [104893.06375, 196022.244094], + "bounding_box": [ + 104002.076625, + 194168.3415, + 105784.050875, + 197876.146688, + ], + }, ) def test_item_perceel(self): p = Perceel( - id='1154/02C000', + id="1154/02C000", sectie=Sectie( - 'A', + "A", Afdeling( - 46013, - 'KRUIBEKE 1 AFD/KRUIBEKE/', - Gemeente(46013, 'Kruibeke') - ) + 46013, "KRUIBEKE 1 AFD/KRUIBEKE/", Gemeente(46013, "Kruibeke") + ), ), - capakey='40613A1154/02C000', percid='40613_A_1154_C_000_02', - capatype='capaty', cashkey='cashkey', + capakey="40613A1154/02C000", + percid="40613_A_1154_C_000_02", + capatype="capaty", + cashkey="cashkey", centroid=(104893.06375, 196022.244094), bounding_box=(104002.076625, 194168.3415, 105784.050875, 197876.146688), - shape={'shape': 'one'} + shape={"shape": "one"}, ) dump = self.renderer(p, {}) self.assertEquals( json.loads(dump), { - 'id': '1154/02C000', - 'sectie': { - 'id': 'A', - 'afdeling': { - 'id': 46013, - 'naam': 'KRUIBEKE 1 AFD/KRUIBEKE/', - 'gemeente': { - 'id': 46013, - 'naam': 'Kruibeke' - }, + "id": "1154/02C000", + "sectie": { + "id": "A", + "afdeling": { + "id": 46013, + "naam": "KRUIBEKE 1 AFD/KRUIBEKE/", + "gemeente": {"id": 46013, "naam": "Kruibeke"}, }, }, - 'capakey': '40613A1154/02C000', - 'percid': '40613_A_1154_C_000_02', - 'centroid': [104893.06375, 196022.244094], - 'bounding_box': [104002.076625, - 194168.3415, - 105784.050875, - 197876.146688], - 'shape': {'shape': 'one'} - } + "capakey": "40613A1154/02C000", + "percid": "40613_A_1154_C_000_02", + "centroid": [104893.06375, 196022.244094], + "bounding_box": [ + 104002.076625, + 194168.3415, + 105784.050875, + 197876.146688, + ], + "shape": {"shape": "one"}, + }, ) diff --git a/crabpy_pyramid/tests/test_capakey.py b/crabpy_pyramid/tests/test_capakey.py index f498d44..7e3bd26 100644 --- a/crabpy_pyramid/tests/test_capakey.py +++ b/crabpy_pyramid/tests/test_capakey.py @@ -1,102 +1,72 @@ # -*- coding: utf-8 -*- -''' +""" Testing of the capakey specific aspects. .. versionadded:: 0.1.0 -''' +""" -from crabpy.gateway.capakey import CapakeyRestGateway - -from crabpy_pyramid import ( - get_capakey, - _parse_settings, - _filter_settings, - _build_capakey, - ICapakey -) - -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa - - -class TestRegistry(object): - - def __init__(self, settings=None): +import unittest - if settings is None: - self.settings = {} - else: - self.settings = settings +from crabpy.gateway.capakey import CapakeyRestGateway +from pyramid import testing +from pyramid.registry import Registry - self.capakey = None +from crabpy_pyramid import capakey +from crabpy_pyramid.capakey import build_capakey +from crabpy_pyramid.capakey import get_capakey +from crabpy_pyramid.capakey import ICapakey - def queryUtility(self, iface): - return self.capakey - def registerUtility(self, capakey, iface): - self.capakey = capakey +class TestGetAndBuild(unittest.TestCase): + def setUp(self): + self.config = testing.setUp() + self.registry: Registry = self.config.registry # type: ignore -class TestGetAndBuild(unittest.TestCase): + def tearDown(self): + testing.tearDown() def test_get_capakey(self): - r = TestRegistry() - G = CapakeyRestGateway() - r.registerUtility(G, ICapakey) - G2 = get_capakey(r) - self.assertIsInstance(G, CapakeyRestGateway) - self.assertIsInstance(G2, CapakeyRestGateway) - self.assertEqual(G, G2) + gateway = CapakeyRestGateway() + self.registry.registerUtility(gateway, ICapakey) + gateway_2 = get_capakey(self.registry) + self.assertEqual(gateway, gateway_2) def test_build_capakey_already_exists(self): - r = TestRegistry() - G = CapakeyRestGateway() - r.registerUtility(G, ICapakey) - G2 = _build_capakey(r, {}) - self.assertIsInstance(G, CapakeyRestGateway) - self.assertIsInstance(G2, CapakeyRestGateway) - self.assertEqual(G, G2) + gateway = CapakeyRestGateway() + self.registry.registerUtility(gateway, ICapakey) + gateway_2 = build_capakey(self.config) + self.assertEqual(gateway, gateway_2) - def test_build_capakey_default_settings(self): - r = TestRegistry() - G = CapakeyRestGateway() - r.registerUtility(G, ICapakey) - G2 = _build_capakey(r, {}) - self.assertIsInstance(G, CapakeyRestGateway) - self.assertIsInstance(G2, CapakeyRestGateway) - self.assertEqual(G, G2) - - def test_build_capakey_custom_settings(self): + def test_parse_settings(self): settings = { - 'crabpy.cache.file.root': './dogpile_data/', - 'crabpy.capakey.permanent.backend': 'dogpile.cache.dbm', - 'crabpy.capakey.permanent.expiration_time': 604800, - 'crabpy.capakey.permanent.arguments.filename': 'dogpile_data/capakey_permanent.dbm', - 'crabpy.capakey.long.backend': 'dogpile.cache.dbm', - 'crabpy.capakey.long.expiration_time': 86400, - 'crabpy.capakey.long.arguments.filename': 'dogpile_data/capakey_long.dbm', - 'crabpy.capakey.short.backend': 'dogpile.cache.dbm', - 'crabpy.capakey.short.expiration_time': 3600, - 'crabpy.capakey.short.arguments.filename': 'dogpile_data/capakey_short.dbm' + "crabpy.cache.file.root": "./dogpile_data/", + "crabpy.capakey.cache_config.permanent.backend": "dogpile.cache.dbm", + "crabpy.capakey.cache_config.permanent.expiration_time": 604800, + "crabpy.capakey.cache_config.permanent.arguments.filename": ( + "dogpile_data/capakey_permanent.dbm" + ), + "crabpy.capakey.cache_config.long.backend": "dogpile.cache.dbm", + "crabpy.capakey.cache_config.long.expiration_time": 86400, + "crabpy.capakey.cache_config.long.arguments.filename": ( + "dogpile_data/capakey_long.dbm" + ), + "crabpy.capakey.cache_config.short.backend": "dogpile.cache.dbm", + "crabpy.capakey.cache_config.short.expiration_time": 3600, + "crabpy.capakey.cache_config.short.arguments.filename": ( + "dogpile_data/capakey_short.dbm" + ), } - r = TestRegistry(settings) - capakey_settings = _filter_settings(_parse_settings(settings), 'capakey.') - if 'include' in capakey_settings: - del capakey_settings['include'] - G = _build_capakey(r, capakey_settings) - self.assertIsInstance(G, CapakeyRestGateway) - - -class TestSettings(unittest.TestCase): - - def _assert_contains_all_keys(self, args): - self.assertIn('proxy.http', args) - - def test_get_all_settings(self): - settings = { - 'crabpy.proxy.http': 'test' + parsed_settings = capakey.parse_settings(settings) + self.assertIsNotNone(parsed_settings) + expected = { + "long.arguments.filename": "dogpile_data/capakey_long.dbm", + "long.backend": "dogpile.cache.dbm", + "long.expiration_time": 86400, + "permanent.arguments.filename": "dogpile_data/capakey_permanent.dbm", + "permanent.backend": "dogpile.cache.dbm", + "permanent.expiration_time": 604800, + "short.arguments.filename": "dogpile_data/capakey_short.dbm", + "short.backend": "dogpile.cache.dbm", + "short.expiration_time": 3600, } - args = _parse_settings(settings) - self._assert_contains_all_keys(args) - self.assertEqual('test', args['proxy.http']) + self.assertEqual(expected, parsed_settings.cache_config) diff --git a/crabpy_pyramid/tests/test_functional.py b/crabpy_pyramid/tests/test_functional.py index 2cb8d6e..1b0fec4 100644 --- a/crabpy_pyramid/tests/test_functional.py +++ b/crabpy_pyramid/tests/test_functional.py @@ -13,7 +13,6 @@ from pyramid import testing from webtest import TestApp -import crabpy_pyramid from crabpy_pyramid import main from crabpy_pyramid.tests.fixtures.adressenregister import adres from crabpy_pyramid.tests.fixtures.adressenregister import adressen @@ -45,21 +44,20 @@ def run_integration_tests(section): settings = { - "crabpy.cache.file.root": os.path.join(os.path.dirname(__file__), "dogpile_data"), "crabpy.capakey.cache_config.permanent.backend": "dogpile.cache.dbm", "crabpy.capakey.cache_config.permanent.expiration_time": 604800, "crabpy.capakey.cache_config.permanent.arguments.filename": os.path.join( - os.path.dirname(__file__), "dogpile_data", "capakey_permanent.dbm" + "/tmp", "capakey_permanent.dbm" ), "crabpy.capakey.cache_config.long.backend": "dogpile.cache.dbm", "crabpy.capakey.cache_config.long.expiration_time": 86400, "crabpy.capakey.cache_config.long.arguments.filename": os.path.join( - os.path.dirname(__file__), "dogpile_data", "capakey_long.dbm" + "/tmp", "capakey_long.dbm" ), "crabpy.capakey.cache_config.short.backend": "dogpile.cache.dbm", "crabpy.capakey.cache_config.short.expiration_time": 3600, "crabpy.capakey.cache_config.short.arguments.filename": os.path.join( - os.path.dirname(__file__), "dogpile_data", "capakey_short.dbm" + "/tmp", "capakey_short.dbm" ), "crabpy.adressenregister.include": True, "crabpy.adressenregister.base_url": "https://api.basisregisters.vlaanderen.be", diff --git a/crabpy_pyramid/tests/test_utils.py b/crabpy_pyramid/tests/test_utils.py index 7fd2b7f..056622f 100644 --- a/crabpy_pyramid/tests/test_utils.py +++ b/crabpy_pyramid/tests/test_utils.py @@ -1,93 +1,87 @@ # -*- coding: utf-8 -*- -''' +""" Tests for the utility module. .. versionadded:: 0.1.0 -''' - -from crabpy_pyramid.utils import ( - parse_range_header, - range_return -) +""" import unittest from pyramid.testing import DummyRequest +from crabpy_pyramid import utils +from crabpy_pyramid.utils import parse_range_header +from crabpy_pyramid.utils import range_return + + class UtilsTests(unittest.TestCase): def test_parse_range_header(self): headers = [ - { - 'header': 'items=0-19', - 'result': { - 'start': 0, - 'finish': 19, - 'count': 20 - } - }, { - 'header': 'items:0-19', - 'result': False - }, { - 'header': 'test', - 'result': False - }, { - 'header': 'items=t-t', - 'result': False - }, { - 'header': 'items=10-0', - 'result': { - 'start': 10, - 'finish': 10, - 'count': 1 - } - }] + {"header": "items=0-19", "result": {"start": 0, "finish": 19, "count": 20}}, + {"header": "items:0-19", "result": False}, + {"header": "test", "result": False}, + {"header": "items=t-t", "result": False}, + {"header": "items=10-0", "result": {"start": 10, "finish": 10, "count": 1}}, + ] for header in headers: - res = parse_range_header(header['header']) - self.assertEquals(res, header['result']) + res = parse_range_header(header["header"]) + self.assertEquals(res, header["result"]) def test_range_return_no_range(self): items = range(10) req = DummyRequest() filtered = range_return(req, items) self.assertEquals(items, filtered) - self.assertEquals(req.response.headers['Content-Range'], 'items 0-9/10') + self.assertEquals(req.response.headers["Content-Range"], "items 0-9/10") def test_range_return_filtered(self): items = range(10) req = DummyRequest() - req.headers['Range'] = 'items=0-4' + req.headers["Range"] = "items=0-4" filtered = range_return(req, items) self.assertEquals(items[0:5], filtered) - self.assertEquals(req.response.headers['Content-Range'], 'items 0-4/10') + self.assertEquals(req.response.headers["Content-Range"], "items 0-4/10") def test_range_return_x_or_not(self): items = range(10) req = DummyRequest() - req.headers['Range'] = 'items=0-4' + req.headers["Range"] = "items=0-4" filtered_no_x = range_return(req, items) req_x = DummyRequest() - req_x.headers['X-Range'] = 'items=0-4' + req_x.headers["X-Range"] = "items=0-4" filtered_x = range_return(req_x, items) self.assertEquals(filtered_x, filtered_no_x) def test_range_return_large_request(self): items = range(10) req = DummyRequest() - req.headers['Range'] = 'items=0-100' + req.headers["Range"] = "items=0-100" filtered = range_return(req, items) self.assertEquals(items, filtered) - self.assertEquals(req.response.headers['Content-Range'], 'items 0-9/10') - + self.assertEquals(req.response.headers["Content-Range"], "items 0-9/10") + def test_range_return_max_return(self): - items = range (9999) + items = range(9999) req = DummyRequest() - req.headers['Range'] = 'items=0-9999' + req.headers["Range"] = "items=0-9999" filtered = range_return(req, items) self.assertEquals(items[0:5000], filtered) - self.assertEqual(req.response.headers['Content-Range'], 'items 0-4999/9999') + self.assertEqual(req.response.headers["Content-Range"], "items 0-4999/9999") items = range(14999) - req.headers['Range'] = 'items=5000-15000' + req.headers["Range"] = "items=5000-15000" filtered = range_return(req, items) self.assertEqual(items[5000:10000], filtered) - self.assertEqual(req.response.headers['Content-Range'], 'items 5000-9999/14999') + self.assertEqual(req.response.headers["Content-Range"], "items 5000-9999/14999") + + def test_filter_settings(self): + settings = utils.filter_settings( + { + "cache.file.root": "/tmp", + "capakey.include": False, + }, + "capakey.", + ) + self.assertEquals(1, len(settings)) + self.assertFalse(settings["include"]) + self.assertNotIn("cache.file.root", settings) diff --git a/crabpy_pyramid/tests/tests.py b/crabpy_pyramid/tests/tests.py deleted file mode 100644 index cd2798b..0000000 --- a/crabpy_pyramid/tests/tests.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Testing of the initialization. -.. versionadded:: 0.1.0 -""" - -from pyramid import testing -from crabpy.gateway.capakey import CapakeyRestGateway -from crabpy.gateway.crab import CrabGateway -import os -import warnings - -from crabpy_pyramid import ( - includeme, - ICapakey, - _filter_settings, - _get_proxy_settings, -) - -import warnings - -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa - - -class TestSettings(unittest.TestCase): - def setUp(self): - self.config = testing.setUp( - settings={ - "crabpy.capakey.include": True, - "crabpy.cache.file.root": "./dogpile_data/", - "crabpy.capakey.cache_config.permanent.backend": "dogpile.cache.dbm", - "crabpy.capakey.cache_config.permanent.expiration_time": 604800, - "crabpy.capakey.cache_config.permanent.arguments.filename": "dogpile_data/capakey_permanent.dbm", - "crabpy.capakey.cache_config.long.backend": "dogpile.cache.dbm", - "crabpy.capakey.cache_config.long.expiration_time": 86400, - "crabpy.capakey.cache_config.long.arguments.filename": "dogpile_data/capakey_long.dbm", - "crabpy.capakey.cache_config.short.backend": "dogpile.cache.dbm", - "crabpy.capakey.cache_config.short.expiration_time": 3600, - "crabpy.capakey.cache_config.short.arguments.filename": "dogpile_data/capakey_short.dbm", - } - ) - - def tearDown(self): - del self.config - - def test_filter_settings(self): - settings = _filter_settings( - { - "cache.file.root": "/tmp", - "capakey.include": False, - }, - "capakey.", - ) - self.assertEquals(1, len(settings)) - self.assertFalse(settings["include"]) - self.assertNotIn("cache.file.root", settings) - - def test_filter_settings_with_proxy(self): - settings = { - "proxy.http": "http://proxy.example.com:3128", - "proxy.https": "https://httpsproxy.example.com:3128", - "crab.cache_config.permanent.backend": "dogpile.cache.dbm", - } - base_settings = _get_proxy_settings(settings) - crab_settings = dict(_filter_settings(settings, "crab."), **base_settings) - self.assertIn("proxy", crab_settings) - self.assertIn("http", crab_settings["proxy"]) - self.assertIn("https", crab_settings["proxy"]) - - def test_empty_proxy_settings(self): - settings = { - "proxy.http": "", - "proxy.https": "", - } - base_settings = _get_proxy_settings(settings) - self.assertNotIn("proxy", base_settings) - - def test_includeme_existing_root(self): - includeme(self.config) - capakey = self.config.registry.queryUtility(ICapakey) - self.assertIsInstance(capakey, CapakeyRestGateway) - - def test_includeme_nonexisting_root(self): - root = "./testdir/" - self.config.registry.settings["crabpy.cache.file.root"] = root - includeme(self.config) - capakey = self.config.registry.queryUtility(ICapakey) - self.assertIsInstance(capakey, CapakeyRestGateway) - os.rmdir(root) - - def test_directive_was_added(self): - includeme(self.config) - r = self.config.registry.settings - self.assertEqual( - "dogpile.cache.dbm", r["crabpy.capakey.cache_config.permanent.backend"] - ) diff --git a/crabpy_pyramid/utils.py b/crabpy_pyramid/utils.py index 9e66d62..c92039c 100644 --- a/crabpy_pyramid/utils.py +++ b/crabpy_pyramid/utils.py @@ -7,6 +7,7 @@ import re + MAX_NUMBER_ITEMS = 5000 @@ -14,12 +15,12 @@ def parse_range_header(range): """ Parse a range header as used by the dojo Json Rest store. - :param str range: The content of the range header to be parsed. + :param str range: The content of the range header to be parsed. eg. `items=0-9` :returns: A dict with keys start, finish and number or `False` if the range is invalid. """ - match = re.match('^items=([0-9]+)-([0-9]+)$', range) + match = re.match("^items=([0-9]+)-([0-9]+)$", range) if match: start = int(match.group(1)) @@ -27,11 +28,7 @@ def parse_range_header(range): if finish < start: finish = start - return { - 'start': start, - 'finish': finish, - 'count': finish - start + 1 - } + return {"start": start, "finish": finish, "count": finish - start + 1} else: return False @@ -45,31 +42,33 @@ def range_return(request, items): :rtype: list """ - if ('Range' in request.headers): - range = parse_range_header(request.headers['Range']) - elif 'X-Range' in request.headers: - range = parse_range_header(request.headers['X-Range']) + if "Range" in request.headers: + range = parse_range_header(request.headers["Range"]) + elif "X-Range" in request.headers: + range = parse_range_header(request.headers["X-Range"]) else: - range = { - 'start': 0, - 'finish': MAX_NUMBER_ITEMS - 1, - 'count': MAX_NUMBER_ITEMS - } - filtered = items[range['start']:range['finish'] + 1] - if len(filtered) < range['count']: + range = {"start": 0, "finish": MAX_NUMBER_ITEMS - 1, "count": MAX_NUMBER_ITEMS} + filtered = items[range["start"] : range["finish"] + 1] + if len(filtered) < range["count"]: # Something was stripped, deal with it - range['count'] = len(filtered) - range['finish'] = range['start'] + range['count'] - 1 - if range['finish'] - range['start'] + 1 >= MAX_NUMBER_ITEMS: - range['finish'] = range['start'] + MAX_NUMBER_ITEMS - 1 - filtered = items[range['start']:range['finish'] + 1] - - request.response.headers['Content-Range'] = 'items %d-%d/%d' % (range['start'], range['finish'], len(items)) - request.response.headers['X-Content-Range'] = request.response.headers['Content-Range'] + range["count"] = len(filtered) + range["finish"] = range["start"] + range["count"] - 1 + if range["finish"] - range["start"] + 1 >= MAX_NUMBER_ITEMS: + range["finish"] = range["start"] + MAX_NUMBER_ITEMS - 1 + filtered = items[range["start"] : range["finish"] + 1] + + request.response.headers["Content-Range"] = "items %d-%d/%d" % ( + range["start"], + range["finish"], + len(items), + ) + request.response.headers["X-Content-Range"] = request.response.headers[ + "Content-Range" + ] return filtered -def set_http_caching(request, gateway='crab', region='permanent'): +def set_http_caching(request, gateway="crab", region="permanent"): """ Set an HTTP Cache Control header on a request. @@ -78,9 +77,27 @@ def set_http_caching(request, gateway='crab', region='permanent'): :param str region: What caching region to use? Defaults to `permanent`. :rtype: pyramid.request.Request """ - crabpy_exp = request.registry.settings.get('crabpy.%s.cache_config.%s.expiration_time' % (gateway, region), None) + crabpy_exp = request.registry.settings.get( + "crabpy.%s.cache_config.%s.expiration_time" % (gateway, region), None + ) if crabpy_exp is None: return request ctime = int(int(crabpy_exp) * 1.05) request.response.cache_expires(ctime, public=True) return request + + +def filter_settings(settings, prefix): + """ + Filter all settings to only return settings that start with a certain + prefix. + + :param dict settings: A settings dictionary. + :param str prefix: A prefix. + """ + ret = {} + for skey in settings.keys(): + if skey.startswith(prefix): + key = skey[len(prefix) :] + ret[key] = settings[skey] + return ret From d958268da1d1bb28d045c0d612160dbc53c4f040 Mon Sep 17 00:00:00 2001 From: Wim De Clercq Date: Wed, 9 Oct 2024 11:51:31 +0200 Subject: [PATCH 2/6] travis --- .travis.yml | 7 +++---- .../tests/renderers/test_capakey.py | 16 +++++++-------- crabpy_pyramid/tests/test_utils.py | 20 +++++++++---------- 3 files changed, 21 insertions(+), 22 deletions(-) diff --git a/.travis.yml b/.travis.yml index 9061cab..db3a276 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,12 +1,11 @@ sudo: false language: python python: - - 3.8 + - 3.11 install: - pip install -r requirements-dev.txt - - python setup.py develop - - pip install nose coverage nose-testconfig coveralls webtest + - pip install -e . script: - nosetests --nologcapture --config nose_cover.cfg --tc-file nose_travis.ini + cd crabpy_pyramid; python -m unittest discover -s tests after_success: coveralls diff --git a/crabpy_pyramid/tests/renderers/test_capakey.py b/crabpy_pyramid/tests/renderers/test_capakey.py index 21d5c9b..ddd8306 100644 --- a/crabpy_pyramid/tests/renderers/test_capakey.py +++ b/crabpy_pyramid/tests/renderers/test_capakey.py @@ -28,7 +28,7 @@ def tearDown(self): def test_list_gemeenten(self): gemeenten = [Gemeente(44021, "Gent"), Gemeente(31043, "Knokke-Heist")] dump = self.renderer(gemeenten, {}) - self.assertEquals( + self.assertEqual( json.loads(dump), [{"id": 44021, "naam": "Gent"}, {"id": 31043, "naam": "Knokke-Heist"}], ) @@ -39,7 +39,7 @@ def test_list_afdelingen(self): Afdeling(31043, "KNOKKE-HEIST 1 AFD", Gemeente(31043, "Knokke-Heist")), ] dump = self.renderer(afdelingen, {}) - self.assertEquals( + self.assertEqual( json.loads(dump), [ { @@ -58,7 +58,7 @@ def test_list_afdelingen(self): def test_list_secties(self): secties = [Sectie("A", Afdeling(44021, "GENT 1 AFD", Gemeente(44021, "Gent")))] dump = self.renderer(secties, {}) - self.assertEquals( + self.assertEqual( json.loads(dump), [ { @@ -87,7 +87,7 @@ def test_list_percelen(self): ) ] dump = self.renderer(percelen, {}) - self.assertEquals( + self.assertEqual( json.loads(dump), [ { @@ -123,7 +123,7 @@ def test_item_gemeente(self): (94653.453, 185680.984, 113654.992, 208920.422), ) dump = self.renderer(g, {}) - self.assertEquals( + self.assertEqual( json.loads(dump), { "id": 44021, @@ -142,7 +142,7 @@ def test_item_afdeling(self): (104002.076625, 194168.3415, 105784.050875, 197876.146688), ) dump = self.renderer(a, {}) - self.assertEquals( + self.assertEqual( json.loads(dump), { "id": 44021, @@ -166,7 +166,7 @@ def test_item_sectie(self): (104002.076625, 194168.3415, 105784.050875, 197876.146688), ) dump = self.renderer(s, {}) - self.assertEquals( + self.assertEqual( json.loads(dump), { "id": "A", @@ -203,7 +203,7 @@ def test_item_perceel(self): shape={"shape": "one"}, ) dump = self.renderer(p, {}) - self.assertEquals( + self.assertEqual( json.loads(dump), { "id": "1154/02C000", diff --git a/crabpy_pyramid/tests/test_utils.py b/crabpy_pyramid/tests/test_utils.py index 056622f..710314f 100644 --- a/crabpy_pyramid/tests/test_utils.py +++ b/crabpy_pyramid/tests/test_utils.py @@ -26,22 +26,22 @@ def test_parse_range_header(self): ] for header in headers: res = parse_range_header(header["header"]) - self.assertEquals(res, header["result"]) + self.assertEqual(res, header["result"]) def test_range_return_no_range(self): items = range(10) req = DummyRequest() filtered = range_return(req, items) - self.assertEquals(items, filtered) - self.assertEquals(req.response.headers["Content-Range"], "items 0-9/10") + self.assertEqual(items, filtered) + self.assertEqual(req.response.headers["Content-Range"], "items 0-9/10") def test_range_return_filtered(self): items = range(10) req = DummyRequest() req.headers["Range"] = "items=0-4" filtered = range_return(req, items) - self.assertEquals(items[0:5], filtered) - self.assertEquals(req.response.headers["Content-Range"], "items 0-4/10") + self.assertEqual(items[0:5], filtered) + self.assertEqual(req.response.headers["Content-Range"], "items 0-4/10") def test_range_return_x_or_not(self): items = range(10) @@ -51,22 +51,22 @@ def test_range_return_x_or_not(self): req_x = DummyRequest() req_x.headers["X-Range"] = "items=0-4" filtered_x = range_return(req_x, items) - self.assertEquals(filtered_x, filtered_no_x) + self.assertEqual(filtered_x, filtered_no_x) def test_range_return_large_request(self): items = range(10) req = DummyRequest() req.headers["Range"] = "items=0-100" filtered = range_return(req, items) - self.assertEquals(items, filtered) - self.assertEquals(req.response.headers["Content-Range"], "items 0-9/10") + self.assertEqual(items, filtered) + self.assertEqual(req.response.headers["Content-Range"], "items 0-9/10") def test_range_return_max_return(self): items = range(9999) req = DummyRequest() req.headers["Range"] = "items=0-9999" filtered = range_return(req, items) - self.assertEquals(items[0:5000], filtered) + self.assertEqual(items[0:5000], filtered) self.assertEqual(req.response.headers["Content-Range"], "items 0-4999/9999") items = range(14999) req.headers["Range"] = "items=5000-15000" @@ -82,6 +82,6 @@ def test_filter_settings(self): }, "capakey.", ) - self.assertEquals(1, len(settings)) + self.assertEqual(1, len(settings)) self.assertFalse(settings["include"]) self.assertNotIn("cache.file.root", settings) From 022fc7d92a9e65efeb4e64e199070be836ffa4ed Mon Sep 17 00:00:00 2001 From: Wim De Clercq Date: Wed, 9 Oct 2024 11:54:20 +0200 Subject: [PATCH 3/6] travis --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index db3a276..ac564c4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,7 @@ sudo: false language: python python: - - 3.11 + - 3.11-dev install: - pip install -r requirements-dev.txt - pip install -e . From bb6069f35b4ac4d2b4448573b712e014f7489ba2 Mon Sep 17 00:00:00 2001 From: Wim De Clercq Date: Wed, 9 Oct 2024 11:55:48 +0200 Subject: [PATCH 4/6] travis --- .travis.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index ac564c4..2d3eee8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,8 @@ +dist: focal sudo: false language: python python: - - 3.11-dev + - 3.11 install: - pip install -r requirements-dev.txt - pip install -e . From 674f8ebba1136b3c4f299191c7c9ab07c33496c2 Mon Sep 17 00:00:00 2001 From: Wim De Clercq Date: Wed, 9 Oct 2024 13:37:00 +0200 Subject: [PATCH 5/6] review fixes --- crabpy_pyramid/adressenregister.py | 8 -------- crabpy_pyramid/capakey.py | 1 - crabpy_pyramid/renderers/adressenregister.py | 3 ++- crabpy_pyramid/utils.py | 10 ++++++++-- 4 files changed, 10 insertions(+), 12 deletions(-) diff --git a/crabpy_pyramid/adressenregister.py b/crabpy_pyramid/adressenregister.py index 32bbdf6..ef6ca04 100644 --- a/crabpy_pyramid/adressenregister.py +++ b/crabpy_pyramid/adressenregister.py @@ -101,14 +101,6 @@ def build_adressenregister(config: Configurator) -> Gateway: return gateway -def get_adressenregister(registry: Registry | Request) -> Gateway: - regis = getattr(registry, "registry", None) - if regis is None: - regis = registry - - return regis.queryUtility(IAdressenregister) - - @functools.singledispatch def get_adressenregister(arg) -> Gateway: raise NotImplementedError(f"Invalid argument {arg}. Pass a request or registry.") diff --git a/crabpy_pyramid/capakey.py b/crabpy_pyramid/capakey.py index 22d828b..a8ea046 100644 --- a/crabpy_pyramid/capakey.py +++ b/crabpy_pyramid/capakey.py @@ -97,7 +97,6 @@ def build_capakey(config: Configurator) -> CapakeyRestGateway: return capakey # Start building - settings = registry.settings parsed_settings = parse_settings(registry.settings) gateway = CapakeyRestGateway(cache_config=parsed_settings.cache_config) diff --git a/crabpy_pyramid/renderers/adressenregister.py b/crabpy_pyramid/renderers/adressenregister.py index 0a7fe71..e3fa218 100644 --- a/crabpy_pyramid/renderers/adressenregister.py +++ b/crabpy_pyramid/renderers/adressenregister.py @@ -1,7 +1,8 @@ -import pycountry +import pycountry.db from crabpy.gateway import adressenregister from pyramid.renderers import JSON + json_list_renderer = JSON() json_item_renderer = JSON() diff --git a/crabpy_pyramid/utils.py b/crabpy_pyramid/utils.py index c92039c..0bc22e8 100644 --- a/crabpy_pyramid/utils.py +++ b/crabpy_pyramid/utils.py @@ -6,12 +6,14 @@ """ import re +from typing import Literal +from typing import Sequence MAX_NUMBER_ITEMS = 5000 -def parse_range_header(range): +def parse_range_header(range) -> dict[str, int] | Literal[False]: """ Parse a range header as used by the dojo Json Rest store. @@ -33,7 +35,7 @@ def parse_range_header(range): return False -def range_return(request, items): +def range_return(request, items: Sequence): """ Determine what range of objects to return. @@ -44,8 +46,12 @@ def range_return(request, items): """ if "Range" in request.headers: range = parse_range_header(request.headers["Range"]) + if not range: + raise ValueError(request.headers["Range"]) elif "X-Range" in request.headers: range = parse_range_header(request.headers["X-Range"]) + if not range: + raise ValueError(request.headers["X-Range"]) else: range = {"start": 0, "finish": MAX_NUMBER_ITEMS - 1, "count": MAX_NUMBER_ITEMS} filtered = items[range["start"] : range["finish"] + 1] From 3bdf006406f73ffd9deacdf53ca816c5db84dd6a Mon Sep 17 00:00:00 2001 From: Wim De Clercq Date: Wed, 9 Oct 2024 13:55:15 +0200 Subject: [PATCH 6/6] more changes --- crabpy_pyramid/capakey.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/crabpy_pyramid/capakey.py b/crabpy_pyramid/capakey.py index a8ea046..4700c99 100644 --- a/crabpy_pyramid/capakey.py +++ b/crabpy_pyramid/capakey.py @@ -98,7 +98,11 @@ def build_capakey(config: Configurator) -> CapakeyRestGateway: # Start building parsed_settings = parse_settings(registry.settings) - gateway = CapakeyRestGateway(cache_config=parsed_settings.cache_config) + kwargs = {} + if parsed_settings.cache_config: + kwargs["cache_config"] = parsed_settings.cache_config + + gateway = CapakeyRestGateway(**kwargs) registry.registerUtility(gateway, ICapakey) config.add_request_method(get_capakey, "capakey_gateway")