diff --git a/.bandit.yaml b/.bandit.yaml
deleted file mode 100644
index 1bf9b483e..000000000
--- a/.bandit.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-skips:
- - B101 # Use of assert detected.
diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml
index 87a56ad72..78baf1ccc 100644
--- a/.github/workflows/main.yaml
+++ b/.github/workflows/main.yaml
@@ -37,7 +37,7 @@ jobs:
- uses: actions/setup-python@v5
with:
- python-version: '3.13'
+ python-version: '3.11'
- run: python3 -m pip install --requirement=ci/requirements.txt
- uses: actions/cache@v4
@@ -46,7 +46,7 @@ jobs:
key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
restore-keys: "pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}\npre-commit-"
- run: pre-commit run --all-files
- - run: git diff --exit-code --patch > /tmp/pre-commit.patch || true
+ - run: git diff --exit-code --patch > /tmp/pre-commit.patch || true && git reset --hard
if: failure()
- uses: actions/upload-artifact@v4
with:
@@ -64,6 +64,14 @@ jobs:
- name: Checks
run: make checks
+ - run: git diff --exit-code --patch > /tmp/ruff.patch || true && git reset --hard
+ if: failure()
+ - uses: actions/upload-artifact@v4
+ with:
+ name: Apply Ruff lint fix.patch
+ path: /tmp/ruff.patch
+ retention-days: 1
+ if: failure()
- name: Tests
run: make tests
@@ -78,6 +86,7 @@ jobs:
retention-days: 5
if: failure()
+ - run: git reset --hard
- name: Publish
run: c2cciutils-publish
if: env.HAS_SECRETS == 'HAS_SECRETS'
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 5d7152033..32930c19c 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -96,37 +96,22 @@ repos:
rev: v0.1.8
hooks:
- id: ripsecrets
- - repo: https://github.com/asottile/pyupgrade
- rev: v3.19.0
+ - repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.7.3
hooks:
- - id: pyupgrade
- args:
- - --py39-plus
- - repo: https://github.com/PyCQA/autoflake
- rev: v2.3.1
- hooks:
- - id: autoflake
- - repo: https://github.com/PyCQA/isort
- rev: 5.13.2
- hooks:
- - id: isort
- args:
- - --project=tilecloud
- - repo: https://github.com/psf/black
- rev: 24.10.0
- hooks:
- - id: black
+ - id: ruff-format
- repo: https://github.com/PyCQA/prospector
rev: v1.13.0
hooks:
- id: prospector
args:
- - --tool=pydocstyle
+ - --tool=ruff
- --die-on-tool-error
- --output-format=pylint
additional_dependencies:
- - prospector-profile-duplicated==1.6.0 # pypi
- - prospector-profile-utils==1.10.1 # pypi
+ - prospector-profile-duplicated==1.7.0 # pypi
+ - prospector-profile-utils==1.10.3 # pypi
+ - ruff==0.7.3 # pypi
- repo: https://github.com/sbrunner/jsonschema-validator
rev: 0.3.2
hooks:
@@ -136,3 +121,7 @@ repos:
rev: 39.10.1
hooks:
- id: renovate-config-validator
+ - repo: https://github.com/sbrunner/python-versions-hook
+ rev: 0.7.0
+ hooks:
+ - id: python-versions
diff --git a/.prospector.yaml b/.prospector.yaml
index fc3bae6fc..336ffe536 100644
--- a/.prospector.yaml
+++ b/.prospector.yaml
@@ -2,23 +2,17 @@ inherits:
- utils:base
- utils:no-design-checks
- utils:fix
+ - utils:unsafe
+ - utils:c2cwsgiutils
- duplicated
-doc-warnings: true
-
ignore-paths:
- tilecloud_chain/configuration.py
-pylint:
- disable:
- - missing-timeout # done globally by c2cwsgiutils
-
-pydocstyle:
- disable:
- - D202 # No blank lines allowed after function docstring
- - D104 # Missing docstring in public package
- - D107 # Missing docstring in __init__
+mypy:
+ options:
+ python-version: '3.10'
-bandit:
+ruff:
options:
- config: .bandit.yaml
+ target-version: py310
diff --git a/docker-compose.yaml b/docker-compose.yaml
index 844ab8538..0fee572c0 100644
--- a/docker-compose.yaml
+++ b/docker-compose.yaml
@@ -1,5 +1,3 @@
-version: '2.2'
-
services:
db:
image: camptocamp/postgres:17-postgis-3
diff --git a/gunicorn.conf.py b/gunicorn.conf.py
index 027eb9654..ad9456908 100644
--- a/gunicorn.conf.py
+++ b/gunicorn.conf.py
@@ -75,7 +75,6 @@ def on_starting(server: gunicorn.arbiter.Arbiter) -> None:
Called just before the master process is initialized.
"""
-
del server
prometheus.start()
@@ -87,7 +86,6 @@ def post_fork(server: gunicorn.arbiter.Arbiter, worker: gunicorn.workers.base.Wo
Called just after a worker has been forked.
"""
-
del server, worker
prometheus.cleanup()
@@ -99,7 +97,6 @@ def child_exit(server: gunicorn.arbiter.Arbiter, worker: gunicorn.workers.base.W
Called just after a worker has been exited, in the master process.
"""
-
del server
multiprocess.mark_process_dead(worker.pid) # type: ignore [no-untyped-call]
diff --git a/package-lock.json b/package-lock.json
index 2d5d5e29f..c8dab415a 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -6,7 +6,7 @@
"": {
"devDependencies": {
"commander": "12.1.0",
- "puppeteer": "23.5.3"
+ "puppeteer": "23.7.1"
}
},
"node_modules/@babel/code-frame": {
@@ -35,13 +35,13 @@
}
},
"node_modules/@puppeteer/browsers": {
- "version": "2.4.0",
- "resolved": "https://registry.npmjs.org/@puppeteer/browsers/-/browsers-2.4.0.tgz",
- "integrity": "sha512-x8J1csfIygOwf6D6qUAZ0ASk3z63zPb7wkNeHRerCMh82qWKUrOgkuP005AJC8lDL6/evtXETGEJVcwykKT4/g==",
+ "version": "2.4.1",
+ "resolved": "https://registry.npmjs.org/@puppeteer/browsers/-/browsers-2.4.1.tgz",
+ "integrity": "sha512-0kdAbmic3J09I6dT8e9vE2JOCSt13wHCW5x/ly8TSt2bDtuIWe2TgLZZDHdcziw9AVCzflMAXCrVyRIhIs44Ng==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
- "debug": "^4.3.6",
+ "debug": "^4.3.7",
"extract-zip": "^2.0.1",
"progress": "^2.0.3",
"proxy-agent": "^6.4.0",
@@ -410,9 +410,9 @@
}
},
"node_modules/devtools-protocol": {
- "version": "0.0.1342118",
- "resolved": "https://registry.npmjs.org/devtools-protocol/-/devtools-protocol-0.0.1342118.tgz",
- "integrity": "sha512-75fMas7PkYNDTmDyb6PRJCH7ILmHLp+BhrZGeMsa4bCh40DTxgCz2NRy5UDzII4C5KuD0oBMZ9vXKhEl6UD/3w==",
+ "version": "0.0.1354347",
+ "resolved": "https://registry.npmjs.org/devtools-protocol/-/devtools-protocol-0.0.1354347.tgz",
+ "integrity": "sha512-BlmkSqV0V84E2WnEnoPnwyix57rQxAM5SKJjf4TbYOCGLAWtz8CDH8RIaGOjPgPCXo2Mce3kxSY497OySidY3Q==",
"dev": true,
"license": "BSD-3-Clause"
},
@@ -945,18 +945,18 @@
}
},
"node_modules/puppeteer": {
- "version": "23.5.3",
- "resolved": "https://registry.npmjs.org/puppeteer/-/puppeteer-23.5.3.tgz",
- "integrity": "sha512-FghmfBsr/UUpe48OiCg1gV3W4vVfQJKjQehbF07SjnQvEpWcvPTah1nykfGWdOQQ1ydJPIXcajzWN7fliCU3zw==",
+ "version": "23.7.1",
+ "resolved": "https://registry.npmjs.org/puppeteer/-/puppeteer-23.7.1.tgz",
+ "integrity": "sha512-jS6XehagMvxQ12etwY/4EOYZ0Sm8GAsrtGhdQn4AqpJAyHc3RYl7tGd4QYh/MmShDw8sF9FWYQqGidhoXaqokQ==",
"dev": true,
"hasInstallScript": true,
"license": "Apache-2.0",
"dependencies": {
- "@puppeteer/browsers": "2.4.0",
+ "@puppeteer/browsers": "2.4.1",
"chromium-bidi": "0.8.0",
"cosmiconfig": "^9.0.0",
- "devtools-protocol": "0.0.1342118",
- "puppeteer-core": "23.5.3",
+ "devtools-protocol": "0.0.1354347",
+ "puppeteer-core": "23.7.1",
"typed-query-selector": "^2.12.0"
},
"bin": {
@@ -967,16 +967,16 @@
}
},
"node_modules/puppeteer-core": {
- "version": "23.5.3",
- "resolved": "https://registry.npmjs.org/puppeteer-core/-/puppeteer-core-23.5.3.tgz",
- "integrity": "sha512-V58MZD/B3CwkYsqSEQlHKbavMJptF04fzhMdUpiCRCmUVhwZNwSGEPhaiZ1f8I3ABQUirg3VNhXVB6Z1ubHXtQ==",
+ "version": "23.7.1",
+ "resolved": "https://registry.npmjs.org/puppeteer-core/-/puppeteer-core-23.7.1.tgz",
+ "integrity": "sha512-Om/qCZhd+HLoAr7GltrRAZpS3uOXwHu7tXAoDbNcJADHjG2zeAlDArgyIPXYGG4QB/EQUHk13Q6RklNxGM73Pg==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
- "@puppeteer/browsers": "2.4.0",
+ "@puppeteer/browsers": "2.4.1",
"chromium-bidi": "0.8.0",
"debug": "^4.3.7",
- "devtools-protocol": "0.0.1342118",
+ "devtools-protocol": "0.0.1354347",
"typed-query-selector": "^2.12.0",
"ws": "^8.18.0"
},
diff --git a/package.json b/package.json
index 44d856eb8..dcd7bd51f 100644
--- a/package.json
+++ b/package.json
@@ -1,7 +1,7 @@
{
"devDependencies": {
"commander": "12.1.0",
- "puppeteer": "23.5.3"
+ "puppeteer": "23.7.1"
},
"type": "module"
}
diff --git a/poetry.lock b/poetry.lock
index 481c74ee6..1823817e3 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
[[package]]
name = "alembic"
@@ -220,24 +220,21 @@ virtualenv = ["virtualenv (>=20.0.35)"]
[[package]]
name = "c2cwsgiutils"
-version = "6.0.9"
+version = "6.1.3"
description = "Common utilities for Camptocamp WSGI applications"
optional = false
-python-versions = ">=3.9"
+python-versions = ">=3.10"
files = [
- {file = "c2cwsgiutils-6.0.9-py3-none-any.whl", hash = "sha256:77e91d0bb74a62ab58b10e493b34a94e4a223dd33f35edef30d7b79d5424c374"},
- {file = "c2cwsgiutils-6.0.9.tar.gz", hash = "sha256:f93ee7cb03a7d72cbb6699710963eb0d0cba25b0afae9a56ff5617b639b2b11d"},
+ {file = "c2cwsgiutils-6.1.3-py3-none-any.whl", hash = "sha256:3e537c6ef3f72bd00ef519aa5f8e4948c7f1f584a95e99aa8159388805674ae6"},
+ {file = "c2cwsgiutils-6.1.3.tar.gz", hash = "sha256:afa5c7c5450512d3661be2375325cb673b8b7c2e520d487582927d099589d311"},
]
[package.dependencies]
alembic = {version = "*", optional = true, markers = "extra == \"standard\" or extra == \"alembic\" or extra == \"all\""}
cee_syslog_handler = "*"
-certifi = "*"
cornice = {version = "*", optional = true, markers = "extra == \"standard\" or extra == \"webserver\" or extra == \"all\""}
gunicorn = {version = "*", optional = true, markers = "extra == \"standard\" or extra == \"webserver\" or extra == \"all\""}
-idna = "*"
objgraph = {version = "*", optional = true, markers = "extra == \"debug\" or extra == \"all\""}
-pillow = "*"
prometheus-client = {version = "*", optional = true, markers = "extra == \"standard\" or extra == \"webserver\" or extra == \"all\""}
psycopg2 = {version = "*", optional = true, markers = "extra == \"standard\" or extra == \"webserver\" or extra == \"all\""}
pyjwt = {version = "*", optional = true, markers = "extra == \"standard\" or extra == \"oauth2\" or extra == \"all\""}
@@ -249,13 +246,10 @@ redis = {version = "*", optional = true, markers = "extra == \"standard\" or ext
requests = "*"
requests-oauthlib = {version = "*", optional = true, markers = "extra == \"standard\" or extra == \"oauth2\" or extra == \"all\""}
scikit-image = {version = "*", optional = true, markers = "extra == \"test-images\""}
-scipy = "*"
sentry-sdk = {version = "*", optional = true, markers = "extra == \"standard\" or extra == \"sentry\" or extra == \"all\""}
SQLAlchemy = {version = "*", optional = true, markers = "extra == \"standard\" or extra == \"webserver\" or extra == \"all\""}
SQLAlchemy-Utils = {version = "*", optional = true, markers = "extra == \"standard\" or extra == \"webserver\" or extra == \"all\""}
ujson = "*"
-urllib3 = "*"
-webob = "*"
"zope.interface" = {version = "*", optional = true, markers = "extra == \"standard\" or extra == \"webserver\" or extra == \"all\""}
"zope.sqlalchemy" = {version = "*", optional = true, markers = "extra == \"standard\" or extra == \"webserver\" or extra == \"all\""}
@@ -1044,13 +1038,13 @@ referencing = ">=0.31.0"
[[package]]
name = "jsonschema-validator-new"
-version = "0.1.0"
+version = "0.3.2"
description = "Tool to validate files against a JSON Schema"
optional = false
-python-versions = ">=3.8,<4"
+python-versions = ">=3.9"
files = [
- {file = "jsonschema_validator_new-0.1.0-py3-none-any.whl", hash = "sha256:f93e1a4ca92079f28fbad2b43a1733b5d51c6cc4c7fd327689a63a90b77cf4ff"},
- {file = "jsonschema_validator_new-0.1.0.tar.gz", hash = "sha256:deac226a3288222e9f1eb73267e496e33a598270529b31629f2407620a6dde37"},
+ {file = "jsonschema_validator_new-0.3.2-py3-none-any.whl", hash = "sha256:1b18a2b2a6115c00ec879d2dc6f99fc657f1086f62b0abef1484165ff19b1b8d"},
+ {file = "jsonschema_validator_new-0.3.2.tar.gz", hash = "sha256:622a93406d7ebc14ab40f54f83688ccbf0ee37acf81b9543c82914c5077555ba"},
]
[package.dependencies]
@@ -1664,19 +1658,18 @@ twisted = ["twisted"]
[[package]]
name = "prospector"
-version = "1.12.1"
+version = "1.13.1"
description = "Prospector is a tool to analyse Python code by aggregating the result of other tools."
optional = false
python-versions = "<4.0,>=3.9"
files = [
- {file = "prospector-1.12.1-py3-none-any.whl", hash = "sha256:e2440b51f40626cbaea80edd97263d8c0a71a79e729415fb505096d4d39e2287"},
- {file = "prospector-1.12.1.tar.gz", hash = "sha256:b9bb4bcdd77b943c597ee4f374960e851cdd2a0b4b60eaeeaf0da465facafc60"},
+ {file = "prospector-1.13.1-py3-none-any.whl", hash = "sha256:119d5f6ba8cbdb19fb4dcb426deaf6b060410bdc1e96c106517ea65e71b5f225"},
+ {file = "prospector-1.13.1.tar.gz", hash = "sha256:b72bd657c82b1aed2cc7cfeceea847f248ba3755332878821b2c59441aa4b6a0"},
]
[package.dependencies]
bandit = {version = ">=1.5.1", optional = true, markers = "extra == \"with-bandit\" or extra == \"with_everything\""}
dodgy = ">=0.2.1,<0.3.0"
-flake8 = "*"
GitPython = ">=3.1.27,<4.0.0"
mccabe = ">=0.7.0,<0.8.0"
mypy = {version = ">=0.600", optional = true, markers = "extra == \"with-mypy\" or extra == \"with_everything\""}
@@ -1691,40 +1684,45 @@ pylint-django = ">=2.6.1"
pylint-flask = "0.6"
pyroma = {version = ">=2.4", optional = true, markers = "extra == \"with-pyroma\" or extra == \"with_everything\""}
PyYAML = "*"
-requirements-detector = ">=1.3.1"
+requirements-detector = ">=1.3.2"
+ruff = {version = "*", optional = true, markers = "extra == \"with-ruff\" or extra == \"with_everything\""}
setoptconf-tmp = ">=0.3.1,<0.4.0"
toml = ">=0.10.2,<0.11.0"
[package.extras]
with-bandit = ["bandit (>=1.5.1)"]
-with-everything = ["bandit (>=1.5.1)", "mypy (>=0.600)", "pyright (>=1.1.3)", "pyroma (>=2.4)", "vulture (>=1.5)"]
+with-everything = ["bandit (>=1.5.1)", "mypy (>=0.600)", "pyright (>=1.1.3)", "pyroma (>=2.4)", "ruff", "vulture (>=1.5)"]
with-mypy = ["mypy (>=0.600)"]
with-pyright = ["pyright (>=1.1.3)"]
with-pyroma = ["pyroma (>=2.4)"]
+with-ruff = ["ruff"]
with-vulture = ["vulture (>=1.5)"]
[[package]]
name = "prospector-profile-duplicated"
-version = "1.6.0"
+version = "1.7.0"
description = "Profile that can be used to disable the duplicated or conflict rules between Prospector and other tools"
optional = false
python-versions = "*"
files = [
- {file = "prospector_profile_duplicated-1.6.0-py2.py3-none-any.whl", hash = "sha256:bf6a6aae0c7de48043b95e4d42e23ccd090c6c7115b6ee8c8ca472ffb1a2022b"},
- {file = "prospector_profile_duplicated-1.6.0.tar.gz", hash = "sha256:9c2d541076537405e8b2484cb6222276a2df17492391b6af1b192695770aab83"},
+ {file = "prospector_profile_duplicated-1.7.0-py2.py3-none-any.whl", hash = "sha256:6b78a320df8b00a1379d46df07ad5d03d8226ef348b948a8fe3ce9dc0b7ea765"},
+ {file = "prospector_profile_duplicated-1.7.0.tar.gz", hash = "sha256:0f97ca8802a7ad6987e8d5b4cdbbdaae26e333a389cdbe266a2e4e34a15c1464"},
]
[[package]]
name = "prospector-profile-utils"
-version = "1.9.1"
+version = "1.11.1"
description = "Some utility Prospector profiles."
optional = false
-python-versions = "*"
+python-versions = "<4.0,>=3.9"
files = [
- {file = "prospector_profile_utils-1.9.1-py2.py3-none-any.whl", hash = "sha256:b458d8c4d59bdb1547e4630a2c6de4971946c4f0999443db6a9eef6d216b26b8"},
- {file = "prospector_profile_utils-1.9.1.tar.gz", hash = "sha256:008efa6797a85233fd8093dcb9d86f5fa5d89673e431c15cb1496a91c9b2c601"},
+ {file = "prospector_profile_utils-1.11.1-py3-none-any.whl", hash = "sha256:de81a27358f8f8cb5714c1e7c67580659696de5da12a1b0bc6c074b40b035c45"},
+ {file = "prospector_profile_utils-1.11.1.tar.gz", hash = "sha256:39de89087df14afbb218882bdc2b51b5919878399289bfc8d7ae4f1c10c517e6"},
]
+[package.dependencies]
+prospector = ">=1.13.0"
+
[[package]]
name = "psycopg2"
version = "2.9.10"
@@ -2250,20 +2248,20 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"]
[[package]]
name = "requirements-detector"
-version = "1.3.1"
+version = "1.3.2"
description = "Python tool to find and list requirements of a Python project"
optional = false
python-versions = "<4.0,>=3.8"
files = [
- {file = "requirements_detector-1.3.1-py3-none-any.whl", hash = "sha256:3ef72e1c5c3ad11100058e8f074a5762a4902985e698099d2e7f1283758d4045"},
- {file = "requirements_detector-1.3.1.tar.gz", hash = "sha256:b89e34faf0e4d17f5736923918bd5401949cbe723294ccfefd698b3cda28e676"},
+ {file = "requirements_detector-1.3.2-py3-none-any.whl", hash = "sha256:e7595a32a21e5273dd54d3727bfef4591bbb96de341f6d95c9671981440876ee"},
+ {file = "requirements_detector-1.3.2.tar.gz", hash = "sha256:af5a3ea98ca703d14cf7b66751b2aeb3656d02d9e5fc1c97d7d4da02b057b601"},
]
[package.dependencies]
astroid = ">=3.0,<4.0"
packaging = ">=21.3"
semver = ">=3.0.0,<4.0.0"
-toml = ">=0.10.2,<0.11.0"
+toml = {version = ">=0.10.2,<0.11.0", markers = "python_version < \"3.11\""}
[[package]]
name = "rich"
@@ -2464,6 +2462,33 @@ files = [
{file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"},
]
+[[package]]
+name = "ruff"
+version = "0.7.3"
+description = "An extremely fast Python linter and code formatter, written in Rust."
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "ruff-0.7.3-py3-none-linux_armv6l.whl", hash = "sha256:34f2339dc22687ec7e7002792d1f50712bf84a13d5152e75712ac08be565d344"},
+ {file = "ruff-0.7.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:fb397332a1879b9764a3455a0bb1087bda876c2db8aca3a3cbb67b3dbce8cda0"},
+ {file = "ruff-0.7.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:37d0b619546103274e7f62643d14e1adcbccb242efda4e4bdb9544d7764782e9"},
+ {file = "ruff-0.7.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d59f0c3ee4d1a6787614e7135b72e21024875266101142a09a61439cb6e38a5"},
+ {file = "ruff-0.7.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:44eb93c2499a169d49fafd07bc62ac89b1bc800b197e50ff4633aed212569299"},
+ {file = "ruff-0.7.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d0242ce53f3a576c35ee32d907475a8d569944c0407f91d207c8af5be5dae4e"},
+ {file = "ruff-0.7.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6b6224af8b5e09772c2ecb8dc9f3f344c1aa48201c7f07e7315367f6dd90ac29"},
+ {file = "ruff-0.7.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c50f95a82b94421c964fae4c27c0242890a20fe67d203d127e84fbb8013855f5"},
+ {file = "ruff-0.7.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f3eff9961b5d2644bcf1616c606e93baa2d6b349e8aa8b035f654df252c8c67"},
+ {file = "ruff-0.7.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8963cab06d130c4df2fd52c84e9f10d297826d2e8169ae0c798b6221be1d1d2"},
+ {file = "ruff-0.7.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:61b46049d6edc0e4317fb14b33bd693245281a3007288b68a3f5b74a22a0746d"},
+ {file = "ruff-0.7.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:10ebce7696afe4644e8c1a23b3cf8c0f2193a310c18387c06e583ae9ef284de2"},
+ {file = "ruff-0.7.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3f36d56326b3aef8eeee150b700e519880d1aab92f471eefdef656fd57492aa2"},
+ {file = "ruff-0.7.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5d024301109a0007b78d57ab0ba190087b43dce852e552734ebf0b0b85e4fb16"},
+ {file = "ruff-0.7.3-py3-none-win32.whl", hash = "sha256:4ba81a5f0c5478aa61674c5a2194de8b02652f17addf8dfc40c8937e6e7d79fc"},
+ {file = "ruff-0.7.3-py3-none-win_amd64.whl", hash = "sha256:588a9ff2fecf01025ed065fe28809cd5a53b43505f48b69a1ac7707b1b7e4088"},
+ {file = "ruff-0.7.3-py3-none-win_arm64.whl", hash = "sha256:1713e2c5545863cdbfe2cbce21f69ffaf37b813bfd1fb3b90dc9a6f1963f5a8c"},
+ {file = "ruff-0.7.3.tar.gz", hash = "sha256:e1d1ba2e40b6e71a61b063354d04be669ab0d39c352461f3d789cac68b54a313"},
+]
+
[[package]]
name = "s3transfer"
version = "0.10.3"
@@ -3394,4 +3419,4 @@ test = ["zope.testing"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<3.13"
-content-hash = "a678808a9c161741a762ef4aee6bd2779dcf4dbb2b1777b9db6f79a7a4024117"
+content-hash = "5c2e3266149bc78ac67411b9e826b5eda0badb941a367bea94523ec3c4e034ed"
diff --git a/pyproject.toml b/pyproject.toml
index 38e3c41b1..0c10a3855 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,13 +1,6 @@
[tool.black]
line-length = 110
-target-version = ["py39"]
-
-[tool.mypy]
-python_version = "3.9"
-warn_redundant_casts = true
-warn_unused_ignores = true
-ignore_missing_imports = true
-strict = true
+target-version = ["py310"]
[tool.isort]
profile = "black"
@@ -15,6 +8,13 @@ line_length = 110
known_first_party = "tilecloud"
known_third_party = "c2cwsgiutils"
+[tool.ruff]
+line-length = 110
+target-version = "py310"
+
+[tool.ruff.lint.pydocstyle]
+convention = "numpy"
+
[tool.poetry]
name = "tilecloud-chain"
version = "0.0.0"
@@ -26,16 +26,19 @@ license = "BSD-2-Clause"
keywords = ["gis", "tilecloud", "chain"]
packages = [{ include = "tilecloud_chain" }]
classifiers = [
- "Development Status :: 5 - Production/Stable",
- "Environment :: Web Environment",
- "Framework :: Pyramid",
- "Intended Audience :: Other Audience",
- "License :: OSI Approved :: BSD License",
- "Operating System :: OS Independent",
- "Programming Language :: Python",
- "Programming Language :: Python :: 3",
- "Topic :: Scientific/Engineering :: GIS",
- "Typing :: Typed",
+ 'Development Status :: 5 - Production/Stable',
+ 'Environment :: Web Environment',
+ 'Framework :: Pyramid',
+ 'Intended Audience :: Other Audience',
+ 'License :: OSI Approved :: BSD License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.10',
+ 'Programming Language :: Python :: 3.11',
+ 'Programming Language :: Python :: 3.12',
+ 'Topic :: Scientific/Engineering :: GIS',
+ 'Typing :: Typed',
]
include = ["tilecloud_chain/py.typed", "tilecloud_chain/*.rst", "tilecloud_chain/*.md"]
@@ -56,7 +59,7 @@ main = "tilecloud_chain.server:main"
[tool.poetry.dependencies]
# Minimal version should also be set in the jsonschema-gentypes.yaml file
python = ">=3.10,<3.13"
-c2cwsgiutils = { version = "6.0.9", extras = ["standard", "broadcast", "oauth2", "debug"] }
+c2cwsgiutils = { version = "6.1.3", extras = ["standard", "broadcast", "oauth2", "debug"] }
pyramid-mako = "1.1.0"
python-dateutil = "2.9.0.post0"
tilecloud = { version = "1.12.3", extras = ["azure", "aws", "redis", "wsgi"] }
@@ -65,16 +68,16 @@ PyYAML = "6.0.2"
Shapely = "2.0.6"
jsonschema = "4.23.0"
pyramid = "2.0.2"
-jsonschema-validator-new = "0.1.0"
+jsonschema-validator-new = "0.3.2"
azure-storage-blob = "12.23.1"
waitress = "3.0.1"
certifi = "2024.8.30"
[tool.poetry.group.dev.dependencies]
-prospector = { extras = ["with_mypy", "with_bandit", "with_pyroma"], version = "1.12.1" }
-prospector-profile-duplicated = "1.6.0"
-prospector-profile-utils = "1.9.1"
-c2cwsgiutils = { version = "6.0.9", extras = ["test_images"] }
+prospector = { extras = ["with_mypy", "with_bandit", "with_pyroma", "with_ruff"], version = "1.13.1" }
+prospector-profile-duplicated = "1.7.0"
+prospector-profile-utils = "1.11.1"
+c2cwsgiutils = { version = "6.1.3", extras = ["test_images"] }
scikit-image = { version = "0.24.0" }
pytest = "8.3.3"
testfixtures = "8.3.0"
diff --git a/requirements.txt b/requirements.txt
index 9fb63d255..0eb4f606f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,5 +2,5 @@ poetry==1.8.4
poetry-plugin-export==1.8.0
poetry-dynamic-versioning==1.4.1
poetry-plugin-tweak-dependencies-version==1.5.2
-pip==24.2
+pip==24.3.1
poetry-plugin-drop-python-upper-constraint==0.1.0
diff --git a/tilecloud_chain/__init__.py b/tilecloud_chain/__init__.py
index fcfb0bc23..e83cdc7d8 100644
--- a/tilecloud_chain/__init__.py
+++ b/tilecloud_chain/__init__.py
@@ -1,6 +1,4 @@
-"""
-TileCloud Chain.
-"""
+"""TileCloud Chain."""
import collections
import json
@@ -19,7 +17,7 @@
import threading
import time
from argparse import ArgumentParser, Namespace
-from collections.abc import Iterable, Iterator
+from collections.abc import Callable, Iterable, Iterator
from concurrent.futures import ThreadPoolExecutor
from datetime import datetime, timedelta
from fractions import Fraction
@@ -27,7 +25,7 @@
from io import BytesIO
from itertools import product
from math import ceil, sqrt
-from typing import IO, TYPE_CHECKING, Any, Callable, Optional, TextIO, TypedDict, Union, cast
+from typing import IO, TYPE_CHECKING, Any, TextIO, TypedDict, cast
import boto3
import botocore.client
@@ -35,8 +33,9 @@
import c2cwsgiutils.setup_process
import jsonschema_validator
import psycopg2
+import tilecloud.filter.error
from azure.identity import DefaultAzureCredential
-from azure.storage.blob import BlobServiceClient, ContainerClient, ContentSettings
+from azure.storage.blob import BlobServiceClient, ContainerClient
from c2cwsgiutils import sentry
from PIL import Image
from prometheus_client import Counter, Summary
@@ -45,10 +44,6 @@
from shapely.geometry.polygon import Polygon
from shapely.ops import unary_union
from shapely.wkb import loads as loads_wkb
-
-import tilecloud.filter.error
-import tilecloud_chain.configuration
-import tilecloud_chain.security
from tilecloud import BoundingPyramid, Tile, TileCoord, TileGrid, TileStore, consume
from tilecloud.filter.error import LogErrors, MaximumConsecutiveErrors
from tilecloud.filter.logger import Logger
@@ -61,6 +56,9 @@
from tilecloud.store.redis import RedisTileStore
from tilecloud.store.s3 import S3TileStore
from tilecloud.store.sqs import SQSTileStore, maybe_stop
+
+import tilecloud_chain.configuration
+import tilecloud_chain.security
from tilecloud_chain import configuration
from tilecloud_chain.multitilestore import MultiTileStore
from tilecloud_chain.timedtilestore import TimedTileStoreWrapper
@@ -74,14 +72,13 @@
def formatted_metadata(tile: Tile) -> str:
"""Get human readable string of the metadata."""
-
metadata = dict(tile.metadata)
if "tiles" in metadata:
- metadata["tiles"] = metadata["tiles"].keys() # type: ignore
+ metadata["tiles"] = metadata["tiles"].keys() # type: ignore[attr-defined]
return " ".join([f"{k}={metadata[k]}" for k in sorted(metadata.keys())])
-setattr(Tile, "formated_metadata", property(formatted_metadata))
+Tile.formated_metadata = property(formatted_metadata) # type: ignore[method-assign,assignment]
def add_common_options(
@@ -183,7 +180,7 @@ def add_common_options(
def get_tile_matrix_identifier(
- grid: tilecloud_chain.configuration.Grid, resolution: Optional[float] = None, zoom: Optional[int] = None
+ grid: tilecloud_chain.configuration.Grid, resolution: float | None = None, zoom: int | None = None
) -> str:
"""Get an identifier for a tile matrix."""
if grid is None or grid.get("matrix_identifier", configuration.MATRIX_IDENTIFIER_DEFAULT) == "zoom":
@@ -232,7 +229,8 @@ def __init__(
_LOGGER, logging.ERROR, "Error in tile: %(tilecoord)s, %(formated_metadata)s, %(error)r"
)
- def __call__(self, tile: Optional[Tile]) -> Optional[Tile]:
+ def __call__(self, tile: Tile | None) -> Tile | None:
+ """Run the tile generation."""
if tile is None:
return None
@@ -253,7 +251,12 @@ def __call__(self, tile: Optional[Tile]) -> Optional[Tile]:
tile.error = e
else:
tile = func(tile)
- _LOGGER.debug("[%s] %s in %s", tilecoord, func.time_message if getattr(func, "time_message", None) is not None else func, str(datetime.now() - n)) # type: ignore
+ _LOGGER.debug(
+ "[%s] %s in %s",
+ tilecoord,
+ func.time_message if getattr(func, "time_message", None) is not None else func, # type: ignore
+ str(datetime.now() - n),
+ )
if tile is None:
_LOGGER.debug("[%s] Drop", tilecoord)
return None
@@ -300,6 +303,7 @@ def __init__(self, db: Any) -> None:
self.db = db
def __call__(self) -> None:
+ """Close the database."""
self.db.close()
@@ -326,7 +330,7 @@ def __init__(self, config: tilecloud_chain.configuration.Configuration, mtime: f
class DatedGeoms:
"""Geoms with timestamps to be able to invalidate it on configuration change."""
- def __init__(self, geoms: dict[Union[str, int], BaseGeometry], mtime: float) -> None:
+ def __init__(self, geoms: dict[str | int, BaseGeometry], mtime: float) -> None:
self.geoms = geoms
self.mtime = mtime
@@ -354,8 +358,8 @@ class MissingErrorFileException(Exception):
class LoggingInformation(TypedDict):
"""Logging information."""
- host: Optional[str]
- layer: Optional[str]
+ host: str | None
+ layer: str | None
meta_tilecoord: str
@@ -365,7 +369,7 @@ class LoggingInformation(TypedDict):
class JsonLogHandler(c2cwsgiutils.pyramid_logging.JsonLogHandler):
"""Log to stdout in JSON."""
- def __init__(self, stream: Optional[TextIO] = None):
+ def __init__(self, stream: TextIO | None = None):
super().__init__(stream)
self.addFilter(TileFilter())
@@ -374,6 +378,7 @@ class TileFilter(logging.Filter):
"""A logging filter that adds request information to CEE logs."""
def filter(self, record: Any) -> bool:
+ """Add the request information to the log record."""
thread_id = threading.current_thread().native_id
assert thread_id is not None
log_info = LOGGING_CONTEXT.get(os.getpid(), {}).get(thread_id)
@@ -414,30 +419,30 @@ def get_azure_container_client(container: str) -> ContainerClient:
class TileGeneration:
"""Base class of all the tile generation."""
- tilestream: Optional[Iterator[Tile]] = None
+ tilestream: Iterator[Tile] | None = None
duration: timedelta = timedelta()
error = 0
- queue_store: Optional[TileStore] = None
+ queue_store: TileStore | None = None
daemon = False
def __init__(
self,
- config_file: Optional[str] = None,
- options: Optional[Namespace] = None,
- layer_name: Optional[str] = None,
- base_config: Optional[tilecloud_chain.configuration.Configuration] = None,
+ config_file: str | None = None,
+ options: Namespace | None = None,
+ layer_name: str | None = None,
+ base_config: tilecloud_chain.configuration.Configuration | None = None,
configure_logging: bool = True,
multi_thread: bool = True,
maxconsecutive_errors: bool = True,
):
self.geoms_cache: dict[str, dict[str, DatedGeoms]] = {}
- self._close_actions: list["Close"] = []
+ self._close_actions: list[Close] = []
self.error_lock = threading.Lock()
self.error_files_: dict[str, TextIO] = {}
self.functions_tiles: list[Callable[[Tile], Tile]] = []
self.functions_metatiles: list[Callable[[Tile], Tile]] = []
self.functions = self.functions_metatiles
- self.metatilesplitter_thread_pool: Optional[ThreadPoolExecutor] = None
+ self.metatilesplitter_thread_pool: ThreadPoolExecutor | None = None
self.multi_thread = multi_thread
self.maxconsecutive_errors = maxconsecutive_errors
self.grid_cache: dict[str, dict[str, DatedTileGrid]] = {}
@@ -445,13 +450,22 @@ def __init__(
self.config_file = config_file
self.base_config = base_config
self.configs: dict[str, DatedConfig] = {}
- self.hosts_cache: Optional[DatedHosts] = None
+ self.hosts_cache: DatedHosts | None = None
self.options: Namespace = options or collections.namedtuple( # type: ignore
"Options",
["verbose", "debug", "quiet", "bbox", "zoom", "test", "near", "time", "geom", "ignore_error"],
)(
- False, False, False, None, None, None, None, None, True, False # type: ignore
+ False, # type: ignore
+ False,
+ False,
+ None,
+ None,
+ None,
+ None,
+ None,
+ True,
+ False,
)
del options
if not hasattr(self.options, "bbox"):
@@ -469,51 +483,50 @@ def __init__(
if not hasattr(self.options, "ignore_error"):
self.options.ignore_error = False
- if configure_logging:
- if os.environ.get("CI", "false").lower() != "true":
- ###
- # logging configuration
- # https://docs.python.org/3/library/logging.config.html#logging-config-dictschema
- ###
- logging.config.dictConfig(
- {
- "version": 1,
- "root": {
- "level": os.environ["OTHER_LOG_LEVEL"],
- "handlers": [os.environ["LOG_TYPE"]],
+ if configure_logging and os.environ.get("CI", "false").lower() != "true":
+ ###
+ # logging configuration
+ # https://docs.python.org/3/library/logging.config.html#logging-config-dictschema
+ ###
+ logging.config.dictConfig(
+ {
+ "version": 1,
+ "root": {
+ "level": os.environ["OTHER_LOG_LEVEL"],
+ "handlers": [os.environ["LOG_TYPE"]],
+ },
+ "loggers": {
+ "gunicorn.error": {"level": os.environ["GUNICORN_LOG_LEVEL"]},
+ # "level = INFO" logs SQL queries.
+ # "level = DEBUG" logs SQL queries and results.
+ # "level = WARN" logs neither. (Recommended for production systems.)
+ "sqlalchemy.engine": {"level": os.environ["SQL_LOG_LEVEL"]},
+ "c2cwsgiutils": {"level": os.environ["C2CWSGIUTILS_LOG_LEVEL"]},
+ "tilecloud": {"level": os.environ["TILECLOUD_LOG_LEVEL"]},
+ "tilecloud_chain": {"level": os.environ["TILECLOUD_CHAIN_LOG_LEVEL"]},
+ },
+ "handlers": {
+ "console": {
+ "class": "logging.StreamHandler",
+ "formatter": "generic",
+ "stream": "ext://sys.stdout",
},
- "loggers": {
- "gunicorn.error": {"level": os.environ["GUNICORN_LOG_LEVEL"]},
- # "level = INFO" logs SQL queries.
- # "level = DEBUG" logs SQL queries and results.
- # "level = WARN" logs neither. (Recommended for production systems.)
- "sqlalchemy.engine": {"level": os.environ["SQL_LOG_LEVEL"]},
- "c2cwsgiutils": {"level": os.environ["C2CWSGIUTILS_LOG_LEVEL"]},
- "tilecloud": {"level": os.environ["TILECLOUD_LOG_LEVEL"]},
- "tilecloud_chain": {"level": os.environ["TILECLOUD_CHAIN_LOG_LEVEL"]},
+ "json": {
+ "class": "tilecloud_chain.JsonLogHandler",
+ "formatter": "generic",
+ "stream": "ext://sys.stdout",
},
- "handlers": {
- "console": {
- "class": "logging.StreamHandler",
- "formatter": "generic",
- "stream": "ext://sys.stdout",
- },
- "json": {
- "class": "tilecloud_chain.JsonLogHandler",
- "formatter": "generic",
- "stream": "ext://sys.stdout",
- },
- },
- "formatters": {
- "generic": {
- "format": "%(asctime)s [%(process)d] [%(levelname)-5.5s] %(message)s",
- "datefmt": "[%Y-%m-%d %H:%M:%S %z]",
- "class": "logging.Formatter",
- }
- },
- }
- )
- sentry.includeme()
+ },
+ "formatters": {
+ "generic": {
+ "format": "%(asctime)s [%(process)d] [%(levelname)-5.5s] %(message)s",
+ "datefmt": "[%Y-%m-%d %H:%M:%S %z]",
+ "class": "logging.Formatter",
+ }
+ },
+ }
+ )
+ sentry.includeme()
assert "generation" in self.get_main_config().config, self.get_main_config().config
@@ -553,7 +566,7 @@ def __init__(
assert layer_name is not None
self.create_log_tiles_error(layer_name)
- def get_host_config_file(self, host: Optional[str]) -> Optional[str]:
+ def get_host_config_file(self, host: str | None) -> str | None:
"""Get the configuration file name for the given host."""
if self.config_file:
return self.config_file
@@ -565,7 +578,7 @@ def get_host_config_file(self, host: Optional[str]) -> Optional[str]:
_LOGGER.debug("For the host %s, use config file: %s", host, config_file)
return config_file
- def get_host_config(self, host: Optional[str]) -> DatedConfig:
+ def get_host_config(self, host: str | None) -> DatedConfig:
"""Get the configuration for the given host."""
config_file = self.get_host_config_file(host)
if not config_file:
@@ -584,7 +597,7 @@ def get_config(
self,
config_file: str,
ignore_error: bool = True,
- base_config: Optional[tilecloud_chain.configuration.Configuration] = None,
+ base_config: tilecloud_chain.configuration.Configuration | None = None,
) -> DatedConfig:
"""Get the validated configuration for the file name, with cache management."""
assert config_file
@@ -596,7 +609,7 @@ def get_config(
else:
sys.exit(1)
- config: Optional[DatedConfig] = self.configs.get(config_file)
+ config: DatedConfig | None = self.configs.get(config_file)
if config is not None and config.mtime == config_path.stat().st_mtime:
return config
@@ -650,7 +663,7 @@ def _get_config(
self,
config_file: str,
ignore_error: bool,
- base_config: Optional[tilecloud_chain.configuration.Configuration] = None,
+ base_config: tilecloud_chain.configuration.Configuration | None = None,
) -> tuple[DatedConfig, bool]:
"""Get the validated configuration for the file name."""
with open(config_file, encoding="utf-8") as f:
@@ -745,16 +758,14 @@ def validate_config(self, config: DatedConfig, ignore_error: bool) -> bool:
_LOGGER.error("The layer '%s' is of type Mapnik/Grid, that can't support matatiles.", lname)
error = True
- if error:
- if not (
- ignore_error
- or os.environ.get("TILEGENERATION_IGNORE_CONFIG_ERROR", "FALSE").lower() == "true"
- ):
- sys.exit(1)
+ if error and not (
+ ignore_error or os.environ.get("TILEGENERATION_IGNORE_CONFIG_ERROR", "FALSE").lower() == "true"
+ ):
+ sys.exit(1)
return not (error or errors)
- def init(self, queue_store: Optional[TileStore] = None, daemon: bool = False) -> None:
+ def init(self, queue_store: TileStore | None = None, daemon: bool = False) -> None:
"""Initialize the tile generation."""
self.queue_store = queue_store
self.daemon = daemon
@@ -771,7 +782,7 @@ def _primefactors(x: int) -> list[int]:
loop += 1
return factorlist
- def _resolution_scale(self, resolutions: Union[list[float], list[int]]) -> int:
+ def _resolution_scale(self, resolutions: list[float] | list[int]) -> int:
prime_fact = {}
for resolution in resolutions:
denominator = Fraction(str(resolution)).denominator
@@ -864,7 +875,6 @@ def get_store(
for dimension in layer["dimensions"]:
metadata["dimension_" + dimension["name"]] = dimension["default"]
import bsddb3 as bsddb # pylint: disable=import-outside-toplevel,import-error
-
from tilecloud.store.bsddb import BSDDBTileStore # pylint: disable=import-outside-toplevel
# on bsddb file
@@ -896,7 +906,7 @@ def get_store(
@staticmethod
def get_grid_name(
- config: DatedConfig, layer: tilecloud_chain.configuration.Layer, name: Optional[Any] = None
+ config: DatedConfig, layer: tilecloud_chain.configuration.Layer, name: Any | None = None
) -> tilecloud_chain.configuration.Grid:
"""Get the grid name."""
if name is None:
@@ -904,7 +914,7 @@ def get_grid_name(
return config.config["grids"][name]
- def get_tilesstore(self, cache: Optional[str] = None) -> TimedTileStoreWrapper:
+ def get_tilesstore(self, cache: str | None = None) -> TimedTileStoreWrapper:
"""Get the tile store."""
gene = self
@@ -944,13 +954,13 @@ def log_tiles(tile: Tile) -> Tile:
elif not self.options.quiet and getattr(self.options, "role", None) != "server":
self.imap(Logger(_LOGGER, logging.INFO, "%(tilecoord)s, %(formated_metadata)s"))
- def add_metatile_splitter(self, store: Optional[TileStore] = None) -> None:
+ def add_metatile_splitter(self, store: TileStore | None = None) -> None:
"""Add a metatile splitter to the chain."""
assert self.functions != self.functions_tiles, "add_metatile_splitter should not be called twice"
if store is None:
gene = self
- def get_splitter(config_file: str, layer_name: str) -> Optional[MetaTileSplitterTileStore]:
+ def get_splitter(config_file: str, layer_name: str) -> MetaTileSplitterTileStore | None:
config = gene.get_config(config_file)
layer = config.config["layers"][layer_name]
if layer.get("meta"):
@@ -1007,7 +1017,7 @@ def meta_get(metatile: Tile) -> Tile:
self.imap(meta_get)
self.functions = self.functions_tiles
- def create_log_tiles_error(self, layer: str) -> Optional[TextIO]:
+ def create_log_tiles_error(self, layer: str) -> TextIO | None:
"""Create the error file for the given layer."""
if "error_file" in self.get_main_config().config.get("generation", {}):
now = datetime.now()
@@ -1027,11 +1037,11 @@ def close(self) -> None:
for file_ in self.error_files_.values():
file_.close()
- def get_log_tiles_error_file(self, layer: str) -> Optional[TextIO]:
+ def get_log_tiles_error_file(self, layer: str) -> TextIO | None:
"""Get the error file for the given layer."""
return self.error_files_[layer] if layer in self.error_files_ else self.create_log_tiles_error(layer)
- def log_tiles_error(self, tile: Optional[Tile] = None, message: Optional[str] = None) -> None:
+ def log_tiles_error(self, tile: Tile | None = None, message: str | None = None) -> None:
"""Log the error message for the given tile."""
if tile is None:
return
@@ -1071,8 +1081,8 @@ def get_grid(self, config: DatedConfig, grid_name: str) -> TileGrid:
return tilegrid
def get_geoms(
- self, config: DatedConfig, layer_name: str, host: Optional[str] = None
- ) -> dict[Union[str, int], BaseGeometry]:
+ self, config: DatedConfig, layer_name: str, host: str | None = None
+ ) -> dict[str | int, BaseGeometry]:
"""Get the geometries for the given layer."""
dated_geoms = self.geoms_cache.get(config.file, {}).get(layer_name)
if dated_geoms is not None and config.mtime == dated_geoms.mtime:
@@ -1120,7 +1130,7 @@ def get_geoms(
else:
extent = config.config["grids"][layer["grid"]]["bbox"]
- geoms: dict[Union[str, int], BaseGeometry] = {}
+ geoms: dict[str | int, BaseGeometry] = {}
if extent:
geom = Polygon(
(
@@ -1130,7 +1140,7 @@ def get_geoms(
(extent[2], extent[1]),
)
)
- for z, r in enumerate(config.config["grids"][layer["grid"]]["resolutions"]):
+ for z, _ in enumerate(config.config["grids"][layer["grid"]]["resolutions"]):
geoms[z] = geom
if self.options.near is None and self.options.geom:
@@ -1138,7 +1148,7 @@ def get_geoms(
with _GEOMS_GET_SUMMARY.labels(layer_name, host if host else self.options.host).time():
connection = psycopg2.connect(g["connection"])
cursor = connection.cursor()
- sql = f"SELECT ST_AsBinary(geom) FROM (SELECT {g['sql']}) AS g" # nosec
+ sql = f"SELECT ST_AsBinary(geom) FROM (SELECT {g['sql']}) AS g" # nosec # noqa: S608
_LOGGER.info("Execute SQL: %s.", sql)
cursor.execute(sql)
geom_list = [loads_wkb(bytes(r[0])) for r in cursor.fetchall()]
@@ -1301,11 +1311,11 @@ def counter_size(self) -> "CountSize":
self.imap(count)
return count
- def process(self, name: Optional[str] = None, key: str = "post_process") -> None:
+ def process(self, name: str | None = None, key: str = "post_process") -> None:
"""Add a process to the tilestream."""
gene = self
- def get_process(config_file: str, layer_name: str) -> Optional[Process]:
+ def get_process(config_file: str, layer_name: str) -> Process | None:
config = gene.get_config(config_file)
layer = config.config["layers"][layer_name]
name_ = name
@@ -1317,12 +1327,12 @@ def get_process(config_file: str, layer_name: str) -> Optional[Process]:
self.imap(MultiAction(get_process))
- def get(self, store: TileStore, time_message: Optional[str] = None) -> None:
+ def get(self, store: TileStore, time_message: str | None = None) -> None:
"""Get the tiles from the store."""
assert store is not None
self.imap(store.get_one, time_message)
- def put(self, store: TileStore, time_message: Optional[str] = None) -> None:
+ def put(self, store: TileStore, time_message: str | None = None) -> None:
"""Put the tiles in the store."""
assert store is not None
@@ -1332,7 +1342,7 @@ def put_internal(tile: Tile) -> Tile:
self.imap(put_internal, time_message)
- def delete(self, store: TileStore, time_message: Optional[str] = None) -> None:
+ def delete(self, store: TileStore, time_message: str | None = None) -> None:
"""Delete the tiles from the store."""
assert store is not None
@@ -1342,14 +1352,14 @@ def delete_internal(tile: Tile) -> Tile:
self.imap(delete_internal, time_message)
- def imap(self, func: Any, time_message: Optional[str] = None) -> None:
+ def imap(self, func: Any, time_message: str | None = None) -> None:
"""Add a function to the tilestream."""
assert func is not None
class Func:
"""Function with an additional field used to names it in timing messages."""
- def __init__(self, func: Callable[[Tile], Tile], time_message: Optional[str]) -> None:
+ def __init__(self, func: Callable[[Tile], Tile], time_message: str | None) -> None:
self.func = func
self.time_message = time_message
@@ -1361,7 +1371,7 @@ def __str__(self) -> str:
self.functions.append(Func(func, time_message))
- def consume(self, test: Optional[int] = None) -> None:
+ def consume(self, test: int | None = None) -> None:
"""Consume the tilestream."""
assert self.tilestream is not None
@@ -1446,7 +1456,8 @@ def __init__(self) -> None:
self.nb = 0
self.lock = threading.Lock()
- def __call__(self, tile: Optional[Tile] = None) -> Optional[Tile]:
+ def __call__(self, tile: Tile | None = None) -> Tile | None:
+ """Count the number of generated tile."""
with self.lock:
self.nb += 1
return tile
@@ -1460,7 +1471,8 @@ def __init__(self) -> None:
self.size = 0
self.lock = threading.Lock()
- def __call__(self, tile: Optional[Tile] = None) -> Optional[Tile]:
+ def __call__(self, tile: Tile | None = None) -> Tile | None:
+ """Count the number of generated tile and measure the total generated size."""
if tile and tile.data:
with self.lock:
self.nb += 1
@@ -1481,9 +1493,9 @@ def __init__(
self,
size: int,
sha1code: str,
- store: Optional[TileStore] = None,
- queue_store: Optional[TileStore] = None,
- count: Optional[Count] = None,
+ store: TileStore | None = None,
+ queue_store: TileStore | None = None,
+ count: Count | None = None,
) -> None:
self.size = size
self.sha1code = sha1code
@@ -1491,9 +1503,10 @@ def __init__(
self.queue_store = queue_store
self.count = count
- def __call__(self, tile: Tile) -> Optional[Tile]:
+ def __call__(self, tile: Tile) -> Tile | None:
+ """Drop the tile if the size and hash are the same as the specified ones."""
assert tile.data
- if len(tile.data) != self.size or sha1(tile.data).hexdigest() != self.sha1code: # nosec
+ if len(tile.data) != self.size or sha1(tile.data).hexdigest() != self.sha1code: # noqa: S324
return tile
else:
if self.store is not None:
@@ -1526,12 +1539,13 @@ class MultiAction:
def __init__(
self,
- get_action: Callable[[str, str], Optional[Callable[[Tile], Optional[Tile]]]],
+ get_action: Callable[[str, str], Callable[[Tile], Tile | None] | None],
) -> None:
self.get_action = get_action
- self.actions: dict[tuple[str, str], Optional[Callable[[Tile], Optional[Tile]]]] = {}
+ self.actions: dict[tuple[str, str], Callable[[Tile], Tile | None] | None] = {}
- def __call__(self, tile: Tile) -> Optional[Tile]:
+ def __call__(self, tile: Tile) -> Tile | None:
+ """Run the action."""
layer = tile.metadata["layer"]
config_file = tile.metadata["config_file"]
action = self.actions.get((config_file, layer))
@@ -1547,11 +1561,12 @@ def __call__(self, tile: Tile) -> Optional[Tile]:
class HashLogger:
"""Log the tile size and hash."""
- def __init__(self, block: str, out: Optional[IO[str]]) -> None:
+ def __init__(self, block: str, out: IO[str] | None) -> None:
self.block = block
self.out = out
def __call__(self, tile: Tile) -> Tile:
+ """Log the tile size and hash."""
ref = None
try:
assert tile.data
@@ -1572,7 +1587,7 @@ def __call__(self, tile: Tile) -> Tile:
f"""Tile: {tile.tilecoord} {tile.formated_metadata}
{self.block}:
size: {len(tile.data)}
- hash: {sha1(tile.data).hexdigest()}""", # nosec
+ hash: {sha1(tile.data).hexdigest()}""", # noqa: E501
file=self.out,
)
return tile
@@ -1596,7 +1611,8 @@ def filter(self, tilecoord: TileCoord) -> bool:
nb = round(tilecoord.z + tilecoord.x / tilecoord.n + tilecoord.y / tilecoord.n)
return nb % self.nb_process == self.process_nb
- def __call__(self, tile: Tile) -> Optional[Tile]:
+ def __call__(self, tile: Tile) -> Tile | None:
+ """Filter the tile."""
return tile if self.filter(tile.tilecoord) else None
@@ -1610,7 +1626,7 @@ def __init__(
self.gene = gene
def filter_tilecoord(
- self, config: DatedConfig, tilecoord: TileCoord, layer_name: str, host: Optional[str] = None
+ self, config: DatedConfig, tilecoord: TileCoord, layer_name: str, host: str | None = None
) -> bool:
"""Filter the tilecoord."""
layer = config.config["layers"][layer_name]
@@ -1628,7 +1644,8 @@ def filter_tilecoord(
tile_grid.extent(tilecoord, grid["resolutions"][tilecoord.z] * px_buffer)
).intersects(geoms[tilecoord.z])
- def __call__(self, tile: Tile) -> Optional[Tile]:
+ def __call__(self, tile: Tile) -> Tile | None:
+ """Filter the tile on a geometry."""
return (
tile
if self.filter_tilecoord(self.gene.get_tile_config(tile), tile.tilecoord, tile.metadata["layer"])
@@ -1647,7 +1664,8 @@ class DropEmpty:
def __init__(self, gene: TileGeneration) -> None:
self.gene = gene
- def __call__(self, tile: Tile) -> Optional[Tile]:
+ def __call__(self, tile: Tile) -> Tile | None:
+ """Filter the enpty tile."""
config = self.gene.get_tile_config(tile)
if not tile or not tile.data:
_LOGGER.error(
@@ -1704,7 +1722,8 @@ def __init__(self, config: tilecloud_chain.configuration.ProcessCommand, options
self.config = config
self.options = options
- def __call__(self, tile: Tile) -> Optional[Tile]:
+ def __call__(self, tile: Tile) -> Tile | None:
+ """Process the tile."""
if tile and tile.data:
fd_in, name_in = tempfile.mkstemp()
with open(name_in, "wb") as file_in:
@@ -1739,7 +1758,9 @@ def __call__(self, tile: Tile) -> Optional[Tile]:
}
_LOGGER.debug("[%s] process: %s", tile.tilecoord, command)
result = subprocess.run( # pylint: disable=subprocess-run-check
- command, shell=True, capture_output=True # nosec
+ command,
+ shell=True,
+ capture_output=True, # nosec
)
if result.returncode != 0:
tile.error = (
@@ -1772,6 +1793,7 @@ def __init__(self, tiles_file: str):
self.tiles_file = open(tiles_file, encoding="utf-8") # pylint: disable=consider-using-with
def list(self) -> Iterator[Tile]:
+ """List the tiles."""
while True:
line = self.tiles_file.readline()
if not line:
@@ -1794,13 +1816,16 @@ def list(self) -> Iterator[Tile]:
metadata=dict([cast(tuple[str, str], e.split("=")) for e in splitted_line[1:]]),
)
- def get_one(self, tile: Tile) -> Optional[Tile]:
+ def get_one(self, tile: Tile) -> Tile | None:
+ """Get the tile."""
raise NotImplementedError()
def put_one(self, tile: Tile) -> Tile:
+ """Put the tile."""
raise NotImplementedError()
def delete_one(self, tile: Tile) -> Tile:
+ """Delete the tile."""
raise NotImplementedError()
@@ -1815,7 +1840,6 @@ def _await_message(_: Any) -> bool:
def get_queue_store(config: DatedConfig, daemon: bool) -> TimedTileStoreWrapper:
"""Get the quue tile store."""
-
queue_store = config.config.get("queue_store", configuration.QUEUE_STORE_DEFAULT)
if queue_store == "postgresql":
@@ -1856,7 +1880,7 @@ def get_queue_store(config: DatedConfig, daemon: bool) -> TimedTileStoreWrapper:
if url is not None:
tilestore_kwargs["url"] = url
else:
- sentinels: list[tuple[str, Union[str, int]]] = []
+ sentinels: list[tuple[str, str | int]] = []
if "TILECLOUD_CHAIN_REDIS_SENTINELs" in os.environ:
sentinels_string = os.environ["TILECLOUD_CHAIN_REDIS_SENTINELS"]
sentinels_tmp = [s.split(":") for s in sentinels_string.split(",")]
diff --git a/tilecloud_chain/configuration.py b/tilecloud_chain/configuration.py
index ee5ae6963..b633f7dd3 100644
--- a/tilecloud_chain/configuration.py
+++ b/tilecloud_chain/configuration.py
@@ -1,6 +1,4 @@
-"""
-Automatically generated file from a JSON schema.
-"""
+"""Automatically generated file from a JSON schema."""
from typing import Any, Literal, TypedDict, Union
@@ -545,7 +543,7 @@ class CloudfrontCost(TypedDict, total=False):
The CloudFront cost (main configuration)
"""
- get: Union[int, float]
+ get: int | float
"""
CloudFront Get.
@@ -554,7 +552,7 @@ class CloudfrontCost(TypedDict, total=False):
default: 0.009
"""
- download: Union[int, float]
+ download: int | float
"""
CloudFront Download.
@@ -575,7 +573,7 @@ class Configuration(TypedDict, total=False):
"""
grids: dict[str, "Grid"]
- r"""
+ """
Grids.
The WMTS grid definitions by grid name, see https://github.com/camptocamp/tilecloud-chain/blob/master/tilecloud_chain/USAGE.rst#configure-grids
@@ -585,7 +583,7 @@ class Configuration(TypedDict, total=False):
"""
caches: dict[str, "Cache"]
- r"""
+ """
Caches.
The tiles caches definitions by name, see https://github.com/camptocamp/tilecloud-chain/blob/master/tilecloud_chain/USAGE.rst#configure-caches
@@ -595,7 +593,7 @@ class Configuration(TypedDict, total=False):
"""
layers: dict[str, "Layer"]
- r"""
+ """
Layers.
The layers definitions by name, see https://github.com/camptocamp/tilecloud-chain/blob/master/tilecloud_chain/USAGE.rst#configure-layers
@@ -942,7 +940,7 @@ class Grid(TypedDict, total=False):
The scale used to build a FreeTileGrid typically '2'
"""
- resolutions: Required[list[Union[int, float]]]
+ resolutions: Required[list[int | float]]
"""
Resolutions.
@@ -951,7 +949,7 @@ class Grid(TypedDict, total=False):
Required property
"""
- bbox: Required[list[Union[int, float]]]
+ bbox: Required[list[int | float]]
"""
Bounding box.
@@ -1069,7 +1067,7 @@ class Info(TypedDict, total=False):
"""
-LayerBoundingBox = list[Union[int, float]]
+LayerBoundingBox = list[int | float]
"""
Layer bounding box.
@@ -1087,7 +1085,7 @@ class LayerCost(TypedDict, total=False):
The rules used to calculate the cost
"""
- tileonly_generation_time: Union[int, float]
+ tileonly_generation_time: int | float
"""
tile only generation time.
@@ -1096,7 +1094,7 @@ class LayerCost(TypedDict, total=False):
default: 40
"""
- tile_generation_time: Union[int, float]
+ tile_generation_time: int | float
"""
tile generation time.
@@ -1105,7 +1103,7 @@ class LayerCost(TypedDict, total=False):
default: 30
"""
- metatile_generation_time: Union[int, float]
+ metatile_generation_time: int | float
"""
Meta tile generation time.
@@ -1114,7 +1112,7 @@ class LayerCost(TypedDict, total=False):
default: 30
"""
- tile_size: Union[int, float]
+ tile_size: int | float
"""
Cost tile size.
@@ -1125,7 +1123,7 @@ class LayerCost(TypedDict, total=False):
LayerDimensionName = str
-r"""
+"""
Layer dimension name.
The dimension name
@@ -1237,7 +1235,7 @@ class LayerEmptyTileDetection(TypedDict, total=False):
LayerLegendMime = str
-r"""
+"""
Layer legend MIME.
The mime type used to store the generated legend
@@ -1335,7 +1333,7 @@ class LayerMapnik(TypedDict, total=False):
"""
wmts_style: Required["LayerWmtsStyle"]
- r"""
+ """
Layer WMTS style.
The WMTS style
@@ -1346,7 +1344,7 @@ class LayerMapnik(TypedDict, total=False):
"""
mime_type: Required["LayerMimeType"]
- r"""
+ """
Layer MIME type.
The MIME type of the tiles
@@ -1380,7 +1378,7 @@ class LayerMapnik(TypedDict, total=False):
"""
legend_mime: "LayerLegendMime"
- r"""
+ """
Layer legend MIME.
The mime type used to store the generated legend
@@ -1528,7 +1526,7 @@ class LayerMapnik(TypedDict, total=False):
LayerMimeType = str
-r"""
+"""
Layer MIME type.
The MIME type of the tiles
@@ -1662,7 +1660,7 @@ class LayerWms(TypedDict, total=False):
"""
wmts_style: Required["LayerWmtsStyle"]
- r"""
+ """
Layer WMTS style.
The WMTS style
@@ -1673,7 +1671,7 @@ class LayerWms(TypedDict, total=False):
"""
mime_type: Required["LayerMimeType"]
- r"""
+ """
Layer MIME type.
The MIME type of the tiles
@@ -1707,7 +1705,7 @@ class LayerWms(TypedDict, total=False):
"""
legend_mime: "LayerLegendMime"
- r"""
+ """
Layer legend MIME.
The mime type used to store the generated legend
@@ -1823,7 +1821,7 @@ class LayerWms(TypedDict, total=False):
LayerWmtsStyle = str
-r"""
+"""
Layer WMTS style.
The WMTS style
@@ -1987,7 +1985,7 @@ class Openlayers(TypedDict, total=False):
default: +proj=somerc +lat_0=46.95240555555556 +lon_0=7.439583333333333 +k_0=1 +x_0=2600000 +y_0=1200000 +ellps=bessel +towgs84=674.374,15.056,405.346,0,0,0,0 +units=m +no_defs
"""
- center_x: Union[int, float]
+ center_x: int | float
"""
Center x.
@@ -1996,7 +1994,7 @@ class Openlayers(TypedDict, total=False):
default: 2600000
"""
- center_y: Union[int, float]
+ center_y: int | float
"""
Center y.
@@ -2005,7 +2003,7 @@ class Openlayers(TypedDict, total=False):
default: 1200000
"""
- zoom: Union[int, float]
+ zoom: int | float
"""
Map initial zoom.
@@ -2322,7 +2320,7 @@ class S3Cost(TypedDict, total=False):
The S3 cost (main configuration)
"""
- storage: Union[int, float]
+ storage: int | float
"""
S3 Storage.
@@ -2331,7 +2329,7 @@ class S3Cost(TypedDict, total=False):
default: 0.125
"""
- put: Union[int, float]
+ put: int | float
"""
S3 Put.
@@ -2340,7 +2338,7 @@ class S3Cost(TypedDict, total=False):
default: 0.01
"""
- get: Union[int, float]
+ get: int | float
"""
S3 Get.
@@ -2349,7 +2347,7 @@ class S3Cost(TypedDict, total=False):
default: 0.01
"""
- download: Union[int, float]
+ download: int | float
"""
S3 Download.
@@ -2616,7 +2614,7 @@ class SqsCost(TypedDict, total=False):
The SQS cost, see https://github.com/camptocamp/tilecloud-chain/blob/master/tilecloud_chain/USAGE.rst#configure-sqs (main configuration)
"""
- request: Union[int, float]
+ request: int | float
"""
Request.
@@ -2659,7 +2657,7 @@ class SqsCost(TypedDict, total=False):
_GenerateItem = str
-r""" pattern: ^[a-zA-Z0-9_\-\+~\.]+$ """
+""" pattern: ^[a-zA-Z0-9_\-\+~\.]+$ """
_HeadersAdditionalproperties = str
@@ -2688,7 +2686,7 @@ class SqsCost(TypedDict, total=False):
class _LayerDimensionsItem(TypedDict, total=False):
name: Required["LayerDimensionName"]
- r"""
+ """
Layer dimension name.
The dimension name
@@ -2717,7 +2715,7 @@ class _LayerDimensionsItem(TypedDict, total=False):
"""
default: Required[str]
- r"""
+ """
Default.
The default value present in the capabilities
@@ -2747,14 +2745,14 @@ class _LayerGeometriesItem(TypedDict, total=False):
Required property
"""
- min_resolution: Union[int, float]
+ min_resolution: int | float
"""
Min resolution.
The min resolution where the query is valid
"""
- max_resolution: Union[int, float]
+ max_resolution: int | float
"""
Max resolution.
@@ -2764,7 +2762,7 @@ class _LayerGeometriesItem(TypedDict, total=False):
class _LayerLegendsItem(TypedDict, total=False):
mime_type: Required[str]
- r"""
+ """
MIME type.
The mime type used in the WMS request
@@ -2882,4 +2880,4 @@ class _ProcessCommandItem(TypedDict, total=False):
_ValuesItem = str
-r""" pattern: ^[a-zA-Z0-9_\-\+~\.]+$ """
+""" pattern: ^[a-zA-Z0-9_\-\+~\.]+$ """
diff --git a/tilecloud_chain/controller.py b/tilecloud_chain/controller.py
index b5cf90edd..25fef274a 100644
--- a/tilecloud_chain/controller.py
+++ b/tilecloud_chain/controller.py
@@ -1,6 +1,4 @@
-"""
-Generate the contextual file like the legends.
-"""
+"""Generate the contextual file like the legends."""
import concurrent.futures
import logging
@@ -14,22 +12,22 @@
from hashlib import sha1
from io import BytesIO, StringIO
from math import exp, log
-from typing import IO, Literal, Optional, Union, cast
+from typing import IO, Literal, cast
from urllib.parse import urlencode, urljoin
import botocore.exceptions
import requests
import ruamel.yaml
+import tilecloud.store.redis
+import tilecloud.store.s3
from azure.core.exceptions import ResourceNotFoundError
from azure.storage.blob import ContentSettings
from bottle import jinja2_template
from PIL import Image
from prometheus_client import Summary
+from tilecloud.lib.PIL_ import FORMAT_BY_CONTENT_TYPE
-import tilecloud.store.redis
-import tilecloud.store.s3
import tilecloud_chain.configuration
-from tilecloud.lib.PIL_ import FORMAT_BY_CONTENT_TYPE
from tilecloud_chain import (
DatedConfig,
TileGeneration,
@@ -44,9 +42,8 @@
_GET_STATUS_SUMMARY = Summary("tilecloud_chain_get_status", "Number of get_stats", ["type", "queue"])
-def main(args: Optional[list[str]] = None, out: Optional[IO[str]] = None) -> None:
+def main(args: list[str] | None = None, out: IO[str] | None = None) -> None:
"""Generate the contextual file like the legends."""
-
del out
try:
@@ -101,16 +98,14 @@ def main(args: Optional[list[str]] = None, out: Optional[IO[str]] = None) -> Non
except SystemExit:
raise
- except: # pylint: disable=bare-except
+ except: # pylint: disable=bare-except # noqa: E722
_LOGGER.exception("Exit with exception")
if os.environ.get("TESTS", "false").lower() == "true":
raise
sys.exit(1)
-def _send(
- data: Union[bytes, str], path: str, mime_type: str, cache: tilecloud_chain.configuration.Cache
-) -> None:
+def _send(data: bytes | str, path: str, mime_type: str, cache: tilecloud_chain.configuration.Cache) -> None:
if cache["type"] == "s3":
cache_s3 = cast(tilecloud_chain.configuration.CacheS3, cache)
client = tilecloud.store.s3.get_client(cache_s3.get("host"))
@@ -153,7 +148,7 @@ def _send(
f.write(data)
-def _get(path: str, cache: tilecloud_chain.configuration.Cache) -> Optional[bytes]:
+def _get(path: str, cache: tilecloud_chain.configuration.Cache) -> bytes | None:
if cache["type"] == "s3":
cache_s3 = cast(tilecloud_chain.configuration.CacheS3, cache)
client = tilecloud.store.s3.get_client(cache_s3.get("host"))
@@ -200,10 +195,9 @@ def _validate_generate_wmts_capabilities(
def get_wmts_capabilities(
- gene: TileGeneration, cache_name: str, exit_: bool = False, config: Optional[DatedConfig] = None
-) -> Optional[str]:
+ gene: TileGeneration, cache_name: str, exit_: bool = False, config: DatedConfig | None = None
+) -> str | None:
"""Get the WMTS capabilities for a configuration file."""
-
start = time.perf_counter()
if config is None:
assert gene.config_file
@@ -273,7 +267,7 @@ def _legend_metadata(
layer: tilecloud_chain.configuration.Layer,
base_url: str,
path: str,
-) -> Optional[tilecloud_chain.Legend]:
+) -> tilecloud_chain.Legend | None:
img = _get(path, cache)
if img is not None:
new_legend: tilecloud_chain.Legend = {
@@ -298,9 +292,9 @@ def _legend_metadata(
def _fill_legend(
gene: TileGeneration,
cache: tilecloud_chain.configuration.Cache,
- server: Optional[tilecloud_chain.configuration.Server],
+ server: tilecloud_chain.configuration.Server | None,
base_urls: list[str],
- config: Optional[DatedConfig] = None,
+ config: DatedConfig | None = None,
) -> None:
if config is None:
assert gene.config_file
@@ -336,7 +330,7 @@ def _fill_legend(
)
] = (layer_name, resolution)
- legend_image_metadata: dict[str, dict[float, Optional[tilecloud_chain.Legend]]] = {}
+ legend_image_metadata: dict[str, dict[float, tilecloud_chain.Legend | None]] = {}
for future in concurrent.futures.as_completed(legend_image_future):
layer_name, resolution = legend_image_future[future]
try:
@@ -349,12 +343,12 @@ def _fill_legend(
_LOGGER.debug("Get %i legend images in %s", len(legend_image_future), time.perf_counter() - start)
for layer_name, layer in config.config["layers"].items():
- previous_legend: Optional[tilecloud_chain.Legend] = None
+ previous_legend: tilecloud_chain.Legend | None = None
previous_resolution = None
if "legend_mime" in layer and "legend_extension" in layer and layer_name not in gene.layer_legends:
gene.layer_legends[layer_name] = []
legends = gene.layer_legends[layer_name]
- for zoom, resolution in enumerate(config.config["grids"][layer["grid"]]["resolutions"]):
+ for _, resolution in enumerate(config.config["grids"][layer["grid"]]["resolutions"]):
new_legend = legend_image_metadata.get(layer_name, {}).get(resolution)
if new_legend is not None:
@@ -374,61 +368,60 @@ def _generate_legend_images(gene: TileGeneration) -> None:
cache = config.config["caches"][gene.options.cache]
for layer_name, layer in config.config["layers"].items():
- if "legend_mime" in layer and "legend_extension" in layer:
- if layer["type"] == "wms":
- session = requests.session()
- session.headers.update(layer["headers"])
- previous_hash = None
- for zoom, resolution in enumerate(config.config["grids"][layer["grid"]]["resolutions"]):
- legends = []
- for wmslayer in layer["layers"].split(","):
- response = session.get(
- layer["url"]
- + "?"
- + urlencode(
- {
- "SERVICE": "WMS",
- "VERSION": layer.get("version", "1.0.0"),
- "REQUEST": "GetLegendGraphic",
- "LAYER": wmslayer,
- "FORMAT": layer["legend_mime"],
- "TRANSPARENT": "TRUE" if layer["legend_mime"] == "image/png" else "FALSE",
- "STYLE": layer["wmts_style"],
- "SCALE": resolution / 0.00028,
- }
- )
+ if "legend_mime" in layer and "legend_extension" in layer and layer["type"] == "wms":
+ session = requests.session()
+ session.headers.update(layer["headers"])
+ previous_hash = None
+ for zoom, resolution in enumerate(config.config["grids"][layer["grid"]]["resolutions"]):
+ legends = []
+ for wmslayer in layer["layers"].split(","):
+ response = session.get(
+ layer["url"]
+ + "?"
+ + urlencode(
+ {
+ "SERVICE": "WMS",
+ "VERSION": layer.get("version", "1.0.0"),
+ "REQUEST": "GetLegendGraphic",
+ "LAYER": wmslayer,
+ "FORMAT": layer["legend_mime"],
+ "TRANSPARENT": "TRUE" if layer["legend_mime"] == "image/png" else "FALSE",
+ "STYLE": layer["wmts_style"],
+ "SCALE": resolution / 0.00028,
+ }
)
- try:
- legends.append(Image.open(BytesIO(response.content)))
- except Exception: # pylint: disable=broad-exception-caught
- _LOGGER.warning(
- "Unable to read legend image for layer '%s'-'%s', resolution '%s': %s",
- layer_name,
- wmslayer,
- resolution,
- response.content,
- exc_info=True,
- )
- width = max(i.size[0] for i in legends) if legends else 0
- height = sum(i.size[1] for i in legends) if legends else 0
- image = Image.new("RGBA", (width, height))
- y = 0
- for i in legends:
- image.paste(i, (0, y))
- y += i.size[1]
- string_io = BytesIO()
- image.save(string_io, FORMAT_BY_CONTENT_TYPE[layer["legend_mime"]])
- result = string_io.getvalue()
- new_hash = sha1(result).hexdigest() # nosec
- if new_hash != previous_hash:
- previous_hash = new_hash
- _send(
- result,
- f"1.0.0/{layer_name}/{layer['wmts_style']}/"
- f"legend{zoom}.{layer['legend_extension']}",
- layer["legend_mime"],
- cache,
+ )
+ try:
+ legends.append(Image.open(BytesIO(response.content)))
+ except Exception: # pylint: disable=broad-exception-caught
+ _LOGGER.warning(
+ "Unable to read legend image for layer '%s'-'%s', resolution '%s': %s",
+ layer_name,
+ wmslayer,
+ resolution,
+ response.content,
+ exc_info=True,
)
+ width = max(i.size[0] for i in legends) if legends else 0
+ height = sum(i.size[1] for i in legends) if legends else 0
+ image = Image.new("RGBA", (width, height))
+ y = 0
+ for i in legends:
+ image.paste(i, (0, y))
+ y += i.size[1]
+ string_io = BytesIO()
+ image.save(string_io, FORMAT_BY_CONTENT_TYPE[layer["legend_mime"]])
+ result = string_io.getvalue()
+ new_hash = sha1(result).hexdigest() # nosec # noqa: S303
+ if new_hash != previous_hash:
+ previous_hash = new_hash
+ _send(
+ result,
+ f"1.0.0/{layer_name}/{layer['wmts_style']}/"
+ f"legend{zoom}.{layer['legend_extension']}",
+ layer["legend_mime"],
+ cache,
+ )
def status(gene: TileGeneration) -> None:
@@ -440,7 +433,7 @@ def get_status(gene: TileGeneration) -> list[str]:
"""Get the tile generation status."""
config = gene.get_main_config()
store = get_queue_store(config, False)
- type_: Union[Literal["redis"], Literal["sqs"]] = "redis" if "redis" in config.config else "sqs"
+ type_: Literal["redis"] | Literal["sqs"] = "redis" if "redis" in config.config else "sqs"
conf = config.config[type_]
with _GET_STATUS_SUMMARY.labels(type_, conf.get("queue", configuration.REDIS_QUEUE_DEFAULT)).time():
status_ = store.get_status()
diff --git a/tilecloud_chain/copy_.py b/tilecloud_chain/copy_.py
index c4cacdd9c..29d41071c 100644
--- a/tilecloud_chain/copy_.py
+++ b/tilecloud_chain/copy_.py
@@ -1,6 +1,4 @@
-"""
-Copy the tiles from a cache to an other.
-"""
+"""Copy the tiles from a cache to an other."""
import logging
import os
@@ -29,9 +27,7 @@ def copy(
destination: str,
task_name: str,
) -> None:
- """
- Copy the tiles from a cache to an other.
- """
+ """Copy the tiles from a cache to an other."""
self._copy(options, gene, layer, source, destination, task_name)
def _copy(
@@ -120,7 +116,7 @@ def main() -> None:
copy.copy(options, gene, layer, options.source, options.dest, "copy")
except SystemExit:
raise
- except: # pylint: disable=bare-except
+ except: # pylint: disable=bare-except # noqa: E722
logger.exception("Exit with exception")
if os.environ.get("TESTS", "false").lower() == "true":
raise
@@ -155,6 +151,6 @@ def process() -> None:
copy.copy(options, gene, layer, options.cache, options.cache, "process")
except SystemExit:
raise
- except: # pylint: disable=bare-except
+ except: # pylint: disable=bare-except # noqa: E722
logger.exception("Exit with exception")
sys.exit(1)
diff --git a/tilecloud_chain/cost.py b/tilecloud_chain/cost.py
index 7af34396c..68f86b5d0 100644
--- a/tilecloud_chain/cost.py
+++ b/tilecloud_chain/cost.py
@@ -1,15 +1,13 @@
-"""
-Calculate the cost of the generation.
-"""
+"""Calculate the cost of the generation."""
import logging
import sys
from argparse import ArgumentParser, Namespace
from collections.abc import Iterable, Iterator
from datetime import timedelta
-from typing import Optional
from tilecloud import Tile, TileStore
+
from tilecloud_chain import Run, TileGeneration, add_common_options, configuration
from tilecloud_chain.format import duration_format
@@ -92,7 +90,7 @@ def main() -> None:
# gene.config['cost'].get("request_per_layers", configuration.REQUESTS_PER_LAYERS_DEFAULT) * tile_size)
except SystemExit:
raise
- except: # pylint: disable=bare-except
+ except: # pylint: disable=bare-except # noqa: E722
logger.exception("Exit with exception")
sys.exit(1)
@@ -150,7 +148,7 @@ def count_metatile(tile: Tile) -> Tile:
class MetaTileSplitter(TileStore):
"""Convert the metatile flow to tile flow."""
- def get(self, tiles: Iterable[Optional[Tile]]) -> Iterator[Tile]:
+ def get(self, tiles: Iterable[Tile | None]) -> Iterator[Tile]:
assert tiles is not None
for metatile in tiles:
assert metatile is not None
@@ -160,7 +158,7 @@ def get(self, tiles: Iterable[Optional[Tile]]) -> Iterator[Tile]:
def put_one(self, tile: Tile) -> Tile:
raise NotImplementedError
- def get_one(self, tile: Tile) -> Optional[Tile]:
+ def get_one(self, tile: Tile) -> Tile | None:
raise NotImplementedError
def delete_one(self, tile: Tile) -> Tile:
@@ -228,10 +226,7 @@ def count_tile(tile: Tile) -> Tile:
print(f"S3 PUT: {c:0.2f} [$]")
if "sqs" in gene.get_main_config().config:
- if meta:
- nb_sqs = nb_metatiles[z] * 3
- else:
- nb_sqs = nb_tile * 3
+ nb_sqs = nb_metatiles[z] * 3 if meta else nb_tile * 3
c = (
nb_sqs
* gene.get_main_config().config["cost"]["sqs"].get("request", configuration.REQUEST_DEFAULT)
diff --git a/tilecloud_chain/database_logger.py b/tilecloud_chain/database_logger.py
index 9ba896c1e..d6ade67cd 100644
--- a/tilecloud_chain/database_logger.py
+++ b/tilecloud_chain/database_logger.py
@@ -1,6 +1,4 @@
-"""
-Log the generated tiles in a database.
-"""
+"""Log the generated tiles in a database."""
import logging
import sys
@@ -8,9 +6,9 @@
import psycopg2.sql
from prometheus_client import Summary
+from tilecloud import Tile
import tilecloud_chain.configuration
-from tilecloud import Tile
_LOGGER = logging.getLogger(__name__)
@@ -101,6 +99,7 @@ def __init__(self, config: tilecloud_chain.configuration.Logging, daemon: bool)
(self.run,) = cursor.fetchone()
def __call__(self, tile: Tile) -> Tile:
+ """Log the generated tiles in a database."""
tile.metadata["run"] = self.run
return tile
@@ -109,6 +108,7 @@ class DatabaseLogger(DatabaseLoggerCommon):
"""Log the generated tiles in a database."""
def __call__(self, tile: Tile) -> Tile:
+ """Log the generated tiles in a database."""
if tile is None:
_LOGGER.warning("The tile is None")
return None
@@ -123,26 +123,25 @@ def __call__(self, tile: Tile) -> Tile:
layer = tile.metadata.get("layer", "- No layer -")
run = tile.metadata.get("run", -1)
- with _INSERT_SUMMARY.labels(layer).time():
- with self.connection.cursor() as cursor:
- try:
- cursor.execute(
- psycopg2.sql.SQL(
- "INSERT INTO {} (layer, run, action, tile) "
- "VALUES (%(layer)s, %(run)s, %(action)s::varchar(7), %(tile)s)"
- ).format(psycopg2.sql.Identifier(self.schema), psycopg2.sql.Identifier(self.table)),
- {"layer": layer, "action": action, "tile": str(tile.tilecoord), "run": run},
- )
- except psycopg2.IntegrityError:
- self.connection.rollback()
- cursor.execute(
- psycopg2.sql.SQL(
- "UPDATE {} SET action = %(action)s "
- "WHERE layer = %(layer)s AND run = %(run)s AND tile = %(tile)s"
- ).format(psycopg2.sql.Identifier(self.schema), psycopg2.sql.Identifier(self.table)),
- {"layer": layer, "action": action, "tile": str(tile.tilecoord), "run": run},
- )
+ with _INSERT_SUMMARY.labels(layer).time(), self.connection.cursor() as cursor:
+ try:
+ cursor.execute(
+ psycopg2.sql.SQL(
+ "INSERT INTO {} (layer, run, action, tile) "
+ "VALUES (%(layer)s, %(run)s, %(action)s::varchar(7), %(tile)s)"
+ ).format(psycopg2.sql.Identifier(self.schema), psycopg2.sql.Identifier(self.table)),
+ {"layer": layer, "action": action, "tile": str(tile.tilecoord), "run": run},
+ )
+ except psycopg2.IntegrityError:
+ self.connection.rollback()
+ cursor.execute(
+ psycopg2.sql.SQL(
+ "UPDATE {} SET action = %(action)s "
+ "WHERE layer = %(layer)s AND run = %(run)s AND tile = %(tile)s"
+ ).format(psycopg2.sql.Identifier(self.schema), psycopg2.sql.Identifier(self.table)),
+ {"layer": layer, "action": action, "tile": str(tile.tilecoord), "run": run},
+ )
- self.connection.commit()
+ self.connection.commit()
return tile
diff --git a/tilecloud_chain/expiretiles.py b/tilecloud_chain/expiretiles.py
index 23884a737..414d876a9 100644
--- a/tilecloud_chain/expiretiles.py
+++ b/tilecloud_chain/expiretiles.py
@@ -1,6 +1,4 @@
-"""
-Import the osm2pgsql expire-tiles file to Postgres.
-"""
+"""Import the osm2pgsql expire-tiles file to Postgres."""
import logging
import sys
@@ -9,8 +7,8 @@
import psycopg2.sql
from shapely.geometry import MultiPolygon, Polygon
from shapely.ops import unary_union
-
from tilecloud.grid.quad import QuadTileGrid
+
from tilecloud_chain import parse_tilecoord
logger = logging.getLogger(__name__)
@@ -184,6 +182,6 @@ def main() -> None:
print("Import successful")
except SystemExit:
raise
- except: # pylint: disable=bare-except
+ except: # pylint: disable=bare-except # noqa: E722
logger.exception("Exit with exception")
sys.exit(1)
diff --git a/tilecloud_chain/format.py b/tilecloud_chain/format.py
index 95fa74254..a88cb52e9 100644
--- a/tilecloud_chain/format.py
+++ b/tilecloud_chain/format.py
@@ -1,6 +1,4 @@
-"""
-Format functions.
-"""
+"""Format functions."""
from datetime import timedelta
diff --git a/tilecloud_chain/generate.py b/tilecloud_chain/generate.py
index 96e572e7f..3d9f5cbf2 100644
--- a/tilecloud_chain/generate.py
+++ b/tilecloud_chain/generate.py
@@ -1,6 +1,4 @@
-"""
-Generate the tiles, generate the queue, ...
-"""
+"""Generate the tiles, generate the queue, ..."""
import logging
import os
@@ -9,19 +7,20 @@
import sys
import threading
from argparse import ArgumentParser, Namespace
+from collections.abc import Callable
from datetime import datetime
from getpass import getuser
-from typing import IO, Callable, Optional, cast
+from typing import IO, cast
import boto3
import prometheus_client
-
import tilecloud.filter.error
-import tilecloud_chain
from tilecloud import Tile, TileCoord, TileStore
from tilecloud.filter.logger import Logger
from tilecloud.layout.wms import WMSTileLayout
from tilecloud.store.url import URLTileStore
+
+import tilecloud_chain
from tilecloud_chain import (
Count,
CountSize,
@@ -52,6 +51,7 @@ def __init__(self, gene: TileGeneration):
self.gene = gene
def __call__(self, tile: Tile) -> Tile:
+ """Add logs tile context."""
tilecloud_chain.LOGGING_CONTEXT.setdefault(os.getpid(), {})[threading.current_thread().native_id] = { # type: ignore
"host": tile.metadata.get("host"),
"layer": tile.metadata.get("layer"),
@@ -65,15 +65,15 @@ class Generate:
"""Generate the tiles, generate the queue, ..."""
def __init__(
- self, options: Namespace, gene: TileGeneration, out: Optional[IO[str]], server: bool = False
+ self, options: Namespace, gene: TileGeneration, out: IO[str] | None, server: bool = False
) -> None:
- self._count_metatiles: Optional[Count] = None
- self._count_metatiles_dropped: Optional[Count] = None
- self._count_tiles: Optional[Count] = None
- self._count_tiles_dropped: Optional[Count] = None
- self._count_tiles_stored: Optional[CountSize] = None
- self._queue_tilestore: Optional[TileStore] = None
- self._cache_tilestore: Optional[TileStore] = None
+ self._count_metatiles: Count | None = None
+ self._count_metatiles_dropped: Count | None = None
+ self._count_tiles: Count | None = None
+ self._count_tiles_dropped: Count | None = None
+ self._count_tiles_stored: CountSize | None = None
+ self._queue_tilestore: TileStore | None = None
+ self._cache_tilestore: TileStore | None = None
self._options = options
self._gene = gene
self.out = out
@@ -89,7 +89,7 @@ def __init__(
if self._options.role != "master" and not server:
self._generate_tiles()
- def gene(self, layer_name: Optional[str] = None) -> None:
+ def gene(self, layer_name: str | None = None) -> None:
"""Generate the tiles."""
if self._count_tiles is not None:
self._count_tiles.nb = 0
@@ -151,9 +151,7 @@ def _generate_init(self) -> None:
assert self._cache_tilestore is not None
def add_local_process_filter(self) -> None:
- """
- Add the local process filter to the gene.
- """
+ """Add the local process filter to the gene."""
self._gene.imap(
LocalProcessFilter(
self._gene.get_main_config()
@@ -163,7 +161,7 @@ def add_local_process_filter(self) -> None:
)
)
- def _generate_queue(self, layer_name: Optional[str]) -> None:
+ def _generate_queue(self, layer_name: str | None) -> None:
if self._options.tiles:
self._gene.set_store(TilesFileStore(self._options.tiles))
return
@@ -243,7 +241,7 @@ def _generate_tiles(self) -> None:
assert self._count_metatiles_dropped is not None
self._gene.imap(MultiAction(HashDropperGetter(self, True, self._count_metatiles_dropped)))
- def add_elapsed_togenerate(metatile: Tile) -> Optional[Tile]:
+ def add_elapsed_togenerate(metatile: Tile) -> Tile | None:
if metatile is not None:
metatile.elapsed_togenerate = metatile.tilecoord.n**2 # type: ignore
return metatile
@@ -309,9 +307,7 @@ def delete_from_store(tile: Tile) -> Tile:
self._gene.init(self._queue_tilestore, daemon=self._options.daemon)
def generate_consume(self) -> None:
- """
- Consume the tiles and log the time if needed.
- """
+ """Consume the tiles and log the time if needed."""
if self._options.time is not None:
options = self._options
@@ -342,10 +338,8 @@ def __call__(self, tile: Tile) -> Tile:
else:
self._gene.consume()
- def generate_resume(self, layer_name: Optional[str]) -> None:
- """
- Generate the resume message and close the tilestore connection.
- """
+ def generate_resume(self, layer_name: str | None) -> None:
+ """Generate the resume message and close the tilestore connection."""
config = self._gene.get_config(self._gene.config_file) if self._gene.config_file is not None else None
if self._options.time is None:
layer = None
@@ -441,7 +435,8 @@ class TilestoreGetter:
def __init__(self, gene: Generate):
self.gene = gene
- def __call__(self, config_file: str, layer_name: str) -> Optional[TileStore]:
+ def __call__(self, config_file: str, layer_name: str) -> TileStore | None:
+ """Get the tilestore based on the layername config file any layer type."""
config = self.gene._gene.get_config(config_file)
layer = config.config["layers"][layer_name]
if layer["type"] == "wms":
@@ -449,7 +444,7 @@ def __call__(self, config_file: str, layer_name: str) -> Optional[TileStore]:
if "STYLES" not in params:
params["STYLES"] = ",".join(layer["wmts_style"] for _ in layer["layers"].split(","))
if layer.get("generate_salt", False):
- params["SALT"] = str(random.randint(0, 999999)) # nosec
+ params["SALT"] = str(random.randint(0, 999999)) # nosec # noqa: S311
# Get the metatile image from the WMS server
return TimedTileStoreWrapper(
@@ -476,6 +471,7 @@ def __call__(self, config_file: str, layer_name: str) -> Optional[TileStore]:
elif layer["type"] == "mapnik":
try:
from tilecloud.store.mapnik_ import MapnikTileStore # pylint: disable=import-outside-toplevel
+
from tilecloud_chain.mapnik_ import ( # pylint: disable=import-outside-toplevel
MapnikDropActionTileStore,
)
@@ -542,7 +538,7 @@ def detach() -> None:
sys.exit(1)
-def main(args: Optional[list[str]] = None, out: Optional[IO[str]] = None) -> None:
+def main(args: list[str] | None = None, out: IO[str] | None = None) -> None:
"""Run the tiles generation."""
try:
parser = ArgumentParser(
@@ -648,7 +644,7 @@ def main(args: Optional[list[str]] = None, out: Optional[IO[str]] = None) -> Non
gene.close()
except SystemExit:
raise
- except: # pylint: disable=bare-except
+ except: # pylint: disable=bare-except # noqa: E722
_LOGGER.exception("Exit with exception")
if os.environ.get("TESTS", "false").lower() == "true":
raise
@@ -666,7 +662,7 @@ def __init__(self, gene: Generate, meta: bool, count: Count):
self.meta = meta
self.count = count
- def __call__(self, config_file: str, layer_name: str) -> Callable[[Tile], Optional[Tile]]:
+ def __call__(self, config_file: str, layer_name: str) -> Callable[[Tile], Tile | None]:
"""Call."""
layer = self.gene._gene.get_config(config_file).config["layers"][layer_name]
conf_name = "empty_metatile_detection" if self.meta else "empty_tile_detection"
diff --git a/tilecloud_chain/internal_mapcache.py b/tilecloud_chain/internal_mapcache.py
index 8d2f9b1e7..4775e3e5e 100644
--- a/tilecloud_chain/internal_mapcache.py
+++ b/tilecloud_chain/internal_mapcache.py
@@ -1,6 +1,4 @@
-"""
-Internal Mapcache.
-"""
+"""Internal Mapcache."""
import collections
import contextlib
@@ -12,13 +10,13 @@
import sys
import threading
from collections.abc import Iterator
-from typing import TYPE_CHECKING, Any, Optional, TypeVar, Union, cast
+from typing import TYPE_CHECKING, Any, TypeVar, cast
import redis.sentinel
from prometheus_client import Summary
+from tilecloud import Tile, TileCoord, TileStore
import tilecloud_chain.configuration
-from tilecloud import Tile, TileCoord, TileStore
from tilecloud_chain import Run, configuration
from tilecloud_chain.generate import Generate
@@ -70,7 +68,7 @@ def __init__(self, config: tilecloud_chain.configuration.Redis, **kwargs: Any):
self._master = redis.Redis.from_url(url, **connection_kwargs) # type: ignore
self._slave = self._master
else:
- sentinels: list[tuple[str, Union[str, int]]] = []
+ sentinels: list[tuple[str, str | int]] = []
if "TILECLOUD_CHAIN_REDIS_SENTINELs" in os.environ:
sentinels_string = os.environ["TILECLOUD_CHAIN_REDIS_SENTINELS"]
sentinels_tmp = [s.split(":") for s in sentinels_string.split(",")]
@@ -91,7 +89,7 @@ def __init__(self, config: tilecloud_chain.configuration.Redis, **kwargs: Any):
self._prefix = config.get("prefix", tilecloud_chain.configuration.PREFIX_DEFAULT)
self._expiration = config.get("expiration", tilecloud_chain.configuration.EXPIRATION_DEFAULT)
- def get_one(self, tile: Tile) -> Optional[Tile]:
+ def get_one(self, tile: Tile) -> Tile | None:
"""See in superclass."""
key = self._get_key(tile)
data = self._slave.get(key)
@@ -169,15 +167,13 @@ def __init__(self, tilegeneration: tilecloud_chain.TileGeneration) -> None:
generator._generate_tiles()
self.run = Run(tilegeneration, tilegeneration.functions_metatiles)
- def read_from_cache(self, tile: Tile) -> Optional[Tile]:
+ def read_from_cache(self, tile: Tile) -> Tile | None:
"""Get the tile from the cache (Redis)."""
-
with _GET_TILE.labels("redis").time():
return self._cache_store.get_one(tile)
def compute_tile(self, tile: Tile) -> None:
"""Create the tile."""
-
with _GET_TILE.labels("wms").time():
self.run(tile)
if tile.error:
@@ -265,7 +261,7 @@ def fetch(
"Try to get the tile %s %s, from the available: '%s'",
tile.tilecoord,
tile.formated_metadata,
- ", ".join([str(e) for e in tiles.keys()]),
+ ", ".join([str(e) for e in tiles]),
)
raise
diff --git a/tilecloud_chain/mapnik_.py b/tilecloud_chain/mapnik_.py
index 647cee840..92a33b2c2 100644
--- a/tilecloud_chain/mapnik_.py
+++ b/tilecloud_chain/mapnik_.py
@@ -1,9 +1,8 @@
-"""
-MapnikTileStore with drop action if the generated tile is empty.
-"""
+"""MapnikTileStore with drop action if the generated tile is empty."""
import logging
-from typing import Any, Callable, Optional
+from collections.abc import Callable
+from typing import Any
from tilecloud import Tile, TileStore
from tilecloud.store.mapnik_ import MapnikTileStore
@@ -16,9 +15,9 @@ class MapnikDropActionTileStore(MapnikTileStore):
def __init__(
self,
- store: Optional[TileStore] = None,
- queue_store: Optional[TileStore] = None,
- count: Optional[list[Callable[[Optional[Tile]], Any]]] = None,
+ store: TileStore | None = None,
+ queue_store: TileStore | None = None,
+ count: list[Callable[[Tile | None], Any]] | None = None,
**kwargs: Any,
) -> None:
"""Initialize."""
@@ -27,7 +26,7 @@ def __init__(
self.count = count or []
MapnikTileStore.__init__(self, **kwargs)
- def get_one(self, tile: Tile) -> Optional[Tile]:
+ def get_one(self, tile: Tile) -> Tile | None:
"""See in superclass."""
result = MapnikTileStore.get_one(self, tile)
if result is None:
diff --git a/tilecloud_chain/multitilestore.py b/tilecloud_chain/multitilestore.py
index fee10416e..fd87cf618 100644
--- a/tilecloud_chain/multitilestore.py
+++ b/tilecloud_chain/multitilestore.py
@@ -1,11 +1,8 @@
-"""
-Redirect to the corresponding Tilestore for the layer and config file.
-"""
+"""Redirect to the corresponding Tilestore for the layer and config file."""
import logging
-from collections.abc import Iterable, Iterator
+from collections.abc import Callable, Iterable, Iterator
from itertools import chain, groupby, starmap
-from typing import Callable, Optional
from tilecloud import Tile, TileStore
@@ -15,13 +12,13 @@
class MultiTileStore(TileStore):
"""Redirect to the corresponding Tilestore for the layer and config file."""
- def __init__(self, get_store: Callable[[str, str], Optional[TileStore]]) -> None:
+ def __init__(self, get_store: Callable[[str, str], TileStore | None]) -> None:
"""Initialize."""
TileStore.__init__(self)
self.get_store = get_store
- self.stores: dict[tuple[str, str], Optional[TileStore]] = {}
+ self.stores: dict[tuple[str, str], TileStore | None] = {}
- def _get_store(self, config_file: str, layer: str) -> Optional[TileStore]:
+ def _get_store(self, config_file: str, layer: str) -> TileStore | None:
store = self.stores.get((config_file, layer))
if store is None:
store = self.get_store(config_file, layer)
@@ -73,7 +70,7 @@ def put_one(self, tile: Tile) -> Tile:
assert store is not None
return store.put_one(tile)
- def get_one(self, tile: Tile) -> Optional[Tile]:
+ def get_one(self, tile: Tile) -> Tile | None:
"""
Add data to ``tile``, or return ``None`` if ``tile`` is not in the store.
@@ -86,10 +83,10 @@ def get_one(self, tile: Tile) -> Optional[Tile]:
assert store is not None
return store.get_one(tile)
- def get(self, tiles: Iterable[Optional[Tile]]) -> Iterator[Optional[Tile]]:
+ def get(self, tiles: Iterable[Tile | None]) -> Iterator[Tile | None]:
"""See in superclass."""
- def apply(key: tuple[str, str], tiles: Iterator[Tile]) -> Iterable[Optional[Tile]]:
+ def apply(key: tuple[str, str], tiles: Iterator[Tile]) -> Iterable[Tile | None]:
store = self._get_store(*key)
if store is None:
return tiles
@@ -118,6 +115,6 @@ def apply(key: tuple[str, str], tiles: Iterator[Tile]) -> Iterator[Tile]:
return chain.from_iterable(starmap(apply, groupby(tiles, self._get_layer)))
@staticmethod
- def _get_layer(tile: Optional[Tile]) -> tuple[str, str]:
+ def _get_layer(tile: Tile | None) -> tuple[str, str]:
assert tile is not None
return (tile.metadata["config_file"], tile.metadata["layer"])
diff --git a/tilecloud_chain/security.py b/tilecloud_chain/security.py
index 97f2b674e..973abc3c4 100644
--- a/tilecloud_chain/security.py
+++ b/tilecloud_chain/security.py
@@ -1,9 +1,6 @@
-"""
-Security policy for the pyramid application.
-"""
+"""Security policy for the pyramid application."""
import os
-from typing import Optional, Union
import c2cwsgiutils.auth
import pyramid.request
@@ -14,22 +11,22 @@
class User:
"""The user definition."""
- login: Optional[str]
- name: Optional[str]
- url: Optional[str]
+ login: str | None
+ name: str | None
+ url: str | None
is_auth: bool
- token: Optional[str]
+ token: str | None
is_admin: bool
request: pyramid.request.Request
def __init__(
self,
auth_type: str,
- login: Optional[str],
- name: Optional[str],
- url: Optional[str],
+ login: str | None,
+ name: str | None,
+ url: str | None,
is_auth: bool,
- token: Optional[str],
+ token: str | None,
request: pyramid.request.Request,
) -> None:
self.auth_type = auth_type
@@ -42,9 +39,7 @@ def __init__(
self.is_admin = c2cwsgiutils.auth.check_access(self.request)
def has_access(self, auth_config: AuthConfig) -> bool:
- """
- Check if the user has access to the tenant.
- """
+ """Check if the user has access to the tenant."""
if self.is_admin:
return True
if "github_repository" in auth_config:
@@ -58,7 +53,6 @@ class SecurityPolicy:
def identity(self, request: pyramid.request.Request) -> User:
"""Return app-specific user object."""
-
if not hasattr(request, "user"):
if "TEST_USER" in os.environ:
user = User(
@@ -81,12 +75,11 @@ def identity(self, request: pyramid.request.Request) -> User:
c2cuser.get("token"),
request,
)
- setattr(request, "user", user)
+ request.user = user
return request.user # type: ignore
- def authenticated_userid(self, request: pyramid.request.Request) -> Optional[str]:
+ def authenticated_userid(self, request: pyramid.request.Request) -> str | None:
"""Return a string ID for the user."""
-
identity = self.identity(request)
if identity is None:
@@ -96,9 +89,8 @@ def authenticated_userid(self, request: pyramid.request.Request) -> Optional[str
def permits(
self, request: pyramid.request.Request, context: AuthConfig, permission: str
- ) -> Union[Allowed, Denied]:
+ ) -> Allowed | Denied:
"""Allow access to everything if signed in."""
-
identity = self.identity(request)
if identity is None:
diff --git a/tilecloud_chain/server.py b/tilecloud_chain/server.py
index c6436eb35..bfc264157 100644
--- a/tilecloud_chain/server.py
+++ b/tilecloud_chain/server.py
@@ -1,6 +1,4 @@
-"""
-The server to serve the tiles.
-"""
+"""The server to serve the tiles."""
# Copyright (c) 2013-2024 by Stéphane Brunner
# All rights reserved.
@@ -35,7 +33,7 @@
import mimetypes
import os
import time
-from typing import TYPE_CHECKING, Any, Generic, Optional, TypeVar, Union, cast
+from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast
from urllib.parse import parse_qs, urlencode
import botocore.exceptions
@@ -43,6 +41,7 @@
import pyramid.response
import pyramid.session
import requests
+import tilecloud.store.s3
from azure.core.exceptions import ResourceNotFoundError
from c2cwsgiutils import health_check
from prometheus_client import Summary
@@ -51,11 +50,10 @@
from pyramid.request import Request
from pyramid.router import Router
from pyramid_mako import add_mako_renderer
+from tilecloud import Tile, TileCoord
-import tilecloud.store.s3
import tilecloud_chain.configuration
import tilecloud_chain.security
-from tilecloud import Tile, TileCoord
from tilecloud_chain import (
TileGeneration,
configuration,
@@ -71,9 +69,8 @@
_TILEGENERATION = None
-def init_tilegeneration(config_file: Optional[str]) -> None:
+def init_tilegeneration(config_file: str | None) -> None:
"""Initialize the tile generation."""
-
global _TILEGENERATION # pylint: disable=global-statement
if _TILEGENERATION is None:
if config_file is not None:
@@ -117,7 +114,7 @@ def __init__(self, store: tilecloud.TileStore, mtime: float) -> None:
class DatedFilter:
"""Filter with timestamp to be able to invalidate it on configuration change."""
- def __init__(self, layer_filter: Optional[tilecloud_chain.IntersectGeometryFilter], mtime: float) -> None:
+ def __init__(self, layer_filter: tilecloud_chain.IntersectGeometryFilter | None, mtime: float) -> None:
"""Initialize."""
self.filter = layer_filter
self.mtime = mtime
@@ -130,7 +127,7 @@ def __init__(self) -> None:
"""Initialize."""
try:
self.filter_cache: dict[str, dict[str, DatedFilter]] = {}
- self.s3_client_cache: dict[str, "botocore.client.S3"] = {}
+ self.s3_client_cache: dict[str, botocore.client.S3] = {}
self.store_cache: dict[str, dict[str, DatedStore]] = {}
assert _TILEGENERATION
@@ -196,7 +193,7 @@ def get_layers(config: tilecloud_chain.DatedConfig) -> list[str]:
def get_filter(
self, config: tilecloud_chain.DatedConfig, layer_name: str
- ) -> Optional[tilecloud_chain.IntersectGeometryFilter]:
+ ) -> tilecloud_chain.IntersectGeometryFilter | None:
"""Get the filter from the config."""
dated_filter = self.filter_cache.get(config.file, {}).get(layer_name)
@@ -339,13 +336,12 @@ def __call__(
def serve(
self,
- path: Optional[list[str]],
+ path: list[str] | None,
params: dict[str, str],
config: tilecloud_chain.DatedConfig,
**kwargs: Any,
) -> Response:
"""Serve the WMTS requests."""
-
if not config or not config.config:
return self.error(
config,
@@ -636,7 +632,7 @@ def forward(
self,
config: tilecloud_chain.DatedConfig,
url: str,
- headers: Optional[Any] = None,
+ headers: Any | None = None,
no_cache: bool = False,
**kwargs: Any,
) -> Response:
@@ -647,7 +643,7 @@ def forward(
headers["Cache-Control"] = "no-cache"
headers["Pragma"] = "no-cache"
- response = requests.get(url, headers=headers) # nosec
+ response = requests.get(url, headers=headers)
if response.status_code == 200:
response_headers = dict(response.headers)
if no_cache:
@@ -673,27 +669,24 @@ def error(
self,
config: tilecloud_chain.DatedConfig,
code: int,
- message: Optional[Union[Exception, str]] = "",
+ message: Exception | str | None = "",
**kwargs: Any,
) -> Response:
"""Build the error, should be implemented in a sub class."""
-
raise NotImplementedError
def response(
self,
config: tilecloud_chain.DatedConfig,
data: bytes,
- headers: Optional[dict[str, str]] = None,
+ headers: dict[str, str] | None = None,
**kwargs: Any,
) -> Response:
"""Build the response, should be implemented in a sub class."""
-
raise NotImplementedError
def get_host(self, **kwargs: Any) -> str:
"""Get the host used in Prometheus stats and in the JSON logs, should be implemented in a sub class."""
-
del kwargs
return "localhost"
@@ -719,7 +712,7 @@ def error(
self,
config: tilecloud_chain.DatedConfig,
code: int,
- message: Optional[Union[Exception, str]] = "",
+ message: Exception | str | None = "",
**kwargs: Any,
) -> list[bytes]:
"""Build the error."""
@@ -731,7 +724,7 @@ def response(
self,
config: tilecloud_chain.DatedConfig,
data: bytes,
- headers: Optional[dict[str, str]] = None,
+ headers: dict[str, str] | None = None,
**kwargs: Any,
) -> list[bytes]:
"""Build the response."""
@@ -744,7 +737,7 @@ def response(
def app_factory(
global_config: Any,
- configfile: Optional[str] = os.environ.get("TILEGENERATION_CONFIGFILE"),
+ configfile: str | None = os.environ.get("TILEGENERATION_CONFIGFILE"),
**local_conf: Any,
) -> WsgiServer:
"""Create the WSGI server."""
@@ -769,7 +762,7 @@ def error(
self,
config: tilecloud_chain.DatedConfig,
code: int,
- message: Optional[Union[Exception, str]] = None,
+ message: Exception | str | None = None,
**kwargs: Any,
) -> pyramid.response.Response:
"""Build the Pyramid response on error."""
@@ -794,7 +787,7 @@ def response(
self,
config: tilecloud_chain.DatedConfig,
data: bytes,
- headers: Optional[dict[str, str]] = None,
+ headers: dict[str, str] | None = None,
**kwargs: Any,
) -> pyramid.response.Response:
"""Build the Pyramid response."""
@@ -809,6 +802,7 @@ def response(
return request.response
def get_host(self, **kwargs: Any) -> str:
+ """Get the host used in Prometheus stats and in the JSON logs."""
request: pyramid.request.Request = kwargs["request"]
assert isinstance(request.host, str)
return request.host
diff --git a/tilecloud_chain/store/postgresql.py b/tilecloud_chain/store/postgresql.py
index 77dffe9cd..dc88f5627 100644
--- a/tilecloud_chain/store/postgresql.py
+++ b/tilecloud_chain/store/postgresql.py
@@ -1,6 +1,4 @@
-"""
-PostgreSQL queue.
-"""
+"""PostgreSQL queue."""
# Copyright (c) 2023-2024 by Camptocamp
# All rights reserved.
@@ -28,7 +26,6 @@
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
import io
import logging
import multiprocessing
@@ -47,8 +44,8 @@
from sqlalchemy import JSON, Column, DateTime, Integer, Unicode, and_
from sqlalchemy.engine import create_engine
from sqlalchemy.orm import DeclarativeBase, sessionmaker
-
from tilecloud import Tile, TileCoord, TileStore
+
from tilecloud_chain import DatedConfig, configuration, controller, generate
_LOGGER = logging.getLogger(__name__)
@@ -139,7 +136,6 @@ class Job(Base):
def __repr__(self) -> str:
"""Return the representation of the job."""
-
return f"Job {self.id} {self.name} [{self.status}]"
@@ -167,7 +163,6 @@ class Queue(Base):
def __repr__(self) -> str:
"""Return the representation of the queue entry."""
-
return f"Queue {self.job_id}.{self.id} zoom={self.zoom} [{self.status}]"
@@ -196,7 +191,7 @@ def _start_job(
if command0 == "generate-tiles":
add_role = "--get-hash" not in arguments and "--get-bbox" not in arguments
- for arg in arguments.keys():
+ for arg in arguments:
if arg.startswith("-") and arg not in allowed_arguments:
job.status = _STATUS_ERROR # type: ignore[assignment]
job.error = ( # type: ignore[attr-defined]
@@ -253,7 +248,7 @@ def _start_job(
_LOGGER.info("Run the command `%s`", display_command)
- completed_process = subprocess.run( # nosec # pylint: disable=subprocess-run-check
+ completed_process = subprocess.run( # nosec # pylint: disable=subprocess-run-check # noqa: S603
final_command,
capture_output=True,
env=env,
@@ -308,7 +303,6 @@ def __init__(
def create_job(self, name: str, command: str, config_filename: str) -> None:
"""Create a job."""
-
with self.SessionMaker() as session:
job = Job(name=name, command=command, config_filename=config_filename)
session.add(job)
@@ -316,7 +310,6 @@ def create_job(self, name: str, command: str, config_filename: str) -> None:
def retry(self, job_id: int, config_filename: str) -> None:
"""Retry a job."""
-
with self.SessionMaker() as session:
nb_job = (
session.query(Job)
@@ -341,7 +334,6 @@ def retry(self, job_id: int, config_filename: str) -> None:
def cancel(self, job_id: int, config_filename: str) -> None:
"""Cancel a job."""
-
with self.SessionMaker() as session:
job = (
session.query(Job)
@@ -371,7 +363,6 @@ def get_status(self, config_filename: str) -> list[tuple[Job, list[dict[str, int
- the status of the job
- the last 5 meta tiles errors
"""
-
result = []
with self.SessionMaker() as session:
for job in (
@@ -433,7 +424,6 @@ def _maintenance(self) -> None:
- manage the too long pending tile generation
- Create the job list to be process
"""
-
with _MAINTENANCE_SUMMARY.time():
# Restart the too long pending jobs (queue generation)
with self.SessionMaker() as session:
@@ -520,7 +510,6 @@ def _maintenance(self) -> None:
def list(self) -> Iterator[Tile]:
"""List the meta tiles in the queue."""
-
while True:
if not self.jobs:
self._maintenance()
@@ -548,7 +537,10 @@ def list(self) -> Iterator[Tile]:
continue
sqlalchemy_tile.status = _STATUS_PENDING # type: ignore[assignment]
sqlalchemy_tile.started_at = datetime.now() # type: ignore[assignment]
- meta_tile = _decode_message(sqlalchemy_tile.meta_tile, postgresql_id=sqlalchemy_tile.id) # type: ignore[arg-type]
+ meta_tile = _decode_message(
+ sqlalchemy_tile.meta_tile, # type: ignore[arg-type]
+ postgresql_id=sqlalchemy_tile.id,
+ )
session.commit()
yield meta_tile
except Exception: # pylint: disable=broad-except
@@ -558,7 +550,6 @@ def list(self) -> Iterator[Tile]:
def put_one(self, tile: Tile) -> Tile:
"""Put the meta tile in the queue."""
-
with self.SessionMaker() as session:
session.add(
Queue(
@@ -572,14 +563,12 @@ def put_one(self, tile: Tile) -> Tile:
def put(self, tiles: Iterable[Tile]) -> Iterator[Tile]:
"""Put the meta tiles in the queue."""
-
for meta_tile in tiles:
self.put_one(meta_tile)
yield meta_tile
def delete_one(self, tile: Tile) -> Tile:
"""Delete the meta tile from the queue."""
-
with self.SessionMaker() as session:
if tile.error:
sqlalchemy_tile = (
@@ -601,7 +590,6 @@ def delete_one(self, tile: Tile) -> Tile:
def delete_all(self) -> None:
"""Delete all the queue."""
-
with self.SessionMaker() as session:
session.query(Queue).delete()
session.commit()
@@ -613,7 +601,6 @@ def get_one(self, tile: Tile) -> Tile:
def get_postgresql_queue_store(config: DatedConfig) -> PostgresqlTileStore:
"""Get the postgreSQL queue tile store."""
-
conf = config.config.get("postgresql", {})
sqlalchemy_url = os.environ.get("TILECLOUD_CHAIN_SQLALCHEMY_URL", conf.get("sqlalchemy_url"))
assert sqlalchemy_url is not None
diff --git a/tilecloud_chain/templates/openlayers.html b/tilecloud_chain/templates/openlayers.html
index 819e9b918..33a870286 100644
--- a/tilecloud_chain/templates/openlayers.html
+++ b/tilecloud_chain/templates/openlayers.html
@@ -72,8 +72,8 @@
>
diff --git a/tilecloud_chain/tests/__init__.py b/tilecloud_chain/tests/__init__.py
index 0ff9b9a9d..dd5769654 100644
--- a/tilecloud_chain/tests/__init__.py
+++ b/tilecloud_chain/tests/__init__.py
@@ -4,9 +4,10 @@
import shutil
import sys
import traceback
+from collections.abc import Callable
from io import StringIO
from logging import config
-from typing import Any, Callable, Union
+from typing import Any, Union
from unittest import TestCase
import yaml
@@ -37,7 +38,7 @@ def assert_result_equals(self, result: str, expected: str, regex: bool = False)
result = re.sub("\n[^\n]*\r", "\n", result)
result = re.sub("^[^\n]*\r", "", result)
result = result.split("\n")
- for n, test in enumerate(zip(expected, result)):
+ for n, test in enumerate(zip(expected, result, strict=False)):
if test[0] != "PASS...":
try:
if regex:
@@ -55,9 +56,7 @@ def assert_result_equals(self, result: str, expected: str, regex: bool = False)
raise e
self.assertEqual(len(expected), len(result), repr(result))
- def run_cmd(
- self, cmd: Union[list[str], str], main_func: Callable, get_error: bool = False
- ) -> tuple[str, str]:
+ def run_cmd(self, cmd: list[str] | str, main_func: Callable, get_error: bool = False) -> tuple[str, str]:
old_stdout = sys.stdout
sys.stdout = mystdout = StringIO()
old_stderr = sys.stderr
@@ -78,15 +77,12 @@ def run_cmd(
return mystdout.getvalue(), mystderr.getvalue()
def assert_cmd_equals(
- self, cmd: Union[list[str], str], main_func: Callable, empty_err: bool = False, **kargs: Any
+ self, cmd: list[str] | str, main_func: Callable, empty_err: bool = False, **kargs: Any
) -> None:
out, err = self.run_cmd(cmd, main_func)
if empty_err:
self.assertEqual(err, "")
- if isinstance(out, bytes):
- out = out.decode("utf-8")
- else:
- out = str(out)
+ out = out.decode("utf-8") if isinstance(out, bytes) else str(out)
self.assert_result_equals(result=out, **kargs)
def assert_cmd_exit_equals(self, cmd: str, main_func: Callable) -> None:
@@ -99,7 +95,7 @@ def assert_cmd_exit_equals(self, cmd: str, main_func: Callable) -> None:
def assert_main_equals(
self,
- cmd: Union[list[str], str],
+ cmd: list[str] | str,
main_func: Callable,
expected: list[list[str]] = None,
get_error: bool = False,
@@ -148,7 +144,7 @@ def assert_main_except_equals(
except AssertionError:
raise
except Exception:
- assert False, traceback.format_exc()
+ raise AssertionError(traceback.format_exc())
if expected:
for expect in expected:
@@ -180,7 +176,7 @@ def assert_tiles_generated_deleted(
) -> None:
self.assert_cmd_equals(expected=expected, **kargs)
count = 0
- for path, dirs, files in os.walk(directory):
+ for path, _dirs, files in os.walk(directory):
if len(files) != 0:
log.info((path, files))
print((path, files))
diff --git a/tilecloud_chain/tests/test_copy.py b/tilecloud_chain/tests/test_copy.py
index 40f48477f..19f6cfc7a 100644
--- a/tilecloud_chain/tests/test_copy.py
+++ b/tilecloud_chain/tests/test_copy.py
@@ -68,8 +68,8 @@ def test_process(self) -> None:
self.assertEqual(statinfo.st_size, 755)
self.assert_cmd_equals(
- cmd=".build/venv/bin/generate_process {} -c "
- "tilegeneration/test-copy.yaml --cache src optipng".format(d),
+ cmd=f".build/venv/bin/generate_process {d} -c "
+ "tilegeneration/test-copy.yaml --cache src optipng",
main_func=copy_.process,
regex=True,
expected=(
diff --git a/tilecloud_chain/tests/test_expiretiles.py b/tilecloud_chain/tests/test_expiretiles.py
index a56cabffd..2d65fbafc 100644
--- a/tilecloud_chain/tests/test_expiretiles.py
+++ b/tilecloud_chain/tests/test_expiretiles.py
@@ -142,7 +142,7 @@ def parse_coord(coord: str) -> tuple[float, float]:
log_capture.check()
def test_expire_tiles_empty(self) -> None:
- with LogCapture("tilecloud_chain", level=30) as log_capture:
+ with LogCapture("tilecloud_chain", level=30):
self.assert_cmd_equals(
cmd=[
".build/venv/bin/import_expiretiles",
diff --git a/tilecloud_chain/tests/test_generate.py b/tilecloud_chain/tests/test_generate.py
index 67e6b1629..a61434cfe 100644
--- a/tilecloud_chain/tests/test_generate.py
+++ b/tilecloud_chain/tests/test_generate.py
@@ -4,8 +4,8 @@
import pytest
from testfixtures import LogCapture
-
from tilecloud.store.redis import RedisTileStore
+
from tilecloud_chain import controller, generate
from tilecloud_chain.tests import CompareCase
@@ -30,8 +30,8 @@ def test_get_hash(self) -> None:
with LogCapture("tilecloud_chain", level=30) as log_capture:
for d in ("-d", ""):
self.assert_cmd_equals(
- cmd=".build/venv/bin/generate_tiles {} --get-hash 4/0/0 "
- "-c tilegeneration/test.yaml -l point".format(d),
+ cmd=f".build/venv/bin/generate_tiles {d} --get-hash 4/0/0 "
+ "-c tilegeneration/test.yaml -l point",
main_func=generate.main,
expected="""Tile: 4/0/0:+8/+8 config_file=tilegeneration/test.yaml dimension_DATE=2012 host=localhost layer=point
empty_metatile_detection:
@@ -50,8 +50,8 @@ def test_get_wrong_hash(self) -> None:
for d in ("-d", "-q"):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_cmd_exit_equals(
- cmd=".build/venv/bin/generate_tiles {} --get-hash 0/7/5 "
- "-c tilegeneration/test.yaml -l all".format(d),
+ cmd=f".build/venv/bin/generate_tiles {d} --get-hash 0/7/5 "
+ "-c tilegeneration/test.yaml -l all",
main_func=generate.main,
)
log_capture.check(
@@ -66,22 +66,22 @@ def test_get_bbox(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_cmd_equals(
- cmd=".build/venv/bin/generate_tiles {} "
- "-c tilegeneration/test.yaml --get-bbox 4/4/4 -l point".format(d),
+ cmd=f".build/venv/bin/generate_tiles {d} "
+ "-c tilegeneration/test.yaml --get-bbox 4/4/4 -l point",
main_func=generate.main,
expected="""Tile bounds: [425120,343600,426400,344880]
""",
)
self.assert_cmd_equals(
- cmd=".build/venv/bin/generate_tiles {} "
- "-c tilegeneration/test.yaml --get-bbox 4/4/4:+1/+1 -l point".format(d),
+ cmd=f".build/venv/bin/generate_tiles {d} "
+ "-c tilegeneration/test.yaml --get-bbox 4/4/4:+1/+1 -l point",
main_func=generate.main,
expected="""Tile bounds: [425120,343600,426400,344880]
""",
)
self.assert_cmd_equals(
- cmd=".build/venv/bin/generate_tiles {} "
- "-c tilegeneration/test.yaml --get-bbox 4/4/4:+2/+2 -l point".format(d),
+ cmd=f".build/venv/bin/generate_tiles {d} "
+ "-c tilegeneration/test.yaml --get-bbox 4/4/4:+2/+2 -l point",
main_func=generate.main,
expected="""Tile bounds: [425120,342320,427680,344880]
""",
@@ -93,8 +93,8 @@ def test_hash_mapnik(self):
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_cmd_equals(
- cmd=".build/venv/bin/generate_tiles {} "
- "--get-hash 4/0/0 -c tilegeneration/test.yaml -l mapnik".format(d),
+ cmd=f".build/venv/bin/generate_tiles {d} "
+ "--get-hash 4/0/0 -c tilegeneration/test.yaml -l mapnik",
main_func=generate.main,
expected="""Tile: 4/0/0 config_file=tilegeneration/test.yaml
empty_tile_detection:
@@ -108,8 +108,8 @@ def test_hash_mapnik_grid(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_cmd_equals(
- cmd=".build/venv/bin/generate_tiles {} "
- "--get-hash 4/0/0 -c tilegeneration/test.yaml -l all".format(d),
+ cmd=f".build/venv/bin/generate_tiles {d} "
+ "--get-hash 4/0/0 -c tilegeneration/test.yaml -l all",
main_func=generate.main,
expected="""Tile: 4/0/0 config_file=tilegeneration/test.yaml dimension_DATE=2012 host=localhost layer=all
empty_metatile_detection:
@@ -171,8 +171,8 @@ def test_test_dimensions(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
- cmd=".build/venv/bin/generate_tiles %s -c tilegeneration/test-nosns.yaml -t 1 "
- "--dimensions DATE=2013" % d,
+ cmd=f".build/venv/bin/generate_tiles {d} -c tilegeneration/test-nosns.yaml -t 1 "
+ "--dimensions DATE=2013",
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/%s/default/2013/swissgrid_5/%i/%i/%i.png",
@@ -300,12 +300,12 @@ def test_zoom_identifier(self) -> None:
y3 = [e[1] for e in xy3]
for d in ("-d", ""):
self.assert_tiles_generated(
- cmd=".build/venv/bin/generate_tiles {} "
- "-c tilegeneration/test-nosns.yaml -t 1 -l polygon2 -z 0".format(d),
+ cmd=f".build/venv/bin/generate_tiles {d} "
+ "-c tilegeneration/test-nosns.yaml -t 1 -l polygon2 -z 0",
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/%s/default/2012/swissgrid_01/%s/%i/%i.png",
- tiles=list(zip(repeat("polygon2", len(x)), repeat("1", len(x)), x, y)),
+ tiles=list(zip(repeat("polygon2", len(x)), repeat("1", len(x)), x, y, strict=False)),
regex=True,
expected=r"""The tile generation of layer 'polygon2 \(DATE=2012\)' is finish
Nb generated metatiles: 1
@@ -322,12 +322,14 @@ def test_zoom_identifier(self) -> None:
""",
)
self.assert_tiles_generated(
- cmd=".build/venv/bin/generate_tiles {} "
- "-c tilegeneration/test-nosns.yaml -t 1 -l polygon2 -z 1".format(d),
+ cmd=f".build/venv/bin/generate_tiles {d} "
+ "-c tilegeneration/test-nosns.yaml -t 1 -l polygon2 -z 1",
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/%s/default/2012/swissgrid_01/%s/%i/%i.png",
- tiles=list(zip(repeat("polygon2", len(x2)), repeat("0_2", len(x2)), x2, y2)),
+ tiles=list(
+ zip(repeat("polygon2", len(x2)), repeat("0_2", len(x2)), x2, y2, strict=False)
+ ),
regex=True,
expected=r"""The tile generation of layer 'polygon2 \(DATE=2012\)' is finish
Nb generated metatiles: 1
@@ -344,12 +346,14 @@ def test_zoom_identifier(self) -> None:
""",
)
self.assert_tiles_generated(
- cmd=".build/venv/bin/generate_tiles {} "
- "-c tilegeneration/test-nosns.yaml -t 1 -l polygon2 -z 2".format(d),
+ cmd=f".build/venv/bin/generate_tiles {d} "
+ "-c tilegeneration/test-nosns.yaml -t 1 -l polygon2 -z 2",
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/%s/default/2012/swissgrid_01/%s/%i/%i.png",
- tiles=list(zip(repeat("polygon2", len(x3)), repeat("0_1", len(x3)), x3, y3)),
+ tiles=list(
+ zip(repeat("polygon2", len(x3)), repeat("0_1", len(x3)), x3, y3, strict=False)
+ ),
regex=True,
expected=r"""The tile generation of layer 'polygon2 \(DATE=2012\)' is finish
Nb generated metatiles: 1
@@ -371,8 +375,8 @@ def test_empty_bbox(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
- cmd=".build/venv/bin/generate_tiles %s -c tilegeneration/test-nosns.yaml "
- "-l point_hash --bbox 700000 250000 800000 300000" % d,
+ cmd=f".build/venv/bin/generate_tiles {d} -c tilegeneration/test-nosns.yaml "
+ "-l point_hash --bbox 700000 250000 800000 300000",
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/%s",
@@ -401,8 +405,8 @@ def test_zoom(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
- cmd=".build/venv/bin/generate_tiles {} "
- "-c tilegeneration/test-nosns.yaml -l point_hash --zoom 1".format(d),
+ cmd=f".build/venv/bin/generate_tiles {d} "
+ "-c tilegeneration/test-nosns.yaml -l point_hash --zoom 1",
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/%s/default/2012/swissgrid_5/%i/%i/%i.png",
@@ -428,8 +432,8 @@ def test_zoom_range(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
- cmd=".build/venv/bin/generate_tiles {} "
- "-c tilegeneration/test-nosns.yaml -l point_hash --zoom 1-3".format(d),
+ cmd=f".build/venv/bin/generate_tiles {d} "
+ "-c tilegeneration/test-nosns.yaml -l point_hash --zoom 1-3",
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/%s/default/2012/swissgrid_5/%i/%i/%i.png",
@@ -463,8 +467,8 @@ def test_no_zoom(self) -> None:
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
cmd=(
- ".build/venv/bin/generate_tiles {} -c tilegeneration/test-nosns.yaml -l point_hash"
- ).format(d),
+ f".build/venv/bin/generate_tiles {d} -c tilegeneration/test-nosns.yaml -l point_hash"
+ ),
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/%s/default/2012/swissgrid_5/%i/%i/%i.png",
@@ -499,8 +503,8 @@ def test_py_buffer(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
- cmd=".build/venv/bin/generate_tiles %s -c tilegeneration/test-nosns.yaml "
- "-l point_px_buffer --zoom 0-2" % d,
+ cmd=f".build/venv/bin/generate_tiles {d} -c tilegeneration/test-nosns.yaml "
+ "-l point_px_buffer --zoom 0-2",
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/point_px_buffer/default/2012/swissgrid_5/%i/%i/%i.png",
@@ -527,8 +531,8 @@ def test_zoom_list(self) -> None:
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
cmd=(
- ".build/venv/bin/generate_tiles %s -c tilegeneration/test-nosns.yaml "
- "-l point_hash --zoom 0,2,3" % d
+ f".build/venv/bin/generate_tiles {d} -c tilegeneration/test-nosns.yaml "
+ "-l point_hash --zoom 0,2,3"
),
main_func=generate.main,
directory="/tmp/tiles/",
@@ -562,8 +566,8 @@ def test_layer_bbox(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
- cmd=".build/venv/bin/generate_tiles {} "
- "-c tilegeneration/test-nosns.yaml -l polygon -z 0".format(d),
+ cmd=f".build/venv/bin/generate_tiles {d} "
+ "-c tilegeneration/test-nosns.yaml -l polygon -z 0",
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/polygon/default/2012/swissgrid_5/0/%i/%i.png",
@@ -583,9 +587,9 @@ def test_layer_bbox(self) -> None:
)
self.assert_tiles_generated(
- cmd=".build/venv/bin/generate_tiles %s "
+ cmd=f".build/venv/bin/generate_tiles {d} "
"-c tilegeneration/test-nosns.yaml -l polygon -z 0"
- " -b 550000 170000 560000 180000" % d,
+ " -b 550000 170000 560000 180000",
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/polygon/default/2012/swissgrid_5/0/%i/%i.png",
@@ -605,9 +609,9 @@ def test_layer_bbox(self) -> None:
)
self.assert_tiles_generated(
- cmd=".build/venv/bin/generate_tiles %s "
+ cmd=f".build/venv/bin/generate_tiles {d} "
"-c tilegeneration/test-nosns.yaml -l polygon -z 0"
- " -b 550000.0 170000.0 560000.0 180000.0" % d,
+ " -b 550000.0 170000.0 560000.0 180000.0",
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/polygon/default/2012/swissgrid_5/0/%i/%i.png",
@@ -627,8 +631,8 @@ def test_layer_bbox(self) -> None:
)
self.assert_tiles_generated(
- cmd=".build/venv/bin/generate_tiles {} "
- "-c tilegeneration/test-nosns.yaml -l all -z 0".format(d),
+ cmd=f".build/venv/bin/generate_tiles {d} "
+ "-c tilegeneration/test-nosns.yaml -l all -z 0",
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/all/default/2012/swissgrid_5/0/%i/%i.png",
@@ -652,8 +656,8 @@ def test_hash_generation(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
- cmd=".build/venv/bin/generate_tiles {} "
- "-c tilegeneration/test-nosns.yaml -l point_hash -z 0".format(d),
+ cmd=f".build/venv/bin/generate_tiles {d} "
+ "-c tilegeneration/test-nosns.yaml -l point_hash -z 0",
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/point_hash/default/2012/swissgrid_5/0/%i/%i.png",
@@ -680,8 +684,8 @@ def test_mapnik(self):
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
- cmd=".build/venv/bin/generate_tiles {} "
- "-c tilegeneration/test-nosns.yaml -l mapnik -z 0".format(d),
+ cmd=f".build/venv/bin/generate_tiles {d} "
+ "-c tilegeneration/test-nosns.yaml -l mapnik -z 0",
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/mapnik/default/2012/swissgrid_5/0/%i/%i.png",
@@ -706,8 +710,8 @@ def test_mapnik_grid(self):
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
- cmd=".build/venv/bin/generate_tiles {} "
- "-c tilegeneration/test-nosns.yaml -l mapnik_grid -z 0".format(d),
+ cmd=f".build/venv/bin/generate_tiles {d} "
+ "-c tilegeneration/test-nosns.yaml -l mapnik_grid -z 0",
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/mapnik_grid/default/2012/swissgrid_5/0/%i/%i.json",
@@ -750,8 +754,8 @@ def test_mapnik_grid_drop(self):
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_tiles_generated(
- cmd=".build/venv/bin/generate_tiles {} "
- "-c tilegeneration/test-nosns.yaml -l mapnik_grid_drop -z 0".format(d),
+ cmd=f".build/venv/bin/generate_tiles {d} "
+ "-c tilegeneration/test-nosns.yaml -l mapnik_grid_drop -z 0",
main_func=generate.main,
directory="/tmp/tiles/",
tiles_pattern="1.0.0/mapnik_grid_drop/default/2012/swissgrid_5/0/%i/%i.json",
@@ -790,8 +794,8 @@ def test_verbose(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.run_cmd(
- cmd=".build/venv/bin/generate_tiles {} "
- "-c tilegeneration/test-nosns.yaml -t 2 -v -l polygon".format(d),
+ cmd=f".build/venv/bin/generate_tiles {d} "
+ "-c tilegeneration/test-nosns.yaml -t 2 -v -l polygon",
main_func=generate.main,
)
log_capture.check()
@@ -800,8 +804,8 @@ def test_time(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_cmd_equals(
- cmd=".build/venv/bin/generate_tiles {} "
- "-c tilegeneration/test.yaml --time 2 -l polygon".format(d),
+ cmd=f".build/venv/bin/generate_tiles {d} "
+ "-c tilegeneration/test.yaml --time 2 -l polygon",
main_func=generate.main,
expected=r"""size: 770
size: 862
@@ -820,8 +824,7 @@ def test_time_layer_bbox(self) -> None:
for d in ("-d", ""):
with LogCapture("tilecloud_chain", level=30) as log_capture:
self.assert_cmd_equals(
- cmd=".build/venv/bin/generate_tiles {} "
- "-c tilegeneration/test.yaml --time 2 -l all".format(d),
+ cmd=f".build/venv/bin/generate_tiles {d} " "-c tilegeneration/test.yaml --time 2 -l all",
main_func=generate.main,
expected=r"""size: 1010
size: 1010
@@ -864,8 +867,8 @@ def test_delete_meta(self) -> None:
)
self.assert_tiles_generated_deleted(
cmd=(
- ".build/venv/bin/generate_tiles %s -c tilegeneration/test-nosns.yaml "
- "-l point_hash_no_meta -z 0" % d
+ f".build/venv/bin/generate_tiles {d} -c tilegeneration/test-nosns.yaml "
+ "-l point_hash_no_meta -z 0"
),
main_func=generate.main,
directory="/tmp/tiles/",
@@ -895,8 +898,8 @@ def test_delete_no_meta(self) -> None:
)
self.assert_tiles_generated_deleted(
cmd=(
- ".build/venv/bin/generate_tiles %s -c tilegeneration/test-nosns.yaml "
- "-l point_hash_no_meta -z 0" % d
+ f".build/venv/bin/generate_tiles {d} -c tilegeneration/test-nosns.yaml "
+ "-l point_hash_no_meta -z 0"
),
main_func=generate.main,
directory="/tmp/tiles/",
diff --git a/tilecloud_chain/tests/test_postgresql.py b/tilecloud_chain/tests/test_postgresql.py
index 829a6c3d9..bb4758adc 100644
--- a/tilecloud_chain/tests/test_postgresql.py
+++ b/tilecloud_chain/tests/test_postgresql.py
@@ -5,8 +5,8 @@
from sqlalchemy import and_
from sqlalchemy.engine import create_engine
from sqlalchemy.orm import sessionmaker
-
from tilecloud import Tile, TileCoord
+
from tilecloud_chain import DatedConfig
from tilecloud_chain.store.postgresql import (
_STATUS_CANCELLED,
diff --git a/tilecloud_chain/tests/test_serve.py b/tilecloud_chain/tests/test_serve.py
index 4510bf5e3..f6a86201a 100644
--- a/tilecloud_chain/tests/test_serve.py
+++ b/tilecloud_chain/tests/test_serve.py
@@ -6,7 +6,7 @@
from pyramid.testing import DummyRequest
from testfixtures import LogCapture
-from tilecloud_chain import controller, generate, server
+from tilecloud_chain import generate, server
from tilecloud_chain.server import PyramidView, app_factory
from tilecloud_chain.tests import CompareCase
diff --git a/tilecloud_chain/timedtilestore.py b/tilecloud_chain/timedtilestore.py
index b61261071..e2acb90d5 100644
--- a/tilecloud_chain/timedtilestore.py
+++ b/tilecloud_chain/timedtilestore.py
@@ -1,16 +1,13 @@
-"""
-A wrapper around a TileStore that adds timer metrics.
-"""
+"""A wrapper around a TileStore that adds timer metrics."""
import time
from collections.abc import Iterable, Iterator
-from typing import Any, Optional, TypeVar, cast
+from typing import Any, TypeVar, cast
from prometheus_client import Summary
-
from tilecloud import BoundingPyramid, Tile, TileStore
-_OptionalTileOrNot = TypeVar("_OptionalTileOrNot", Optional[Tile], Tile)
+_OptionalTileOrNot = TypeVar("_OptionalTileOrNot", Tile | None, Tile)
_TILESTORE_OPERATION_SUMMARY = Summary(
"tilecloud_chain_tilestore", "Number of tilestore contains", ["layer", "host", "store", "operation"]
@@ -51,7 +48,6 @@ def _time_iteration(
def __contains__(self, tile: Tile) -> bool:
"""See in superclass."""
-
with _TILESTORE_OPERATION_SUMMARY.labels(
tile.metadata.get("layer", "none"),
tile.metadata.get("host", "none"),
@@ -62,7 +58,6 @@ def __contains__(self, tile: Tile) -> bool:
def __len__(self) -> int:
"""See in superclass."""
-
with _LEN_SUMMARY.labels(
self._store_name,
).time():
@@ -70,12 +65,10 @@ def __len__(self) -> int:
def delete(self, tiles: Iterable[Tile]) -> Iterator[Tile]:
"""See in superclass."""
-
return self._time_iteration(self._tile_store.delete(tiles), "delete")
def delete_one(self, tile: Tile) -> Tile:
"""See in superclass."""
-
with _TILESTORE_OPERATION_SUMMARY.labels(
tile.metadata.get("layer", "none"),
tile.metadata.get("host", "none"),
@@ -86,22 +79,18 @@ def delete_one(self, tile: Tile) -> Tile:
def list(self) -> Iterable[Tile]:
"""See in superclass."""
-
return cast(Iterable[Tile], self._time_iteration(self._tile_store.list(), "list"))
- def get(self, tiles: Iterable[Optional[Tile]]) -> Iterator[Optional[Tile]]:
+ def get(self, tiles: Iterable[Tile | None]) -> Iterator[Tile | None]:
"""See in superclass."""
-
return self._time_iteration(self._tile_store.get(tiles), "get")
- def get_all(self) -> Iterator[Optional[Tile]]:
+ def get_all(self) -> Iterator[Tile | None]:
"""See in superclass."""
-
return self._time_iteration(self._tile_store.get_all(), "get_all")
- def get_one(self, tile: Tile) -> Optional[Tile]:
+ def get_one(self, tile: Tile) -> Tile | None:
"""See in superclass."""
-
with _TILESTORE_OPERATION_SUMMARY.labels(
tile.metadata.get("layer", "none"), tile.metadata.get("host", "none"), self._store_name, "get_one"
).time():
@@ -109,12 +98,10 @@ def get_one(self, tile: Tile) -> Optional[Tile]:
def put(self, tiles: Iterable[Tile]) -> Iterator[Tile]:
"""See in superclass."""
-
return cast(Iterator[Tile], self._time_iteration(self._tile_store.put(tiles), "put"))
def put_one(self, tile: Tile) -> Tile:
"""See in superclass."""
-
with _TILESTORE_OPERATION_SUMMARY.labels(
tile.metadata.get("layer", "none"), tile.metadata.get("host", "none"), self._store_name, "put_one"
).time():
@@ -122,20 +109,16 @@ def put_one(self, tile: Tile) -> Tile:
def __getattr__(self, item: str) -> Any:
"""See in superclass."""
-
return getattr(self._tile_store, item)
def get_bounding_pyramid(self) -> BoundingPyramid:
"""See in superclass."""
-
return self._tile_store.get_bounding_pyramid()
- def get_cheap_bounding_pyramid(self) -> Optional[BoundingPyramid]:
+ def get_cheap_bounding_pyramid(self) -> BoundingPyramid | None:
"""See in superclass."""
-
return self._tile_store.get_cheap_bounding_pyramid()
def __str__(self) -> str:
"""Get string representation."""
-
return f"tilecloud_chain.timedtilestore.TimedTileStoreWrapper: {self._tile_store}"
diff --git a/tilecloud_chain/views/admin.py b/tilecloud_chain/views/admin.py
index 9d863f4df..3fbfbc965 100644
--- a/tilecloud_chain/views/admin.py
+++ b/tilecloud_chain/views/admin.py
@@ -1,6 +1,4 @@
-"""
-The admin views.
-"""
+"""The admin views."""
# Copyright (c) 2018-2024 by Camptocamp
# All rights reserved.
@@ -28,7 +26,6 @@
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
import io
import json
import logging
@@ -37,7 +34,8 @@
import re
import shlex
import subprocess # nosec
-from typing import IO, Any, Callable
+from collections.abc import Callable
+from typing import IO, Any
from urllib.parse import urljoin
import pyramid.httpexceptions
@@ -87,7 +85,6 @@ def _check_access(self, rase_on_no_access: bool = True) -> tuple[bool, tilecloud
@view_config(route_name="admin_slash", renderer="tilecloud_chain:templates/admin_index.html") # type: ignore
def index(self) -> dict[str, Any]:
"""Get the admin index page."""
-
assert self.gene
has_access, config = self._check_access(False)
server_config = config.config.get("server", {})
@@ -148,7 +145,7 @@ def run(self) -> pyramid.response.Response:
.config.get("server", {})
.get("allowed_arguments", configuration.ALLOWED_ARGUMENTS_DEFAULT)
)
- for arg in arguments.keys():
+ for arg in arguments:
if arg.startswith("-") and arg not in allowed_arguments:
self.request.response.status_code = 400
return {
@@ -187,7 +184,7 @@ def run(self) -> pyramid.response.Response:
proc.join()
return return_dict
- completed_process = subprocess.run( # nosec # pylint: disable=subprocess-run-check
+ completed_process = subprocess.run( # nosec # pylint: disable=subprocess-run-check # noqa: S603
final_command,
capture_output=True,
env=env,
@@ -220,7 +217,6 @@ def run(self) -> pyramid.response.Response:
@view_config(route_name="admin_create_job", renderer="fast_json") # type: ignore[misc]
def create_job(self) -> dict[str, Any]:
"""Create a job."""
-
if "TEST_USER" not in os.environ:
auth_view(self.request)
self._check_access()
@@ -252,7 +248,6 @@ def create_job(self) -> dict[str, Any]:
@view_config(route_name="admin_cancel_job", renderer="fast_json") # type: ignore[misc]
def cancel_job(self) -> dict[str, Any]:
"""Cancel a job."""
-
if "TEST_USER" not in os.environ:
auth_view(self.request)
self._check_access()
@@ -346,7 +341,7 @@ def _parse_stdout(stdout: str) -> list[str]:
full_message = json_message["full_message"].replace("\n", "
")
msg += f"
{full_message}"
stdout_parsed.append(msg)
- except: # pylint: disable=bare-except
+ except: # pylint: disable=bare-except # noqa: E722
stdout_parsed.append(line)
return stdout_parsed
@@ -359,9 +354,8 @@ def _format_output(string: str, max_length: int = 1000) -> str:
if line.startswith("{"):
try:
parsed = json.loads(line)
- if "source_facility" in parsed:
- if not parsed.startswith("tilecloud"):
- continue
+ if "source_facility" in parsed and not parsed.startswith("tilecloud"):
+ continue
if result:
result += "\n"