From 400cc49f74f4b7d23d58c6a39f3796364ebe8db6 Mon Sep 17 00:00:00 2001 From: StepSecurity Bot Date: Thu, 18 Jan 2024 16:56:18 +0000 Subject: [PATCH 01/37] [StepSecurity] Apply security best practices Signed-off-by: StepSecurity Bot --- .github/dependabot.yml | 6 ++++++ .github/workflows/actions-versions-updater.yml | 4 ++-- .github/workflows/add-to-project.yml | 2 +- .github/workflows/bump-version.yml | 4 ++-- .github/workflows/cache-cleaner.yml | 2 +- .github/workflows/codeql-analysis.yml | 2 +- .github/workflows/dependency-review.yml | 2 +- .github/workflows/first_pull_request.yml | 2 +- .github/workflows/label.yml | 2 +- .github/workflows/label_on_approval.yml | 8 ++++---- .github/workflows/main.yml | 16 ++++++++-------- .github/workflows/publish-mastodon.yml | 6 +++--- .github/workflows/publish-pypi.yml | 6 +++--- .github/workflows/tag-testpypi.yml | 6 +++--- .github/workflows/testdata_version.yml | 10 +++++----- .github/workflows/upstream.yml | 6 +++--- .pre-commit-config.yaml | 4 ++++ 17 files changed, 49 insertions(+), 39 deletions(-) create mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000..253bcb76b --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,6 @@ +version: 2 +updates: + - package-ecosystem: github-actions + directory: / + schedule: + interval: daily diff --git a/.github/workflows/actions-versions-updater.yml b/.github/workflows/actions-versions-updater.yml index 8dd02f117..b71df10c2 100644 --- a/.github/workflows/actions-versions-updater.yml +++ b/.github/workflows/actions-versions-updater.yml @@ -27,13 +27,13 @@ jobs: github.com:443 - name: Checkout - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 with: token: ${{ secrets.ACTIONS_VERSION_UPDATER_TOKEN }} persist-credentials: true - name: Run GitHub Actions Version Updater - uses: saadmk11/github-actions-version-updater@v0.8.1 + uses: saadmk11/github-actions-version-updater@64be81ba69383f81f2be476703ea6570c4c8686e # v0.8.1 with: token: ${{ secrets.ACTIONS_VERSION_UPDATER_TOKEN }} committer_email: 'bumpversion[bot]@ouranos.ca' diff --git a/.github/workflows/add-to-project.yml b/.github/workflows/add-to-project.yml index f0cfbc6f0..706de14a4 100644 --- a/.github/workflows/add-to-project.yml +++ b/.github/workflows/add-to-project.yml @@ -24,7 +24,7 @@ jobs: allowed-endpoints: > api.github.com:443 - - uses: actions/add-to-project@v0.5.0 + - uses: actions/add-to-project@31b3f3ccdc584546fc445612dec3f38ff5edb41c # v0.5.0 with: project-url: https://github.com/orgs/Ouranosinc/projects/6 github-token: ${{ secrets.ADD_TO_PROJECT_TOKEN }} diff --git a/.github/workflows/bump-version.yml b/.github/workflows/bump-version.yml index 679216e7d..fce947d77 100644 --- a/.github/workflows/bump-version.yml +++ b/.github/workflows/bump-version.yml @@ -43,10 +43,10 @@ jobs: files.pythonhosted.org:443 github.com:443 pypi.org:443 - - uses: actions/checkout@v4.1.1 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 with: persist-credentials: false - - uses: actions/setup-python@v5.0.0 + - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 with: python-version: "3.x" - name: Config Commit Bot diff --git a/.github/workflows/cache-cleaner.yml b/.github/workflows/cache-cleaner.yml index 96d1993fb..9dbe50be2 100644 --- a/.github/workflows/cache-cleaner.yml +++ b/.github/workflows/cache-cleaner.yml @@ -23,7 +23,7 @@ jobs: objects.githubusercontent.com:443 - name: Check out code - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Cleanup run: | diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 7de9ab54a..b9fe1ac1a 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -49,7 +49,7 @@ jobs: pypi.org:443 uploads.github.com:443 - name: Checkout repository - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@1245696032ecf7d39f87d54daa406e22ddf769a8 diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml index ba457fc32..c63f07bd4 100644 --- a/.github/workflows/dependency-review.yml +++ b/.github/workflows/dependency-review.yml @@ -25,7 +25,7 @@ jobs: github.com:443 - name: 'Checkout Repository' - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: 'Dependency Review' uses: actions/dependency-review-action@c74b580d73376b7750d3d2a50bfb8adc2c937507 diff --git a/.github/workflows/first_pull_request.yml b/.github/workflows/first_pull_request.yml index 73a474813..610a78baa 100644 --- a/.github/workflows/first_pull_request.yml +++ b/.github/workflows/first_pull_request.yml @@ -24,7 +24,7 @@ jobs: allowed-endpoints: > api.github.com:443 - - uses: actions/github-script@v7.0.1 + - uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 with: script: | // Get a list of all issues created by the PR opener diff --git a/.github/workflows/label.yml b/.github/workflows/label.yml index bdec57e9d..964652871 100644 --- a/.github/workflows/label.yml +++ b/.github/workflows/label.yml @@ -30,6 +30,6 @@ jobs: egress-policy: block allowed-endpoints: > api.github.com:443 - - uses: actions/labeler@v5.0.0 + - uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0 with: repo-token: "${{ secrets.GITHUB_TOKEN }}" diff --git a/.github/workflows/label_on_approval.yml b/.github/workflows/label_on_approval.yml index 89e5da8e6..af9c8c842 100644 --- a/.github/workflows/label_on_approval.yml +++ b/.github/workflows/label_on_approval.yml @@ -34,7 +34,7 @@ jobs: api.github.com:443 - name: Label Approved - uses: actions/github-script@v7.0.1 + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 with: script: | github.rest.issues.addLabels({ @@ -60,7 +60,7 @@ jobs: with: egress-policy: audit - name: Find comment - uses: peter-evans/find-comment@v2.4.0 + uses: peter-evans/find-comment@a54c31d7fa095754bfef525c0c8e5e5674c4b4b1 # v2.4.0 id: fc with: issue-number: ${{ github.event.pull_request.number }} @@ -71,7 +71,7 @@ jobs: (steps.fc.outputs.comment-id == '') && (!contains(github.event.pull_request.labels.*.name, 'approved')) && (github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name) - uses: peter-evans/create-or-update-comment@v3.1.0 + uses: peter-evans/create-or-update-comment@23ff15729ef2fc348714a3bb66d2f655ca9066f2 # v3.1.0 with: comment-id: ${{ steps.fc.outputs.comment-id }} issue-number: ${{ github.event.pull_request.number }} @@ -82,7 +82,7 @@ jobs: - name: Update comment if: | contains(github.event.pull_request.labels.*.name, 'approved') - uses: peter-evans/create-or-update-comment@v3.1.0 + uses: peter-evans/create-or-update-comment@23ff15729ef2fc348714a3bb66d2f655ca9066f2 # v3.1.0 with: comment-id: ${{ steps.fc.outputs.comment-id }} issue-number: ${{ github.event.pull_request.number }} diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 406014fb5..e711616d9 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -52,9 +52,9 @@ jobs: files.pythonhosted.org:443 github.com:443 pypi.org:443 - - uses: actions/checkout@v4.1.1 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Set up Python${{ matrix.python-version }} - uses: actions/setup-python@v5.0.0 + uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 with: python-version: ${{ matrix.python-version }} - name: Install pylint and tox @@ -88,9 +88,9 @@ jobs: github.com:443 pypi.org:443 raw.githubusercontent.com:443 - - uses: actions/checkout@v4.1.1 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Set up Python${{ matrix.python-version }} - uses: actions/setup-python@v5.0.0 + uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 with: python-version: ${{ matrix.python-version }} - name: Install tox @@ -148,14 +148,14 @@ jobs: ppa.launchpadcontent.net:443 pypi.org:443 raw.githubusercontent.com:443 - - uses: actions/checkout@v4.1.1 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Install Eigen3 if: contains(matrix.tox-env, 'sbck') run: | sudo apt-get update sudo apt-get install libeigen3-dev - name: Set up Python${{ matrix.python-version }} - uses: actions/setup-python@v5.0.0 + uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 with: python-version: ${{ matrix.python-version }} - name: Install tox @@ -203,9 +203,9 @@ jobs: pypi.org:443 raw.githubusercontent.com:443 repo.anaconda.com:443 - - uses: actions/checkout@v4.1.1 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Setup Conda (Micromamba) with Python${{ matrix.python-version }} - uses: mamba-org/setup-micromamba@v1.7.3 + uses: mamba-org/setup-micromamba@e820223f89c8720d6c740ca154a7adf32fcd278a # v1.7.3 with: cache-downloads: true cache-environment: true diff --git a/.github/workflows/publish-mastodon.yml b/.github/workflows/publish-mastodon.yml index c1e0ddf84..1da0f4933 100644 --- a/.github/workflows/publish-mastodon.yml +++ b/.github/workflows/publish-mastodon.yml @@ -30,7 +30,7 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Current Version if: ${{ !github.event.inputs.version-tag }} @@ -63,7 +63,7 @@ jobs: - name: Prepare Message id: render_template - uses: chuhlomin/render-template@v1.9 + uses: chuhlomin/render-template@a473db625a96c98e519d188812dc22bcaf54ffba # v1.9 with: template: .github/publish-mastodon.template.md vars: | @@ -75,7 +75,7 @@ jobs: - name: Send toot to Mastodon if: ${{ github.event.inputs.dry-run != 'true' }} || ${{ github.event_name == 'release' }} - uses: cbrgm/mastodon-github-action@v1.0.3 + uses: cbrgm/mastodon-github-action@d98ab3376f941df14d37d5737961de431c0838c6 # v1.0.3 with: message: "${{ steps.render_template.outputs.result }}${{ env.contributors }}" visibility: "public" diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml index 2e6d4b891..16f0e6bc2 100644 --- a/.github/workflows/publish-pypi.yml +++ b/.github/workflows/publish-pypi.yml @@ -21,9 +21,9 @@ jobs: uses: step-security/harden-runner@eb238b55efaa70779f274895e782ed17c84f2895 # v2.6.1 with: egress-policy: audit - - uses: actions/checkout@v4.1.1 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Set up Python3 - uses: actions/setup-python@v5.0.0 + uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 with: python-version: "3.x" - name: Install packaging libraries @@ -33,4 +33,4 @@ jobs: run: | python -m flit build - name: Publish distribution 📦 to PyPI - uses: pypa/gh-action-pypi-publish@v1.8.11 + uses: pypa/gh-action-pypi-publish@2f6f737ca5f74c637829c0f5c3acd0e29ea5e8bf # v1.8.11 diff --git a/.github/workflows/tag-testpypi.yml b/.github/workflows/tag-testpypi.yml index 910648ed8..249941134 100644 --- a/.github/workflows/tag-testpypi.yml +++ b/.github/workflows/tag-testpypi.yml @@ -21,9 +21,9 @@ jobs: uses: step-security/harden-runner@eb238b55efaa70779f274895e782ed17c84f2895 # v2.6.1 with: egress-policy: audit - - uses: actions/checkout@v4.1.1 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Set up Python3 - uses: actions/setup-python@v5.0.0 + uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 with: python-version: "3.x" - name: Install packaging libraries @@ -33,7 +33,7 @@ jobs: run: | python -m flit build - name: Publish distribution 📦 to Test PyPI - uses: pypa/gh-action-pypi-publish@v1.8.11 + uses: pypa/gh-action-pypi-publish@2f6f737ca5f74c637829c0f5c3acd0e29ea5e8bf # v1.8.11 with: repository-url: https://test.pypi.org/legacy/ skip-existing: true diff --git a/.github/workflows/testdata_version.yml b/.github/workflows/testdata_version.yml index cff8527e0..875da30d0 100644 --- a/.github/workflows/testdata_version.yml +++ b/.github/workflows/testdata_version.yml @@ -28,7 +28,7 @@ jobs: allowed-endpoints: > api.github.com:443 github.com:443 - - uses: actions/checkout@v4.1.1 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Find xclim-testdata Tag and CI Testing Branch run: | XCLIM_TESTDATA_TAG="$( \ @@ -44,7 +44,7 @@ jobs: echo "Latest xclim-testdata tag: ${{ env.XCLIM_TESTDATA_TAG }}" echo "Tag for xclim-testdata in CI: ${{ env.XCLIM_TESTDATA_BRANCH }}" - name: Find Comment - uses: peter-evans/find-comment@v2.4.0 + uses: peter-evans/find-comment@a54c31d7fa095754bfef525c0c8e5e5674c4b4b1 # v2.4.0 id: fc with: issue-number: ${{ github.event.pull_request.number }} @@ -52,13 +52,13 @@ jobs: body-includes: It appears that this Pull Request modifies the `main.yml` workflow. - name: Compare Versions if: ${{( env.XCLIM_TESTDATA_TAG != env.XCLIM_TESTDATA_BRANCH )}} - uses: actions/github-script@v7.0.1 + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 with: script: | core.setFailed('Configured `xclim-testdata` tag is not `latest`.') - name: Update Failure Comment if: ${{ failure() }} - uses: peter-evans/create-or-update-comment@v3.1.0 + uses: peter-evans/create-or-update-comment@23ff15729ef2fc348714a3bb66d2f655ca9066f2 # v3.1.0 with: comment-id: ${{ steps.fc.outputs.comment-id }} issue-number: ${{ github.event.pull_request.number }} @@ -74,7 +74,7 @@ jobs: edit-mode: replace - name: Update Success Comment if: ${{ success() }} - uses: peter-evans/create-or-update-comment@v3.1.0 + uses: peter-evans/create-or-update-comment@23ff15729ef2fc348714a3bb66d2f655ca9066f2 # v3.1.0 with: comment-id: ${{ steps.fc.outputs.comment-id }} issue-number: ${{ github.event.pull_request.number }} diff --git a/.github/workflows/upstream.yml b/.github/workflows/upstream.yml index 2c6a39b10..030914543 100644 --- a/.github/workflows/upstream.yml +++ b/.github/workflows/upstream.yml @@ -54,11 +54,11 @@ jobs: pypi.org:443 raw.githubusercontent.com:443 repo.anaconda.com:443 - - uses: actions/checkout@v4.1.1 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 with: fetch-depth: 0 # Fetch all history for all branches and tags. - name: Setup Conda (Micromamba) with Python${{ matrix.python-version }} - uses: mamba-org/setup-micromamba@v1.7.3 + uses: mamba-org/setup-micromamba@e820223f89c8720d6c740ca154a7adf32fcd278a # v1.7.3 with: cache-downloads: true cache-environment: true @@ -96,6 +96,6 @@ jobs: && steps.status.outcome == 'failure' && github.event_name == 'schedule' && github.repository_owner == 'Ouranosinc' - uses: xarray-contrib/issue-from-pytest-log@v1.2.8 + uses: xarray-contrib/issue-from-pytest-log@138db94bfe4b12ac11fc1aff307ee0835feab403 # v1.2.8 with: log-path: output-${{ matrix.python-version }}-log.jsonl diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2d331d8da..c90e5fe00 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -94,3 +94,7 @@ ci: autoupdate_schedule: monthly skip: [ nbstripout ] submodules: false + - repo: https://github.com/pylint-dev/pylint + rev: v2.17.2 + hooks: + - id: pylint From 3ba08da3a81d3169b88c959b98a94e4e06fecb00 Mon Sep 17 00:00:00 2001 From: Trevor James Smith <10819524+Zeitsperre@users.noreply.github.com> Date: Thu, 18 Jan 2024 11:57:21 -0500 Subject: [PATCH 02/37] Update .pre-commit-config.yaml --- .pre-commit-config.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c90e5fe00..e219da850 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,6 +26,10 @@ repos: rev: v0.23.1 hooks: - id: toml-sort-fix + - repo: https://github.com/pylint-dev/pylint + rev: v2.17.2 + hooks: + - id: pylint - repo: https://github.com/adrienverge/yamllint.git rev: v1.33.0 hooks: @@ -94,7 +98,3 @@ ci: autoupdate_schedule: monthly skip: [ nbstripout ] submodules: false - - repo: https://github.com/pylint-dev/pylint - rev: v2.17.2 - hooks: - - id: pylint From 1b1ada7b89a767a994e527c5faf98bbe36421682 Mon Sep 17 00:00:00 2001 From: Trevor James Smith <10819524+Zeitsperre@users.noreply.github.com> Date: Thu, 18 Jan 2024 12:51:52 -0500 Subject: [PATCH 03/37] convert testdata_version to trigger on pull_request_target --- .github/workflows/testdata_version.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/testdata_version.yml b/.github/workflows/testdata_version.yml index 875da30d0..abc30abfc 100644 --- a/.github/workflows/testdata_version.yml +++ b/.github/workflows/testdata_version.yml @@ -1,7 +1,8 @@ name: Verify Testing Data on: - pull_request: + # Needed for write permissions of peter-evans/create-or-update-comment + pull_request_target: # It is very important to not perform code checkout/build/testing with pull_request_target types: - opened - reopened From 41f34fb17553a7242a32537b2738de179a747b51 Mon Sep 17 00:00:00 2001 From: Trevor James Smith <10819524+Zeitsperre@users.noreply.github.com> Date: Thu, 18 Jan 2024 14:53:41 -0500 Subject: [PATCH 04/37] configure pylint with toml support, address some small errors --- .pre-commit-config.yaml | 5 +- pylintrc => .pylintrc.toml | 799 ++++++++++++++++-------------------- xclim/cli.py | 6 +- xclim/core/calendar.py | 36 +- xclim/core/indicator.py | 6 +- xclim/core/options.py | 2 +- xclim/core/units.py | 4 +- xclim/core/utils.py | 7 +- xclim/ensembles/_filters.py | 2 +- xclim/sdba/processing.py | 8 +- xclim/sdba/properties.py | 2 +- xclim/testing/utils.py | 7 +- 12 files changed, 400 insertions(+), 484 deletions(-) rename pylintrc => .pylintrc.toml (56%) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e219da850..898b68aa9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,9 +27,10 @@ repos: hooks: - id: toml-sort-fix - repo: https://github.com/pylint-dev/pylint - rev: v2.17.2 + rev: v3.0.3 hooks: - id: pylint + args: [ '--rcfile=.pylintrc.toml' ] - repo: https://github.com/adrienverge/yamllint.git rev: v1.33.0 hooks: @@ -44,7 +45,7 @@ repos: hooks: - id: isort - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.11 + rev: v0.1.13 hooks: - id: ruff - repo: https://github.com/pycqa/flake8 diff --git a/pylintrc b/.pylintrc.toml similarity index 56% rename from pylintrc rename to .pylintrc.toml index cdac8c797..a51464284 100644 --- a/pylintrc +++ b/.pylintrc.toml @@ -1,642 +1,553 @@ -[MAIN] +[tool.pylint.main] +# Analyse import fallback blocks. This can be used to support both Python 2 and 3 +# compatible code, which means that the block might have code that exists only in +# one or another interpreter, leading to false positives when analysed. +# analyse-fallback-blocks = -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no +# Clear in-memory caches upon conclusion of linting. Useful if running pylint in +# a server-like mode. +# clear-cache-post-run = -# Load and enable all available extensions. Use --list-extensions to see a list -# all available extensions. -#enable-all-extensions= - -# In error mode, messages with a category besides ERROR or FATAL are -# suppressed, and no reports are done by default. Error mode is compatible with -# disabling specific errors. -#errors-only= - -# Always return a 0 (non-error) status code, even if lint errors are found. -# This is primarily useful in continuous integration scripts. -#exit-zero= +# Always return a 0 (non-error) status code, even if lint errors are found. This +# is primarily useful in continuous integration scripts. +exit-zero = false # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. -extension-pkg-allow-list= +# extension-pkg-allow-list = # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. (This is an alternative name to extension-pkg-allow-list # for backward compatibility.) -extension-pkg-whitelist= +# extension-pkg-whitelist = # Return non-zero exit code if any of these messages/categories are detected, # even if score is above --fail-under value. Syntax same as enable. Messages # specified are enabled, while categories only check already-enabled messages. -fail-on= +# fail-on = # Specify a score threshold under which the program will exit with error. -fail-under=10 +fail-under = 10 # Interpret the stdin as a python script, whose filename needs to be passed as # the module_or_package argument. -#from-stdin= +# from-stdin = # Files or directories to be skipped. They should be base names, not paths. -ignore= +ignore = ["CVS"] # Add files or directories matching the regular expressions patterns to the # ignore-list. The regex matches against paths and can be in Posix or Windows -# format. Because '\' represents the directory delimiter on Windows systems, it +# format. Because '\\' represents the directory delimiter on Windows systems, it # can't be used as an escape character. -ignore-paths= - docs, - xclim/testing/tests, +# ignore-paths = -# Files or directories matching the regular expression patterns are skipped. -# The regex matches against base names, not paths. The default value ignores -# Emacs file locks -ignore-patterns=^\.# +# Files or directories matching the regular expression patterns are skipped. The +# regex matches against base names, not paths. The default value ignores Emacs +# file locks +ignore-patterns = ["^\\.#"] -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis). It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= +# List of module names for which member attributes should not be checked (useful +# for modules/projects where namespaces are manipulated during runtime and thus +# existing member attributes cannot be deduced by static analysis). It supports +# qualified module names, as well as Unix pattern matching. +# ignored-modules = # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). -#init-hook= +# init-hook = # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the # number of processors available to use, and will cap the count on Windows to # avoid hangs. -jobs=0 +jobs = 1 -# Control the amount of potential inferred values when inferring a single -# object. This can help the performance when dealing with large functions or -# complex, nested conditions. -limit-inference-results=100 +# Control the amount of potential inferred values when inferring a single object. +# This can help the performance when dealing with large functions or complex, +# nested conditions. +limit-inference-results = 100 # List of plugins (as comma separated values of python module names) to load, # usually to register additional checkers. -load-plugins= +# load-plugins = # Pickle collected data for later comparisons. -persistent=yes +persistent = true -# Minimum Python version to use for version dependent checks. Will default to -# the version used to run pylint. -py-version=3.8 +# Minimum Python version to use for version dependent checks. Will default to the +# version used to run pylint. +py-version = "3.8" # Discover python modules and packages in the file system subtree. -recursive=no +# recursive = + +# Add paths to the list of the source roots. Supports globbing patterns. The +# source root is an absolute path or a path relative to the current working +# directory used to determine a package namespace for modules located under the +# source root. +# source-roots = # When enabled, pylint would attempt to guess common misconfiguration and emit # user-friendly hints instead of false-positive error messages. -suggestion-mode=yes +suggestion-mode = true # Allow loading of arbitrary C extensions. Extensions are imported into the # active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - -# In verbose mode, extra non-checker-related info will be displayed. -#verbose= - - -[REPORTS] - -# Python expression which should return a score less than or equal to 10. You -# have access to the variables 'fatal', 'error', 'warning', 'refactor', -# 'convention', and 'info' which contain the number of messages in each -# category, as well as 'statement' which is the total number of statements -# analyzed. This score is used by the global evaluation report (RP0004). -evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -#output-format= - -# Tells whether to display a full report or only the messages. -reports=no - -# Activate the evaluation score. -score=yes - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, -# UNDEFINED. -confidence=HIGH, - CONTROL_FLOW, - INFERENCE, - INFERENCE_FAILURE, - UNDEFINED - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then re-enable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use "--disable=all --enable=classes -# --disable=W". -disable=arguments-differ, - arguments-out-of-order, - bad-inline-option, - deprecated-pragma, - file-ignored, - invalid-name, - invalid-unary-operand-type, - line-too-long, - locally-disabled, - missing-function-docstring, - missing-module-docstring, - non-ascii-name, - pointless-string-statement, - protected-access, - raw-checker-failed, - suppressed-message, - too-few-public-methods, - too-many-arguments, - too-many-branches, - too-many-lines, - too-many-locals, - too-many-nested-blocks, - too-many-statements, - unspecified-encoding, - unused-argument, - use-symbolic-message-instead, - useless-suppression, - wrong-import-order, - - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member - - -[METHOD_ARGS] - -# List of qualified names (i.e., library.method) which require a timeout -# parameter e.g. 'requests.api.get,requests.api.post' -timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when caught. -overgeneral-exceptions=builtins.BaseException, - builtins.Exception - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=sys.exit,argparse.parse_error - - -[DESIGN] - -# List of regular expressions of class ancestor names to ignore when counting -# public methods (see R0903) -exclude-too-few-public-methods= - -# List of qualified class names to ignore when counting class parents (see -# R0901) -ignored-parents= - -# Maximum number of arguments for function / method. -max-args=5 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Maximum number of boolean expressions in an if statement (see R0916). -max-bool-expr=5 - -# Maximum number of branch for function / method body. -max-branches=12 - -# Maximum number of locals for function / method body. -max-locals=15 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body. -max-returns=6 - -# Maximum number of statements in function / method body. -max-statements=50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - - -[IMPORTS] - -# List of modules that can be imported at any level, not just the top level -# one. -allow-any-import-level= - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=yes - -# Deprecated modules which should not be used, separated by a comma. -deprecated-modules= - -# Output a graph (.gv or any supported image format) of external dependencies -# to the given file (report RP0402 must not be disabled). -ext-import-graph= - -# Output a graph (.gv or any supported image format) of all (i.e. internal and -# external) dependencies to the given file (report RP0402 must not be -# disabled). -import-graph= - -# Output a graph (.gv or any supported image format) of internal dependencies -# to the given file (report RP0402 must not be disabled). -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - -# Couples of modules and preferred modules, separated by a comma. -preferred-modules= - - -[CLASSES] - -# Warn about protected attribute access inside special methods -check-protected-access-in-special-methods=no - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp, - __post_init__ - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=cls - - -[BASIC] +# unsafe-load-any-extension = +[tool.pylint.basic] # Naming style matching correct argument names. -argument-naming-style=snake_case +argument-naming-style = "snake_case" -# Regular expression matching correct argument names. Overrides argument- -# naming-style. If left empty, argument names will be checked with the set -# naming style. -#argument-rgx= +# Regular expression matching correct argument names. Overrides argument-naming- +# style. If left empty, argument names will be checked with the set naming style. +# argument-rgx = # Naming style matching correct attribute names. -attr-naming-style=snake_case +attr-naming-style = "snake_case" # Regular expression matching correct attribute names. Overrides attr-naming- # style. If left empty, attribute names will be checked with the set naming # style. -#attr-rgx= +# attr-rgx = # Bad variable names which should always be refused, separated by a comma. -bad-names=foo, - bar, - baz, - toto, - tutu, - tata +bad-names = ["foo", "bar", "baz", "toto", "tutu", "tata"] # Bad variable names regexes, separated by a comma. If names match any regex, # they will always be refused -bad-names-rgxs= +# bad-names-rgxs = # Naming style matching correct class attribute names. -class-attribute-naming-style=any +class-attribute-naming-style = "any" # Regular expression matching correct class attribute names. Overrides class- # attribute-naming-style. If left empty, class attribute names will be checked # with the set naming style. -#class-attribute-rgx= +# class-attribute-rgx = # Naming style matching correct class constant names. -class-const-naming-style=UPPER_CASE +class-const-naming-style = "UPPER_CASE" # Regular expression matching correct class constant names. Overrides class- # const-naming-style. If left empty, class constant names will be checked with # the set naming style. -#class-const-rgx= +# class-const-rgx = # Naming style matching correct class names. -class-naming-style=PascalCase +class-naming-style = "PascalCase" -# Regular expression matching correct class names. Overrides class-naming- -# style. If left empty, class names will be checked with the set naming style. -#class-rgx= +# Regular expression matching correct class names. Overrides class-naming-style. +# If left empty, class names will be checked with the set naming style. +# class-rgx = # Naming style matching correct constant names. -const-naming-style=UPPER_CASE +const-naming-style = "UPPER_CASE" # Regular expression matching correct constant names. Overrides const-naming- -# style. If left empty, constant names will be checked with the set naming -# style. -#const-rgx= +# style. If left empty, constant names will be checked with the set naming style. +# const-rgx = -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 +# Minimum line length for functions/classes that require docstrings, shorter ones +# are exempt. +docstring-min-length = -1 # Naming style matching correct function names. -function-naming-style=snake_case +function-naming-style = "snake_case" -# Regular expression matching correct function names. Overrides function- -# naming-style. If left empty, function names will be checked with the set -# naming style. -#function-rgx= +# Regular expression matching correct function names. Overrides function-naming- +# style. If left empty, function names will be checked with the set naming style. +# function-rgx = # Good variable names which should always be accepted, separated by a comma. -good-names=i, - j, - k, - _, - da, - ds, - +good-names = ["i", "j", "k", "ex", "Run", "_"] # Good variable names regexes, separated by a comma. If names match any regex, # they will always be accepted -good-names-rgxs= +# good-names-rgxs = # Include a hint for the correct naming format with invalid-name. -include-naming-hint=no +# include-naming-hint = # Naming style matching correct inline iteration names. -inlinevar-naming-style=any +inlinevar-naming-style = "any" # Regular expression matching correct inline iteration names. Overrides # inlinevar-naming-style. If left empty, inline iteration names will be checked # with the set naming style. -#inlinevar-rgx= +# inlinevar-rgx = # Naming style matching correct method names. -method-naming-style=snake_case +method-naming-style = "snake_case" # Regular expression matching correct method names. Overrides method-naming- # style. If left empty, method names will be checked with the set naming style. -#method-rgx= +# method-rgx = # Naming style matching correct module names. -module-naming-style=snake_case +module-naming-style = "snake_case" # Regular expression matching correct module names. Overrides module-naming- # style. If left empty, module names will be checked with the set naming style. -#module-rgx= +# module-rgx = -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= +# Colon-delimited sets of names that determine each other's naming style when the +# name regexes allow several styles. +# name-group = -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ +# Regular expression which should only match function or class names that do not +# require a docstring. +no-docstring-rgx = "^_" # List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -# These decorators are taken in consideration only for invalid-name. -property-classes=abc.abstractproperty +# to this list to register other decorators that produce valid properties. These +# decorators are taken in consideration only for invalid-name. +property-classes = ["abc.abstractproperty"] + +# Regular expression matching correct type alias names. If left empty, type alias +# names will be checked with the set naming style. +# typealias-rgx = # Regular expression matching correct type variable names. If left empty, type # variable names will be checked with the set naming style. -#typevar-rgx= +# typevar-rgx = # Naming style matching correct variable names. -variable-naming-style=snake_case +variable-naming-style = "snake_case" -# Regular expression matching correct variable names. Overrides variable- -# naming-style. If left empty, variable names will be checked with the set -# naming style. -#variable-rgx= +# Regular expression matching correct variable names. Overrides variable-naming- +# style. If left empty, variable names will be checked with the set naming style. +# variable-rgx = +[tool.pylint.classes] +# Warn about protected attribute access inside special methods +# check-protected-access-in-special-methods = -[SIMILARITIES] - -# Comments are removed from the similarity computation -ignore-comments=yes +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods = ["__init__", "__new__", "setUp", "asyncSetUp", "__post_init__"] -# Docstrings are removed from the similarity computation -ignore-docstrings=yes +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected = ["_asdict", "_fields", "_replace", "_source", "_make", "os._exit"] -# Imports are removed from the similarity computation -ignore-imports=yes +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg = ["cls"] -# Signatures are removed from the similarity computation -ignore-signatures=yes +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg = ["mcs"] -# Minimum lines number of a similarity. -min-similarity-lines=4 +[tool.pylint.design] +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +# exclude-too-few-public-methods = +# List of qualified class names to ignore when counting class parents (see R0901) +# ignored-parents = -[LOGGING] +# Maximum number of arguments for function / method. +max-args = 15 -# The type of string formatting that logging methods do. `old` means using % -# formatting, `new` is for `{}` formatting. -logging-format-style=old +# Maximum number of attributes for a class (see R0902). +max-attributes = 7 -# Logging modules to check that the string format arguments are in logging -# function parameter format. -logging-modules=logging +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr = 5 +# Maximum number of branch for function / method body. +max-branches = 30 -[VARIABLES] +# Maximum number of locals for function / method body. +max-locals = 50 -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid defining new builtins when possible. -additional-builtins= +# Maximum number of parents for a class (see R0901). +max-parents = 7 -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes +# Maximum number of public methods for a class (see R0904). +max-public-methods = 20 -# List of names allowed to shadow builtins -allowed-redefined-builtins= +# Maximum number of return / yield for function / method body. +max-returns = 13 -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb +# Maximum number of statements in function / method body. +max-statements = 100 -# A regular expression matching the name of dummy variables (i.e. expected to -# not be used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ +# Minimum number of public methods for a class (see R0903). +min-public-methods = 2 -# Argument names that match this expression will be ignored. -ignored-argument-names=_.*|^ignored_|^unused_ +[tool.pylint.exceptions] +# Exceptions that will emit a warning when caught. +overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"] -# Tells whether we should check for unused import in __init__ files. -init-import=no +[tool.pylint.format] +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +# expected-line-ending-format = -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines = "^\\s*(# )??$" +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren = 4 -[SPELLING] +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string = " " -# Limits count of emitted suggestions for spelling mistakes. -max-spelling-suggestions=4 +# Maximum number of characters on a single line. +max-line-length = 150 -# Spelling dictionary name. Available dictionaries: en (aspell), en_AU -# (aspell), en_CA (aspell), en_GB (aspell), en_US (aspell). -spelling-dict= +# Maximum number of lines in a module. +max-module-lines = 1000 -# List of comma separated words that should be considered directives if they -# appear at the beginning of a comment and should not be checked. -spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +# single-line-class-stmt = -# List of comma separated words that should not be checked. -spelling-ignore-words= +# Allow the body of an if to be on the same line as the test if there is no else. +# single-line-if-stmt = -# A path to a file that contains the private dictionary; one word per line. -spelling-private-dict-file= +[tool.pylint.imports] +# List of modules that can be imported at any level, not just the top level one. +# allow-any-import-level = -# Tells whether to store unknown words to the private dictionary (see the -# --spelling-private-dict-file option) instead of raising a message. -spelling-store-unknown-words=no +# Allow explicit reexports by alias from a package __init__. +# allow-reexport-from-package = +# Allow wildcard imports from modules that define __all__. +# allow-wildcard-with-all = -[FORMAT] +# Deprecated modules which should not be used, separated by a comma. +# deprecated-modules = -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= +# Output a graph (.gv or any supported image format) of external dependencies to +# the given file (report RP0402 must not be disabled). +# ext-import-graph = -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ +# Output a graph (.gv or any supported image format) of all (i.e. internal and +# external) dependencies to the given file (report RP0402 must not be disabled). +# import-graph = -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 +# Output a graph (.gv or any supported image format) of internal dependencies to +# the given file (report RP0402 must not be disabled). +# int-import-graph = -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' +# Force import order to recognize a module as part of the standard compatibility +# libraries. +# known-standard-library = -# Maximum number of characters on a single line. -max-line-length=100 +# Force import order to recognize a module as part of a third party library. +known-third-party = ["enchant"] -# Maximum number of lines in a module. -max-module-lines=1000 +# Couples of modules and preferred modules, separated by a comma. +# preferred-modules = -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no +[tool.pylint.logging] +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. +logging-format-style = "old" -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules = ["logging"] + +[tool.pylint."messages control"] +# Only show warnings with the listed confidence levels. Leave empty to show all. +# Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, UNDEFINED. +confidence = ["HIGH", "CONTROL_FLOW", "INFERENCE", "INFERENCE_FAILURE", "UNDEFINED"] + +# Disable the message, report, category or checker with the given id(s). You can +# either give multiple identifiers separated by comma (,) or put this option +# multiple times (only on the command line, not in the configuration file where +# it should appear only once). You can also use "--disable=all" to disable +# everything first and then re-enable specific checks. For example, if you want +# to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable = [ + "arguments-differ", + "bad-inline-option", + "deprecated-pragma", + "file-ignored", + "invalid-name", + "invalid-unary-operand-type", + "locally-disabled", + "missing-module-docstring", + "protected-access", + "raw-checker-failed", + "redefined-outer-name", + "superfluous-parens", + "suppressed-message", + "unused-argument", + "use-symbolic-message-instead", + "useless-suppression" +] +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where it +# should appear only once). See also the "--disable" option for examples. +enable = ["c-extension-no-member"] -[MISCELLANEOUS] +[tool.pylint.method_args] +# List of qualified names (i.e., library.method) which require a timeout +# parameter e.g. 'requests.api.get,requests.api.post' +timeout-methods = ["requests.api.delete", "requests.api.get", "requests.api.head", "requests.api.options", "requests.api.patch", "requests.api.post", "requests.api.put", "requests.api.request"] +[tool.pylint.miscellaneous] # List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX, - TODO +notes = ["FIXME", "XXX", "TODO"] # Regular expression of note tags to take in consideration. -notes-rgx= +# notes-rgx = + +[tool.pylint.refactoring] +# Maximum number of nested blocks for function / method body +max-nested-blocks = 10 + +# Complete name of functions that never returns. When checking for inconsistent- +# return-statements if a never returning function is called then it will be +# considered as an explicit return statement and no message will be printed. +never-returning-functions = ["sys.exit", "argparse.parse_error"] + +[tool.pylint.reports] +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'fatal', 'error', 'warning', 'refactor', +# 'convention', and 'info' which contain the number of messages in each category, +# as well as 'statement' which is the total number of statements analyzed. This +# score is used by the global evaluation report (RP0004). +evaluation = "max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))" + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +# msg-template = + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +# output-format = + +# Tells whether to display a full report or only the messages. +# reports = + +# Activate the evaluation score. +score = true + +[tool.pylint.similarities] +# Comments are removed from the similarity computation +ignore-comments = true + +# Docstrings are removed from the similarity computation +ignore-docstrings = true + +# Imports are removed from the similarity computation +ignore-imports = true + +# Signatures are removed from the similarity computation +ignore-signatures = true + +# Minimum lines number of a similarity. +min-similarity-lines = 10 + +[tool.pylint.spelling] +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions = 4 + +# Spelling dictionary name. No available dictionaries : You need to install both +# the python package and the system dependency for enchant to work.. +# spelling-dict = + +# List of comma separated words that should be considered directives if they +# appear at the beginning of a comment and should not be checked. +spelling-ignore-comment-directives = "fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:" + +# List of comma separated words that should not be checked. +# spelling-ignore-words = +# A path to a file that contains the private dictionary; one word per line. +# spelling-private-dict-file = -[TYPECHECK] +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +# spelling-store-unknown-words = +[tool.pylint.typecheck] # List of decorators that produce context managers, such as # contextlib.contextmanager. Add to this list to register other decorators that # produce valid context managers. -contextmanager-decorators=contextlib.contextmanager +contextmanager-decorators = ["contextlib.contextmanager"] # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E1101 when accessed. Python regular # expressions are accepted. -generated-members= +# generated-members = -# Tells whether to warn about missing members when the owner of the attribute -# is inferred to be None. -ignore-none=yes +# Tells whether missing members accessed in mixin class should be ignored. A +# class is considered mixin if its name matches the mixin-class-rgx option. +# Tells whether to warn about missing members when the owner of the attribute is +# inferred to be None. +ignore-none = true # This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes +# checks whenever an opaque object is returned when inferring. The inference can +# return multiple potential results while evaluating a Python object, but some +# branches might not be evaluated, which results in partial inference. In that +# case, it might be useful to still emit no-member and other checks for the rest +# of the inferred objects. +ignore-on-opaque-inference = true # List of symbolic message names to ignore for Mixin members. -ignored-checks-for-mixins=no-member, - not-async-context-manager, - not-context-manager, - attribute-defined-outside-init +ignored-checks-for-mixins = ["no-member", "not-async-context-manager", "not-context-manager", "attribute-defined-outside-init"] # List of class names for which member attributes should not be checked (useful # for classes with dynamically set attributes). This supports the use of # qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace +ignored-classes = ["optparse.Values", "thread._local", "_thread._local", "argparse.Namespace"] -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes +# Show a hint with possible names when a member name was not found. The aspect of +# finding the hint is based on edit distance. +missing-member-hint = true # The minimum edit distance a name should have in order to be considered a # similar match for a missing member name. -missing-member-hint-distance=1 +missing-member-hint-distance = 1 # The total number of similar names that should be taken in consideration when # showing a hint for a missing member. -missing-member-max-choices=1 +missing-member-max-choices = 1 # Regex pattern to define which classes are considered mixins. -mixin-class-rgx=.*[Mm]ixin +mixin-class-rgx = ".*[Mm]ixin" # List of decorators that change the signature of a decorated function. -signature-mutators= +# signature-mutators = + +[tool.pylint.variables] +# List of additional names supposed to be defined in builtins. Remember that you +# should avoid defining new builtins when possible. +# additional-builtins = + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables = true +# List of names allowed to shadow builtins +# allowed-redefined-builtins = + +# List of strings which can identify a callback function by name. A callback name +# must start or end with one of those strings. +callbacks = ["cb_", "_cb"] -[STRING] +# A regular expression matching the name of dummy variables (i.e. expected to not +# be used). +dummy-variables-rgx = "_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_" -# This flag controls whether inconsistent-quotes generates a warning when the -# character used as a quote delimiter is used inconsistently within a module. -check-quote-consistency=no +# Argument names that match this expression will be ignored. +ignored-argument-names = "_.*|^ignored_|^unused_" -# This flag controls whether the implicit-str-concat should generate a warning -# on implicit string concatenation in sequences defined over several lines. -check-str-concat-over-line-jumps=no +# Tells whether we should check for unused import in __init__ files. +# init-import = + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules = ["six.moves", "past.builtins", "future.builtins", "builtins", "io"] diff --git a/xclim/cli.py b/xclim/cli.py index 42dcd1f63..2a09f01a9 100644 --- a/xclim/cli.py +++ b/xclim/cli.py @@ -283,7 +283,7 @@ def dataflags(ctx, variables, raise_flags, append, dims, freq): @click.option( "-i", "--info", is_flag=True, help="Prints more details for each indicator." ) -def indices(info): +def indices(information): """List all indicators.""" formatter = click.HelpFormatter() formatter.write_heading("Listing all available indicators for computation.") @@ -297,7 +297,7 @@ def indices(info): right += ( " (" + ", ".join([var["var_name"] for var in indcls.cf_attrs]) + ")" ) - if info: + if information: right += "\n" + indcls.abstract rows.append((left, right)) rows.sort(key=lambda row: row[0]) @@ -466,7 +466,7 @@ def cli(ctx, **kwargs): @cli.result_callback() # noqa @click.pass_context -def write_file(ctx, *args, **kwargs): # noqa +def write_file(ctx, *args, **kwargs): # noqa: W0613 """Write the output dataset to file.""" if ctx.obj["output"] is not None: if ctx.obj["verbose"]: diff --git a/xclim/core/calendar.py b/xclim/core/calendar.py index 1e72fcc45..e63d4ac21 100644 --- a/xclim/core/calendar.py +++ b/xclim/core/calendar.py @@ -1598,18 +1598,18 @@ def get_doys(start, end, inclusive): def _month_is_first_period_month(time, freq): """Returns True if the given time is from the first month of freq.""" if isinstance(time, cftime.datetime): - frqM = xr.coding.cftime_offsets.to_offset("MS") + frq_monthly = xr.coding.cftime_offsets.to_offset("MS") frq = xr.coding.cftime_offsets.to_offset(freq) - if frqM.onOffset(time): + if frq_monthly.onOffset(time): return frq.onOffset(time) - return frq.onOffset(frqM.rollback(time)) + return frq.onOffset(frq_monthly.rollback(time)) # Pandas time = pd.Timestamp(time) - frqM = pd.tseries.frequencies.to_offset("MS") + frq_monthly = pd.tseries.frequencies.to_offset("MS") frq = pd.tseries.frequencies.to_offset(freq) - if frqM.is_on_offset(time): + if frq_monthly.is_on_offset(time): return frq.is_on_offset(time) - return frq.is_on_offset(frqM.rollback(time)) + return frq.is_on_offset(frq_monthly.rollback(time)) def stack_periods( @@ -1635,8 +1635,8 @@ def stack_periods( ---------- da : xr.Dataset or xr.DataArray An xarray object with a `time` dimension. - Must have an uniform timestep length. - Output might be strange if this does not use an uniform calendar (noleap, 360_day, all_leap). + Must have a uniform timestep length. + Output might be strange if this does not use a uniform calendar (noleap, 360_day, all_leap). window : int The length of the moving window as a multiple of ``freq``. stride : int, optional @@ -1652,7 +1652,7 @@ def stack_periods( freq : str Units of ``window``, ``stride`` and ``min_length``, as a frequency string. Must be larger or equal to the data's sampling frequency. - Note that this function offers an easier interface for non uniform period (like years or months) + Note that this function offers an easier interface for non-uniform period (like years or months) but is much slower than a rolling-construct method. dim : str The new dimension name. @@ -1662,7 +1662,8 @@ def stack_periods( align_days : bool When True (default), an error is raised if the output would have unaligned days across periods. If `freq = 'YS'`, day-of-year alignment is checked and if `freq` is "MS" or "QS", we check day-in-month. - Only uniform-calendar will pass the test for `freq='YS'`. For other frequencies, only the `360_day` calendar will work. + Only uniform-calendar will pass the test for `freq='YS'`. + For other frequencies, only the `360_day` calendar will work. This check is ignored if the sampling rate of the data is coarser than "D". pad_value: Any When some periods are shorter than others, this value is used to pad them at the end. @@ -1677,7 +1678,7 @@ def stack_periods( That coordinate is the same for all periods, depending on the choice of ``window`` and ``freq``, it might make sense. But for unequal periods or non-uniform calendars, it will certainly not. If ``stride`` is a divisor of ``window``, the correct timeseries can be reconstructed with :py:func:`unstack_periods`. - The coordinate of `period` is the first timestep of each windows. + The coordinate of `period` is the first timestep of each window. """ from xclim.core.units import ( # Import in function to avoid cyclical imports ensure_cf_units, @@ -1734,9 +1735,9 @@ def stack_periods( ) periods = [] - longest = 0 + # longest = 0 # Iterate over strides, but recompute the full window for each stride start - for begin, strd_slc in da.resample(time=strd_frq).groups.items(): + for _, strd_slc in da.resample(time=strd_frq).groups.items(): win_resamp = time2.isel(time=slice(strd_slc.start, None)).resample(time=win_frq) # Get slice for first group win_slc = win_resamp._group_indices[0] @@ -1749,7 +1750,7 @@ def stack_periods( open_ended = min_slc.stop is None else: # The end of the group slice is None if no outside-group value was found after the last element - # As we added an extra step to time2, we avoid the case where a group ends exactly on the last element of ds. + # As we added an extra step to time2, we avoid the case where a group ends exactly on the last element of ds open_ended = win_slc.stop is None if open_ended: # Too short, we got to the end @@ -1760,7 +1761,8 @@ def stack_periods( and min_length == window and not _month_is_first_period_month(da.time[0].item(), freq) ): - # For annual or quartely frequencies (which can be anchor-based), if the first time is not in the first month of the first period, + # For annual or quartely frequencies (which can be anchor-based), + # if the first time is not in the first month of the first period, # then the first period is incomplete but by a fractional amount. continue periods.append( @@ -1783,7 +1785,7 @@ def stack_periods( m, u = infer_sampling_units(da) lengths = lengths * m lengths.attrs["units"] = ensure_cf_units(u) - # Start points for each periods + remember parameters for unstacking + # Start points for each period and remember parameters for unstacking starts = xr.DataArray( [da.time[slc.start].item() for slc in periods], dims=(dim,), @@ -1873,7 +1875,7 @@ def unstack_periods(da: xr.DataArray | xr.Dataset, dim: str = "period"): f"`unstack_periods` can't find the `{dim}_length` coordinate." ) from err # Get length as number of points - m, u = infer_sampling_units(da.time) + m, _ = infer_sampling_units(da.time) lengths = lengths // m else: # It is acceptable to lose "{dim}_length" if they were all equal diff --git a/xclim/core/indicator.py b/xclim/core/indicator.py index 6590ab9a4..00cc9f414 100644 --- a/xclim/core/indicator.py +++ b/xclim/core/indicator.py @@ -1623,12 +1623,12 @@ def build_indicator_module( ) out = getattr(indicators, name) if reload: - for name, ind in list(out.iter_indicators()): - if name not in objs: + for n, ind in list(out.iter_indicators()): + if n not in objs: # Remove the indicator from the registries and the module del registry[ind._registry_id] # noqa del _indicators_registry[ind.__class__] - del out.__dict__[name] + del out.__dict__[n] else: doc = doc or f"{name.capitalize()} indicators\n" + "=" * (len(name) + 11) try: diff --git a/xclim/core/options.py b/xclim/core/options.py index 1eba36a3f..45eeedacf 100644 --- a/xclim/core/options.py +++ b/xclim/core/options.py @@ -137,7 +137,7 @@ def run_check(*args, **kwargs): return run_check -class set_options: +class set_options: # noqa: C0103 """Set options for xclim in a controlled context. Attributes diff --git a/xclim/core/units.py b/xclim/core/units.py index 1ffc9d567..185f6726a 100644 --- a/xclim/core/units.py +++ b/xclim/core/units.py @@ -1139,10 +1139,10 @@ def dec(func): # Raised when it is not understood, we assume it was a dimensionality try: units.get_dimensionality(dim.replace("dimensionless", "")) - except Exception: + except Exception as e: raise ValueError( f"Relative units for {name} are invalid. Got {dim}. (See stacktrace for more information)." - ) + ) from e @wraps(func) def wrapper(*args, **kwargs): diff --git a/xclim/core/utils.py b/xclim/core/utils.py index 89304b5ce..d582b5be2 100644 --- a/xclim/core/utils.py +++ b/xclim/core/utils.py @@ -136,10 +136,11 @@ def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): msg = ( - f"`{func.__name__}` is deprecated{' from version {}'.format(from_version) if from_version else ''} " + f"`{func.__name__}` is deprecated" + f"{' from version {}'.format(from_version) if from_version else ''} " "and will be removed in a future version of xclim" - f"{'. Use `{}` instead'.format(suggested if suggested else '')}. " - f"Please update your scripts accordingly." + f"{'. Use `{}` instead'.format(suggested) if suggested else ''}. " + "Please update your scripts accordingly." ) warnings.warn( msg, diff --git a/xclim/ensembles/_filters.py b/xclim/ensembles/_filters.py index ef0e8f969..6923173da 100644 --- a/xclim/ensembles/_filters.py +++ b/xclim/ensembles/_filters.py @@ -47,7 +47,7 @@ def _concat_hist(da, **hist): raise ValueError("Too many values in hist scenario.") # Scenario dimension, and name of the historical scenario - ((dim, name),) = hist.items() + ((dim, _),) = hist.items() # Select historical scenario and drop it from the data h = da.sel(**hist).dropna("time", how="all") diff --git a/xclim/sdba/processing.py b/xclim/sdba/processing.py index f6d833ae8..7866956c2 100644 --- a/xclim/sdba/processing.py +++ b/xclim/sdba/processing.py @@ -46,10 +46,10 @@ def adapt_freq( Parameters ---------- - ds : xr.Dataset - With variables : "ref", Target/reference data, usually observed data, and "sim", Simulated data. - dim : str - Dimension name. + ref : xr.Dataset + Target/reference data, usually observed data, with a "time" dimension. + sim : xr.Dataset + Simulated data, with a "time" dimension. group : str or Grouper Grouping information, see base.Grouper thresh : str diff --git a/xclim/sdba/properties.py b/xclim/sdba/properties.py index e729a13ee..94bc10ab1 100644 --- a/xclim/sdba/properties.py +++ b/xclim/sdba/properties.py @@ -1112,7 +1112,7 @@ def _decorrelation_length( corr = _pairwise_spearman(da, dims) - dists, mn, mx = _pairwise_haversine_and_bins( + dists, _, _ = _pairwise_haversine_and_bins( corr.cf["longitude"].values, corr.cf["latitude"].values, transpose=True ) diff --git a/xclim/testing/utils.py b/xclim/testing/utils.py index c15a92054..82b63d631 100644 --- a/xclim/testing/utils.py +++ b/xclim/testing/utils.py @@ -202,7 +202,8 @@ def _get( local_md5 = file_md5_checksum(local_file) try: url = "/".join((github_url, "raw", branch, md5_name.as_posix())) - logger.info(f"Attempting to fetch remote file md5: {md5_name.as_posix()}") + msg = f"Attempting to fetch remote file md5: {md5_name.as_posix()}" + logger.info(msg) urlretrieve(url, md5_file) # nosec with open(md5_file) as f: remote_md5 = f.read() @@ -235,7 +236,7 @@ def _get( local_file.parent.mkdir(exist_ok=True, parents=True) url = "/".join((github_url, "raw", branch, fullname.as_posix())) - logger.info(f"Fetching remote file: {fullname.as_posix()}") + logger.info("Fetching remote file: {}" % fullname.as_posix()) try: urlretrieve(url, local_file) # nosec except HTTPError as e: @@ -256,7 +257,7 @@ def _get( raise FileNotFoundError(msg) from e try: url = "/".join((github_url, "raw", branch, md5_name.as_posix())) - logger.info(f"Fetching remote file md5: {md5_name.as_posix()}") + logger.info("Fetching remote file md5: {}" % md5_name.as_posix()) urlretrieve(url, md5_file) # nosec except (HTTPError, URLError) as e: msg = ( From 47609cb9ac69e1a29f61351cd6afeb82dfaaa01e Mon Sep 17 00:00:00 2001 From: Trevor James Smith <10819524+Zeitsperre@users.noreply.github.com> Date: Thu, 18 Jan 2024 14:58:18 -0500 Subject: [PATCH 05/37] update target config --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index e711616d9..201a02778 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -61,7 +61,7 @@ jobs: run: pip install pylint tox~=4.0 - name: Run pylint run: | - python -m pylint --rcfile=pylintrc --disable=import-error --exit-zero xclim + python -m pylint --rcfile=.pylintrc.toml --disable=import-error --exit-zero xclim - name: Run linting suite run: | python -m tox -e lint From 74f63e9d6f2316618a738ccec872a3410c439b47 Mon Sep 17 00:00:00 2001 From: Trevor James Smith <10819524+Zeitsperre@users.noreply.github.com> Date: Thu, 18 Jan 2024 15:35:14 -0500 Subject: [PATCH 06/37] ignore .pylintrc.toml --- .pre-commit-config.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 898b68aa9..bf9366f2d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,6 +26,7 @@ repos: rev: v0.23.1 hooks: - id: toml-sort-fix + exclude: '.pylintrc.toml' - repo: https://github.com/pylint-dev/pylint rev: v3.0.3 hooks: From 91381b21306398ad64e9e9bcec2fd4af109bce47 Mon Sep 17 00:00:00 2001 From: Trevor James Smith <10819524+Zeitsperre@users.noreply.github.com> Date: Thu, 18 Jan 2024 15:39:26 -0500 Subject: [PATCH 07/37] fix logging --- xclim/testing/utils.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/xclim/testing/utils.py b/xclim/testing/utils.py index 82b63d631..f48e3ca90 100644 --- a/xclim/testing/utils.py +++ b/xclim/testing/utils.py @@ -236,7 +236,8 @@ def _get( local_file.parent.mkdir(exist_ok=True, parents=True) url = "/".join((github_url, "raw", branch, fullname.as_posix())) - logger.info("Fetching remote file: {}" % fullname.as_posix()) + msg = f"Fetching remote file: {fullname.as_posix()}" + logger.info(msg) try: urlretrieve(url, local_file) # nosec except HTTPError as e: @@ -257,7 +258,8 @@ def _get( raise FileNotFoundError(msg) from e try: url = "/".join((github_url, "raw", branch, md5_name.as_posix())) - logger.info("Fetching remote file md5: {}" % md5_name.as_posix()) + msg = f"Fetching remote file md5: {format(md5_name.as_posix())}" + logger.info(msg) urlretrieve(url, md5_file) # nosec except (HTTPError, URLError) as e: msg = ( From 529f656b7ec76914a31ecc834c9ff01e57ed4146 Mon Sep 17 00:00:00 2001 From: Trevor James Smith <10819524+Zeitsperre@users.noreply.github.com> Date: Thu, 18 Jan 2024 16:21:22 -0500 Subject: [PATCH 08/37] undo regression --- xclim/cli.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/xclim/cli.py b/xclim/cli.py index 2a09f01a9..d14b8eca3 100644 --- a/xclim/cli.py +++ b/xclim/cli.py @@ -283,7 +283,7 @@ def dataflags(ctx, variables, raise_flags, append, dims, freq): @click.option( "-i", "--info", is_flag=True, help="Prints more details for each indicator." ) -def indices(information): +def indices(info): # noqa """List all indicators.""" formatter = click.HelpFormatter() formatter.write_heading("Listing all available indicators for computation.") @@ -297,7 +297,7 @@ def indices(information): right += ( " (" + ", ".join([var["var_name"] for var in indcls.cf_attrs]) + ")" ) - if information: + if info: right += "\n" + indcls.abstract rows.append((left, right)) rows.sort(key=lambda row: row[0]) From 9d1cd10a9b07c103102b06a2b2abb705c15439fb Mon Sep 17 00:00:00 2001 From: Trevor James Smith <10819524+Zeitsperre@users.noreply.github.com> Date: Thu, 18 Jan 2024 16:31:29 -0500 Subject: [PATCH 09/37] more tuning --- .pylintrc.toml | 4 ++-- xclim/testing/utils.py | 14 +++++++++----- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/.pylintrc.toml b/.pylintrc.toml index a51464284..0c64979e2 100644 --- a/.pylintrc.toml +++ b/.pylintrc.toml @@ -10,7 +10,7 @@ # Always return a 0 (non-error) status code, even if lint errors are found. This # is primarily useful in continuous integration scripts. -exit-zero = false +# exit-zero = false # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may @@ -298,7 +298,7 @@ indent-string = " " max-line-length = 150 # Maximum number of lines in a module. -max-module-lines = 1000 +max-module-lines = 1500 # Allow the body of a class to be on the same line as the declaration if body # contains single statement. diff --git a/xclim/testing/utils.py b/xclim/testing/utils.py index f48e3ca90..ce1a49ff4 100644 --- a/xclim/testing/utils.py +++ b/xclim/testing/utils.py @@ -205,7 +205,7 @@ def _get( msg = f"Attempting to fetch remote file md5: {md5_name.as_posix()}" logger.info(msg) urlretrieve(url, md5_file) # nosec - with open(md5_file) as f: + with open(md5_file, "r") as f: remote_md5 = f.read() if local_md5.strip() != remote_md5.strip(): local_file.unlink() @@ -272,7 +272,7 @@ def _get( local_md5 = file_md5_checksum(local_file) try: - with open(md5_file) as f: + with open(md5_file, "r") as f: remote_md5 = f.read() if local_md5.strip() != remote_md5.strip(): local_file.unlink() @@ -489,7 +489,7 @@ def publish_release_notes( if not changes_file.exists(): raise FileNotFoundError("Changelog file not found in xclim folder tree.") - with open(changes_file) as hf: + with open(changes_file, "r") as hf: changes = hf.read() if style == "rst": @@ -534,7 +534,9 @@ def publish_release_notes( if not file: return changes if isinstance(file, (Path, os.PathLike)): - file = Path(file).open("w") + with Path(file).open("w") as f: + print(changes, file=f) + return print(changes, file=file) @@ -591,5 +593,7 @@ def show_versions( if not file: return message if isinstance(file, (Path, os.PathLike)): - file = Path(file).open("w") + with Path(file).open("w") as f: + print(message, file=f) + return print(message, file=file) From 195ea4270c5035aa41c35b021485bb571784fbb1 Mon Sep 17 00:00:00 2001 From: Trevor James Smith <10819524+Zeitsperre@users.noreply.github.com> Date: Thu, 18 Jan 2024 16:57:12 -0500 Subject: [PATCH 10/37] reconfigure pylint for pre-commit, adjust formatting and exceptions --- .pre-commit-config.yaml | 2 +- tests/test_indicators.py | 24 ++++-- tests/test_indices.py | 2 +- tests/test_modules.py | 4 +- tests/test_sdba/test_adjustment.py | 5 +- tests/test_sdba/test_base.py | 128 ++++++++++++++++------------- xclim/core/indicator.py | 4 +- xclim/testing/utils.py | 6 +- 8 files changed, 103 insertions(+), 72 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index bf9366f2d..9652c6082 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -31,7 +31,7 @@ repos: rev: v3.0.3 hooks: - id: pylint - args: [ '--rcfile=.pylintrc.toml' ] + args: [ '--rcfile=.pylintrc.toml', '--errors-only', '--disable=import-error' ] - repo: https://github.com/adrienverge/yamllint.git rev: v1.33.0 hooks: diff --git a/tests/test_indicators.py b/tests/test_indicators.py index ee11a86ca..cd15f2618 100644 --- a/tests/test_indicators.py +++ b/tests/test_indicators.py @@ -159,7 +159,11 @@ def test_attrs(tas_series): assert f"xclim version: {__version__}" in txm.attrs["history"] assert txm.name == "tmin5 degC" assert uniIndTemp.standard_name == "{freq} mean temperature" - assert uniIndTemp.cf_attrs[0]["another_attr"] == "With a value." + # fmt: off + assert ( + uniIndTemp.cf_attrs[0]["another_attr"] == "With a value." # pylint: disable=unsubscriptable-object + ) + # fmt: on thresh = xr.DataArray( [1], @@ -243,8 +247,14 @@ def test_module(): """Translations are keyed according to the module where the indicators are defined.""" assert atmos.tg_mean.__module__.split(".")[2] == "atmos" # Virtual module also are stored under xclim.indicators - assert xclim.indicators.cf.fg.__module__ == "xclim.indicators.cf" - assert xclim.indicators.icclim.GD4.__module__ == "xclim.indicators.icclim" + # fmt: off + assert ( + xclim.indicators.cf.fg.__module__ == "xclim.indicators.cf" # pylint: disable=no-member + ) + assert ( + xclim.indicators.icclim.GD4.__module__ == "xclim.indicators.icclim" # pylint: disable=no-member + ) + # fmt: on def test_temp_unit_conversion(tas_series): @@ -257,7 +267,9 @@ def test_temp_unit_conversion(tas_series): with pytest.raises(AssertionError): np.testing.assert_array_almost_equal(txk, txc + 273.15) - uniIndTemp.cf_attrs[0]["units"] = "degC" + uniIndTemp.cf_attrs[0][ # noqa; # pylint: disable=unsubscriptable-object + "units" + ] = "degC" txc = uniIndTemp(a, freq="YS") np.testing.assert_array_almost_equal(txk, txc + 273.15) @@ -685,7 +697,7 @@ def test_indicator_from_dict(): # Wrap a multi-output ind d = dict(base="wind_speed_from_vector") - ind = Indicator.from_dict(d, identifier="wsfv", module="test") + Indicator.from_dict(d, identifier="wsfv", module="test") def test_indicator_errors(): @@ -758,7 +770,7 @@ def func(data: xr.DataArray, thresh: str = "0 degC", freq: str = "YS"): # noqa # with pytest.raises(ValueError, match="variable data is missing expected units"): # Daily(**d) - d["parameters"]["thresh"] = {"units": "K"} + d["parameters"]["thresh"] = {"units": "K"} # pylint: disable=function-redefined d["realm"] = "mercury" d["input"] = {"data": "tasmin"} with pytest.raises(AttributeError, match="Indicator's realm must be given as one"): diff --git a/tests/test_indices.py b/tests/test_indices.py index 5a7e0eb5d..d4979341c 100644 --- a/tests/test_indices.py +++ b/tests/test_indices.py @@ -2385,7 +2385,7 @@ def test_simple(self, open_dataset, ind, exp): np.testing.assert_almost_equal(out[0], exp, decimal=4) def test_indice_against_icclim(self, cmip3_day_tas): - from xclim.indicators import icclim # noqa + from xclim.indicators import icclim # noqa; # pylint: disable=no-name-in-module with set_options(cf_compliance="log"): ind = xci.tg_mean(cmip3_day_tas) diff --git a/tests/test_modules.py b/tests/test_modules.py index b6b3abb47..3c06d3a4c 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -28,7 +28,9 @@ def virtual_indicator(request): def test_default_modules_exist(): - from xclim.indicators import anuclim, cf, icclim # noqa + from xclim.indicators import anuclim # noqa; # pylint: disable=no-name-in-module + from xclim.indicators import cf # noqa; # pylint: disable=no-name-in-module + from xclim.indicators import icclim # noqa; # pylint: disable=no-name-in-module assert hasattr(icclim, "TG") diff --git a/tests/test_sdba/test_adjustment.py b/tests/test_sdba/test_adjustment.py index 953bd5ded..30d5f05b0 100644 --- a/tests/test_sdba/test_adjustment.py +++ b/tests/test_sdba/test_adjustment.py @@ -710,7 +710,10 @@ def test_default_grouper_understood(tas_series): class TestSBCKutils: @pytest.mark.slow @pytest.mark.parametrize( - "method", [m for m in dir(adjustment) if m.startswith("SBCK_")] + "method", + [ + m for m in dir(adjustment) if m.startswith("SBCK_") + ], # pylint: disable=no-member ) @pytest.mark.parametrize("use_dask", [True]) # do we gain testing both? def test_sbck(self, method, use_dask, random): diff --git a/tests/test_sdba/test_base.py b/tests/test_sdba/test_base.py index ffba45c1f..591780986 100644 --- a/tests/test_sdba/test_base.py +++ b/tests/test_sdba/test_base.py @@ -159,60 +159,74 @@ def normalize_from_precomputed(grpds, dim=None): np.testing.assert_allclose(out, exp, rtol=1e-10) -def test_map_blocks(tas_series): - tas = tas_series(np.arange(366), start="2000-01-01") - tas = tas.expand_dims(lat=[1, 2, 3, 4]).chunk() - - # Test dim parsing - @map_blocks(reduces=["lat"], data=["lon"]) - def func(ds, *, group, lon=None): - assert group.window == 5 - data = ds.tas.rename(lat="lon") - return data.rename("data").to_dataset() - - # Raises on missing coords - with pytest.raises(ValueError, match="This function adds the lon dimension*"): - data = func(xr.Dataset(dict(tas=tas)), group="time.dayofyear", window=5) - - data = func( - xr.Dataset(dict(tas=tas)), group="time.dayofyear", window=5, lon=[1, 2, 3, 4] - ).load() - assert set(data.data.dims) == {"time", "lon"} - - @map_groups(data=[Grouper.PROP]) - def func(ds, *, dim): - assert isinstance(dim, list) - data = ds.tas.mean(dim) - return data.rename("data").to_dataset() - - data = func( - xr.Dataset(dict(tas=tas)), group="time.dayofyear", window=5, add_dims=["lat"] - ).load() - assert set(data.data.dims) == {"dayofyear"} - - @map_groups(data=[Grouper.PROP], main_only=True) - def func(ds, *, dim): - assert isinstance(dim, str) - data = ds.tas.mean(dim) - return data.rename("data").to_dataset() - - # with a scalar aux coord - data = func( - xr.Dataset(dict(tas=tas.isel(lat=0, drop=True)), coords=dict(leftover=1)), - group="time.dayofyear", - ).load() - assert set(data.data.dims) == {"dayofyear"} - assert "leftover" in data - - -def test_map_blocks_error(tas_series): - tas = tas_series(np.arange(366), start="2000-01-01") - tas = tas.expand_dims(lat=[1, 2, 3, 4]).chunk(lat=1) - - # Test dim parsing - @map_blocks(reduces=["lat"], data=[]) - def func(ds, *, group, lon=None): - return ds.tas.rename("data").to_dataset() - - with pytest.raises(ValueError, match="cannot be chunked"): - func(xr.Dataset(dict(tas=tas)), group="time") +class TestMapBlocks: + def test_lat_lon(self, tas_series): + tas = tas_series(np.arange(366), start="2000-01-01") + tas = tas.expand_dims(lat=[1, 2, 3, 4]).chunk() + + # Test dim parsing + @map_blocks(reduces=["lat"], data=["lon"]) + def func(ds, *, group, lon=None): + assert group.window == 5 + d = ds.tas.rename(lat="lon") + return d.rename("data").to_dataset() + + # Raises on missing coords + with pytest.raises(ValueError, match="This function adds the lon dimension*"): + data = func(xr.Dataset(dict(tas=tas)), group="time.dayofyear", window=5) + + data = func( + xr.Dataset(dict(tas=tas)), + group="time.dayofyear", + window=5, + lon=[1, 2, 3, 4], + ).load() + assert set(data.data.dims) == {"time", "lon"} + + def test_grouper_prop(self, tas_series): + tas = tas_series(np.arange(366), start="2000-01-01") + tas = tas.expand_dims(lat=[1, 2, 3, 4]).chunk() + + @map_groups(data=[Grouper.PROP]) + def func(ds, *, dim): + assert isinstance(dim, list) + d = ds.tas.mean(dim) + return d.rename("data").to_dataset() + + data = func( + xr.Dataset(dict(tas=tas)), + group="time.dayofyear", + window=5, + add_dims=["lat"], + ).load() + assert set(data.data.dims) == {"dayofyear"} + + def test_grouper_prop_main_only(self, tas_series): + tas = tas_series(np.arange(366), start="2000-01-01") + tas = tas.expand_dims(lat=[1, 2, 3, 4]).chunk() + + @map_groups(data=[Grouper.PROP], main_only=True) + def func(ds, *, dim): + assert isinstance(dim, str) + data = ds.tas.mean(dim) + return data.rename("data").to_dataset() + + # with a scalar aux coord + data = func( + xr.Dataset(dict(tas=tas.isel(lat=0, drop=True)), coords=dict(leftover=1)), + group="time.dayofyear", + ).load() + assert set(data.data.dims) == {"dayofyear"} + assert "leftover" in data + + def test_raises_error(self, tas_series): + tas = tas_series(np.arange(366), start="2000-01-01") + tas = tas.expand_dims(lat=[1, 2, 3, 4]).chunk(lat=1) + + # Test dim parsing + @map_blocks(reduces=["lat"], data=[]) + def func(ds, *, group, lon=None): + return ds.tas.rename("data").to_dataset() + + with pytest.raises(ValueError, match="cannot be chunked"): + func(xr.Dataset(dict(tas=tas)), group="time") diff --git a/xclim/core/indicator.py b/xclim/core/indicator.py index 00cc9f414..b2e7aaa49 100644 --- a/xclim/core/indicator.py +++ b/xclim/core/indicator.py @@ -211,8 +211,8 @@ def update(self, other: dict) -> None: def is_parameter_dict(cls, other: dict) -> bool: """Return whether indicator has a parameter dictionary.""" return set(other.keys()).issubset( - cls.__dataclass_fields__.keys() - ) # pylint disable=no-member + cls.__dataclass_fields__.keys() # pylint disable=no-member + ) def __getitem__(self, key) -> str: """Return an item in retro-compatible fashion.""" diff --git a/xclim/testing/utils.py b/xclim/testing/utils.py index ce1a49ff4..561b8a863 100644 --- a/xclim/testing/utils.py +++ b/xclim/testing/utils.py @@ -205,7 +205,7 @@ def _get( msg = f"Attempting to fetch remote file md5: {md5_name.as_posix()}" logger.info(msg) urlretrieve(url, md5_file) # nosec - with open(md5_file, "r") as f: + with open(md5_file) as f: remote_md5 = f.read() if local_md5.strip() != remote_md5.strip(): local_file.unlink() @@ -272,7 +272,7 @@ def _get( local_md5 = file_md5_checksum(local_file) try: - with open(md5_file, "r") as f: + with open(md5_file) as f: remote_md5 = f.read() if local_md5.strip() != remote_md5.strip(): local_file.unlink() @@ -489,7 +489,7 @@ def publish_release_notes( if not changes_file.exists(): raise FileNotFoundError("Changelog file not found in xclim folder tree.") - with open(changes_file, "r") as hf: + with open(changes_file) as hf: changes = hf.read() if style == "rst": From b039e8a8bb04b2aefaec09ddb641bfbd62550c2b Mon Sep 17 00:00:00 2001 From: Zeitsperre <10819524+Zeitsperre@users.noreply.github.com> Date: Fri, 19 Jan 2024 10:47:47 -0500 Subject: [PATCH 11/37] pylint adjustments --- .pre-commit-config.yaml | 2 +- tests/test_indicators.py | 6 ++++- tests/test_locales.py | 8 ++++++- tests/test_sdba/test_adjustment.py | 6 +++-- tests/test_sdba/test_base.py | 4 ++-- tests/test_utils.py | 4 ++-- xclim/core/indicator.py | 18 +++++++-------- xclim/indices/fire/_ffdi.py | 36 +++++++++++++++++++++++------- xclim/sdba/measures.py | 16 ++++++------- xclim/sdba/nbutils.py | 15 ++++++++----- xclim/sdba/properties.py | 16 ++++++------- xclim/testing/diagnostics.py | 2 +- 12 files changed, 81 insertions(+), 52 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9652c6082..12d9c6d22 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -31,7 +31,7 @@ repos: rev: v3.0.3 hooks: - id: pylint - args: [ '--rcfile=.pylintrc.toml', '--errors-only', '--disable=import-error' ] + args: [ '--rcfile=.pylintrc.toml', '--errors-only', '--jobs=0', '--disable=import-error' ] - repo: https://github.com/adrienverge/yamllint.git rev: v1.33.0 hooks: diff --git a/tests/test_indicators.py b/tests/test_indicators.py index cd15f2618..3f572d40f 100644 --- a/tests/test_indicators.py +++ b/tests/test_indicators.py @@ -776,8 +776,12 @@ def func(data: xr.DataArray, thresh: str = "0 degC", freq: str = "YS"): # noqa with pytest.raises(AttributeError, match="Indicator's realm must be given as one"): Daily(**d) - def func(data: xr.DataArray, thresh: str = "0 degC"): + # fmt: off + def func( # noqa; # pylint: disable=function-redefined + data: xr.DataArray, thresh: str = "0 degC" + ): return data + # fmt: on func.__doc__ = "\n".join(doc[:10] + doc[12:]) d = dict( diff --git a/tests/test_locales.py b/tests/test_locales.py index 4d7e12fdb..441a0252c 100644 --- a/tests/test_locales.py +++ b/tests/test_locales.py @@ -159,7 +159,13 @@ def test_xclim_translations(locale, official_indicators): @pytest.mark.parametrize( - "initeng,expected", [(False, ""), (True, atmos.tg_mean.cf_attrs[0]["long_name"])] + # fmt: off + "initeng,expected", + [ + (False, ""), + (True, atmos.tg_mean.cf_attrs[0]["long_name"]), # pylint: disable=unsubscriptable-object + ], + # fmt: on ) def test_local_dict_generation(initeng, expected): dic = generate_local_dict("tlh", init_english=initeng) diff --git a/tests/test_sdba/test_adjustment.py b/tests/test_sdba/test_adjustment.py index 30d5f05b0..52ff3ae75 100644 --- a/tests/test_sdba/test_adjustment.py +++ b/tests/test_sdba/test_adjustment.py @@ -711,9 +711,11 @@ class TestSBCKutils: @pytest.mark.slow @pytest.mark.parametrize( "method", + # fmt: off [ - m for m in dir(adjustment) if m.startswith("SBCK_") - ], # pylint: disable=no-member + m for m in dir(adjustment) if m.startswith("SBCK_") # pylint: disable=no-member + ], + # fmt: on ) @pytest.mark.parametrize("use_dask", [True]) # do we gain testing both? def test_sbck(self, method, use_dask, random): diff --git a/tests/test_sdba/test_base.py b/tests/test_sdba/test_base.py index 591780986..b597842ca 100644 --- a/tests/test_sdba/test_base.py +++ b/tests/test_sdba/test_base.py @@ -193,7 +193,7 @@ def func(ds, *, dim): d = ds.tas.mean(dim) return d.rename("data").to_dataset() - data = func( + data = func( # pylint: disable=missing-kwoa xr.Dataset(dict(tas=tas)), group="time.dayofyear", window=5, @@ -212,7 +212,7 @@ def func(ds, *, dim): return data.rename("data").to_dataset() # with a scalar aux coord - data = func( + data = func( # pylint: disable=missing-kwoa xr.Dataset(dict(tas=tas.isel(lat=0, drop=True)), coords=dict(leftover=1)), group="time.dayofyear", ).load() diff --git a/tests/test_utils.py b/tests/test_utils.py index 45ec02004..f679cd1d9 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -38,9 +38,9 @@ def func(a, b=1, c=1): assert newf(1) == (1, 2, 2) assert newf.__doc__ == func.__doc__ - def func(a, b=1, c=1, **kws): + def func(a, b=1, c=1, **kws): # pylint: disable=function-redefined """Docstring""" - return (a, b, c) + return a, b, c newf = wrapped_partial(func, suggested=dict(c=2), a=2, b=2) assert list(signature(newf).parameters.keys()) == ["c", "kws"] diff --git a/xclim/core/indicator.py b/xclim/core/indicator.py index b2e7aaa49..f4cb9bc72 100644 --- a/xclim/core/indicator.py +++ b/xclim/core/indicator.py @@ -211,7 +211,7 @@ def update(self, other: dict) -> None: def is_parameter_dict(cls, other: dict) -> bool: """Return whether indicator has a parameter dictionary.""" return set(other.keys()).issubset( - cls.__dataclass_fields__.keys() # pylint disable=no-member + cls.__dataclass_fields__.keys() # noqa; # pylint: disable=no-member ) def __getitem__(self, key) -> str: @@ -1502,15 +1502,13 @@ def _preprocess_and_checks(self, das, params): das, params = super()._preprocess_and_checks(das, params) # Check if the period is allowed: - if ( - self.allowed_periods is not None - and parse_offset(params["freq"])[1] not in self.allowed_periods - ): - raise ValueError( - f"Resampling frequency {params['freq']} is not allowed for indicator " - f"{self.identifier} (needs something equivalent to one " - f"of {self.allowed_periods})." - ) + if self.allowed_periods is not None: + if parse_offset(params["freq"])[1] not in self.allowed_periods: + raise ValueError( + f"Resampling frequency {params['freq']} is not allowed for indicator " + f"{self.identifier} (needs something equivalent to one " + f"of {self.allowed_periods})." + ) return das, params diff --git a/xclim/indices/fire/_ffdi.py b/xclim/indices/fire/_ffdi.py index f811b32d4..123b51984 100644 --- a/xclim/indices/fire/_ffdi.py +++ b/xclim/indices/fire/_ffdi.py @@ -239,10 +239,20 @@ def keetch_byram_drought_index( """ def _keetch_byram_drought_index_pass(pr, tasmax, pr_annual, kbdi0): - """Pass inputs on to guvectorized function `_keetch_byram_drought_index`. DO NOT CALL DIRECTLY, use `keetch_byram_drought_index` instead.""" - # This function is actually only required as xr.apply_ufunc will not receive - # a guvectorized function which has the output(s) in its function signature - return _keetch_byram_drought_index(pr, tasmax, pr_annual, kbdi0) + """Pass inputs on to guvectorized function `_keetch_byram_drought_index`. + + This function is actually only required as `xr.apply_ufunc` will not receive + a guvectorized function which has the output(s) in its function signature. + + Warnings + -------- + DO NOT CALL DIRECTLY, use `keetch_byram_drought_index` instead. + """ + # fmt: off + return _keetch_byram_drought_index( # pylint: disable=no-value-for-parameter + pr, tasmax, pr_annual, kbdi0 + ) + # fmt: on pr = convert_units_to(pr, "mm/day", context="hydro") tasmax = convert_units_to(tasmax, "C") @@ -311,10 +321,20 @@ def griffiths_drought_factor( """ def _griffiths_drought_factor_pass(pr, smd, lim): - """Pass inputs on to guvectorized function `_griffiths_drought_factor`. DO NOT CALL DIRECTLY, use `griffiths_drought_factor` instead.""" - # This function is actually only required as xr.apply_ufunc will not receive - # a guvectorized function which has the output(s) in its function signature - return _griffiths_drought_factor(pr, smd, lim) + """Pass inputs on to guvectorized function `_griffiths_drought_factor`. + + This function is actually only required as xr.apply_ufunc will not receive + a guvectorized function which has the output(s) in its function signature. + + Warnings + -------- + DO NOT CALL DIRECTLY, use `griffiths_drought_factor` instead. + """ + # fmt: off + return _griffiths_drought_factor( # pylint: disable=no-value-for-parameter + pr, smd, lim + ) + # fmt: on pr = convert_units_to(pr, "mm/day", context="hydro") smd = convert_units_to(smd, "mm/day") diff --git a/xclim/sdba/measures.py b/xclim/sdba/measures.py index ede225c31..b1fa8a5d0 100644 --- a/xclim/sdba/measures.py +++ b/xclim/sdba/measures.py @@ -120,15 +120,13 @@ def _preprocess_and_checks(self, das, params): if isinstance(params["group"], str): params["group"] = Grouper(params["group"]) - if ( - self.allowed_groups is not None - and params["group"].prop not in self.allowed_groups - ): - raise ValueError( - f"Grouping period {params['group'].prop_name} is not allowed for property " - f"{self.identifier} (needs something in " - f"{list(map(lambda g: '.' + g.replace('group', ''), self.allowed_groups))})." - ) + if self.allowed_groups is not None: + if params["group"].prop not in self.allowed_groups: + raise ValueError( + f"Grouping period {params['group'].prop_name} is not allowed for property " + f"{self.identifier} (needs something in " + f"{list(map(lambda g: '.' + g.replace('group', ''), self.allowed_groups))})." + ) # Convert grouping and check if allowed: sim = das["sim"] diff --git a/xclim/sdba/nbutils.py b/xclim/sdba/nbutils.py index fcd4b5444..6e49933d4 100644 --- a/xclim/sdba/nbutils.py +++ b/xclim/sdba/nbutils.py @@ -23,7 +23,7 @@ def _vecquantiles(arr, rnk, res): res[0] = np.nanquantile(arr, rnk) -def vecquantiles(da, rnk, dim): +def vecquantiles(da: DataArray, rnk: DataArray, dim: str | DataArray.dims) -> DataArray: """For when the quantile (rnk) is different for each point. da and rnk must share all dimensions but dim. @@ -34,7 +34,7 @@ def vecquantiles(da, rnk, dim): da = da.transpose(*rnk.dims, tem) res = DataArray( - _vecquantiles(da.values, rnk.values), + _vecquantiles(da.values, rnk.values), # pylint: disable=no-value-for-parameter dims=rnk.dims, coords=rnk.coords, attrs=da.attrs, @@ -54,7 +54,7 @@ def _quantile(arr, q): return out -def quantile(da, q, dim): +def quantile(da: DataArray, q, dim: str | DataArray.dims) -> DataArray: """Compute the quantiles from a fixed list `q`.""" # We have two cases : # - When all dims are processed : we stack them and use _quantile1d @@ -68,7 +68,7 @@ def quantile(da, q, dim): da = da.stack({tem: dims}) # So we cut in half the definitions to declare in numba - # We still use q as the coords so it corresponds to what was done upstream + # We still use q as the coords, so it corresponds to what was done upstream if not hasattr(q, "dtype") or q.dtype != da.dtype: qc = np.array(q, dtype=da.dtype) else: @@ -90,7 +90,7 @@ def quantile(da, q, dim): # All dims are processed res = DataArray( _quantile(da.values, qc), - dims=("quantiles"), + dims="quantiles", coords={"quantiles": q}, attrs=da.attrs, ) @@ -185,8 +185,11 @@ def _first_and_last_nonnull(arr): @njit def _extrapolate_on_quantiles( + # fmt: off interp, oldx, oldg, oldy, newx, newg, method="constant" -): # noqa + # noqa + # fmt: on +): """Apply extrapolation to the output of interpolation on quantiles with a given grouping. Arguments are the same as _interp_on_quantiles_2D. diff --git a/xclim/sdba/properties.py b/xclim/sdba/properties.py index 94bc10ab1..b5fc41bc0 100644 --- a/xclim/sdba/properties.py +++ b/xclim/sdba/properties.py @@ -77,15 +77,13 @@ def _preprocess_and_checks(self, das, params): if isinstance(params["group"], str): params["group"] = Grouper(params["group"]) - if ( - self.allowed_groups is not None - and params["group"].prop not in self.allowed_groups - ): - raise ValueError( - f"Grouping period {params['group'].prop_name} is not allowed for property " - f"{self.identifier} (needs something in " - f"{map(lambda g: '.' + g.replace('group', ''), self.allowed_groups)})." - ) + if self.allowed_groups is not None: + if params["group"].prop not in self.allowed_groups: + raise ValueError( + f"Grouping period {params['group'].prop_name} is not allowed for property " + f"{self.identifier} (needs something in " + f"{map(lambda g: '.' + g.replace('group', ''), self.allowed_groups)})." + ) return das, params diff --git a/xclim/testing/diagnostics.py b/xclim/testing/diagnostics.py index a7f8a6c21..5338ea137 100644 --- a/xclim/testing/diagnostics.py +++ b/xclim/testing/diagnostics.py @@ -128,7 +128,7 @@ def adapt_freq_graph(): x = series(synth_rainfall(2, 2, wet_freq=0.25, size=n), "pr") # sim y = series(synth_rainfall(2, 2, wet_freq=0.5, size=n), "pr") # ref - xp = adapt_freq(x, y, thresh=0).sim_ad + xp = adapt_freq(x, y, thresh=0).sim_ad # noqa; # pylint: disable=no-member fig, (ax1, ax2) = plt.subplots(2, 1) sx = x.sortby(x) From f19b4343a1cba4a8d1eb1e987d0bf6c225107db2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89ric=20Dupuis?= Date: Fri, 19 Jan 2024 11:13:28 -0500 Subject: [PATCH 12/37] fix freq='D' behaviour and add tests --- tests/test_indices.py | 24 ++++++++++++++++++++++-- xclim/indices/_agro.py | 2 +- xclim/indices/stats.py | 29 ++++++++++++++++------------- 3 files changed, 39 insertions(+), 16 deletions(-) diff --git a/tests/test_indices.py b/tests/test_indices.py index 5a7e0eb5d..8dae0651a 100644 --- a/tests/test_indices.py +++ b/tests/test_indices.py @@ -21,7 +21,7 @@ import xarray as xr from xclim import indices as xci -from xclim.core.calendar import date_range, percentile_doy +from xclim.core.calendar import convert_calendar, date_range, percentile_doy from xclim.core.options import set_options from xclim.core.units import ValidationError, convert_units_to, units @@ -462,7 +462,7 @@ def test_effective_growing_degree_days( # tolerance possible. # Repeated tests with lower tolerance means we want a more precise comparison, so we compare # the current version of XClim with the version where the test was implemented - # TODO : Add tests for SPI_daily. + @pytest.mark.parametrize( "freq, window, dist, method, values, diff_tol", [ @@ -518,12 +518,32 @@ def test_effective_growing_degree_days( [0.683273, 1.51189, 1.61597, 1.03875, 0.72531], 2e-2, ), + ( + "D", + 1, + "gamma", + "APP", + [-0.18618353, 1.44582971, 0.95985043, 0.15779587, -0.37801587], + 2e-2, + ), + ( + "D", + 12, + "gamma", + "APP", + [-0.24417774, -0.11404418, 0.64997039, 1.07670517, 0.6462852], + 2e-2, + ), ], ) def test_standardized_precipitation_index( self, open_dataset, freq, window, dist, method, values, diff_tol ): ds = open_dataset("sdba/CanESM2_1950-2100.nc").isel(location=1) + if freq == "D": + ds = convert_calendar( + ds, "366_day", missing=np.NaN + ) # to compare with ``climate_indices`` pr = ds.pr.sel(time=slice("1998", "2000")) pr_cal = ds.pr.sel(time=slice("1950", "1980")) params = xci.stats.standardized_index_fit_params( diff --git a/xclim/indices/_agro.py b/xclim/indices/_agro.py index a3ac3eebb..7183a75ac 100644 --- a/xclim/indices/_agro.py +++ b/xclim/indices/_agro.py @@ -1238,7 +1238,7 @@ def standardized_precipitation_index( spi = standardized_index(pr, params) spi.attrs = params.attrs - spi.attrs["freq"] = freq or xarray.infer_freq(spi.time) + spi.attrs["freq"] = (freq or xarray.infer_freq(spi.time)) or "undefined" spi.attrs["window"] = window spi.attrs["units"] = "" return spi diff --git a/xclim/indices/stats.py b/xclim/indices/stats.py index 7a91e89bc..c8a9396b4 100644 --- a/xclim/indices/stats.py +++ b/xclim/indices/stats.py @@ -8,7 +8,7 @@ import numpy as np import xarray as xr -from xclim.core.calendar import resample_doy, select_time +from xclim.core.calendar import compare_offsets, resample_doy, select_time from xclim.core.formatting import prefix_attrs, unprefix_attrs, update_history from xclim.core.units import convert_units_to from xclim.core.utils import Quantified, uses_dask @@ -625,13 +625,21 @@ def preprocess_standardized_index( # We could allow a more general frequency in this function and move # the constraint {"D", "MS"} in specific indices such as SPI / SPEI. final_freq = freq or xr.infer_freq(da.time) - try: - group = {"D": "time.dayofyear", "MS": "time.month"}[final_freq] - except KeyError(): - raise ValueError( - f"The input (following resampling if applicable) has a frequency `{final_freq}`" - "which is not supported for standardized indices." + if final_freq: + if final_freq == "D": + group = "time.dayofyear" + elif compare_offsets(final_freq, "==", "MS"): + group = "time.month" + else: + raise ValueError( + f"The input (following resampling if applicable) has a frequency `{final_freq}` " + "which is not supported for standardized indices." + ) + else: + warnings.warn( + "No resampling frequency was specified and a frequency for the dataset could not be identified with ``xr.infer_freq``" ) + group = "time.dayofyear" if freq is not None: da = da.resample(time=freq).mean(keep_attrs=True) @@ -732,10 +740,7 @@ def standardized_index_fit_params( "units": "", "offset": offset or "", } - if indexer != {}: - method, args = indexer.popitem() - else: - method, args = "", [] + method, args = ("", []) if indexer == {} else indexer.popitem() params.attrs["time_indexer"] = (method, *args) return params @@ -762,8 +767,6 @@ def standardized_index(da: xr.DataArray, params: xr.DataArray): def reindex_time(da, da_ref): if group == "time.dayofyear": - da = da.rename(day="time").reindex(time=da_ref.time.dt.dayofyear) - da["time"] = da_ref.time da = resample_doy(da, da_ref) elif group == "time.month": da = da.rename(month="time").reindex(time=da_ref.time.dt.month) From e593786ad427ebac310c70d1737bc211e5b12fa0 Mon Sep 17 00:00:00 2001 From: Zeitsperre <10819524+Zeitsperre@users.noreply.github.com> Date: Fri, 19 Jan 2024 11:19:37 -0500 Subject: [PATCH 13/37] disable pylint checks that are incompatible with mapblocks --- xclim/sdba/adjustment.py | 1 + xclim/sdba/processing.py | 1 + xclim/sdba/properties.py | 1 + xclim/testing/diagnostics.py | 2 +- 4 files changed, 4 insertions(+), 1 deletion(-) diff --git a/xclim/sdba/adjustment.py b/xclim/sdba/adjustment.py index c56e19c64..17d0d2068 100644 --- a/xclim/sdba/adjustment.py +++ b/xclim/sdba/adjustment.py @@ -1,3 +1,4 @@ +# pylint: disable=missing-kwoa """ Adjustment Methods ================== diff --git a/xclim/sdba/processing.py b/xclim/sdba/processing.py index 7866956c2..742877ed8 100644 --- a/xclim/sdba/processing.py +++ b/xclim/sdba/processing.py @@ -1,3 +1,4 @@ +# pylint: disable=missing-kwoa """ Pre- and Post-Processing Submodule ================================== diff --git a/xclim/sdba/properties.py b/xclim/sdba/properties.py index b5fc41bc0..694ae9d6c 100644 --- a/xclim/sdba/properties.py +++ b/xclim/sdba/properties.py @@ -1,3 +1,4 @@ +# pylint: disable=missing-kwoa """ Properties Submodule ==================== diff --git a/xclim/testing/diagnostics.py b/xclim/testing/diagnostics.py index 5338ea137..b93ea9c29 100644 --- a/xclim/testing/diagnostics.py +++ b/xclim/testing/diagnostics.py @@ -128,7 +128,7 @@ def adapt_freq_graph(): x = series(synth_rainfall(2, 2, wet_freq=0.25, size=n), "pr") # sim y = series(synth_rainfall(2, 2, wet_freq=0.5, size=n), "pr") # ref - xp = adapt_freq(x, y, thresh=0).sim_ad # noqa; # pylint: disable=no-member + xp = adapt_freq(x, y, thresh=0).sim_ad # pylint: disable=no-member,missing-kwoa fig, (ax1, ax2) = plt.subplots(2, 1) sx = x.sortby(x) From 0974c52afc7f82ae2a5fb2f5a3d608b5d36e0435 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89ric=20Dupuis?= Date: Fri, 19 Jan 2024 11:20:55 -0500 Subject: [PATCH 14/37] Update CHANGES --- CHANGES.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGES.rst b/CHANGES.rst index 416ae2c8f..98231742c 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -28,6 +28,7 @@ Bug fixes ^^^^^^^^^ * Fixed passing ``missing=0`` to ``xclim.core.calendar.convert_calendar``. (:issue:`1562`, :pull:`1563`). * Fix wrong `window` attributes in ``xclim.indices.standardized_precipitation_index``, ``xclim.indices.standardized_precipitation_evapotranspiration_index``. (:issue:`1552` :pull:`1554`). +* Fix the daily case `freq='D'` of ``xclim.stats.preprocess_standardized_index`` (:issue:`1602` :pull:`1607`). * Several spelling mistakes have been corrected within the documentation and codebase. (:pull:`1576`). Internal changes From a8e8bfafcdd3e0a7d44564c5cd2ec2188bff424e Mon Sep 17 00:00:00 2001 From: Zeitsperre <10819524+Zeitsperre@users.noreply.github.com> Date: Fri, 19 Jan 2024 11:45:56 -0500 Subject: [PATCH 15/37] update dependabot.yml configuration, add job concerning workflow changes from forks --- .github/dependabot.yml | 9 ++++ .github/workflows/label_on_approval.yml | 5 +- .github/workflows/testdata_version.yml | 63 +++++++++++++++++++++++-- 3 files changed, 73 insertions(+), 4 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 253bcb76b..826840927 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -4,3 +4,12 @@ updates: directory: / schedule: interval: daily + time: '12:00' + open-pull-requests-limit: 5 + + - package-ecosystem: pip + directory: / + schedule: + interval: daily + time: '12:00' + open-pull-requests-limit: 5 diff --git a/.github/workflows/label_on_approval.yml b/.github/workflows/label_on_approval.yml index af9c8c842..01670046b 100644 --- a/.github/workflows/label_on_approval.yml +++ b/.github/workflows/label_on_approval.yml @@ -58,7 +58,10 @@ jobs: - name: Harden Runner uses: step-security/harden-runner@eb238b55efaa70779f274895e782ed17c84f2895 # v2.6.1 with: - egress-policy: audit + disable-sudo: true + egress-policy: block + allowed-endpoints: > + api.github.com:443 - name: Find comment uses: peter-evans/find-comment@a54c31d7fa095754bfef525c0c8e5e5674c4b4b1 # v2.4.0 id: fc diff --git a/.github/workflows/testdata_version.yml b/.github/workflows/testdata_version.yml index abc30abfc..1cc1f6e77 100644 --- a/.github/workflows/testdata_version.yml +++ b/.github/workflows/testdata_version.yml @@ -1,22 +1,79 @@ -name: Verify Testing Data +name: Verify Testing Data and Workflows on: - # Needed for write permissions of peter-evans/create-or-update-comment - pull_request_target: # It is very important to not perform code checkout/build/testing with pull_request_target + pull_request: types: - opened - reopened - synchronize paths: - .github/workflows/main.yml + pull_request_target: # It is very important to not perform code checkout/build/testing with pull_request_target + paths: + - .github/workflows/*.yml permissions: # added using https://github.com/step-security/secure-repo contents: read jobs: + comment-concerning-workflow-changes: + name: Comment Concerning Workflow Changes + runs-on: ubuntu-latest + if: | + (github.event_name == 'pull_request_target') && + (github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name) + permissions: + pull-requests: write + steps: + - name: Harden Runner + uses: step-security/harden-runner@eb238b55efaa70779f274895e782ed17c84f2895 # v2.6.1 + with: + disable-sudo: true + egress-policy: block + allowed-endpoints: > + api.github.com:443 + - name: Find comment + uses: peter-evans/find-comment@a54c31d7fa095754bfef525c0c8e5e5674c4b4b1 # v2.4.0 + id: fc + with: + issue-number: ${{ github.event.pull_request.number }} + comment-author: 'github-actions[bot]' + body-includes: | + This Pull Request modifies GitHub workflows and is coming from a fork. + - name: Create comment + if: | + (steps.fc.outputs.comment-id == '') && + (!contains(github.event.pull_request.labels.*.name, 'approved')) && + (github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name) + uses: peter-evans/create-or-update-comment@23ff15729ef2fc348714a3bb66d2f655ca9066f2 # v3.1.0 + with: + comment-id: ${{ steps.fc.outputs.comment-id }} + issue-number: ${{ github.event.pull_request.number }} + body: | + > **Warning** + > This Pull Request modifies GitHub Workflows and is coming from a fork. + **It is very important for the reviewer to ensure that the workflow changes are appropriate.** + edit-mode: replace + - name: Update comment + if: | + contains(github.event.pull_request.labels.*.name, 'approved') + uses: peter-evans/create-or-update-comment@23ff15729ef2fc348714a3bb66d2f655ca9066f2 # v3.1.0 + with: + comment-id: ${{ steps.fc.outputs.comment-id }} + issue-number: ${{ github.event.pull_request.number }} + body: | + > **Note** + > This Pull Request modifies GitHub Workflows and is coming from a fork. + Changes have been approved by a maintainer. + reactions: | + hooray + edit-mode: append + use-latest-tag: name: Check Latest xclim-testdata Tag runs-on: ubuntu-latest + if: | + (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) permissions: contents: read pull-requests: write From 1d5e7e955e7ec314a0ab4e9228f3b54b02e5833e Mon Sep 17 00:00:00 2001 From: Zeitsperre <10819524+Zeitsperre@users.noreply.github.com> Date: Fri, 19 Jan 2024 12:34:39 -0500 Subject: [PATCH 16/37] security-related changes --- ...tdata_version.yml => testdata-version.yml} | 56 --------------- .github/workflows/workflow-warning.yml | 69 +++++++++++++++++++ 2 files changed, 69 insertions(+), 56 deletions(-) rename .github/workflows/{testdata_version.yml => testdata-version.yml} (61%) create mode 100644 .github/workflows/workflow-warning.yml diff --git a/.github/workflows/testdata_version.yml b/.github/workflows/testdata-version.yml similarity index 61% rename from .github/workflows/testdata_version.yml rename to .github/workflows/testdata-version.yml index 1cc1f6e77..9f2ccff36 100644 --- a/.github/workflows/testdata_version.yml +++ b/.github/workflows/testdata-version.yml @@ -8,67 +8,11 @@ on: - synchronize paths: - .github/workflows/main.yml - pull_request_target: # It is very important to not perform code checkout/build/testing with pull_request_target - paths: - - .github/workflows/*.yml permissions: # added using https://github.com/step-security/secure-repo contents: read jobs: - comment-concerning-workflow-changes: - name: Comment Concerning Workflow Changes - runs-on: ubuntu-latest - if: | - (github.event_name == 'pull_request_target') && - (github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name) - permissions: - pull-requests: write - steps: - - name: Harden Runner - uses: step-security/harden-runner@eb238b55efaa70779f274895e782ed17c84f2895 # v2.6.1 - with: - disable-sudo: true - egress-policy: block - allowed-endpoints: > - api.github.com:443 - - name: Find comment - uses: peter-evans/find-comment@a54c31d7fa095754bfef525c0c8e5e5674c4b4b1 # v2.4.0 - id: fc - with: - issue-number: ${{ github.event.pull_request.number }} - comment-author: 'github-actions[bot]' - body-includes: | - This Pull Request modifies GitHub workflows and is coming from a fork. - - name: Create comment - if: | - (steps.fc.outputs.comment-id == '') && - (!contains(github.event.pull_request.labels.*.name, 'approved')) && - (github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name) - uses: peter-evans/create-or-update-comment@23ff15729ef2fc348714a3bb66d2f655ca9066f2 # v3.1.0 - with: - comment-id: ${{ steps.fc.outputs.comment-id }} - issue-number: ${{ github.event.pull_request.number }} - body: | - > **Warning** - > This Pull Request modifies GitHub Workflows and is coming from a fork. - **It is very important for the reviewer to ensure that the workflow changes are appropriate.** - edit-mode: replace - - name: Update comment - if: | - contains(github.event.pull_request.labels.*.name, 'approved') - uses: peter-evans/create-or-update-comment@23ff15729ef2fc348714a3bb66d2f655ca9066f2 # v3.1.0 - with: - comment-id: ${{ steps.fc.outputs.comment-id }} - issue-number: ${{ github.event.pull_request.number }} - body: | - > **Note** - > This Pull Request modifies GitHub Workflows and is coming from a fork. - Changes have been approved by a maintainer. - reactions: | - hooray - edit-mode: append - use-latest-tag: name: Check Latest xclim-testdata Tag runs-on: ubuntu-latest diff --git a/.github/workflows/workflow-warning.yml b/.github/workflows/workflow-warning.yml new file mode 100644 index 000000000..854e382c6 --- /dev/null +++ b/.github/workflows/workflow-warning.yml @@ -0,0 +1,69 @@ +name: Workflow Changes Warnings + +on: + pull_request: + types: + - opened + - reopened + - synchronize + paths: + - .github/workflows/*.yml + +permissions: + contents: read + +jobs: + comment-concerning-workflow-changes: + name: Comment Concerning Workflow Changes + runs-on: ubuntu-latest + # Note: There is always a potential security risk from pull_request_target. + # Do not add actions in here which need a checkout of the repo, and do not use any caching in here. + if: | + (github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name) + permissions: + contents: read + pull-requests: write + steps: + - name: Harden Runner + uses: step-security/harden-runner@eb238b55efaa70779f274895e782ed17c84f2895 # v2.6.1 + with: + disable-sudo: true + egress-policy: block + allowed-endpoints: > + api.github.com:443 + - name: Find comment + uses: peter-evans/find-comment@a54c31d7fa095754bfef525c0c8e5e5674c4b4b1 # v2.4.0 + id: fc + with: + issue-number: ${{ github.event.pull_request.number }} + comment-author: 'github-actions[bot]' + body-includes: | + This Pull Request modifies GitHub workflows and is coming from a fork. + - name: Create comment + if: | + (steps.fc.outputs.comment-id == '') && + (!contains(github.event.pull_request.labels.*.name, 'approved')) && + (github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name) + uses: peter-evans/create-or-update-comment@23ff15729ef2fc348714a3bb66d2f655ca9066f2 # v3.1.0 + with: + comment-id: ${{ steps.fc.outputs.comment-id }} + issue-number: ${{ github.event.pull_request.number }} + body: | + > **Warning** + > This Pull Request modifies GitHub Workflows and is coming from a fork. + **It is very important for the reviewer to ensure that the workflow changes are appropriate.** + edit-mode: replace + - name: Update comment + if: | + contains(github.event.pull_request.labels.*.name, 'approved') + uses: peter-evans/create-or-update-comment@23ff15729ef2fc348714a3bb66d2f655ca9066f2 # v3.1.0 + with: + comment-id: ${{ steps.fc.outputs.comment-id }} + issue-number: ${{ github.event.pull_request.number }} + body: | + > **Note** + > This Pull Request modifies GitHub Workflows and is coming from a fork. + Changes have been approved by a maintainer. + reactions: | + hooray + edit-mode: append From 10291a89811c6d6c732204d180b79fa5365add11 Mon Sep 17 00:00:00 2001 From: Zeitsperre <10819524+Zeitsperre@users.noreply.github.com> Date: Fri, 19 Jan 2024 12:34:55 -0500 Subject: [PATCH 17/37] rename some workflows for consistency --- .../workflows/{first_pull_request.yml => first-pull-request.yml} | 0 .../workflows/{label_on_approval.yml => label-on-approval.yml} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename .github/workflows/{first_pull_request.yml => first-pull-request.yml} (100%) rename .github/workflows/{label_on_approval.yml => label-on-approval.yml} (100%) diff --git a/.github/workflows/first_pull_request.yml b/.github/workflows/first-pull-request.yml similarity index 100% rename from .github/workflows/first_pull_request.yml rename to .github/workflows/first-pull-request.yml diff --git a/.github/workflows/label_on_approval.yml b/.github/workflows/label-on-approval.yml similarity index 100% rename from .github/workflows/label_on_approval.yml rename to .github/workflows/label-on-approval.yml From 80c17422243664df22d80f94862abaa7473158ca Mon Sep 17 00:00:00 2001 From: Zeitsperre <10819524+Zeitsperre@users.noreply.github.com> Date: Fri, 19 Jan 2024 12:37:41 -0500 Subject: [PATCH 18/37] run on pull_request_target --- .github/workflows/workflow-warning.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/workflow-warning.yml b/.github/workflows/workflow-warning.yml index 854e382c6..f4bfb003c 100644 --- a/.github/workflows/workflow-warning.yml +++ b/.github/workflows/workflow-warning.yml @@ -1,7 +1,7 @@ name: Workflow Changes Warnings on: - pull_request: + pull_request_target: types: - opened - reopened From 6a80c0004a5155c936150f4f0047aedccd8a550f Mon Sep 17 00:00:00 2001 From: Zeitsperre <10819524+Zeitsperre@users.noreply.github.com> Date: Fri, 19 Jan 2024 12:53:31 -0500 Subject: [PATCH 19/37] deactivate actions-versions-updater.yml (handled by dependabot now), finishing touches --- .../actions-versions-updater.yml | 0 .github/workflows/add-to-project.yml | 2 +- .github/workflows/bump-version.yml | 2 +- .github/workflows/cache-cleaner.yml | 2 +- .github/workflows/first-pull-request.yml | 2 +- .github/workflows/label-on-approval.yml | 2 +- .github/workflows/label.yml | 8 +++++++- .github/workflows/publish-mastodon.yml | 2 +- .github/workflows/publish-pypi.yml | 2 +- .github/workflows/scorecard.yml | 9 ++------- .github/workflows/tag-testpypi.yml | 2 +- .github/workflows/testdata-version.yml | 4 ++-- .github/workflows/upstream.yml | 2 +- .github/workflows/workflow-warning.yml | 8 ++++---- 14 files changed, 24 insertions(+), 23 deletions(-) rename .github/{workflows => deactivated}/actions-versions-updater.yml (100%) diff --git a/.github/workflows/actions-versions-updater.yml b/.github/deactivated/actions-versions-updater.yml similarity index 100% rename from .github/workflows/actions-versions-updater.yml rename to .github/deactivated/actions-versions-updater.yml diff --git a/.github/workflows/add-to-project.yml b/.github/workflows/add-to-project.yml index 706de14a4..62c0f51d6 100644 --- a/.github/workflows/add-to-project.yml +++ b/.github/workflows/add-to-project.yml @@ -5,7 +5,7 @@ on: types: - opened -permissions: # added using https://github.com/step-security/secure-repo +permissions: contents: read jobs: diff --git a/.github/workflows/bump-version.yml b/.github/workflows/bump-version.yml index fce947d77..7bddd3feb 100644 --- a/.github/workflows/bump-version.yml +++ b/.github/workflows/bump-version.yml @@ -23,7 +23,7 @@ on: - tox.ini - xclim/__init__.py -permissions: # added using https://github.com/step-security/secure-repo +permissions: contents: read jobs: diff --git a/.github/workflows/cache-cleaner.yml b/.github/workflows/cache-cleaner.yml index 9dbe50be2..0ce283114 100644 --- a/.github/workflows/cache-cleaner.yml +++ b/.github/workflows/cache-cleaner.yml @@ -5,7 +5,7 @@ on: types: - closed -permissions: # added using https://github.com/step-security/secure-repo +permissions: contents: read jobs: diff --git a/.github/workflows/first-pull-request.yml b/.github/workflows/first-pull-request.yml index 610a78baa..a958bcd33 100644 --- a/.github/workflows/first-pull-request.yml +++ b/.github/workflows/first-pull-request.yml @@ -5,7 +5,7 @@ on: types: - opened -permissions: # added using https://github.com/step-security/secure-repo +permissions: contents: read jobs: diff --git a/.github/workflows/label-on-approval.yml b/.github/workflows/label-on-approval.yml index 01670046b..e6bcfe0db 100644 --- a/.github/workflows/label-on-approval.yml +++ b/.github/workflows/label-on-approval.yml @@ -68,7 +68,7 @@ jobs: with: issue-number: ${{ github.event.pull_request.number }} comment-author: 'github-actions[bot]' - body-includes: This Pull Request is coming from a fork and must be manually tagged `approved` in order to perform additional testing + body-includes: This Pull Request is coming from a fork and must be manually tagged `approved` in order to perform additional testing. - name: Create comment if: | (steps.fc.outputs.comment-id == '') && diff --git a/.github/workflows/label.yml b/.github/workflows/label.yml index 964652871..c1753bc68 100644 --- a/.github/workflows/label.yml +++ b/.github/workflows/label.yml @@ -6,10 +6,15 @@ # https://github.com/actions/labeler/blob/master/README.md name: Labeler -on: [pull_request_target] +on: # Note: potential security risk from this action using pull_request_target. # Do not add actions in here which need a checkout of the repo, and do not use any caching in here. # See: https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target + pull_request_target: + types: + - opened + - reopened + - synchronize permissions: contents: read @@ -30,6 +35,7 @@ jobs: egress-policy: block allowed-endpoints: > api.github.com:443 + - uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0 with: repo-token: "${{ secrets.GITHUB_TOKEN }}" diff --git a/.github/workflows/publish-mastodon.yml b/.github/workflows/publish-mastodon.yml index 1da0f4933..37e14233b 100644 --- a/.github/workflows/publish-mastodon.yml +++ b/.github/workflows/publish-mastodon.yml @@ -15,7 +15,7 @@ on: default: true type: boolean -permissions: # added using https://github.com/step-security/secure-repo +permissions: contents: read jobs: diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml index 16f0e6bc2..d34fe5e37 100644 --- a/.github/workflows/publish-pypi.yml +++ b/.github/workflows/publish-pypi.yml @@ -5,7 +5,7 @@ on: types: - published -permissions: # added using https://github.com/step-security/secure-repo +permissions: contents: read jobs: diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index c50b2ca22..da3db3bf8 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -12,7 +12,8 @@ on: schedule: - cron: '41 8 * * 4' push: - branches: [ "master" ] + branches: + - master # Declare default permissions as read only. permissions: read-all @@ -26,9 +27,6 @@ jobs: security-events: write # Needed to publish results and get a badge (see publish_results below). id-token: write - # Uncomment the permissions below if installing in a private repository. - # contents: read - # actions: read steps: - name: Harden Runner @@ -56,9 +54,6 @@ jobs: # - Publish results to OpenSSF REST API for easy access by consumers # - Allows the repository to include the Scorecard badge. # - See https://github.com/ossf/scorecard-action#publishing-results. - # For private repositories: - # - `publish_results` will always be set to `false`, regardless - # of the value entered here. publish_results: true # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF diff --git a/.github/workflows/tag-testpypi.yml b/.github/workflows/tag-testpypi.yml index 249941134..734706ebb 100644 --- a/.github/workflows/tag-testpypi.yml +++ b/.github/workflows/tag-testpypi.yml @@ -5,7 +5,7 @@ on: tags: - 'v*' -permissions: # added using https://github.com/step-security/secure-repo +permissions: contents: read jobs: diff --git a/.github/workflows/testdata-version.yml b/.github/workflows/testdata-version.yml index 9f2ccff36..b8a02da4f 100644 --- a/.github/workflows/testdata-version.yml +++ b/.github/workflows/testdata-version.yml @@ -1,4 +1,4 @@ -name: Verify Testing Data and Workflows +name: Verify Testing Data on: pull_request: @@ -9,7 +9,7 @@ on: paths: - .github/workflows/main.yml -permissions: # added using https://github.com/step-security/secure-repo +permissions: contents: read jobs: diff --git a/.github/workflows/upstream.yml b/.github/workflows/upstream.yml index 030914543..5e328380f 100644 --- a/.github/workflows/upstream.yml +++ b/.github/workflows/upstream.yml @@ -16,7 +16,7 @@ concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true -permissions: # added using https://github.com/step-security/secure-repo +permissions: contents: read jobs: diff --git a/.github/workflows/workflow-warning.yml b/.github/workflows/workflow-warning.yml index f4bfb003c..433881bba 100644 --- a/.github/workflows/workflow-warning.yml +++ b/.github/workflows/workflow-warning.yml @@ -1,6 +1,9 @@ name: Workflow Changes Warnings on: + # Note: potential security risk from this action using pull_request_target. + # Do not add actions in here which need a checkout of the repo, and do not use any caching in here. + # See: https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target pull_request_target: types: - opened @@ -16,8 +19,6 @@ jobs: comment-concerning-workflow-changes: name: Comment Concerning Workflow Changes runs-on: ubuntu-latest - # Note: There is always a potential security risk from pull_request_target. - # Do not add actions in here which need a checkout of the repo, and do not use any caching in here. if: | (github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name) permissions: @@ -62,8 +63,7 @@ jobs: issue-number: ${{ github.event.pull_request.number }} body: | > **Note** - > This Pull Request modifies GitHub Workflows and is coming from a fork. - Changes have been approved by a maintainer. + > Changes have been approved by a maintainer. reactions: | hooray edit-mode: append From a63f9a239e3eb4e15374066a825fc5e6e0ed9856 Mon Sep 17 00:00:00 2001 From: Zeitsperre <10819524+Zeitsperre@users.noreply.github.com> Date: Fri, 19 Jan 2024 13:39:15 -0500 Subject: [PATCH 20/37] small adjustments --- xclim/testing/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xclim/testing/utils.py b/xclim/testing/utils.py index 561b8a863..c4bb93211 100644 --- a/xclim/testing/utils.py +++ b/xclim/testing/utils.py @@ -258,7 +258,7 @@ def _get( raise FileNotFoundError(msg) from e try: url = "/".join((github_url, "raw", branch, md5_name.as_posix())) - msg = f"Fetching remote file md5: {format(md5_name.as_posix())}" + msg = f"Fetching remote file md5: {md5_name.as_posix()}" logger.info(msg) urlretrieve(url, md5_file) # nosec except (HTTPError, URLError) as e: From 5e1d024919086830dfe53871c0afe90e9706bbf1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89ric=20Dupuis?= Date: Fri, 19 Jan 2024 14:08:01 -0500 Subject: [PATCH 21/37] mark SI tests as slow, more daily tests --- tests/test_indices.py | 55 ++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 52 insertions(+), 3 deletions(-) diff --git a/tests/test_indices.py b/tests/test_indices.py index 8dae0651a..0183708f9 100644 --- a/tests/test_indices.py +++ b/tests/test_indices.py @@ -456,13 +456,13 @@ def test_effective_growing_degree_days( np.testing.assert_array_equal(out, np.array([np.NaN, expected])) - # gamma reference results: Obtained with `monocongo/climate_indices` library - # fisk reference results: Obtained with R package `SPEI` + # gamma/APP reference results: Obtained with `monocongo/climate_indices` library + # MS/fisk/ML reference results: Obtained with R package `SPEI` # Using the method `APP` in XClim matches the method from monocongo, hence the very low # tolerance possible. # Repeated tests with lower tolerance means we want a more precise comparison, so we compare # the current version of XClim with the version where the test was implemented - + @pytest.mark.slow @pytest.mark.parametrize( "freq, window, dist, method, values, diff_tol", [ @@ -534,6 +534,54 @@ def test_effective_growing_degree_days( [-0.24417774, -0.11404418, 0.64997039, 1.07670517, 0.6462852], 2e-2, ), + ( + "D", + 1, + "gamma", + "ML", + [-0.03577971, 1.30589409, 0.8863447, 0.23906544, -0.05185997], + 2e-2, + ), + ( + "D", + 12, + "gamma", + "ML", + [-0.15846245, -0.04924534, 0.66299367, 1.09938471, 0.66095752], + 2e-2, + ), + ( + "D", + 1, + "fisk", + "APP", + [-1.26216389, 1.03096183, 0.62985354, -0.50335153, -1.32788296], + 2e-2, + ), + ( + "D", + 12, + "fisk", + "APP", + [-0.57109258, -0.40657737, 0.55163493, 0.97381067, 0.55580649], + 2e-2, + ), + ( + "D", + 1, + "fisk", + "ML", + [-0.05562691, 1.30809152, 0.6954986, 0.33018744, -0.50258979], + 2e-2, + ), + ( + "D", + 12, + "fisk", + "ML", + [-0.14151269, -0.01914608, 0.7080277, 1.01510279, 0.6954002], + 2e-2, + ), ], ) def test_standardized_precipitation_index( @@ -565,6 +613,7 @@ def test_standardized_precipitation_index( np.testing.assert_allclose(spi.values, values, rtol=0, atol=diff_tol) # See SPI version + @pytest.mark.slow @pytest.mark.parametrize( "freq, window, dist, method, values, diff_tol", [ From 5643126bc9490657feefd3cdddf270e149efefa6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=89ric=20Dupuis?= Date: Fri, 19 Jan 2024 14:16:26 -0500 Subject: [PATCH 22/37] tests with freq=None --- tests/test_indices.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/tests/test_indices.py b/tests/test_indices.py index 0183708f9..51fea3eac 100644 --- a/tests/test_indices.py +++ b/tests/test_indices.py @@ -582,6 +582,22 @@ def test_effective_growing_degree_days( [-0.14151269, -0.01914608, 0.7080277, 1.01510279, 0.6954002], 2e-2, ), + ( + None, + 1, + "gamma", + "APP", + [-0.18618353, 1.44582971, 0.95985043, 0.15779587, -0.37801587], + 2e-2, + ), + ( + None, + 12, + "gamma", + "APP", + [-0.24417774, -0.11404418, 0.64997039, 1.07670517, 0.6462852], + 2e-2, + ), ], ) def test_standardized_precipitation_index( From ca2ca17da4589c1f320a99e54d2c7e8e675e6f47 Mon Sep 17 00:00:00 2001 From: Zeitsperre <10819524+Zeitsperre@users.noreply.github.com> Date: Fri, 19 Jan 2024 15:32:09 -0500 Subject: [PATCH 23/37] remove some pylint warnings, mark map_groups as modifying call signatures --- .pre-commit-config.yaml | 10 +++++----- .pylintrc.toml | 3 ++- pyproject.toml | 2 +- tests/test_indicators.py | 19 +++++-------------- tests/test_locales.py | 6 ++---- tests/test_sdba/test_base.py | 5 +++-- xclim/cli.py | 4 ++-- xclim/core/options.py | 2 +- xclim/indices/fire/_ffdi.py | 15 ++++----------- xclim/sdba/nbutils.py | 10 +++------- xclim/testing/diagnostics.py | 3 ++- 11 files changed, 30 insertions(+), 49 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 12d9c6d22..bbc5cde4f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,11 +27,6 @@ repos: hooks: - id: toml-sort-fix exclude: '.pylintrc.toml' - - repo: https://github.com/pylint-dev/pylint - rev: v3.0.3 - hooks: - - id: pylint - args: [ '--rcfile=.pylintrc.toml', '--errors-only', '--jobs=0', '--disable=import-error' ] - repo: https://github.com/adrienverge/yamllint.git rev: v1.33.0 hooks: @@ -49,6 +44,11 @@ repos: rev: v0.1.13 hooks: - id: ruff + - repo: https://github.com/pylint-dev/pylint + rev: v3.0.3 + hooks: + - id: pylint + args: [ '--rcfile=.pylintrc.toml', '--errors-only', '--jobs=0', '--disable=import-error' ] - repo: https://github.com/pycqa/flake8 rev: 7.0.0 hooks: diff --git a/.pylintrc.toml b/.pylintrc.toml index 0c64979e2..98fceee40 100644 --- a/.pylintrc.toml +++ b/.pylintrc.toml @@ -374,6 +374,7 @@ disable = [ "invalid-unary-operand-type", "locally-disabled", "missing-module-docstring", + "no-member", "protected-access", "raw-checker-failed", "redefined-outer-name", @@ -521,7 +522,7 @@ missing-member-max-choices = 1 mixin-class-rgx = ".*[Mm]ixin" # List of decorators that change the signature of a decorated function. -# signature-mutators = +signature-mutators = ["xclim.sdba.base.map_groups"] [tool.pylint.variables] # List of additional names supposed to be defined in builtins. Remember that you diff --git a/pyproject.toml b/pyproject.toml index 631017ff5..7b822c8ec 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -292,7 +292,7 @@ lines-after-imports = 1 no-lines-before = ["future", "standard-library"] [tool.ruff.mccabe] -max-complexity = 15 +max-complexity = 20 [tool.ruff.per-file-ignores] "docs/*.py" = ["D100", "D101", "D102", "D103"] diff --git a/tests/test_indicators.py b/tests/test_indicators.py index 3f572d40f..6ff98f893 100644 --- a/tests/test_indicators.py +++ b/tests/test_indicators.py @@ -159,11 +159,10 @@ def test_attrs(tas_series): assert f"xclim version: {__version__}" in txm.attrs["history"] assert txm.name == "tmin5 degC" assert uniIndTemp.standard_name == "{freq} mean temperature" - # fmt: off assert ( - uniIndTemp.cf_attrs[0]["another_attr"] == "With a value." # pylint: disable=unsubscriptable-object + uniIndTemp.cf_attrs[0]["another_attr"] # pylint: disable=unsubscriptable-object + == "With a value." ) - # fmt: on thresh = xr.DataArray( [1], @@ -247,14 +246,8 @@ def test_module(): """Translations are keyed according to the module where the indicators are defined.""" assert atmos.tg_mean.__module__.split(".")[2] == "atmos" # Virtual module also are stored under xclim.indicators - # fmt: off - assert ( - xclim.indicators.cf.fg.__module__ == "xclim.indicators.cf" # pylint: disable=no-member - ) - assert ( - xclim.indicators.icclim.GD4.__module__ == "xclim.indicators.icclim" # pylint: disable=no-member - ) - # fmt: on + assert xclim.indicators.cf.fg.__module__ == "xclim.indicators.cf" + assert xclim.indicators.icclim.GD4.__module__ == "xclim.indicators.icclim" def test_temp_unit_conversion(tas_series): @@ -267,9 +260,7 @@ def test_temp_unit_conversion(tas_series): with pytest.raises(AssertionError): np.testing.assert_array_almost_equal(txk, txc + 273.15) - uniIndTemp.cf_attrs[0][ # noqa; # pylint: disable=unsubscriptable-object - "units" - ] = "degC" + uniIndTemp.cf_attrs[0]["units"] = "degC" # pylint: disable=unsubscriptable-object txc = uniIndTemp(a, freq="YS") np.testing.assert_array_almost_equal(txk, txc + 273.15) diff --git a/tests/test_locales.py b/tests/test_locales.py index 441a0252c..6f63977a3 100644 --- a/tests/test_locales.py +++ b/tests/test_locales.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +# pylint: disable=unsubscriptable-object # Tests for `xclim.locales` from __future__ import annotations @@ -159,13 +159,11 @@ def test_xclim_translations(locale, official_indicators): @pytest.mark.parametrize( - # fmt: off "initeng,expected", [ (False, ""), - (True, atmos.tg_mean.cf_attrs[0]["long_name"]), # pylint: disable=unsubscriptable-object + (True, atmos.tg_mean.cf_attrs[0]["long_name"]), ], - # fmt: on ) def test_local_dict_generation(initeng, expected): dic = generate_local_dict("tlh", init_english=initeng) diff --git a/tests/test_sdba/test_base.py b/tests/test_sdba/test_base.py index b597842ca..028c06d8e 100644 --- a/tests/test_sdba/test_base.py +++ b/tests/test_sdba/test_base.py @@ -1,3 +1,4 @@ +# pylint: disable=missing-kwoa from __future__ import annotations import jsonpickle @@ -193,7 +194,7 @@ def func(ds, *, dim): d = ds.tas.mean(dim) return d.rename("data").to_dataset() - data = func( # pylint: disable=missing-kwoa + data = func( xr.Dataset(dict(tas=tas)), group="time.dayofyear", window=5, @@ -212,7 +213,7 @@ def func(ds, *, dim): return data.rename("data").to_dataset() # with a scalar aux coord - data = func( # pylint: disable=missing-kwoa + data = func( xr.Dataset(dict(tas=tas.isel(lat=0, drop=True)), coords=dict(leftover=1)), group="time.dayofyear", ).load() diff --git a/xclim/cli.py b/xclim/cli.py index d14b8eca3..601292c6b 100644 --- a/xclim/cli.py +++ b/xclim/cli.py @@ -464,9 +464,9 @@ def cli(ctx, **kwargs): ctx.obj = kwargs -@cli.result_callback() # noqa +@cli.result_callback() @click.pass_context -def write_file(ctx, *args, **kwargs): # noqa: W0613 +def write_file(ctx, *args, **kwargs): """Write the output dataset to file.""" if ctx.obj["output"] is not None: if ctx.obj["verbose"]: diff --git a/xclim/core/options.py b/xclim/core/options.py index 45eeedacf..1eba36a3f 100644 --- a/xclim/core/options.py +++ b/xclim/core/options.py @@ -137,7 +137,7 @@ def run_check(*args, **kwargs): return run_check -class set_options: # noqa: C0103 +class set_options: """Set options for xclim in a controlled context. Attributes diff --git a/xclim/indices/fire/_ffdi.py b/xclim/indices/fire/_ffdi.py index 123b51984..fb6b3c7d3 100644 --- a/xclim/indices/fire/_ffdi.py +++ b/xclim/indices/fire/_ffdi.py @@ -1,3 +1,4 @@ +# pylint: disable=no-value-for-parameter r""" McArthur Forest Fire Danger (Mark 5) System =========================================== @@ -97,7 +98,7 @@ def _keetch_byram_drought_index(p, t, pa, kbdi0, kbdi: float): # pragma: no cov nopython=True, cache=True, ) -def _griffiths_drought_factor(p, smd, lim, df): # pragma: no cover # noqa: C901 +def _griffiths_drought_factor(p, smd, lim, df): """Compute the Griffiths drought factor. Parameters @@ -248,11 +249,7 @@ def _keetch_byram_drought_index_pass(pr, tasmax, pr_annual, kbdi0): -------- DO NOT CALL DIRECTLY, use `keetch_byram_drought_index` instead. """ - # fmt: off - return _keetch_byram_drought_index( # pylint: disable=no-value-for-parameter - pr, tasmax, pr_annual, kbdi0 - ) - # fmt: on + return _keetch_byram_drought_index(pr, tasmax, pr_annual, kbdi0) pr = convert_units_to(pr, "mm/day", context="hydro") tasmax = convert_units_to(tasmax, "C") @@ -330,11 +327,7 @@ def _griffiths_drought_factor_pass(pr, smd, lim): -------- DO NOT CALL DIRECTLY, use `griffiths_drought_factor` instead. """ - # fmt: off - return _griffiths_drought_factor( # pylint: disable=no-value-for-parameter - pr, smd, lim - ) - # fmt: on + return _griffiths_drought_factor(pr, smd, lim) pr = convert_units_to(pr, "mm/day", context="hydro") smd = convert_units_to(smd, "mm/day") diff --git a/xclim/sdba/nbutils.py b/xclim/sdba/nbutils.py index 6e49933d4..9fd245d20 100644 --- a/xclim/sdba/nbutils.py +++ b/xclim/sdba/nbutils.py @@ -1,3 +1,4 @@ +# pylint: disable=no-value-for-parameter """ Numba-accelerated Utilities =========================== @@ -34,7 +35,7 @@ def vecquantiles(da: DataArray, rnk: DataArray, dim: str | DataArray.dims) -> Da da = da.transpose(*rnk.dims, tem) res = DataArray( - _vecquantiles(da.values, rnk.values), # pylint: disable=no-value-for-parameter + _vecquantiles(da.values, rnk.values), dims=rnk.dims, coords=rnk.coords, attrs=da.attrs, @@ -184,12 +185,7 @@ def _first_and_last_nonnull(arr): @njit -def _extrapolate_on_quantiles( - # fmt: off - interp, oldx, oldg, oldy, newx, newg, method="constant" - # noqa - # fmt: on -): +def _extrapolate_on_quantiles(interp, oldx, oldg, oldy, newx, newg, method="constant"): """Apply extrapolation to the output of interpolation on quantiles with a given grouping. Arguments are the same as _interp_on_quantiles_2D. diff --git a/xclim/testing/diagnostics.py b/xclim/testing/diagnostics.py index b93ea9c29..8aede8e47 100644 --- a/xclim/testing/diagnostics.py +++ b/xclim/testing/diagnostics.py @@ -1,3 +1,4 @@ +# pylint: disable=no-member,missing-kwoa """ SDBA Diagnostic Testing Module ============================== @@ -128,7 +129,7 @@ def adapt_freq_graph(): x = series(synth_rainfall(2, 2, wet_freq=0.25, size=n), "pr") # sim y = series(synth_rainfall(2, 2, wet_freq=0.5, size=n), "pr") # ref - xp = adapt_freq(x, y, thresh=0).sim_ad # pylint: disable=no-member,missing-kwoa + xp = adapt_freq(x, y, thresh=0).sim_ad # noqa fig, (ax1, ax2) = plt.subplots(2, 1) sx = x.sortby(x) From d7a50dca0882136df74a84526783d2d27702a18f Mon Sep 17 00:00:00 2001 From: Zeitsperre <10819524+Zeitsperre@users.noreply.github.com> Date: Fri, 19 Jan 2024 15:53:51 -0500 Subject: [PATCH 24/37] remove pylint statements --- tests/test_indicators.py | 17 +++++------------ tests/test_sdba/test_adjustment.py | 7 ++----- 2 files changed, 7 insertions(+), 17 deletions(-) diff --git a/tests/test_indicators.py b/tests/test_indicators.py index 6ff98f893..80ee3901f 100644 --- a/tests/test_indicators.py +++ b/tests/test_indicators.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +# pylint: disable=unsubscriptable-object,function-redefined # Tests for the Indicator objects from __future__ import annotations @@ -159,10 +159,7 @@ def test_attrs(tas_series): assert f"xclim version: {__version__}" in txm.attrs["history"] assert txm.name == "tmin5 degC" assert uniIndTemp.standard_name == "{freq} mean temperature" - assert ( - uniIndTemp.cf_attrs[0]["another_attr"] # pylint: disable=unsubscriptable-object - == "With a value." - ) + assert uniIndTemp.cf_attrs[0]["another_attr"] == "With a value." thresh = xr.DataArray( [1], @@ -260,7 +257,7 @@ def test_temp_unit_conversion(tas_series): with pytest.raises(AssertionError): np.testing.assert_array_almost_equal(txk, txc + 273.15) - uniIndTemp.cf_attrs[0]["units"] = "degC" # pylint: disable=unsubscriptable-object + uniIndTemp.cf_attrs[0]["units"] = "degC" txc = uniIndTemp(a, freq="YS") np.testing.assert_array_almost_equal(txk, txc + 273.15) @@ -761,18 +758,14 @@ def func(data: xr.DataArray, thresh: str = "0 degC", freq: str = "YS"): # noqa # with pytest.raises(ValueError, match="variable data is missing expected units"): # Daily(**d) - d["parameters"]["thresh"] = {"units": "K"} # pylint: disable=function-redefined + d["parameters"]["thresh"] = {"units": "K"} d["realm"] = "mercury" d["input"] = {"data": "tasmin"} with pytest.raises(AttributeError, match="Indicator's realm must be given as one"): Daily(**d) - # fmt: off - def func( # noqa; # pylint: disable=function-redefined - data: xr.DataArray, thresh: str = "0 degC" - ): + def func(data: xr.DataArray, thresh: str = "0 degC"): return data - # fmt: on func.__doc__ = "\n".join(doc[:10] + doc[12:]) d = dict( diff --git a/tests/test_sdba/test_adjustment.py b/tests/test_sdba/test_adjustment.py index 52ff3ae75..7543ae9d7 100644 --- a/tests/test_sdba/test_adjustment.py +++ b/tests/test_sdba/test_adjustment.py @@ -1,3 +1,4 @@ +# pylint: disable=no-member from __future__ import annotations import numpy as np @@ -711,11 +712,7 @@ class TestSBCKutils: @pytest.mark.slow @pytest.mark.parametrize( "method", - # fmt: off - [ - m for m in dir(adjustment) if m.startswith("SBCK_") # pylint: disable=no-member - ], - # fmt: on + [m for m in dir(adjustment) if m.startswith("SBCK_")], ) @pytest.mark.parametrize("use_dask", [True]) # do we gain testing both? def test_sbck(self, method, use_dask, random): From 90f78fb8e0b0fe8fcb92e373e6bfddb21fd6f8ac Mon Sep 17 00:00:00 2001 From: Zeitsperre <10819524+Zeitsperre@users.noreply.github.com> Date: Fri, 19 Jan 2024 16:06:44 -0500 Subject: [PATCH 25/37] treat xclim.indicators as a module with run-time submodules --- .pylintrc.toml | 2 +- tests/test_indices.py | 2 +- tests/test_modules.py | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.pylintrc.toml b/.pylintrc.toml index 98fceee40..0dccb5086 100644 --- a/.pylintrc.toml +++ b/.pylintrc.toml @@ -53,7 +53,7 @@ ignore-patterns = ["^\\.#"] # for modules/projects where namespaces are manipulated during runtime and thus # existing member attributes cannot be deduced by static analysis). It supports # qualified module names, as well as Unix pattern matching. -# ignored-modules = +ignored-modules = ["xclim.indicators"] # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). diff --git a/tests/test_indices.py b/tests/test_indices.py index d4979341c..5a7e0eb5d 100644 --- a/tests/test_indices.py +++ b/tests/test_indices.py @@ -2385,7 +2385,7 @@ def test_simple(self, open_dataset, ind, exp): np.testing.assert_almost_equal(out[0], exp, decimal=4) def test_indice_against_icclim(self, cmip3_day_tas): - from xclim.indicators import icclim # noqa; # pylint: disable=no-name-in-module + from xclim.indicators import icclim # noqa with set_options(cf_compliance="log"): ind = xci.tg_mean(cmip3_day_tas) diff --git a/tests/test_modules.py b/tests/test_modules.py index 3c06d3a4c..1927fa94f 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -28,9 +28,9 @@ def virtual_indicator(request): def test_default_modules_exist(): - from xclim.indicators import anuclim # noqa; # pylint: disable=no-name-in-module - from xclim.indicators import cf # noqa; # pylint: disable=no-name-in-module - from xclim.indicators import icclim # noqa; # pylint: disable=no-name-in-module + from xclim.indicators import anuclim # noqa + from xclim.indicators import cf # noqa + from xclim.indicators import icclim # noqa assert hasattr(icclim, "TG") From 9320f6302b6b7c8ed9247cfd70e74fd6b7aac67e Mon Sep 17 00:00:00 2001 From: Zeitsperre <10819524+Zeitsperre@users.noreply.github.com> Date: Fri, 19 Jan 2024 16:12:47 -0500 Subject: [PATCH 26/37] re-add pragma: no cover --- xclim/indices/fire/_ffdi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xclim/indices/fire/_ffdi.py b/xclim/indices/fire/_ffdi.py index fb6b3c7d3..b986fe2e3 100644 --- a/xclim/indices/fire/_ffdi.py +++ b/xclim/indices/fire/_ffdi.py @@ -98,7 +98,7 @@ def _keetch_byram_drought_index(p, t, pa, kbdi0, kbdi: float): # pragma: no cov nopython=True, cache=True, ) -def _griffiths_drought_factor(p, smd, lim, df): +def _griffiths_drought_factor(p, smd, lim, df): # pragma: no cover """Compute the Griffiths drought factor. Parameters From 380210b68741fed5835d01f5e6d2d1f929f17ef1 Mon Sep 17 00:00:00 2001 From: Zeitsperre <10819524+Zeitsperre@users.noreply.github.com> Date: Fri, 19 Jan 2024 16:15:04 -0500 Subject: [PATCH 27/37] noqa adjustment --- xclim/core/indicator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xclim/core/indicator.py b/xclim/core/indicator.py index f4cb9bc72..1a8c26255 100644 --- a/xclim/core/indicator.py +++ b/xclim/core/indicator.py @@ -211,7 +211,7 @@ def update(self, other: dict) -> None: def is_parameter_dict(cls, other: dict) -> bool: """Return whether indicator has a parameter dictionary.""" return set(other.keys()).issubset( - cls.__dataclass_fields__.keys() # noqa; # pylint: disable=no-member + cls.__dataclass_fields__.keys() # pylint: disable=no-member ) def __getitem__(self, key) -> str: From 65fc23c798992e83a17869f71d0528633061b979 Mon Sep 17 00:00:00 2001 From: Zeitsperre <10819524+Zeitsperre@users.noreply.github.com> Date: Fri, 19 Jan 2024 16:19:36 -0500 Subject: [PATCH 28/37] update CHANGES.rst --- CHANGES.rst | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 416ae2c8f..c80832fcf 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -38,7 +38,12 @@ Internal changes * Updated the CONTRIBUTING.rst directions to showcase the new versioning system. (:issue:`1557`, :pull:`1573`). * The `codespell` library is now a development dependency for the `dev` installation recipe with configurations found within `pyproject.toml`. This is also now a linting step and integrated as a `pre-commit` hook. For more information, see the `codespell documentation `_ (:pull:`1576`). * Climate indicators search page now prioritizes the "official" indicators (atmos, land, seaIce and generic), virtual submodules can be added to search through checkbox option. (:issue:`1559`, :pull:`1593`). - +* The OpenSSF StepSecurity bot has contributed some changes to the workflows and pre-commit. (:issue:`1181`, :pull:`1606`): + * Dependabot has been configured to monitor the `xclim` repository for dependency updates. The ``actions-version-updater.yml`` workflow has been deprecated. + * GitHub Actions are now pinned to their commit hashes to prevent unexpected changes in the future. + * A new GitHub Workflow (``workflow-warning.yml``) has been added to warn maintainers when a forked repository has been used to open a Pull Request that modifies GitHub Workflows. + * `pylint` has been configured to provide some overhead checks of the `xclim` codebase as well as run as part of `xclim`'s `pre-commit` hooks. + * Some small adjustments to code organization to address `pylint` errors. v0.47.0 (2023-12-01) -------------------- From fd9121b2de6e3fd287a794ac7ba3b2ca85e1ec41 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Jan 2024 15:17:39 +0000 Subject: [PATCH 29/37] Bump actions/dependency-review-action from 3.1.5 to 4.0.0 Bumps [actions/dependency-review-action](https://github.com/actions/dependency-review-action) from 3.1.5 to 4.0.0. - [Release notes](https://github.com/actions/dependency-review-action/releases) - [Commits](https://github.com/actions/dependency-review-action/compare/c74b580d73376b7750d3d2a50bfb8adc2c937507...4901385134134e04cec5fbe5ddfe3b2c5bd5d976) --- updated-dependencies: - dependency-name: actions/dependency-review-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/dependency-review.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml index c63f07bd4..c977388df 100644 --- a/.github/workflows/dependency-review.yml +++ b/.github/workflows/dependency-review.yml @@ -28,4 +28,4 @@ jobs: uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: 'Dependency Review' - uses: actions/dependency-review-action@c74b580d73376b7750d3d2a50bfb8adc2c937507 + uses: actions/dependency-review-action@4901385134134e04cec5fbe5ddfe3b2c5bd5d976 From 85f2b3460dea8f6f3319de90682eff71840912fb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Jan 2024 15:17:51 +0000 Subject: [PATCH 30/37] Bump actions/upload-artifact from 4.1.0 to 4.2.0 Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 4.1.0 to 4.2.0. - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/1eb3cb2b3e0f29609092a73eb033bb759a334595...694cdabd8bdb0f10b2cea11669e1bf5453eed0a6) --- updated-dependencies: - dependency-name: actions/upload-artifact dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/scorecard.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index da3db3bf8..c0dd56256 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -59,7 +59,7 @@ jobs: # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF # format to the repository Actions tab. - name: "Upload artifact" - uses: actions/upload-artifact@1eb3cb2b3e0f29609092a73eb033bb759a334595 + uses: actions/upload-artifact@694cdabd8bdb0f10b2cea11669e1bf5453eed0a6 with: name: SARIF file path: results.sarif From ba1569347f828fb236d14004aaa9aa81fe406312 Mon Sep 17 00:00:00 2001 From: Zeitsperre <10819524+Zeitsperre@users.noreply.github.com> Date: Mon, 22 Jan 2024 12:29:02 -0500 Subject: [PATCH 31/37] add numba anaconda repository --- .github/workflows/main.yml | 10 ++- environment.yml | 149 +++++++++++++++++++------------------ 2 files changed, 82 insertions(+), 77 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 201a02778..cf8a918de 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -181,8 +181,12 @@ jobs: strategy: matrix: include: - - tox-env: py310 - python-version: "3.10" + - python-version: "3.10" + install-python: "3.10" + - python-version: "3.11" + install-python: "3.11" + - python-version: "3.12" + install-python: "3.12" defaults: run: shell: bash -l {0} @@ -212,7 +216,7 @@ jobs: environment-file: environment.yml create-args: >- conda - python=${{ matrix.python-version }} + python=${{ matrix.install-python }} - name: Conda and Mamba versions run: | conda --version diff --git a/environment.yml b/environment.yml index 04ae0e482..06d5fc29b 100644 --- a/environment.yml +++ b/environment.yml @@ -1,78 +1,79 @@ name: xclim channels: - - conda-forge - - defaults + - numba # Added to gain access to Python3.12-compatible numba release candidates. + - conda-forge + - defaults dependencies: - - python >=3.8 - - astroid - - boltons >=20.1 - - bottleneck >=1.3.1 - - cf_xarray >=0.6.1 - - cftime >=1.4.1 - - Click >=8.1 - - dask >=2.6.0 - - importlib-resources # For Python3.8 - - jsonpickle - - lmoments3 - - numba - - numpy >=1.16 - - pandas >=0.23,<2.2 - - pint >=0.9 - - poppler >=0.67 - - pyyaml - - scikit-learn >=0.21.3 - - scipy >=1.2 - - statsmodels - - xarray >=2022.06.0,<2023.11.0 - - yamale - # Extras - - eofs - - flox + - python >=3.8,<3.13 + - astroid + - boltons >=20.1 + - bottleneck >=1.3.1 + - cf_xarray >=0.6.1 + - cftime >=1.4.1 + - Click >=8.1 + - dask >=2.6.0 + - importlib-resources # For Python3.8 + - jsonpickle + - lmoments3 + - numba + - numpy >=1.16 + - pandas >=0.23,<2.2 + - pint >=0.9 + - poppler >=0.67 + - pyyaml + - scikit-learn >=0.21.3 + - scipy >=1.2 + - statsmodels + - xarray >=2022.06.0,<2023.11.0 + - yamale + # Extras + - eofs + - flox # Testing and development dependencies - - black >=22.12 - - blackdoc - - bump-my-version - - cairosvg - - codespell - - coverage - - distributed >=2.0 - - filelock - - flake8 - - flake8-rst-docstrings - - flit - - h5netcdf - - ipykernel - - ipython - - matplotlib - - mypy - - nbqa - - nbsphinx - - nbval - - nc-time-axis - - netCDF4 >=1.4 - - notebook - - platformdirs - - pooch - - pre-commit - - pybtex - - pylint - - pytest - - pytest-cov - - pytest-socket - - pytest-xdist >=3.2 - - ruff >=0.1.0 - - sphinx - - sphinx-autodoc-typehints - - sphinx-codeautolink - - sphinx-copybutton - - sphinx_rtd_theme >=1.0 - - sphinxcontrib-bibtex - - tokenize-rt - - tox -# - tox-conda # Will be added when a tox@v4.0+ compatible plugin is released. - - xdoctest - - yamllint - - pip - - pip: - - flake8-alphabetize - - sphinxcontrib-svg2pdfconverter + - black >=22.12 + - blackdoc + - bump-my-version + - cairosvg + - codespell + - coverage + - distributed >=2.0 + - filelock + - flake8 + - flake8-rst-docstrings + - flit + - h5netcdf + - ipykernel + - ipython + - matplotlib + - mypy + - nbqa + - nbsphinx + - nbval + - nc-time-axis + - netCDF4 >=1.4 + - notebook + - platformdirs + - pooch + - pre-commit + - pybtex + - pylint + - pytest + - pytest-cov + - pytest-socket + - pytest-xdist >=3.2 + - ruff >=0.1.0 + - sphinx + - sphinx-autodoc-typehints + - sphinx-codeautolink + - sphinx-copybutton + - sphinx-rtd-theme >=1.0 + - sphinxcontrib-bibtex + - tokenize-rt + - tox +# - tox-conda # Will be added when a tox@v4.0+ compatible plugin is released. + - xdoctest + - yamllint + - pip + - pip: + - flake8-alphabetize + - sphinxcontrib-svg2pdfconverter From e461973e37d00c93198cfcc3a2580263249246f7 Mon Sep 17 00:00:00 2001 From: Zeitsperre <10819524+Zeitsperre@users.noreply.github.com> Date: Mon, 22 Jan 2024 12:46:30 -0500 Subject: [PATCH 32/37] support Python3.12 --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 7b822c8ec..855f13943 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,6 +27,7 @@ classifiers = [ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: Scientific/Engineering :: Atmospheric Science" ] dynamic = ["description", "version"] From 0b45defee08d7e5dbcdc62d5932020ea0e72bf15 Mon Sep 17 00:00:00 2001 From: Zeitsperre <10819524+Zeitsperre@users.noreply.github.com> Date: Mon, 22 Jan 2024 12:59:30 -0500 Subject: [PATCH 33/37] add Python3.12 Python build to tox and main.yml, conda builds to Python3.9 and Python3.12 --- .github/workflows/main.yml | 13 ++++++------- tox.ini | 4 ++++ 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index cf8a918de..be56aff2a 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -131,6 +131,9 @@ jobs: - tox-env: offline-prefetch python-version: "3.11" markers: -m 'not slow and not requires_internet' + - tox-env: py312 + python-version: "3.12" + markers: -m 'not slow' steps: - name: Harden Runner uses: step-security/harden-runner@eb238b55efaa70779f274895e782ed17c84f2895 # v2.6.1 @@ -172,7 +175,7 @@ jobs: test-conda: needs: lint - name: test-conda-${{ matrix.tox-env }} (Python${{ matrix.python-version }}) + name: test-conda-Python${{ matrix.python-version }} if: | contains(github.event.pull_request.labels.*.name, 'approved') || (github.event.review.state == 'approved') || @@ -181,12 +184,8 @@ jobs: strategy: matrix: include: - - python-version: "3.10" - install-python: "3.10" - - python-version: "3.11" - install-python: "3.11" + - python-version: "3.9" - python-version: "3.12" - install-python: "3.12" defaults: run: shell: bash -l {0} @@ -216,7 +215,7 @@ jobs: environment-file: environment.yml create-args: >- conda - python=${{ matrix.install-python }} + python=${{ matrix.python-version }} - name: Conda and Mamba versions run: | conda --version diff --git a/tox.ini b/tox.ini index 3017f8de9..3d33cdcfc 100644 --- a/tox.ini +++ b/tox.ini @@ -10,6 +10,7 @@ env_list = py39-upstream-doctest py310 py311 + py312 labels = test = py38, py39-upstream-doctest, py310, py311, notebooks_doctests, offline-prefetch requires = @@ -103,6 +104,9 @@ passenv = extras = dev deps = py38: scipy<1.9 + # FIXME: Remove when numba 0.59.0 is released + py312: numba==0.59.0rc1 + py312: llvmlite==0.42.0rc1 coverage: coveralls upstream: -rrequirements_upstream.txt eofs: eofs From 192c17fc8f7caa0f9a5d90ade93284f6389d0e3e Mon Sep 17 00:00:00 2001 From: Zeitsperre <10819524+Zeitsperre@users.noreply.github.com> Date: Mon, 22 Jan 2024 13:17:56 -0500 Subject: [PATCH 34/37] update CHANGES.rst and metadata --- .github/workflows/main.yml | 6 +++--- CHANGES.rst | 1 + pyproject.toml | 3 ++- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index be56aff2a..0d649624c 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -126,14 +126,14 @@ jobs: - tox-env: py311-coverage-sbck python-version: "3.11" markers: -m 'not slow' + - tox-env: py312-coverage + python-version: "3.12" + markers: -m 'not slow' - tox-env: notebooks_doctests python-version: "3.10" - tox-env: offline-prefetch python-version: "3.11" markers: -m 'not slow and not requires_internet' - - tox-env: py312 - python-version: "3.12" - markers: -m 'not slow' steps: - name: Harden Runner uses: step-security/harden-runner@eb238b55efaa70779f274895e782ed17c84f2895 # v2.6.1 diff --git a/CHANGES.rst b/CHANGES.rst index c80832fcf..0a6d038e0 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -8,6 +8,7 @@ Contributors to this version: Juliette Lavoie (:user:`juliettelavoie`), Pascal B Announcements ^^^^^^^^^^^^^ +* `xclim` now officially supports Python3.12 (requires `numba>=0.59.0`). (:pull:`1613`). * `xclim` now adheres to the `Semantic Versioning 2.0.0 `_ specification. (:issue:`1556`, :pull:`1569`). * The `xclim` repository now uses `GitHub Discussions `_ to offer help for users, coordinate translation efforts, and support general Q&A for the `xclim` community. The `xclim` `Gitter` room has been deprecated in favour of GitHub Discussions. (:issue:`1571`, :pull:`1572`). diff --git a/pyproject.toml b/pyproject.toml index 855f13943..03f6db874 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,7 +43,8 @@ dependencies = [ "importlib-resources; python_version == '3.8'", "jsonpickle", "lmoments3>=1.0.5", - "numba", + "numba; python_version <= '3.11'", + "numba>=0.59.0; python_version >= '3.12'", "numpy>=1.16", "pandas>=0.23,<2.0; python_version == '3.8'", "pandas>=0.23,<2.2; python_version >= '3.9'", From 27fd516af7c72cd10bf7cbeb1e0c2df512a9c6f4 Mon Sep 17 00:00:00 2001 From: Zeitsperre <10819524+Zeitsperre@users.noreply.github.com> Date: Mon, 22 Jan 2024 13:28:46 -0500 Subject: [PATCH 35/37] reorganize version pinning to allow for fewer breaking changes --- .github/workflows/main.yml | 2 +- pyproject.toml | 3 +-- tox.ini | 9 ++++++--- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 0d649624c..7233985bd 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -126,7 +126,7 @@ jobs: - tox-env: py311-coverage-sbck python-version: "3.11" markers: -m 'not slow' - - tox-env: py312-coverage + - tox-env: py312-coverage-numba python-version: "3.12" markers: -m 'not slow' - tox-env: notebooks_doctests diff --git a/pyproject.toml b/pyproject.toml index 03f6db874..855f13943 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,8 +43,7 @@ dependencies = [ "importlib-resources; python_version == '3.8'", "jsonpickle", "lmoments3>=1.0.5", - "numba; python_version <= '3.11'", - "numba>=0.59.0; python_version >= '3.12'", + "numba", "numpy>=1.16", "pandas>=0.23,<2.0; python_version == '3.8'", "pandas>=0.23,<2.2; python_version >= '3.9'", diff --git a/tox.ini b/tox.ini index 3d33cdcfc..1c0692cf5 100644 --- a/tox.ini +++ b/tox.ini @@ -10,7 +10,7 @@ env_list = py39-upstream-doctest py310 py311 - py312 + py312-numba labels = test = py38, py39-upstream-doctest, py310, py311, notebooks_doctests, offline-prefetch requires = @@ -104,9 +104,12 @@ passenv = extras = dev deps = py38: scipy<1.9 + # FIXME: Remove when Python3.8 is dropped + py38: numba<0.59.0 + py38: llvmlite<0.42.0 # FIXME: Remove when numba 0.59.0 is released - py312: numba==0.59.0rc1 - py312: llvmlite==0.42.0rc1 + numba: numba==0.59.0rc1 + numba: llvmlite==0.42.0rc1 coverage: coveralls upstream: -rrequirements_upstream.txt eofs: eofs From 559f8f11a484086627459000b780e5eb3d89fac5 Mon Sep 17 00:00:00 2001 From: Trevor James Smith <10819524+Zeitsperre@users.noreply.github.com> Date: Mon, 22 Jan 2024 14:35:14 -0500 Subject: [PATCH 36/37] Update environment.yml --- environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment.yml b/environment.yml index 06d5fc29b..3ce0cefa9 100644 --- a/environment.yml +++ b/environment.yml @@ -4,7 +4,7 @@ channels: - conda-forge - defaults dependencies: - - python >=3.8,<3.13 + - python >=3.8 - astroid - boltons >=20.1 - bottleneck >=1.3.1 From ffd4aad674c591f88745c770c748da1194c1f954 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 Jan 2024 12:48:36 +0000 Subject: [PATCH 37/37] Bump mamba-org/setup-micromamba from 1.7.3 to 1.8.0 Bumps [mamba-org/setup-micromamba](https://github.com/mamba-org/setup-micromamba) from 1.7.3 to 1.8.0. - [Release notes](https://github.com/mamba-org/setup-micromamba/releases) - [Commits](https://github.com/mamba-org/setup-micromamba/compare/e820223f89c8720d6c740ca154a7adf32fcd278a...8767fb704bd78032e9392f0386bf46950bdd1194) --- updated-dependencies: - dependency-name: mamba-org/setup-micromamba dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/main.yml | 2 +- .github/workflows/upstream.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 7233985bd..d03342841 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -208,7 +208,7 @@ jobs: repo.anaconda.com:443 - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Setup Conda (Micromamba) with Python${{ matrix.python-version }} - uses: mamba-org/setup-micromamba@e820223f89c8720d6c740ca154a7adf32fcd278a # v1.7.3 + uses: mamba-org/setup-micromamba@8767fb704bd78032e9392f0386bf46950bdd1194 # v1.8.0 with: cache-downloads: true cache-environment: true diff --git a/.github/workflows/upstream.yml b/.github/workflows/upstream.yml index 5e328380f..7ebf57655 100644 --- a/.github/workflows/upstream.yml +++ b/.github/workflows/upstream.yml @@ -58,7 +58,7 @@ jobs: with: fetch-depth: 0 # Fetch all history for all branches and tags. - name: Setup Conda (Micromamba) with Python${{ matrix.python-version }} - uses: mamba-org/setup-micromamba@e820223f89c8720d6c740ca154a7adf32fcd278a # v1.7.3 + uses: mamba-org/setup-micromamba@8767fb704bd78032e9392f0386bf46950bdd1194 # v1.8.0 with: cache-downloads: true cache-environment: true