Skip to content

Commit

Permalink
chore(backport): Move to using Ruff for pre-commit (#2282)
Browse files Browse the repository at this point in the history
* Backport components of:
   - #2124
   - #2238
   - #2240
   - #2273
  • Loading branch information
matthewfeickert authored Aug 16, 2023
1 parent f97b885 commit 0744b42
Show file tree
Hide file tree
Showing 8 changed files with 48 additions and 51 deletions.
9 changes: 0 additions & 9 deletions .flake8

This file was deleted.

30 changes: 7 additions & 23 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,16 +26,11 @@ repos:
# exclude generated files
exclude: ^validation/|\.dtd$|\.xml$

- repo: https://github.com/asottile/pyupgrade
rev: v3.3.1
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: "v0.0.281"
hooks:
- id: pyupgrade
args: ["--py37-plus"]

- repo: https://github.com/MarcoGorelli/absolufy-imports
rev: v0.3.1
hooks:
- id: absolufy-imports
- id: ruff
args: ["--fix", "--show-fixes"]

- repo: https://github.com/psf/black
rev: 23.3.0
Expand All @@ -48,18 +43,6 @@ repos:
- id: blacken-docs
additional_dependencies: [black==23.3.0]

- repo: https://github.com/asottile/yesqa
rev: v1.4.0
hooks:
- id: yesqa

- repo: https://github.com/PyCQA/flake8
rev: 6.0.0
hooks:
- id: flake8
args: ["--count", "--statistics"]
additional_dependencies: [flake8-encodings==0.5.0.post1]

- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.2.0
# check the oldest and newest supported Pythons
Expand All @@ -81,8 +64,9 @@ repos:
- repo: https://github.com/nbQA-dev/nbQA
rev: 1.7.0
hooks:
- id: nbqa-pyupgrade
additional_dependencies: [pyupgrade==3.3.1]
- id: nbqa-ruff
additional_dependencies: [ruff==0.0.281]
args: ["--extend-ignore=F821,F401,F841,F811"]

- repo: https://github.com/codespell-project/codespell
rev: v2.2.4
Expand Down
30 changes: 24 additions & 6 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -225,12 +225,6 @@ filterwarnings = [
"ignore:module 'sre_constants' is deprecated:DeprecationWarning", # tensorflow v2.12.0+ for Python 3.11+
]

[tool.nbqa.mutate]
pyupgrade = 1

[tool.nbqa.addopts]
pyupgrade = ["--py37-plus"]

[tool.mypy]
files = "src"
python_version = "3.11"
Expand Down Expand Up @@ -281,3 +275,27 @@ module = [
'pyhf.tensor.pytorch_backend.*',
]
ignore_errors = true

[tool.ruff]
select = [
"E", "F", "W", # flake8
"UP", # pyupgrade
"RUF", # Ruff-specific
"TID", # flake8-tidy-imports
]
line-length = 88
ignore = [
"E402",
"E501",
"RUF001", # String contains ambiguous unicode character
"RUF005", # unpack-instead-of-concatenating-to-collection-literal
]
src = ["src"]
typing-modules = ["pyhf.typing"]
unfixable = [
"F841", # Removes unused variables
]
flake8-tidy-imports.ban-relative-imports = "all"

[tool.ruff.per-file-ignores]
"docs/lite/jupyterlite.py" = ["F401", "F704"]
2 changes: 1 addition & 1 deletion src/pyhf/cli/spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def inspect(workspace, output_file, measurement):
)

# summary
fmtStr = '{{0: >{0:d}s}} {{1:s}}'.format(maxlen + len('Summary'))
fmtStr = '{{: >{:d}s}} {{:s}}'.format(maxlen + len('Summary'))
click.echo(fmtStr.format(' Summary ', ''))
click.echo(fmtStr.format('-' * 18, ''))
fmtStr = f'{{0: >{maxlen:d}s}} {{1:s}}'
Expand Down
2 changes: 1 addition & 1 deletion src/pyhf/contrib/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def download(archive_url, output_directory, force=False, compress=False):
# directory up and then renamed as the name of the
# zipfile directory is set at zipfile creation time and
# isn't knowable in advance.
child_path = [child for child in output_directory.iterdir()][0]
child_path = next(iter(output_directory.iterdir()))
_tmp_path = output_directory.parent.joinpath(
Path(output_directory.name + "__tmp__")
)
Expand Down
4 changes: 2 additions & 2 deletions src/pyhf/contrib/viz/brazil.py
Original file line number Diff line number Diff line change
Expand Up @@ -361,9 +361,9 @@ def plot_results(test_pois, tests, test_size=0.05, ax=None, **kwargs):
handles, labels = ax.get_legend_handles_labels()
if not no_cls:
for label_part in ["exp", "pm1", "pm2", "alpha"]:
label_idx = [
label_idx = next(
idx for idx, label in enumerate(labels) if label_part in label
][0]
)
handles.append(handles.pop(label_idx))
labels.append(labels.pop(label_idx))

Expand Down
16 changes: 10 additions & 6 deletions src/pyhf/workspace.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,18 @@
* the observed data (optional)
* fit configurations ("measurements")
"""
from __future__ import annotations

import collections
import copy
import logging
from typing import ClassVar

import jsonpatch
import copy
import collections
from pyhf import exceptions
from pyhf import schema
from pyhf.pdf import Model

from pyhf import exceptions, schema
from pyhf.mixins import _ChannelSummaryMixin
from pyhf.pdf import Model

log = logging.getLogger(__name__)

Expand Down Expand Up @@ -284,7 +288,7 @@ class Workspace(_ChannelSummaryMixin, dict):
A JSON-serializable object that is built from an object that follows the :obj:`workspace.json` `schema <https://scikit-hep.org/pyhf/likelihood.html#workspace>`__.
"""

valid_joins = ['none', 'outer', 'left outer', 'right outer']
valid_joins: ClassVar[list[str]] = ['none', 'outer', 'left outer', 'right outer']

def __init__(self, spec, validate: bool = True, **config_kwargs):
"""
Expand Down
6 changes: 3 additions & 3 deletions tests/test_workspace.py
Original file line number Diff line number Diff line change
Expand Up @@ -378,7 +378,7 @@ def test_join_items_outer_deep(join_items):
joined = pyhf.workspace._join_items(
'outer', left_items, right_items, key='name', deep_merge_key='deep'
)
assert [k['deep'] for k in joined if k['name'] == 'common'][0] == [
assert next(k['deep'] for k in joined if k['name'] == 'common') == [
{'name': 1},
{'name': 2},
]
Expand All @@ -389,7 +389,7 @@ def test_join_items_left_outer_deep(join_items):
joined = pyhf.workspace._join_items(
'left outer', left_items, right_items, key='name', deep_merge_key='deep'
)
assert [k['deep'] for k in joined if k['name'] == 'common'][0] == [
assert next(k['deep'] for k in joined if k['name'] == 'common') == [
{'name': 1},
{'name': 2},
]
Expand All @@ -400,7 +400,7 @@ def test_join_items_right_outer_deep(join_items):
joined = pyhf.workspace._join_items(
'right outer', left_items, right_items, key='name', deep_merge_key='deep'
)
assert [k['deep'] for k in joined if k['name'] == 'common'][0] == [
assert next(k['deep'] for k in joined if k['name'] == 'common') == [
{'name': 2},
{'name': 1},
]
Expand Down

0 comments on commit 0744b42

Please sign in to comment.