Skip to content

Commit

Permalink
Update pre-commit config, fix new issues
Browse files Browse the repository at this point in the history
  • Loading branch information
maxnoe committed Feb 26, 2024
1 parent ff54275 commit 4ab0aba
Show file tree
Hide file tree
Showing 13 changed files with 163 additions and 84 deletions.
37 changes: 24 additions & 13 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,17 +1,28 @@
repos:
# https://pycqa.github.io/isort/docs/configuration/black_compatibility.html#integration-with-pre-commit
- repo: https://github.com/pycqa/isort
rev: 5.12.0
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0 # Use the ref you want to point at
hooks:
- id: isort
args: ["--profile", "black", "--filter-files"]
- repo: https://github.com/psf/black
rev: 23.7.0
- id: trailing-whitespace
- id: check-added-large-files
- id: check-case-conflict
- id: check-merge-conflict
- id: end-of-file-fixer

- repo: https://github.com/codespell-project/codespell
rev: v2.2.4
hooks:
- id: black-jupyter
# https://black.readthedocs.io/en/stable/guides/using_black_with_other_tools.html?highlight=other%20tools#flake8
- repo: https://github.com/PyCQA/flake8
rev: 6.1.0
- id: codespell
additional_dependencies:
- tomli

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.2.1
hooks:
- id: flake8
args: [--max-line-length=88, "--extend-ignore=E203,E712"]
- id: ruff
args: [ --fix, --show-fixes ]
- id: ruff-format

- repo: https://github.com/scientific-python/cookie
rev: "2024.01.24"
hooks:
- id: sp-repo-review
64 changes: 64 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -60,3 +60,67 @@ ctapipe_io_zfits = ["resources/*"]

[tool.setuptools_scm]
write_to = 'src/ctapipe_io_zfits/_version.py'

[tool.ruff]
line-length = 88
target-version = "py310"


[tool.ruff.lint]
extend-select = [
"I", # isort
"N", # pep8 naming
"D", # pydocstyle
"NPY", # numpy
"PT", # pytest
"UP", # pyupgrade
"COM", # flake8-commas
"ISC", # implicit string concat rules
"ICN", # import name conventions
"G", # logging
"B", # flake8-bugbear
]
ignore = [
"COM812", # incompatible with ruff format
"ISC001", # incompatible with ruff format
]

[tool.ruff.lint.pydocstyle]
convention = "numpy"

[tool.ruff.lint.per-file-ignores]
# no documentation linting for test files
"**/tests/**" = ["D"]
"**/tests_*.py" = ["D"]
"docs/conf.py" = ["D"]
"**/conftest.py" = ["D"]

[tool.ruff.format]
quote-style = "double"
indent-style = "space"
skip-magic-trailing-comma = false
line-ending = "auto"


[tool.repo-review]
select = ["PY", "PP", "PC"]
ignore = [
"PY007", # we do not use tox for now
"PC140", # mypy is not used
"PC170", # pygrep is not used
"PC180", # prettier is not used
"PC111", # blacken-docs is not used
"PC901", # no CI setup currently for updating pre-commit hooks
]


[tool.pytest.ini_options]
minversion = "7"
testpaths = ["src"]
log_cli_level = "INFO"
xfail_strict = true
# print summary of failed tests, force errors if settings are misspelled
addopts = ["-ra", "--strict-config", "--strict-markers"]
filterwarnings = [
"error",
]
4 changes: 1 addition & 3 deletions src/ctapipe_io_zfits/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
"""
EventSource implementations for protozfits files
"""
"""EventSource implementations for protozfits files."""
from .dl0 import ProtozfitsDL0EventSource, ProtozfitsDL0TelescopeEventSource
from .version import __version__

Expand Down
2 changes: 1 addition & 1 deletion src/ctapipe_io_zfits/_dev_version/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@

version = get_version(root="../..", relative_to=__file__)
except Exception as e:
raise ImportError(f"setuptools_scm broken or not installed: {e}")
raise ImportError(f"setuptools_scm broken or not installed: {e}") from e
6 changes: 6 additions & 0 deletions src/ctapipe_io_zfits/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,11 +35,16 @@


def to_anyarray(array):
"""Convert numpy to protobuf AnyArray."""
type_ = DTYPE_TO_ANYARRAY_TYPE[array.dtype.type]
return AnyArray(type=type_, data=array.tobytes())


def evening_of_obs(time, tz):
"""Get the evening an observation started.
Uses noon localtime in ``tz`` as a cutoff.
"""
dt = time.to_datetime(timezone.utc).astimezone(tz)
if dt.hour < 12:
return (dt - timedelta(days=1)).date()
Expand All @@ -49,6 +54,7 @@ def evening_of_obs(time, tz):

@pytest.fixture(scope="session")
def acada_base(tmp_path_factory):
"""Base directory of acada data."""
return tmp_path_factory.mktemp("acada_base_")


Expand Down
52 changes: 23 additions & 29 deletions src/ctapipe_io_zfits/dl0.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
"""
DL0 Protozfits EventSource
"""
"""DL0 Protozfits EventSource."""
import logging
from contextlib import ExitStack
from typing import Dict, Tuple

import numpy as np
from ctapipe.containers import (
Expand Down Expand Up @@ -47,7 +44,7 @@ def _is_compatible(input_url, extname, allowed_protos):
try:
hdul = stack.enter_context(fits.open(input_url))
except Exception as e:
log.debug(f"Error trying to open input file as fits: {e}")
log.debug("Error trying to open input file as fits: %s", e)
return False

if extname not in hdul:
Expand All @@ -70,7 +67,7 @@ def _is_compatible(input_url, extname, allowed_protos):
return False

if proto_class not in allowed_protos:
log.debug(f"Unsupported PBFHEAD: {proto_class} not in {allowed_protos}")
log.debug("Unsupported PBFHEAD: %s not in %s", proto_class, allowed_protos)
return False

return True
Expand Down Expand Up @@ -150,7 +147,7 @@ class ProtozfitsDL0EventSource(EventSource):
The ``input_url`` must be the subarray trigger file, the source
will then look for the other data files according to the filename and
directory schema layed out in the draft of the ACADA - DPPS ICD.
directory schema laid out in the draft of the ACADA - DPPS ICD.
"""

subarray_id = Integer(default_value=1).tag(config=True)
Expand Down Expand Up @@ -203,10 +200,11 @@ def _get_tel_events_directory(self, tel_id):
/ self._date_dirs
)

@classmethod
def is_compatible(cls, input_url):
@staticmethod
def is_compatible(file_path):
"""Return True if the given file can be read by this source."""
return _is_compatible(
input_url,
file_path,
extname="SubarrayEvents",
allowed_protos={"DL0v1.Subarray.Event"},
)
Expand All @@ -230,29 +228,30 @@ def _open_telescope_files(self):
)

def close(self):
"""Close underlying files."""
self._exit_stack.__exit__(None, None, None)

def __exit__(self, exc_type, exc_value, traceback):
def __exit__(self, exc_type, exc_value, traceback): # noqa: D105
self._exit_stack.__exit__(exc_type, exc_value, traceback)

@property
def is_simulation(self) -> bool:
def is_simulation(self) -> bool: # noqa: D102
return False

@property
def datalevels(self) -> Tuple[DataLevel]:
def datalevels(self) -> tuple[DataLevel]: # noqa: D102
return (DataLevel.DL0,)

@property
def subarray(self) -> SubarrayDescription:
def subarray(self) -> SubarrayDescription: # noqa: D102
return self._subarray

@property
def observation_blocks(self) -> Dict[int, ObservationBlockContainer]:
def observation_blocks(self) -> dict[int, ObservationBlockContainer]: # noqa: D102
return self._observation_blocks

@property
def scheduling_blocks(self) -> Dict[int, SchedulingBlockContainer]:
def scheduling_blocks(self) -> dict[int, SchedulingBlockContainer]: # noqa: D102
return self._scheduling_blocks

def _generator(self):
Expand Down Expand Up @@ -304,7 +303,7 @@ class ProtozfitsDL0TelescopeEventSource(EventSource):
ignore_samples_end = Integer(default_value=0).tag(config=True)

@classmethod
def is_compatible(cls, input_url):
def is_compatible(cls, input_url): # noqa: D102
return _is_compatible(
input_url,
extname="Events",
Expand Down Expand Up @@ -338,35 +337,30 @@ def __init__(self, input_url=None, **kwargs):
self.sb_id: SchedulingBlockContainer(sb_id=np.uint64(self.sb_id))
}

def close(self):
def close(self): # noqa: D102
self._exit_stack.__exit__(None, None, None)

def __exit__(self, exc_type, exc_value, traceback):
def __exit__(self, exc_type, exc_value, traceback): # noqa: D105
self._exit_stack.__exit__(exc_type, exc_value, traceback)

@property
def is_simulation(self) -> bool:
"""If data comes from simulations"""
def is_simulation(self) -> bool: # noqa: D102
return False

@property
def datalevels(self) -> Tuple[DataLevel]:
"""Provided data levels"""
def datalevels(self) -> tuple[DataLevel]: # noqa: D102
return (DataLevel.DL0,)

@property
def subarray(self) -> SubarrayDescription:
"""The subarray"""
def subarray(self) -> SubarrayDescription: # noqa: D102
return self._subarray

@property
def observation_blocks(self) -> Dict[int, ObservationBlockContainer]:
"""The observation blocks"""
def observation_blocks(self) -> dict[int, ObservationBlockContainer]: # noqa: D102
return self._observation_blocks

@property
def scheduling_blocks(self) -> Dict[int, SchedulingBlockContainer]:
"""The scheduling blocks"""
def scheduling_blocks(self) -> dict[int, SchedulingBlockContainer]: # noqa: D102
return self._scheduling_blocks

def _fill_event(self, count, zfits_event) -> ArrayEventContainer:
Expand Down
13 changes: 7 additions & 6 deletions src/ctapipe_io_zfits/instrument.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Definitionas of the instrument configuration."""
import json
from functools import cache
from importlib.resources import as_file, files
from typing import Tuple

import astropy.units as u
from astropy.coordinates import EarthLocation
Expand Down Expand Up @@ -57,21 +57,21 @@ def _load_array_elements():

@cache
def get_subarrays_by_id():
"""Get a mapping of subarray_id to subarray definition"""
"""Get a mapping of subarray_id to subarray definition."""
data = _load_subarrays()
return {subarray["id"]: subarray for subarray in data["subarrays"]}


@cache
def get_array_elements_by_id():
"""Get a mapping of ae_id to array element definition"""
"""Get a mapping of ae_id to array element definition."""
data = _load_array_elements()
return {ae["id"]: ae for ae in data["array_elements"]}


@cache
def get_array_element_ids(subarray_id: int) -> Tuple[int]:
"""Get array element ids for a given subarray_id"""
def get_array_element_ids(subarray_id: int) -> tuple[int]:
"""Get array element ids for a given subarray_id."""
subarray = get_subarrays_by_id().get(subarray_id)
if subarray_id is None:
raise ValueError(f"Unknown subarray_id: {subarray_id}")
Expand All @@ -80,10 +80,11 @@ def get_array_element_ids(subarray_id: int) -> Tuple[int]:


def build_subarray_description(subarray_id):
"""Create a SubarrayDescription from the subarray_id."""
try:
subarray = get_subarrays_by_id()[subarray_id]
except KeyError:
raise ValueError(f"Unknown subarray_id: {subarray_id}")
raise ValueError(f"Unknown subarray_id: {subarray_id}") from None

tel_ids = get_array_element_ids(subarray_id)
array_elements = get_array_elements_by_id()
Expand Down
Loading

0 comments on commit 4ab0aba

Please sign in to comment.