Skip to content

Commit

Permalink
MAINT: Fix linting errors and bump numpy version
Browse files Browse the repository at this point in the history
Increased min numpy version from 1.18 to 1.20 to allow using dtype within numpy.concatenate.
  • Loading branch information
derb12 committed Feb 13, 2024
1 parent a5c4791 commit 86fb254
Show file tree
Hide file tree
Showing 14 changed files with 40 additions and 31 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/python-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ jobs:
- name: Install required dependencies
run: |
python -m pip install --upgrade pip
python -m pip install "numpy>=1.18" "scipy>=1.5" pytest
python -m pip install "numpy>=1.20" "scipy>=1.5" pytest
- name: Test with required dependencies
run: pytest .
Expand Down Expand Up @@ -79,7 +79,7 @@ jobs:
- name: Install minimum dependencies
run: |
python -m pip install --upgrade pip
python -m pip install numpy==1.18 scipy==1.5 pytest
python -m pip install numpy==1.20 scipy==1.5 pytest
- name: Test with minimum required dependencies
run: pytest .
Expand Down
2 changes: 1 addition & 1 deletion docs/installation.rst
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ Dependencies

pybaselines requires `Python <https://python.org>`_ version 3.8 or later and the following libraries:

* `NumPy <https://numpy.org>`_ (>= 1.18)
* `NumPy <https://numpy.org>`_ (>= 1.20)
* `SciPy <https://www.scipy.org>`_ (>= 1.5)


Expand Down
4 changes: 2 additions & 2 deletions examples/general/plot_algorithm_convergence.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@
the measured tolerance value at each iteration. The `tol_history` parameter
can be helpful for determining appropriate `max_iter` or `tol` values.
In this example, the convergence of the :meth:`~.Baseline.asls` and :meth:`~.Baseline.aspls` functions
will be compared. asls is a relatively simple calculation that sets its weighting
In this example, the convergence of the :meth:`~.Baseline.asls` and :meth:`~.Baseline.aspls`
functions will be compared. asls is a relatively simple calculation that sets its weighting
each iteration based on whether the current baseline is above or below the input data
at each point. aspls has a much more intricate weighting based on the logistic distribution
of the residuals (data minus baseline); further, aspls also updates an additional
Expand Down
3 changes: 2 additions & 1 deletion examples/whittaker/plot_whittaker_solvers.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,8 @@ def make_data(num_x):

if not _banded_utils._HAS_PENTAPY:
warnings.warn(
'pentapy is not installed so pentapy and scipy-banded timings will be identical'
'pentapy is not installed so pentapy and scipy-banded timings will be identical',
stacklevel=2
)

# equation obtained following similar procedure as `lam` vs data size example
Expand Down
6 changes: 3 additions & 3 deletions pybaselines/classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -1028,9 +1028,9 @@ def _averaged_interp(x, y, mask, interp_half_window=0):
mask_sum = mask.sum()
if not mask_sum: # all points belong to peaks
# will just interpolate between first and last points
warnings.warn('there were no baseline points found', ParameterWarning)
warnings.warn('there were no baseline points found', ParameterWarning, stacklevel=2)
elif mask_sum == mask.shape[0]: # all points belong to baseline
warnings.warn('there were no peak points found', ParameterWarning)
warnings.warn('there were no peak points found', ParameterWarning, stacklevel=2)
return output

peak_starts, peak_ends = _find_peak_segments(mask)
Expand Down Expand Up @@ -1153,7 +1153,7 @@ def _iter_threshold(power, num_std=3.0):
if masked_power.size < 2: # need at least 2 points for std calculation
warnings.warn(
'not enough baseline points found; "num_std" is likely too low',
ParameterWarning
ParameterWarning, stacklevel=2
)
break
mask = power < np.mean(masked_power) + num_std * np.std(masked_power, ddof=1)
Expand Down
2 changes: 1 addition & 1 deletion pybaselines/smooth.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ def snip(self, data, max_half_window=None, decreasing=False, smooth_half_window=
if half_window > (self._len - 1) // 2:
warnings.warn(
'max_half_window values greater than (len(data) - 1) / 2 have no effect.',
ParameterWarning
ParameterWarning, stacklevel=2
)
half_windows[i] = (self._len - 1) // 2

Expand Down
10 changes: 6 additions & 4 deletions pybaselines/spline.py
Original file line number Diff line number Diff line change
Expand Up @@ -638,7 +638,7 @@ def pspline_airpls(self, data, lam=1e3, num_knots=100, spline_degree=3,
warnings.warn(
('error occurred during fitting, indicating that "tol"'
' is too low, "max_iter" is too high, or "lam" is too high'),
ParameterWarning
ParameterWarning, stacklevel=2
)
i -= 1 # reduce i so that output tol_history indexing is correct
break
Expand All @@ -653,7 +653,7 @@ def pspline_airpls(self, data, lam=1e3, num_knots=100, spline_degree=3,
# point would get a weight of 0, which fails the solver
warnings.warn(
('almost all baseline points are below the data, indicating that "tol"'
' is too low and/or "max_iter" is too high'), ParameterWarning
' is too low and/or "max_iter" is too high'), ParameterWarning, stacklevel=2
)
i -= 1 # reduce i so that output tol_history indexing is correct
break
Expand Down Expand Up @@ -851,7 +851,8 @@ def pspline_drpls(self, data, lam=1e3, eta=0.5, num_knots=100, spline_degree=3,
# checking a scalar is faster; cannot use np.errstate since it is not 100% reliable
warnings.warn(
('nan and/or +/- inf occurred in weighting calculation, likely meaning '
'"tol" is too low and/or "max_iter" is too high'), ParameterWarning
'"tol" is too low and/or "max_iter" is too high'), ParameterWarning,
stacklevel=2
)
break
elif calc_difference < tol:
Expand Down Expand Up @@ -938,7 +939,8 @@ def pspline_iarpls(self, data, lam=1e3, num_knots=100, spline_degree=3, diff_ord
# checking a scalar is faster; cannot use np.errstate since it is not 100% reliable
warnings.warn(
('nan and/or +/- inf occurred in weighting calculation, likely meaning '
'"tol" is too low and/or "max_iter" is too high'), ParameterWarning
'"tol" is too low and/or "max_iter" is too high'), ParameterWarning,
stacklevel=2
)
break
elif calc_difference < tol:
Expand Down
4 changes: 3 additions & 1 deletion pybaselines/two_d/_whittaker_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -608,7 +608,9 @@ def basis(self):
def _calc_dof(self, weights, assume_a='pos'):
if not self._using_svd:
# Could maybe just output a matrix of ones?
raise ValueError('Cannot calculate degrees of freedom when not using eigendecomposition')
raise ValueError(
'Cannot calculate degrees of freedom when not using eigendecomposition'
)
lhs = self._make_btwb(weights)
rhs = lhs.copy()
np.fill_diagonal(lhs, lhs.diagonal() + self.penalty)
Expand Down
8 changes: 5 additions & 3 deletions pybaselines/two_d/spline.py
Original file line number Diff line number Diff line change
Expand Up @@ -557,7 +557,7 @@ def pspline_airpls(self, data, lam=1e3, num_knots=25, spline_degree=3,
warnings.warn(
('error occurred during fitting, indicating that "tol"'
' is too low, "max_iter" is too high, or "lam" is too high'),
ParameterWarning
ParameterWarning, stacklevel=2
)
i -= 1 # reduce i so that output tol_history indexing is correct
break
Expand All @@ -572,7 +572,8 @@ def pspline_airpls(self, data, lam=1e3, num_knots=25, spline_degree=3,
# point would get a weight of 0, which fails the solver
warnings.warn(
('almost all baseline points are below the data, indicating that "tol"'
' is too low and/or "max_iter" is too high'), ParameterWarning
' is too low and/or "max_iter" is too high'), ParameterWarning,
stacklevel=2
)
i -= 1 # reduce i so that output tol_history indexing is correct
break
Expand Down Expand Up @@ -744,7 +745,8 @@ def pspline_iarpls(self, data, lam=1e3, num_knots=25, spline_degree=3, diff_orde
# checking a scalar is faster; cannot use np.errstate since it is not 100% reliable
warnings.warn(
('nan and/or +/- inf occurred in weighting calculation, likely meaning '
'"tol" is too low and/or "max_iter" is too high'), ParameterWarning
'"tol" is too low and/or "max_iter" is too high'), ParameterWarning,
stacklevel=2
)
break
elif calc_difference < tol:
Expand Down
11 changes: 7 additions & 4 deletions pybaselines/whittaker.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ def airpls(self, data, lam=1e6, diff_order=2, max_iter=50, tol=1e-3, weights=Non
warnings.warn(
('error occurred during fitting, indicating that "tol"'
' is too low, "max_iter" is too high, or "lam" is too high'),
ParameterWarning
ParameterWarning, stacklevel=2
)
i -= 1 # reduce i so that output tol_history indexing is correct
break
Expand All @@ -278,7 +278,8 @@ def airpls(self, data, lam=1e6, diff_order=2, max_iter=50, tol=1e-3, weights=Non
# point would get a weight of 0, which fails the solver
warnings.warn(
('almost all baseline points are below the data, indicating that "tol"'
' is too low and/or "max_iter" is too high'), ParameterWarning
' is too low and/or "max_iter" is too high'), ParameterWarning,
stacklevel=2
)
i -= 1 # reduce i so that output tol_history indexing is correct
break
Expand Down Expand Up @@ -454,7 +455,8 @@ def drpls(self, data, lam=1e5, eta=0.5, max_iter=50, tol=1e-3, weights=None, dif
# checking a scalar is faster; cannot use np.errstate since it is not 100% reliable
warnings.warn(
('nan and/or +/- inf occurred in weighting calculation, likely meaning '
'"tol" is too low and/or "max_iter" is too high'), ParameterWarning
'"tol" is too low and/or "max_iter" is too high'), ParameterWarning,
stacklevel=2
)
break
elif calc_difference < tol:
Expand Down Expand Up @@ -530,7 +532,8 @@ def iarpls(self, data, lam=1e5, diff_order=2, max_iter=50, tol=1e-3, weights=Non
# checking a scalar is faster; cannot use np.errstate since it is not 100% reliable
warnings.warn(
('nan and/or +/- inf occurred in weighting calculation, likely meaning '
'"tol" is too low and/or "max_iter" is too high'), ParameterWarning
'"tol" is too low and/or "max_iter" is too high'), ParameterWarning,
stacklevel=2
)
break
elif calc_difference < tol:
Expand Down
5 changes: 2 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,8 @@ keywords = [
]
requires-python = ">=3.8"
dependencies = [
# lowest versions supported for python 3.8
"numpy>=1.18",
"scipy>=1.5",
"numpy>=1.20", # lowest version to allow dtype for np.concatenate
"scipy>=1.5", # lowest versions supported for python 3.8
]

[project.urls]
Expand Down
8 changes: 4 additions & 4 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -441,10 +441,10 @@ def test_output(self, additional_keys=None, **kwargs):
# check all entries in output param dictionary
for key in total_keys:
if key not in output[1]:
assert False, f'key "{key}" missing from param dictionary'
raise AssertionError(f'key "{key}" missing from param dictionary')
output[1].pop(key)
if output[1]:
assert False, f'unchecked keys in param dictionary: {output[1]}'
raise AssertionError(f'unchecked keys in param dictionary: {output[1]}')

def test_x_ordering(self, assertion_kwargs=None, **kwargs):
"""Ensures arrays are correctly sorted within the function."""
Expand Down Expand Up @@ -688,10 +688,10 @@ def test_output(self, additional_keys=None, **kwargs):
# check all entries in output param dictionary
for key in total_keys:
if key not in output[1]:
assert False, f'key "{key}" missing from param dictionary'
raise AssertionError(f'key "{key}" missing from param dictionary')
output[1].pop(key)
if output[1]:
assert False, f'unchecked keys in param dictionary: {output[1]}'
raise AssertionError(f'unchecked keys in param dictionary: {output[1]}')

def test_xz_ordering(self, assertion_kwargs=None, **kwargs):
"""Ensures arrays are correctly sorted within the function."""
Expand Down
2 changes: 1 addition & 1 deletion tests/test_spline_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -422,7 +422,7 @@ def test_pspline_tck_none(data_fixture):

assert pspline.coef is None
with pytest.raises(ValueError):
pspline.tck
tck = pspline.tck


def test_pspline_tck_readonly(data_fixture):
Expand Down
2 changes: 1 addition & 1 deletion tests/two_d/test_spline_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ def test_pspline_tck_none(data_fixture2d):

assert pspline.coef is None
with pytest.raises(ValueError):
pspline.tck
tck = pspline.tck


def test_pspline_tck_readonly(data_fixture2d):
Expand Down

0 comments on commit 86fb254

Please sign in to comment.