Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Fix] complexity_coarsegraining(): fix method #892

Merged
merged 12 commits into from
Sep 22, 2023
22 changes: 13 additions & 9 deletions docs/functions/complexity.rst
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,18 @@ Entropy
"""""""""""""""""""
.. autofunction:: neurokit2.complexity.entropy_renyi

*entropy_approximate()*
""""""""""""""""""""""""
.. autofunction:: neurokit2.complexity.entropy_approximate

*entropy_sample()*
""""""""""""""""""""
.. autofunction:: neurokit2.complexity.entropy_sample

*entropy_quadratic()*
""""""""""""""""""""
.. autofunction:: neurokit2.complexity.entropy_quadratic

*entropy_cumulativeresidual()*
"""""""""""""""""""""""""""""""""
.. autofunction:: neurokit2.complexity.entropy_cumulativeresidual
Expand Down Expand Up @@ -155,14 +167,6 @@ Entropy
"""""""""""""""""""""""
.. autofunction:: neurokit2.complexity.entropy_ofentropy

*entropy_approximate()*
""""""""""""""""""""""""
.. autofunction:: neurokit2.complexity.entropy_approximate

*entropy_sample()*
""""""""""""""""""""
.. autofunction:: neurokit2.complexity.entropy_sample

*entropy_permutation()*
""""""""""""""""""""""""
.. autofunction:: neurokit2.complexity.entropy_permutation
Expand Down Expand Up @@ -274,4 +278,4 @@ Joint/Multivariate
.. .. automodule:: neurokit2.complexity
.. :members:
.. :exclude-members: complexity, complexity_delay, complexity_dimension, complexity_tolerance, complexity_k, fractal_katz, fractal_petrosian, fractal_sevcik, fractal_nld, fractal_psdslope, fractal_higuchi, fractal_correlation, entropy_shannon, entropy_maximum, entropy_differential, entropy_tsallis, entropy_renyi, entropy_cumulativeresidual, entropy_svd, entropy_spectral, entropy_phase, entropy_grid, entropy_attention, entropy_increment, entropy_slope, entropy_symbolicdynamic, entropy_dispersion, entropy_ofentropy, entropy_approximate, entropy_sample, entropy_permutation, entropy_bubble, entropy_range, entropy_fuzzy, entropy_multiscale, entropy_hierarchical, fisher_information, complexity_hjorth, complexity_lempelziv, complexity_relativeroughness, fractal_hurst, complexity_lyapunov, complexity_rqa, fractal_mandelbrot, complexity_simulate, complexity_attractor, complexity_symbolize, complexity_coarsegraining, complexity_ordinalpatterns, recurrence_matrix, entropy_shannon_joint, entropy_rate
.. :exclude-members: complexity, complexity_delay, complexity_dimension, complexity_tolerance, complexity_k, fractal_katz, fractal_petrosian, fractal_sevcik, fractal_nld, fractal_psdslope, fractal_higuchi, fractal_correlation, entropy_shannon, entropy_maximum, entropy_differential, entropy_tsallis, entropy_renyi, entropy_cumulativeresidual, entropy_svd, entropy_spectral, entropy_phase, entropy_grid, entropy_attention, entropy_increment, entropy_slope, entropy_symbolicdynamic, entropy_dispersion, entropy_ofentropy, entropy_approximate, entropy_sample, entropy_permutation, entropy_bubble, entropy_range, entropy_fuzzy, entropy_multiscale, entropy_hierarchical, fisher_information, complexity_hjorth, complexity_lempelziv, complexity_relativeroughness, fractal_hurst, complexity_lyapunov, complexity_rqa, fractal_mandelbrot, complexity_simulate, complexity_attractor, complexity_symbolize, complexity_coarsegraining, complexity_ordinalpatterns, recurrence_matrix, entropy_shannon_joint, entropy_rate, entropy_quadratic
6 changes: 5 additions & 1 deletion neurokit2/complexity/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
from .entropy_permutation import entropy_permutation
from .entropy_phase import entropy_phase
from .entropy_power import entropy_power
from .entropy_quadratic import entropy_quadratic
from .entropy_range import entropy_range
from .entropy_rate import entropy_rate
from .entropy_renyi import entropy_renyi
Expand Down Expand Up @@ -96,7 +97,9 @@
complexity_rcmse = functools.partial(entropy_multiscale, method="RCMSEn")
complexity_fuzzymse = functools.partial(entropy_multiscale, fuzzy=True)
complexity_fuzzycmse = functools.partial(entropy_multiscale, method="CMSEn", fuzzy=True)
complexity_fuzzyrcmse = functools.partial(entropy_multiscale, method="RCMSEn", fuzzy=True)
complexity_fuzzyrcmse = functools.partial(
entropy_multiscale, method="RCMSEn", fuzzy=True
)


complexity_dfa = fractal_dfa
Expand Down Expand Up @@ -175,6 +178,7 @@
"entropy_symbolicdynamic",
"entropy_cumulativeresidual",
"entropy_approximate",
"entropy_quadratic",
"entropy_bubble",
"entropy_coalition",
"entropy_sample",
Expand Down
9 changes: 5 additions & 4 deletions neurokit2/complexity/entropy_approximate.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,12 @@
import pandas as pd

from .optim_complexity_tolerance import _entropy_apen, complexity_tolerance
from .utils import _get_count
from .utils_entropy import _get_count


def entropy_approximate(signal, delay=1, dimension=2, tolerance="sd", corrected=False, **kwargs):
def entropy_approximate(
signal, delay=1, dimension=2, tolerance="sd", corrected=False, **kwargs
):
"""**Approximate entropy (ApEn) and its corrected version (cApEn)**
Approximate entropy is a technique used to quantify the amount of regularity and the
Expand Down Expand Up @@ -95,7 +97,7 @@ def entropy_approximate(signal, delay=1, dimension=2, tolerance="sd", corrected=

# Compute index
if corrected is False:
# ApEn is implemented in 'optim_complexity_tolerance()' to avoid circular imports
# ApEn is implemented in 'utils_entropy.py' to avoid circular imports
# as one of the method for optimizing tolerance relies on ApEn
out, _ = _entropy_apen(signal, delay, dimension, info["Tolerance"], **kwargs)
else:
Expand All @@ -110,7 +112,6 @@ def entropy_approximate(signal, delay=1, dimension=2, tolerance="sd", corrected=


def _entropy_capen(signal, delay, dimension, tolerance, **kwargs):

__, count1, _ = _get_count(
signal,
delay=delay,
Expand Down
39 changes: 28 additions & 11 deletions neurokit2/complexity/entropy_multiscale.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@
from .entropy_slope import entropy_slope
from .entropy_symbolicdynamic import entropy_symbolicdynamic
from .optim_complexity_tolerance import complexity_tolerance
from .utils import _phi, _phi_divide
from .utils_complexity_coarsegraining import _get_scales, complexity_coarsegraining
from .utils_entropy import _phi, _phi_divide


def entropy_multiscale(
Expand Down Expand Up @@ -249,10 +249,12 @@ def entropy_multiscale(
if "delay" in kwargs:
kwargs.pop("delay")

# Parameters selection
# Default parameters
algorithm = entropy_sample
refined = False
coarsegraining = "nonoverlapping"

# Parameters adjustement for variants
if method in ["MSEn", "SampEn"]:
pass # The default arguments are good
elif method in ["MSApEn", "ApEn", "MSPEn", "PEn", "MSWPEn", "WPEn"]:
Expand Down Expand Up @@ -326,13 +328,9 @@ def entropy_multiscale(
info["Value"] = np.array(
[
_entropy_multiscale(
coarse=complexity_coarsegraining(
signal,
scale=scale,
method=coarsegraining,
show=False,
**kwargs,
),
signal,
scale=scale,
coarsegraining=coarsegraining,
algorithm=algorithm,
dimension=dimension,
tolerance=info["Tolerance"],
Expand Down Expand Up @@ -378,13 +376,32 @@ def _entropy_multiscale_plot(mse, info):
# =============================================================================
# Methods
# =============================================================================
def _entropy_multiscale(coarse, algorithm, dimension, tolerance, refined=False, **kwargs):
def _entropy_multiscale(
signal,
scale,
coarsegraining,
algorithm,
dimension,
tolerance,
refined=False,
**kwargs,
):
"""Wrapper function that works both on 1D and 2D coarse-grained (for composite)"""

# Get coarse-grained signal
coarse = complexity_coarsegraining(signal, scale=scale, method=coarsegraining)

# For 1D coarse-graining
if coarse.ndim == 1:
# Get delay
delay = 1 # If non-overlapping
if coarsegraining in ["rolling", "interpolate"]:
delay = scale

# Compute entropy
return algorithm(
coarse,
delay=1,
delay=delay,
dimension=dimension,
tolerance=tolerance,
**kwargs,
Expand Down
75 changes: 75 additions & 0 deletions neurokit2/complexity/entropy_quadratic.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
# -*- coding: utf-8 -*-
import numpy as np

from .entropy_sample import entropy_sample


def entropy_quadratic(signal, delay=1, dimension=2, tolerance="sd", **kwargs):
"""**Quadratic Sample Entropy (QSE)**
Compute the quadratic sample entropy (QSE) of a signal. It is essentially a correction of
SampEn introduced by Lake (2005) defined as:
.. math::
QSE = SampEn + ln(2 * tolerannce)
QSE has been described as a more stable measure of entropy than SampEn (Gylling, 2017).
Parameters
----------
signal : Union[list, np.array, pd.Series]
The signal (i.e., a time series) in the form of a vector of values.
delay : int
Time delay (often denoted *Tau* :math:`\\tau`, sometimes referred to as *lag*) in samples.
See :func:`complexity_delay` to estimate the optimal value for this parameter.
dimension : int
Embedding Dimension (*m*, sometimes referred to as *d* or *order*). See
:func:`complexity_dimension` to estimate the optimal value for this parameter.
tolerance : float
Tolerance (often denoted as *r*), distance to consider two data points as similar. If
``"sd"`` (default), will be set to :math:`0.2 * SD_{signal}`. See
:func:`complexity_tolerance` to estimate the optimal value for this parameter.
**kwargs : optional
Other arguments.
See Also
--------
entropy_sample
Returns
----------
qse : float
The uadratic sample entropy of the single time series.
info : dict
A dictionary containing additional information regarding the parameters used
to compute sample entropy.
Examples
----------
.. ipython:: python
import neurokit2 as nk
signal = nk.signal_simulate(duration=2, frequency=5)
qsa, parameters = nk.entropy_quadratic(signal, delay=1, dimension=2)
qsa
References
----------
* Huselius Gylling, K. (2017). Quadratic sample entropy as a measure of burstiness: A study in
how well Rényi entropy rate and quadratic sample entropy can capture the presence of spikes in
time-series data.
* Lake, D. E. (2005). Renyi entropy measures of heart rate Gaussianity. IEEE Transactions on
Biomedical Engineering, 53(1), 21-27.
"""
sampen, info = entropy_sample(
signal,
delay=delay,
dimension=dimension,
tolerance=tolerance,
**kwargs,
)
return sampen + np.log(2 * info["Tolerance"]), info
10 changes: 5 additions & 5 deletions neurokit2/complexity/entropy_sample.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import pandas as pd

from .optim_complexity_tolerance import complexity_tolerance
from .utils import _phi, _phi_divide
from .utils_entropy import _phi, _phi_divide


def entropy_sample(signal, delay=1, dimension=2, tolerance="sd", **kwargs):
Expand Down Expand Up @@ -35,7 +35,7 @@ def entropy_sample(signal, delay=1, dimension=2, tolerance="sd", **kwargs):

See Also
--------
entropy_shannon, entropy_approximate, entropy_fuzzy
entropy_shannon, entropy_approximate, entropy_fuzzy, entropy_quadratic

Returns
----------
Expand Down Expand Up @@ -81,13 +81,13 @@ def entropy_sample(signal, delay=1, dimension=2, tolerance="sd", **kwargs):
}

# Compute phi
phi = _phi(
info["phi"], _ = _phi(
signal,
delay=delay,
dimension=dimension,
tolerance=info["Tolerance"],
approximate=False,
**kwargs
)[0]
)

return _phi_divide(phi), info
return _phi_divide(info["phi"]), info
42 changes: 18 additions & 24 deletions neurokit2/complexity/optim_complexity_tolerance.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@
import sklearn.neighbors

from ..stats import density
from .utils import _phi
from .utils_complexity_embedding import complexity_embedding
from .utils_entropy import _entropy_apen


def complexity_tolerance(
Expand Down Expand Up @@ -224,7 +224,11 @@ def complexity_tolerance(
):
if dimension is None:
raise ValueError("'dimension' cannot be empty for the 'nolds' method.")
r = 0.11604738531196232 * np.std(signal, ddof=1) * (0.5627 * np.log(dimension) + 1.3334)
r = (
0.11604738531196232
* np.std(signal, ddof=1)
* (0.5627 * np.log(dimension) + 1.3334)
)
info = {"Method": "Adjusted 20% SD"}

elif method in ["chon", "chon2009"] and (
Expand Down Expand Up @@ -264,7 +268,9 @@ def complexity_tolerance(
raise ValueError("'dimension' cannot be empty for the 'makowski' method.")
n = len(signal)
r = np.std(signal, ddof=1) * (
0.2811 * (dimension - 1) + 0.0049 * np.log(n) - 0.02 * ((dimension - 1) * np.log(n))
0.2811 * (dimension - 1)
+ 0.0049 * np.log(n)
- 0.02 * ((dimension - 1) * np.log(n))
)

info = {"Method": "Makowski"}
Expand Down Expand Up @@ -292,7 +298,9 @@ def complexity_tolerance(
info.update({"Method": "bin"})

else:
raise ValueError("NeuroKit error: complexity_tolerance(): 'method' not recognized.")
raise ValueError(
"NeuroKit error: complexity_tolerance(): 'method' not recognized."
)

if show is True:
_optimize_tolerance_plot(r, info, method=method, signal=signal)
Expand All @@ -305,10 +313,11 @@ def complexity_tolerance(


def _optimize_tolerance_recurrence(signal, r_range=None, delay=None, dimension=None):

# Optimize missing parameters
if delay is None or dimension is None:
raise ValueError("If method='recurrence', both delay and dimension must be specified.")
raise ValueError(
"If method='recurrence', both delay and dimension must be specified."
)

# Compute distance matrix
emb = complexity_embedding(signal, delay=delay, dimension=dimension)
Expand All @@ -332,10 +341,11 @@ def _optimize_tolerance_recurrence(signal, r_range=None, delay=None, dimension=N


def _optimize_tolerance_maxapen(signal, r_range=None, delay=None, dimension=None):

# Optimize missing parameters
if delay is None or dimension is None:
raise ValueError("If method='maxApEn', both delay and dimension must be specified.")
raise ValueError(
"If method='maxApEn', both delay and dimension must be specified."
)

if r_range is None:
r_range = 40
Expand All @@ -358,20 +368,6 @@ def _optimize_tolerance_maxapen(signal, r_range=None, delay=None, dimension=None
return r_range[np.argmax(apens)], {"Values": r_range, "Scores": np.array(apens)}


def _entropy_apen(signal, delay, dimension, tolerance, **kwargs):

phi, info = _phi(
signal,
delay=delay,
dimension=dimension,
tolerance=tolerance,
approximate=True,
**kwargs,
)

return np.abs(np.subtract(phi[0], phi[1])), info


def _optimize_tolerance_neighbours(signal, r_range=None, delay=None, dimension=None):
if delay is None:
delay = 1
Expand Down Expand Up @@ -400,7 +396,6 @@ def _optimize_tolerance_neighbours(signal, r_range=None, delay=None, dimension=N


def _optimize_tolerance_bin(signal, delay=None, dimension=None):

# Optimize missing parameters
if delay is None or dimension is None:
raise ValueError("If method='bin', both delay and dimension must be specified.")
Expand All @@ -424,7 +419,6 @@ def _optimize_tolerance_bin(signal, delay=None, dimension=None):
# Plotting
# =============================================================================
def _optimize_tolerance_plot(r, info, ax=None, method="maxApEn", signal=None):

if ax is None:
fig, ax = plt.subplots()
else:
Expand Down
Loading