Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into elitherl/workflows
Browse files Browse the repository at this point in the history
  • Loading branch information
pixelifytica committed Nov 27, 2023
2 parents ad11d0c + ec9d5fb commit 18a45b4
Show file tree
Hide file tree
Showing 38 changed files with 1,483 additions and 1,181 deletions.
27 changes: 16 additions & 11 deletions .github/workflows/docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,33 +29,38 @@ jobs:
with:
python-version: 3.8 # should match value in .readthedocs.yml

- name: Install Poetry
uses: abatilo/[email protected]
with:
poetry-version: 1.4.2

- name: Configure poetry
shell: bash
run: python -m poetry config virtualenvs.in-project true

- name: Set up cache
uses: actions/cache@v2
id: cache
with:
path: .venv
key: venv-pip-${{ runner.os }}-${{ hashFiles('**/poetry.lock', '.github/workflows/docs.yml')}}
key: venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('**/poetry.lock') }}

- name: Ensure cache is healthy
if: steps.cache.outputs.cache-hit == 'true'
shell: bash
run: source .venv/bin/activate &&
(timeout 10s pip --version || (rm -rf .venv && echo "::set-output name=cache-bad::true"))
id: cache-checkup
run: timeout 10s python -m poetry run pip --version || rm -rf .venv

- name: Install dependencies
shell: bash
run: |
python -m poetry install
- name: Install plantuml
shell: bash
run: |
sudo apt-get update
sudo apt-get install -y plantuml
- name: Install dependencies
shell: bash
run: python -m venv .venv &&
source .venv/bin/activate &&
pip install .[docs]
if: ${{ steps.cache-venv.ouputs.cache-hit != 'true' || steps.cache-checkup.outputs.cache-bad == 'true'}}

- name: Test docs build successfully
run: source .venv/bin/activate &&
make -C doc html SPHINXOPTS="-W --keep-going"
2 changes: 1 addition & 1 deletion .github/workflows/quality.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ jobs:
- name: Install Poetry
uses: abatilo/[email protected]
with:
poetry-version: 1.1.15
poetry-version: 1.4.2

- name: Configure poetry
shell: bash
Expand Down
7 changes: 5 additions & 2 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ jobs:
- name: Install Poetry
uses: abatilo/[email protected]
with:
poetry-version: 1.1.15
poetry-version: 1.4.2

- name: Configure poetry
shell: bash
Expand Down Expand Up @@ -65,6 +65,9 @@ jobs:
- name: "Upload coverage to Codecov"
if: ${{ always() }}
uses: codecov/codecov-action@v1
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./coverage.xml
fail_ci_if_error: true
verbose: true
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,6 @@ repos:
- flake8-import-order
name: "Check Code Style Using flake8"
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.942 # Use the sha / tag you want to point at
rev: v1.1.1 # Use the sha / tag you want to point at
hooks:
- id: mypy
28 changes: 18 additions & 10 deletions .readthedocs.yml
Original file line number Diff line number Diff line change
@@ -1,16 +1,24 @@
# .readthedocs.yml

version: 2

build:
os: "ubuntu-22.04"
apt_packages:
- plantuml
tools:
python: "3.8"
jobs:
post_create_environment:
# Install poetry
# https://python-poetry.org/docs/#installing-manually
- pip install poetry
# Tell poetry to not use a virtual environment
- poetry config virtualenvs.create false
post_install:
# Install dependencies with 'docs' dependency group
# https://python-poetry.org/docs/managing-dependencies/#dependency-groups
- poetry install --with docs --without dev

sphinx:
configuration: doc/conf.py
fail_on_warning: true

python:
version: 3.8
install:
- path: .
method: pip
extra_requirements:
- docs
system_packages: true
7 changes: 3 additions & 4 deletions indica/converters/abstractconverter.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
from typing import Optional
from typing import Tuple

import numpy as np
from xarray import DataArray
from xarray import zeros_like

Expand Down Expand Up @@ -295,7 +294,7 @@ def distance(
R, z = cast(Tuple[DataArray, DataArray], self.convert_to_Rz(x1, x2, t))
if isinstance(R, (int, float)) or isinstance(z, (int, float)):
raise ValueError("Arguments x1 and x2 must be xarray DataArray objects.")
spacings = np.sqrt(R.diff(direction) ** 2 + z.diff(direction) ** 2)
spacings = (R.diff(direction) ** 2 + z.diff(direction) ** 2) ** 0.5
result = zeros_like(R.broadcast_like(z))
result[{direction: slice(1, None)}] = spacings.cumsum(direction)
return result
Expand All @@ -304,9 +303,9 @@ def encode(self) -> str:
"""Returns a JSON representation of this object. Should be sufficient
to recreate it identically from scratch (except for the
equilibrium)."""
return ""
raise NotImplementedError

@staticmethod
def decode(json: str) -> "CoordinateTransform":
"""Takes some JSON and decodes it into a CoordinateTransform object."""
pass
raise NotImplementedError
9 changes: 5 additions & 4 deletions indica/converters/line_of_sight.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
"""

from typing import cast
from typing import Optional
from typing import Tuple

import numpy as np
Expand Down Expand Up @@ -205,9 +206,9 @@ def distance(
x = self.x_start + (self.x_end - self.x_start) * x2
y = self.y_start + (self.y_end - self.y_start) * x2
z = self.z_start + (self.z_end - self.z_start) * x2
spacings = np.sqrt(
spacings = (
x.diff(direction) ** 2 + z.diff(direction) ** 2 + y.diff(direction) ** 2
)
) ** 0.5
result = zeros_like(x)
result[{direction: slice(1, None)}] = spacings.cumsum(direction)
return result.values
Expand All @@ -225,7 +226,7 @@ def set_dl(self, dl: float):
)

# Find the number of points
npts = np.ceil(los_length.data / dl).astype(int)
npts = np.ceil(los_length.data / dl).astype(int) # type: ignore

# Set dl, calculate dl
ind = np.linspace(0, 1, npts, dtype=float)
Expand All @@ -237,7 +238,7 @@ def set_dl(self, dl: float):
def assign_flux_transform(self, flux_transform: FluxSurfaceCoordinates):
self.flux_transform = flux_transform

def convert_to_rho(self, t: float = None):
def convert_to_rho(self, t: Optional[float] = None):
self.rho = self.flux_transform.convert_from_Rz(self.R, self.z, t=t)


Expand Down
4 changes: 2 additions & 2 deletions indica/converters/lines_of_sight.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,9 +225,9 @@ def distance(
x = x_s + (x_e - x_s) * x2
y = y_s + (y_e - y_s) * x2
z = z_s + (z_e - z_s) * x2
spacings = np.sqrt(
spacings = (
x.diff(direction) ** 2 + z.diff(direction) ** 2 + y.diff(direction) ** 2
)
) ** 0.5
result = zeros_like(x)
result[{direction: slice(1, None)}] = spacings.cumsum(direction)
return result
Expand Down
4 changes: 3 additions & 1 deletion indica/converters/magnetic.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,9 @@ def func(R: float) -> float:
f"scipy.optimize.root_scalar failed to converge with flag {result.flag}"
)

return apply_ufunc(find_root, x1, x2, t, vectorize=True), x2 + self.z_los
# apply_ufunc vectorize=True does not seem to be working
vfunc = np.vectorize(find_root)
return apply_ufunc(vfunc, x1, x2, t, vectorize=False), x2 + self.z_los

def convert_from_Rz(
self, R: LabeledArray, z: LabeledArray, t: LabeledArray
Expand Down
36 changes: 18 additions & 18 deletions indica/converters/time.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

import numpy as np
from xarray import DataArray
from xarray.core.types import InterpOptions


def strip_provenance(arr: DataArray):
Expand All @@ -19,7 +20,7 @@ def convert_in_time(
tend: float,
frequency: float,
data: DataArray,
method: str = "linear",
method: InterpOptions = "linear",
) -> DataArray:
"""Interpolate or bin (as appropriate) the given data along the time
axis, discarding data before or after the limits.
Expand Down Expand Up @@ -57,7 +58,7 @@ def convert_in_time_dt(
tend: float,
dt: float,
data: DataArray,
method: str = "linear",
method: InterpOptions = "linear",
) -> DataArray:
"""
Interpolate or bin given data along the time axis, discarding data before
Expand Down Expand Up @@ -90,7 +91,7 @@ def convert_in_time_dt(


def interpolate_to_time_labels(
tlabels: np.ndarray, data: DataArray, method: str = "linear"
tlabels: np.ndarray, data: DataArray, method: InterpOptions = "linear"
) -> DataArray:
"""
Interpolate data to sit on the specified time labels.
Expand Down Expand Up @@ -125,6 +126,9 @@ def interpolate_to_time_labels(
)
interpolated.attrs["dropped"] = dropped

if "transform" in data.attrs:
interpolated.attrs["transform"] = data.attrs["transform"]

strip_provenance(interpolated)

return interpolated
Expand Down Expand Up @@ -163,17 +167,14 @@ def bin_to_time_labels(tlabels: np.ndarray, data: DataArray) -> DataArray:

if "error" in data.attrs:
grouped = (
data.attrs["error"]
(data.attrs["error"] ** 2)
.sel(t=slice(tbins[0], tbins[-1]))
.groupby_bins("t", tbins, labels=tlabels)
)
uncertainty = np.sqrt(
grouped.reduce(
lambda x, axis: np.sum(x**2, axis) / np.size(x, axis) ** 2, "t"
)
)
error = np.sqrt(uncertainty**2 + stdev**2)
uncertainty_square = grouped.sum("t") / grouped.count("t")
error = (uncertainty_square + stdev**2) ** 0.5
averaged.attrs["error"] = error.rename(t_bins="t")

if "dropped" in data.attrs:
grouped = (
data.attrs["dropped"]
Expand All @@ -190,14 +191,13 @@ def bin_to_time_labels(tlabels: np.ndarray, data: DataArray) -> DataArray:
.sel(t=slice(tbins[0], tbins[-1]))
.groupby_bins("t", tbins, labels=tlabels)
)
uncertainty = np.sqrt(
grouped.reduce(
lambda x, axis: np.sum(x**2, axis) / np.size(x, axis) ** 2, "t"
)
)
error = np.sqrt(uncertainty**2 + stdev**2)
uncertainty_square = grouped.sum("t") / grouped.count("t")
error = (uncertainty_square + stdev**2) ** 0.5
averaged.attrs["dropped"].attrs["error"] = error.rename(t_bins="t")

if "transform" in data.attrs:
averaged.attrs["transform"] = data.attrs["transform"]

strip_provenance(averaged)

return averaged.rename(t_bins="t")
Expand All @@ -208,7 +208,7 @@ def interpolate_in_time(
tend: float,
frequency: float,
data: DataArray,
method: str = "linear",
method: InterpOptions = "linear",
) -> DataArray:
"""Interpolate the given data along the time axis, discarding data
before or after the limits.
Expand Down Expand Up @@ -245,7 +245,7 @@ def interpolate_in_time_dt(
tend: float,
dt: float,
data: DataArray,
method: str = "linear",
method: InterpOptions = "linear",
) -> DataArray:
"""Interpolate the given data along the time axis, discarding data
before or after the limits.
Expand Down
13 changes: 7 additions & 6 deletions indica/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
from scipy.interpolate import InterpolatedUnivariateSpline
from scipy.interpolate import RectBivariateSpline
import xarray as xr
from xarray.core.types import InterpOptions
from xarray.core.utils import either_dict_or_kwargs

from . import session
Expand Down Expand Up @@ -159,7 +160,7 @@ def invert_interp(
target: str,
new_dims: Optional[Union[Tuple[str, ...], str]] = None,
coords: Optional[Mapping[Hashable, ArrayLike]] = None,
method: str = "linear",
method: InterpOptions = "linear",
assume_sorted: bool = False,
kwargs: Optional[Mapping[str, Any]] = None,
**coords_kwargs: ArrayLike,
Expand Down Expand Up @@ -279,7 +280,7 @@ def invert_root(
guess: Optional[ArrayLike] = None,
new_dims: Optional[Union[Tuple[str, ...], str]] = None,
coords: Optional[Mapping[Hashable, ArrayLike]] = None,
method: str = "linear",
method: InterpOptions = "linear",
assume_sorted: bool = False,
kwargs: Optional[Mapping[str, Any]] = None,
**coords_kwargs: ArrayLike,
Expand Down Expand Up @@ -787,10 +788,10 @@ def check_datatype(self, data_type: ArrayType) -> Tuple[bool, Optional[str]]:
status
Whether the datatype of this array matches the argument.
message
If ``status == False``, an explaination of why.
If ``status == False``, an explanation of why.
"""
pass
return NotImplementedError # type: ignore

def _update_prov_for_equilibrium(
self,
Expand Down Expand Up @@ -1043,7 +1044,7 @@ def attach(
(unless the value is the same). This behaviour can be
overridden with the `overwrite` argument.
This function will fail if the specific datatyp for ``array``
This function will fail if the specific datatype for ``array``
differs from that for this Dataset. It will also fail if the
dimensions of ``array`` differ from those of the Dataset.
Expand Down Expand Up @@ -1137,7 +1138,7 @@ def check_datatype(self, datatype: DatasetType) -> Tuple[bool, Optional[str]]:
If ``status == False``, an explanation of why.
"""
pass
raise NotImplementedError

def inclusive_timeslice(self, t_start: float, t_end: float) -> xr.Dataset:
"""
Expand Down
Loading

0 comments on commit 18a45b4

Please sign in to comment.