Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

PyParamGUI MVP #5

Open
wants to merge 10 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file added .DS_Store
Binary file not shown.
4 changes: 2 additions & 2 deletions .cruft.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
"context": {
"cookiecutter": {
"full_name": "Anmol Bhatia",
"email": "[email protected]",
"github_username": "glotaran",
"email": "[email protected]",
"github_username": "anmolbhatia05",
"project_name": "PyParamGUI",
"project_slug": "pyparamgui",
"project_slug_url": "pyparamgui",
Expand Down
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -104,3 +104,6 @@ ENV/

# IDE settings
.vscode/

examples/
!examples/*.ipynb
5 changes: 3 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -74,13 +74,13 @@ repos:
alias: flake8-docs
args:
- "--select=DOC"
- "--extend-ignore=DOC502"
- "--extend-ignore=DOC502,DOC601,DOC603,DOC101,DOC103,DOC201"
- "--color=always"
- "--require-return-section-when-returning-nothing=False"
- "--allow-init-docstring=True"
- "--skip-checking-short-docstrings=False"
name: "flake8 lint docstrings"
exclude: "^(docs/|tests?/)"
exclude: "^(docs/|tests?/|pyparamgui/generator.py)"
additional_dependencies: [pydoclint==0.5.3]

- repo: https://github.com/econchick/interrogate
Expand All @@ -97,6 +97,7 @@ repos:
types: [file]
types_or: [python, pyi, markdown, rst, jupyter]
args: [-L nnumber]
exclude: ^examples/

- repo: https://github.com/rhysd/actionlint
rev: "v1.7.1"
Expand Down
6 changes: 1 addition & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,7 @@

[![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)

pyglotaran notebook widgets for teaching parameter estimation examples

## Features

- TODO
A pyglotaran based jupyter notebook widget for teaching parameter estimation examples. It can simulate data, visualize it and create related model.yml, parameters.csv and dataset.nc files. It is supposed to help students learn about the basics of the pyglotaran ecosystem.

## Contributors ✨

Expand Down
Binary file added pyparamgui/.DS_Store
Binary file not shown.
14 changes: 13 additions & 1 deletion pyparamgui/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,17 @@
from __future__ import annotations

__author__ = """Anmol Bhatia"""
__email__ = "[email protected]"
__email__ = "[email protected]"
__version__ = "0.0.1"

from pyparamgui.widget import Widget

__all__ = ["Widget"]
"""
Package Usage:
%env ANYWIDGET_HMR=1
from pyparamgui import Widget

widget = Widget()
widget
"""
1 change: 0 additions & 1 deletion pyparamgui/__main__.py

This file was deleted.

278 changes: 278 additions & 0 deletions pyparamgui/generator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,278 @@
"""The glotaran generator module."""

from __future__ import annotations

from typing import TYPE_CHECKING
from typing import Any
from typing import TypedDict
from typing import cast

from glotaran.builtin.io.yml.utils import write_dict
from glotaran.builtin.megacomplexes.decay import DecayParallelMegacomplex
from glotaran.builtin.megacomplexes.decay import DecaySequentialMegacomplex
from glotaran.builtin.megacomplexes.spectral import SpectralMegacomplex
from glotaran.model import Model

if TYPE_CHECKING:
from collections.abc import Callable


def _generate_decay_model(
*, nr_compartments: int, irf: bool, spectral: bool, decay_type: str
) -> dict[str, Any]:
"""Generate a decay model dictionary.

Parameters
----------
nr_compartments : int
The number of compartments.
irf : bool
Whether to add a gaussian irf.
spectral : bool
Whether to add a spectral model.
decay_type : str
The type of the decay

Returns
-------
dict[str, Any]
The generated model dictionary.
"""
compartments = [f"species_{i+1}" for i in range(nr_compartments)]
rates = [f"rates.species_{i+1}" for i in range(nr_compartments)]

model: dict[str, Any] = {
"megacomplex": {
f"megacomplex_{decay_type}_decay": {
"type": f"decay-{decay_type}",
"compartments": compartments,
"rates": rates,
},
},
"dataset": {"dataset_1": {"megacomplex": [f"megacomplex_{decay_type}_decay"]}},
}
if spectral:
model["megacomplex"] |= {
"megacomplex_spectral": {
"type": "spectral",
"shape": {
compartment: f"shape_species_{i+1}"
for i, compartment in enumerate(compartments)
},
}
}
model["shape"] = {
f"shape_species_{i+1}": {
"type": "skewed-gaussian",
"amplitude": f"shapes.species_{i+1}.amplitude",
"location": f"shapes.species_{i+1}.location",
"width": f"shapes.species_{i+1}.width",
"skewness": f"shapes.species_{i+1}.skewness",
}
for i in range(nr_compartments)
}
model["dataset"]["dataset_1"] |= {
"global_megacomplex": ["megacomplex_spectral"],
"spectral_axis_inverted": True,
"spectral_axis_scale": 1e7,
}
if irf:
model["dataset"]["dataset_1"] |= {"irf": "gaussian_irf"}
model["irf"] = {
"gaussian_irf": {"type": "gaussian", "center": "irf.center", "width": "irf.width"},
}
return model


def generate_parallel_decay_model(
*, nr_compartments: int = 1, irf: bool = False
) -> dict[str, Any]:
"""Generate a parallel decay model dictionary.

Parameters
----------
nr_compartments : int
The number of compartments.
irf : bool
Whether to add a gaussian irf.

Returns
-------
dict[str, Any]
The generated model dictionary.
"""
return _generate_decay_model(
nr_compartments=nr_compartments, irf=irf, spectral=False, decay_type="parallel"
)


def generate_parallel_spectral_decay_model(
*, nr_compartments: int = 1, irf: bool = False
) -> dict[str, Any]:
"""Generate a parallel spectral decay model dictionary.

Parameters
----------
nr_compartments : int
The number of compartments.
irf : bool
Whether to add a gaussian irf.

Returns
-------
dict[str, Any]
The generated model dictionary.
"""
return _generate_decay_model(
nr_compartments=nr_compartments, irf=irf, spectral=True, decay_type="parallel"
)


def generate_sequential_decay_model(
*, nr_compartments: int = 1, irf: bool = False
) -> dict[str, Any]:
"""Generate a sequential decay model dictionary.

Parameters
----------
nr_compartments : int
The number of compartments.
irf : bool
Whether to add a gaussian irf.

Returns
-------
dict[str, Any]
The generated model dictionary.
"""
return _generate_decay_model(
nr_compartments=nr_compartments, irf=irf, spectral=False, decay_type="sequential"
)


def generate_sequential_spectral_decay_model(
*, nr_compartments: int = 1, irf: bool = False
) -> dict[str, Any]:
"""Generate a sequential spectral decay model dictionary.

Parameters
----------
nr_compartments : int
The number of compartments.
irf : bool
Whether to add a gaussian irf.

Returns
-------
dict[str, Any]
The generated model dictionary.
"""
return _generate_decay_model(
nr_compartments=nr_compartments, irf=irf, spectral=True, decay_type="sequential"
)


generators: dict[str, Callable] = {
"decay_parallel": generate_parallel_decay_model,
"spectral_decay_parallel": generate_parallel_spectral_decay_model,
"decay_sequential": generate_sequential_decay_model,
"spectral_decay_sequential": generate_sequential_spectral_decay_model,
}

available_generators: list[str] = list(generators.keys())


class GeneratorArguments(TypedDict, total=False):
"""Arguments used by ``generate_model`` and ``generate_model``.

Parameters
----------
nr_compartments : int
The number of compartments.
irf : bool
Whether to add a gaussian irf.

See Also
--------
generate_model
generate_model_yml
"""

nr_compartments: int
irf: bool


def generate_model(*, generator_name: str, generator_arguments: GeneratorArguments) -> Model:
"""Generate a model.

Parameters
----------
generator_name : str
The generator to use.
generator_arguments : GeneratorArguments
Arguments for the generator.

Returns
-------
Model
The generated model

See Also
--------
generate_parallel_decay_model
generate_parallel_spectral_decay_model
generate_sequential_decay_model
generate_sequential_spectral_decay_model

Raises
------
ValueError
Raised when an unknown generator is specified.
"""
if generator_name not in generators:
msg = (
f"Unknown model generator '{generator_name}'. "
f"Known generators are: {list(generators.keys())}"
)
raise ValueError(msg)
model = generators[generator_name](**generator_arguments)
return Model.create_class_from_megacomplexes(
[DecayParallelMegacomplex, DecaySequentialMegacomplex, SpectralMegacomplex]
)(**model)


def generate_model_yml(*, generator_name: str, generator_arguments: GeneratorArguments) -> str:
"""Generate a model as yml string.

Parameters
----------
generator_name : str
The generator to use.
generator_arguments : GeneratorArguments
Arguments for the generator.

Returns
-------
str
The generated model yml string.

See Also
--------
generate_parallel_decay_model
generate_parallel_spectral_decay_model
generate_sequential_decay_model
generate_sequential_spectral_decay_model

Raises
------
ValueError
Raised when an unknown generator is specified.
"""
if generator_name not in generators:
msg = (
f"Unknown model generator '{generator_name}'. "
f"Known generators are: {list(generators.keys())}"
)
raise ValueError(msg)
model = generators[generator_name](**generator_arguments)
return cast(str, write_dict(model))
Loading
Loading