diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 00000000..826b56de --- /dev/null +++ b/.coveragerc @@ -0,0 +1,2 @@ +[run] +omit=venv/* diff --git a/.github/actions/setup-nox/action.yml b/.github/actions/setup-nox/action.yml deleted file mode 100644 index 69359dee..00000000 --- a/.github/actions/setup-nox/action.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: Setup Nox -description: 'Prepares all python versions for nox' - -runs: - using: composite - steps: - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.7" - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.8" - - uses: actions/setup-python@v3 - with: - python-version: "pypy-3.9" - - uses: actions/setup-python@v3 - with: - python-version: "2.7" - - uses: actions/setup-python@v3 - with: - python-version: "3.5" - - uses: actions/setup-python@v3 - with: - python-version: "3.6" - - uses: actions/setup-python@v3 - with: - python-version: "3.7" - - uses: actions/setup-python@v3 - with: - python-version: "3.8" - - uses: actions/setup-python@v3 - with: - python-version: "3.9" - - uses: actions/setup-python@v3 - with: - python-version: "3.10" - - name: "Install nox" - run: pipx install nox - shell: bash diff --git a/.github/deadpendency.yaml b/.github/deadpendency.yaml index 72aebf4a..03111d30 100644 --- a/.github/deadpendency.yaml +++ b/.github/deadpendency.yaml @@ -1,37 +1,10 @@ ignore-failures: - python: - - aioserial - - getmac - - backcall - - commonmark - - entrypoints - - future - - ipython-genutils - - pickleshare - - pyasn1 - - pyasn1-modules - - webencodings - - wcwidth + python: + - pyserial additional-deps: python: # name can be included so Deadpendency can load the package details in the registry - - name: aiofiles - repo: Tinche/aiofiles - - name: asyncua repo: FreeOpcUa/opcua-asyncio - - - name: ipywidgets - repo: jupyter-widgets/ipywidgets - - - name: lmfit - repo: lmfit/lmfit-py - - name: lxml repo: lxml/lxml - - - name: nmrglue - repo: jjhelmus/nmrglue - - - name: opcua - repo: FreeOpcUa/python-opcua diff --git a/.github/workflows/publish_pypi.yml b/.github/workflows/publish_pypi.yml index 39630b28..8a2a0497 100644 --- a/.github/workflows/publish_pypi.yml +++ b/.github/workflows/publish_pypi.yml @@ -11,26 +11,29 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@master - - name: Set up Python 3.9 - uses: actions/setup-python@v1 + - name: Set up Python 3.10 + uses: actions/setup-python@v4 with: - python-version: 3.9 + python-version: '3.10' - name: Install pypa/build run: | python -m pip install build --user + - name: Build a binary wheel and a source tarball run: | python -m build --sdist --wheel --outdir dist/ . + - name: Publish distribution 📦 to Test PyPI - uses: pypa/gh-action-pypi-publish@master + uses: pypa/gh-action-pypi-publish@release/v1 with: user: dcambie password: ${{ secrets.TEST_PYPI_API_TOKEN }} repository_url: https://test.pypi.org/legacy/ skip_existing: true + - name: Publish distribution 📦 to PyPI - if: startsWith(github.ref, 'refs/tags/v') - uses: pypa/gh-action-pypi-publish@master + if: startsWith(github.ref_name, 'v') + uses: pypa/gh-action-pypi-publish@release/v1 with: user: __token__ password: ${{ secrets.PYPI_API_TOKEN }} @@ -40,9 +43,9 @@ jobs: steps: - uses: actions/checkout@master - name: Publish release 📦 to GitHub - if: startsWith(github.ref, 'refs/tags/v') + if: startsWith(github.ref_name, 'v') uses: elgohr/Github-Release-Action@master env: GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }} with: - args: AutomatedRelease + title: flowchem ${{ github.ref_name }} diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 21f27e7a..df147ce4 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -12,19 +12,25 @@ on: jobs: build: runs-on: ${{ matrix.operating-system }} + timeout-minutes: 30 strategy: matrix: operating-system: - ubuntu-latest - - windows-latest - - macOS-latest +# - windows-latest +# - macOS-latest steps: - name: Checkout uses: actions/checkout@v3 - - name: Setup nox - uses: ./.github/actions/setup-nox + - name: Set up Python 3.11 + uses: actions/setup-python@v4 + with: + python-version: '3.11' - - name: Run nox - run: nox + - name: Install flowchem + run: python -m pip install .[dev] + + - name: Run tox + run: tox diff --git a/.gitignore b/.gitignore index e8a71ad5..d64973dc 100644 --- a/.gitignore +++ b/.gitignore @@ -127,4 +127,5 @@ dmypy.json # Pyre type checker .pyre/ -/flowchem/components/devices/Vapourtec/commands.py +/flowchem/devices/Vapourtec/commands.py +/src/flowchem/devices/knauer_hplc_nda.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 453afcb4..92628b1d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,17 +1,44 @@ # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks -fail_fast: true +fail_fast: false repos: - repo: https://github.com/psf/black - rev: 22.1.0 + rev: 22.8.0 hooks: - id: black language_version: python3 + - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.2.0 + rev: v4.3.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - id: check-yaml - id: check-added-large-files + +- repo: https://github.com/asottile/reorder_python_imports + rev: v3.8.2 + hooks: + - id: reorder-python-imports + args: ["--py39-plus", "--application-directories=.:src"] + +- repo: https://github.com/asottile/pyupgrade + rev: v2.38.0 + hooks: + - id: pyupgrade + args: [--py310-plus] + +#- repo: https://gitlab.com/pycqa/flake8 +# rev: 5.0.4 +# hooks: +# - id: flake8 +# additional_dependencies: [ +# 'flake8-bugbear', +## 'flake8-comprehensions', +# 'flake8-deprecated', +## 'flake8-docstrings', +# 'flake8-pep3101', +# 'flake8-string-format', +# ] +# args: ['--count', '--extend-ignore=E501,E203', '--show-source', '--statistics'] diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 00000000..1f3750a0 --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,37 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the version of Python and other tools you might need +build: + os: ubuntu-20.04 + tools: + python: "3.10" + # You can also specify other tool versions: + # nodejs: "16" + # rust: "1.55" + # golang: "1.17" + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: docs/conf.py + +# If using Sphinx, optionally build your docs in additional formats such as PDF +# formats: +# - pdf + +# Optionally declare the Python requirements required to build your documentation +#python: +# install: +# - requirements: docs/requirements.txt + + +python: + install: + - method: pip + path: . + extra_requirements: + - docs diff --git a/CODE_of_CONDUCT.md b/CODE_of_CONDUCT.md new file mode 100644 index 00000000..35b51670 --- /dev/null +++ b/CODE_of_CONDUCT.md @@ -0,0 +1,132 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, caste, color, religion, or sexual +identity and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +* Focusing on what is best not just for us as individuals, but for the overall + community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery, and sexual attention or advances of + any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email address, + without their explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +[INSERT CONTACT METHOD]. +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series of +actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or permanent +ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within the +community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.1, available at +[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1]. + +Community Impact Guidelines were inspired by +[Mozilla's code of conduct enforcement ladder][Mozilla CoC]. + +For answers to common questions about this code of conduct, see the FAQ at +[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at +[https://www.contributor-covenant.org/translations][translations]. + +[homepage]: https://www.contributor-covenant.org +[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html +[Mozilla CoC]: https://github.com/mozilla/diversity +[FAQ]: https://www.contributor-covenant.org/faq +[translations]: https://www.contributor-covenant.org/translations diff --git a/LICENSE b/LICENSE index f0f732b1..449cea48 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright 2021 +Copyright 2022 Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 6d279030..00000000 --- a/MANIFEST.in +++ /dev/null @@ -1,3 +0,0 @@ -graft flowchem -global-exclude __pycache__ -global-exclude *.py[cod] diff --git a/README.md b/README.md index 01a62629..62da763c 100644 --- a/README.md +++ b/README.md @@ -4,37 +4,46 @@ Welcome to flowchem ![github-actions](https://github.com/cambiegroup/flowchem/actions/workflows/python-app.yml/badge.svg) [![PyPI version fury.io](https://badge.fury.io/py/flowchem.svg)](https://pypi.org/project/flowchem/) +[![Documentation Status](https://readthedocs.org/projects/flowchem/badge/?version=latest)](https://flowchem.readthedocs.io/en/latest/?badge=latest) [![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=flat-square)](http://makeapullrequest.com) [![MIT license](https://img.shields.io/badge/License-MIT-blue.svg)](https://lbesson.mit-license.org/) [![DOI](https://zenodo.org/badge/300656785.svg)](https://zenodo.org/badge/latestdoi/300656785) +[![Contributor Covenant](https://img.shields.io/badge/Contributor%20Covenant-2.1-4baaaa.svg)](CODE_of_CONDUCT.md) -Flowchem is a python library to automated flow chemistry experiments. +Flowchem is a python library to control a variety of instruments commonly found in chemistry labs. -Currently, the following instruments can be controlled via flowchem: +### Overview +Using flowchem is simple. You only need to +1. **Create a configuration file** with the connection parameters for the devices you want to control (see the +[User Guide](https://flowchem.readthedocs.io/en/latest/user_guide.html) for details). +2. **Run `flowchem my_device_config_file.toml`** with the name of your configuration file +3. **Done**! +A web server will be created serving a RESTful API endpoint for each device, directly +usable in browser or programmatically. + +### Supported devices +Currently, the following instruments are supported, but we are open to contributions and the list keeps expanding! - Pumps (Knauer P2.1, Harvard Apparatus Elite 11, Hamilton ML600) - Valves (ViciValco and Knauer) - Thermostat (Huber) - Analytical instruments (Magritek Spinsolve benchtop NMR and Mattler Toledo FlowIR) - General purpose sensors-actuators from Phidgets (e.g. 4...20 mA sensor to interface with Swagelok pressure sensors) - -## Table of Contents -* [Installation](#installation) -* [Usage](#usage) -* [License](#license) -* [Questions](#questions) - + - ... [add support for a new device](https://flowchem.readthedocs.io/en/latest/add_new_device_type.html)! ## Installation -```bash -pip install flowchem +Python 3.10 is needed, and it is suggested to install flowchem with pipx. +You can install pipx and flowchem as follows: +```shell +pip install pipx +pipx ensurepath +pipx install flowchem ``` - -## Usage -{WRITE ME} +## Documentation +You can find the documentation online on [flowchem.readthedocs.io](https://flowchem.readthedocs.io/en/latest/). ## License This project is released under the terms of the MIT License. ## Questions -For questions about this project, fell free to open a GitHub issue, or reach out by email at dario.cambie@mpikg.mpg.de. +For questions about this project, fell free to open a GitHub issue, or reach out [by email](mailto:2422614+dcambie@users.noreply.github.com). diff --git a/docs/Elite11.md b/docs/Elite11.md deleted file mode 100644 index d0d26e87..00000000 --- a/docs/Elite11.md +++ /dev/null @@ -1,89 +0,0 @@ -Harvard Apparatus Elite 11 -========================== - -Flowchem implements the Protocol11 syntax to communicate with Elite11 pumps. - -## Connection -The USB type-B port on the back of the pump, once connected to a PC, creates a virtual serial port (drivers are -auto-installed on Windows and not needed on Linux). - -To identify the serial port to which the pump is connected to you can use the utility function `elite11_finder()` as -follows: - -```pycon ->>> from flowchem.devices.Harvard_Apparatus.Elite11_finder import elite11_finder ->>> elite11_finder() -Looking for pump on COM3... -Looking for pump on COM4... -Found a pump with address 06 on COM4! -Out[5]: {'COM4'} - -``` - - .. note:: -Multiple pumps can be daisy chained on the same serial port provided they all have different address and that the pump -connected to the PC has address 0. See manufacturer documentation for more info. - -### Model type -Note that there are two models of Elite11, an "infuse only" and an "infuse and withdraw" pump. -If you only need infuse capabilities just use: -```python -from flowchem import Elite11InfuseOnly -``` -this will work with both pump models. -On the other hand, if you need withdraw commands you need: -```python -from flowchem import Elite11InfuseWithdraw -``` -whose `initialize()` method will take care of ensuring that the pump supports withdrawing. - -The constructor and all the methods are the same for both `Elite11` pumps, with the exception of the withdrawing commands being -only available in `Elite11InfuseWithdraw`. - - -## Test Connection -Now that you know the serial port your pump is connected to, and the model of your pump, you can instantiate it and test the connection. -```python -from flowchem import HarvardApparatusPumpIO, Elite11InfuseWithdraw -pumpio = HarvardApparatusPumpIO(port='COM4') -pump1 = Elite11InfuseWithdraw(pump_io=pumpio, diameter=10.2, syringe_volume=10, address=0) -pump2 = Elite11InfuseWithdraw(pump_io=pumpio, diameter=10.2, syringe_volume=10, address=1) - -``` -Alternatively, the `from_config()` classmethod can be used to instantiate the pump without the need of creating an -HarvardApparatusPumpIO object (will be done automatically and shared across pumps on the same serial port). -```python -from flowchem import Elite11InfuseWithdraw -pump = Elite11InfuseWithdraw.from_config(port="COM4", address=0, diameter="14.5 mm", syringe_volume="10 ml", name="acetone") -# Note that the constructor above is equivalent to the following -pump_config = { - 'port': 'COM4', - 'address': 0, - 'name': "acetone", - 'diameter': "14.6 mm", - 'syringe_volume': "10 ml" -} -pump = Elite11InfuseWithdraw.from_config(**pump_config) -# ... which is what is actually used when a device configuration is provided in yaml format e.g. via graph file. -``` - -## Initialization -The first step after the creation of the pump object is the initialization, via the `initialize()` method, e.g.: -```python -await pump.initialize() -``` -Note that the `initialize()` method returns a coroutine, so it must be called with `await` in order to wait for the pump to be ready. -If you are not familiar with asynchronous syntax in python you can just call it with `asyncio.run()`. -```python -import asyncio -asyncio.run(pump.initialize()) -``` -The initialization is needed to find the pump address if non was provided (the autodetection only works if a single pump is -present on the serial port provided), to set the syringe volume and diameter and to ensure that the pump supports -withdrawing moves if it has been initialized as `Elite11InfuseWithdraw`. - -## Usage -Once you've initialized the pump, you can use all the methods it exposes. See FIXME:add sphinx autodoc link for the API reference. - -## API docs -Autogenerate this diff --git a/docs/IDEAS.md b/docs/IDEAS.md deleted file mode 100644 index 119e3c09..00000000 --- a/docs/IDEAS.md +++ /dev/null @@ -1,4 +0,0 @@ -# Ideas and notes - -### Injection loops -Treat loop as a a kind of sequential syringe pump. Can look up attached devices. need loading and dispensing capability. Both entails valve switching and activating one of 2 pumps. diff --git a/docs/ML600.md b/docs/ML600.md deleted file mode 100644 index b8787203..00000000 --- a/docs/ML600.md +++ /dev/null @@ -1,25 +0,0 @@ -Hamilton ML600 -============== - -Flowchem implements the Protocol1/RNO+ syntax to communicate with ML-600 pumps. - -## Table of Contents -* [Connection](#connection) -* [Example](#example) -* [API docs](#API docs) - -## Connection -Serial communication (RS-232) with the pump takes place over a standard serial cable (DB-9 male/female connector). -To identify the serial port to which the pump is connected to you can use the utility script `ML600_finder.py` as follows: - - -Follow the manufacturer instruction - -## Example -lalla - -## MultiPump - - -## API docs -Autogenerate this diff --git a/docs/Principles.md b/docs/Principles.md deleted file mode 100644 index 11f20950..00000000 --- a/docs/Principles.md +++ /dev/null @@ -1,22 +0,0 @@ -Principles: -- No-code platform: device settings via YAML file and method calls via OpenAPI (directly usable in the browser). -- only specify settings in a config file, `pipex` the module and use the OpenAPI (e.g. testing via web-broswer). -- cross-platform HTTP-based API interface for interoperability. (This circumvents the issues with python dependencies and versioning conflicts and allows us to use modern python.) -- it should still be possible to interact with the device object directly, i.e. without the HTTP interface, for power-users. - -Implementation design: -- The end user should not need any knowledge of any implementation detail. Underlying complexity has to be handled internally and hidden to the user. -- Device objects should only raise Exceptions upon instantiation. - - the connection to the device is implicit in the object instantiation - - raising warning is the preferred way to signal errors during execution as it allows the control code to continue w.g. with cleanup - - communication streams are passed to the device constructors (i.e. dependency injection). This simplifies testing. -- Each device module should be independent. Code sharing is possible via `flowchem.analysis` (or `flowchem.utils` et simil.) -- Each device module should be accompanied by tests and documentation (at least in form of examples). -- Device objects should use generic `flowchem.exceptions` or sublcasses thereof. - - -Inspired by many packages with similar aims, including (in alphabetical order): -- [Chemios](https://github.com/Chemios/chemios) -- [ChemOS](https://github.com/aspuru-guzik-group/ChemOS) -- [MechWolf](https://github.com/MechWolf/MechWolf) -- [Octopus](https://github.com/richardingham/octopus) diff --git a/docs/conf.py b/docs/conf.py index 1e33c3a0..9d35382a 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -3,26 +3,30 @@ # This file only contains a selection of the most common options. For a full # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html - # -- Path setup -------------------------------------------------------------- - # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -# -# import os -# import sys -# sys.path.insert(0, os.path.abspath('.')) +import datetime +import os +import sys +from importlib import metadata +sys.path.insert(0, os.path.abspath("../src")) +print(sys.path) -# -- Project information ----------------------------------------------------- +CONF_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__))) +ROOT_DIR = os.path.abspath(os.path.join(CONF_DIR, os.pardir)) -project = "Flowchem" -copyright = "2021, Dario Cambie, Jakob Wolf" -author = "Dario Cambie, Jakob Wolf" -# The full version, including alpha/beta/rc tags -release = "0.0.1" +# -- Project information ----------------------------------------------------- + +# Extract from +project = "flowchem" +YEAR = datetime.date.today().strftime("%Y") +author = "Dario Cambié" +copyright = f"{YEAR}, {author}" +release = metadata.version("flowchem") # -- General configuration --------------------------------------------------- @@ -30,7 +34,31 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = [] +extensions = [ + "myst_parser", + "sphinx.ext.autodoc", + "sphinx.ext.napoleon", + "sphinxcontrib.openapi", + "sphinxcontrib.httpdomain", +] + +source_suffix = [".rst", ".md"] +autodoc_member_order = "bysource" + +myst_enable_extensions = [ + "amsmath", + "colon_fence", + "deflist", + "dollarmath", + "fieldlist", + "html_admonition", + "html_image", + "replacements", + "smartquotes", + "strikethrough", + "substitution", + "tasklist", +] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] @@ -46,9 +74,10 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = "alabaster" +html_theme = "sphinx_rtd_theme" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] +html_static_path = [] +# html_static_path = ["_static"] diff --git a/docs/contributing/add_device/add_as_plugin.md b/docs/contributing/add_device/add_as_plugin.md new file mode 100644 index 00000000..c4ee3440 --- /dev/null +++ b/docs/contributing/add_device/add_as_plugin.md @@ -0,0 +1 @@ +# Add new device as external plugin diff --git a/docs/contributing/add_device/add_to_flowchem.md b/docs/contributing/add_device/add_to_flowchem.md new file mode 100644 index 00000000..25bd5237 --- /dev/null +++ b/docs/contributing/add_device/add_to_flowchem.md @@ -0,0 +1,67 @@ +# Add new device to flowchem + +## Code +In the `flowchem.device` subpackage, the device modules are organized in folders by manufacturer. +Since this is the first device from _Weasley & Weasley_ in flowchem, we need to create a new folder under +`/flowchem/devices`. Let's call it `/flowchem/devices/Weasley` to avoid the use of special characters ;) + +In this folder we will write a _module_ (i.e. a python file 🐍) called `ExtendableEar.py` to control our magic device. +We create it piece by piece, but the content of the module will look like this: + +```python +from flowchem.devices.flowchem_device import FlowchemDevice + + +class ExtendeableEar(FlowchemDevice): + """Our virtual Extendable Ear!""" + + def __init__(self): + ... + +``` + + + +```python +from flowchem.devices.flowchem_device import FlowchemDevice + + +class ExtendeableEar(FlowchemDevice): + """Our virtual Extendable Ear!""" + + def __init__(self): + ... + + async def initialize(self): + ... + + async def deploy(self): + ... + + async def listen_for(self, seconds: str): + ... + + async def retract(self): + ... + + + +``` +- instantiable from dict or if not possible with from_config @classmethod + +Finally, to register your new device type to the flowchem +add to `__init__.py` to ensure it is available for import with a statement like +```python +from flowchem.devices import ExtendableEar +``` + + +## Documentation +Write a brief description of the class you created in the /docs/devices/ folder, following the same manufacturer-base hierarchy. +Ideally manufacturer communication manual is added to the docs + +:::{note} +This is an implementation detail that user do not have to care about since the `flowchem.device` submodules will hide +this nested layer via their `__init__.py`. If you do not understand what this means it is not important, and you can +safely skip this note if you follow this guide. +::: diff --git a/docs/contributing/add_device/index.md b/docs/contributing/add_device/index.md new file mode 100644 index 00000000..1f884ae4 --- /dev/null +++ b/docs/contributing/add_device/index.md @@ -0,0 +1,26 @@ +# Add support for new devices + +If you want to add support for a new device-type this is tha page for you! +Let's assume you got a new lab device, an _✨Extendable Ear✨_ manufactured by _Weasley & Weasley_. +And of course you want to control it via flowchem. Solid idea!👏 + +You have two possibilities: +* add support directly into flowchem (fork the repo, add device-specific code and +create a pull request) +* add support via a plugin (e.g. a `flowchem-extendable-ear` package) + +In general, devices whose support needs the addition of new dependencies to flowchem are better packaged as plugins, +while generally useful modules are ideally embedded with flowchem. +This is to limit the amount dependencies in `flowchem` while enabling support to devices with more complex needs. + +For example, a device only needing serial communication such as a syringe pump is ideal for native support, while the +interface to Spinsolve NMR, that needs an external library form XML parsing, is provided as a plugin. + +```{toctree} +:maxdepth: 2 +:caption: Add device to flowchem + +add_to_flowchem +add_as_plugin + +``` diff --git a/docs/contributing/community.md b/docs/contributing/community.md new file mode 100644 index 00000000..f0e6b087 --- /dev/null +++ b/docs/contributing/community.md @@ -0,0 +1,6 @@ +# Community + +We aim at creating a community around flowchem, by incentive the participation from a diverse group of +[contributors](./index.md). +Please read the Contributor Covenant we adopted as Code of Conduct for guidance on how to interact with others in a way +that makes the community thrive diff --git a/docs/contributing/design_principles.md b/docs/contributing/design_principles.md new file mode 100644 index 00000000..29aa7bef --- /dev/null +++ b/docs/contributing/design_principles.md @@ -0,0 +1,74 @@ +# Software architecture + +## General +### In which order are device initialized +When `flochem` is called with a configuration file via CLI, the following happens: +1. The configuration is parsed, and all the hardware device object are created in the order they appear in the file. +2. Communication is established for all the hardware device via the `async` method `initialize()`. +3. The components of each hardware object are collected, their routes added to the API server and advertised via mDNS. +4. Flowchem is ready to be used. + +It follows that: +* All the code in components can assume that a valid connection to the hw device is already in place. +* Components can use introspection e.g. to determine if a pump has withdrawing capabilities or not. + +### Why no device orchestration functionalities are included? +Lab automation holds great potential, yet research lab rarely reuse existing code. +One reason for this is the lack of modularity in the existing lab-automation solutions. While a monolithic approach is faster to implement, it lacks flexibility. + +We designed flowchem to be a solid foundation for other modules to be based on. + +We try to follow the unix philosophy: do one thing, and do it well. Flowchem provides uniform API endpoints for the heterogeneous environment of lab devices. + +### Why Python 3.10? +The recommended use of flowchem is to run it as standalone app to provide homogeneous REST API access to the variegated landscape of lab devices. The direct import of device objects is highly discouraged. +This allows us to use a recent version of Python and to exploit all the newly introduced features. +For example, in the codebase are used the walrus operator (`:=`) and `importlib.metadata` introduced in 3.8, the dict merge with OR operator introduced in 3.9 and the type hints unions with the OR operator introduced in 3.10. We are looking forward to the inclusion of `tomllib` in the stdlib for 3.11 to drop the external dependency on `tomli`. + +### Why FastAPI? +To create the API endpoints we use fastAPI mainly for its simplicity and for the ability to automatically generate openAPI specs from the type hints. +The async aspects were particularly appealing since the communication with lab devices can take relatively long time (especially on slow protocols such as serial communication at 9600 baud) thus impacting the responsiveness of the API even at low requests/second. + +### Why Pint? +Different devices use different units for the same quantities. For example, among pumps, Knauer HPLC pumps use ul/min as base unit, Harvard Apparatus syringe pumps can be set with different units from the nl/h to the ml/s while the Hamilton ML600 pumps have a custom steps-per-stroke parameter that controls the flow rate. +Moreover, to offer a uniform experience and prevent errors, the same units should be used by the public API across different devices, yet the order of magnitudes involved are often experiment-specific. + +To solve all of these problems we decided to widely adopt [pint](https://pint.readthedocs.io/en/stable/) to represent any physical quantity. Particularly attractive was the possibility of serialize and de-serialize the quantities to strings with minor losses in precision. This matched our aim of enabling full configurability of device settings via a simple, text-based configuration file. For example, a syringe diameter can be intuitively specified as either "18.2 mm" or "1.82 cm". + +### Repository structure +We follow the so-called "src-layout" i.e. with the source code in a `src` sub-folder. This increasingly popular trend among the Python ecosystem is to ensure that tox (among others) is using the built version of flowchem and not the local folder shadowing the same namespace. Read [this article](https://setuptools.pypa.io/en/latest/userguide/package_discovery.html#src-layout) for more details. + +### CLI application +At its core, flowchem is a command line application that: +1. parse a configuration file +2. connects to the lab devices described and +3. offer access to them via a RESTful API. + +All of this could in theory be achieved without installing anything via pipx run, e.g. +```shell +pipx run flowchem my_device_config.toml +``` + +Principles: +- No-code shim: device settings via YAML file creates OpenAPI endpoints with predictable names based on serial numbers. +- Implements existing interoperable standard for lab IoT to avoid standard proliferation. + +Implementation design: +- intended use via CLI endpoint, installed via `pipex`. +- Ideally, failure in one device should not affect the others. (Catch-all error via starlette middleware SO 61596911) +- to ease debug, add support for auto-reload if settings file is changed. (easy, need to trigger reload on changes, via watchfiles) +- only connection specific settings are needed. Device-specific are optional on instantiation even if needed for use. + - This e.g. a syringe pump might need syringe diameter and volume before use, but those are device specific parameters and not connection specific, so they are not required in flowchem config. + - ideally all permanent device specific parameters (not changing during normal use) are received/set in a uniform way and advertised as such (to enabling dynamic graphs config via web interface, somehow similar to Magritek protocol options). +- Don't force code-reuse, but allow for easy extension and leave device modules as independent as possible. +- Each device module should be accompanied by tests and documentation/examples. +- Following abstract device ontologies ease abstraction in higher level code. +- Each device MUST have a name, unique per server, tha will be the endpoint path. If Non will be generated. + - If a unique name can be programmatically used after init (e.g. based on SERIAL_NUMBER), than that will be also advertised in autodiscover name. + - This allows dependent libraries to use static names even though they are not yet known at flowchem init. + +Inspired by many packages with similar aims, including (in alphabetical order): +- [Chemios](https://github.com/Chemios/chemios) +- [ChemOS](https://github.com/aspuru-guzik-group/ChemOS) +- [MechWolf](https://github.com/MechWolf/MechWolf) +- [Octopus](https://github.com/richardingham/octopus) diff --git a/docs/contributing/index.md b/docs/contributing/index.md new file mode 100644 index 00000000..e0af5729 --- /dev/null +++ b/docs/contributing/index.md @@ -0,0 +1,112 @@ +# Contribute to flowchem +% part of this page is based on the numpy project one +% See also https://rdflib.readthedocs.io/en/stable/developers.html +% And https://diataxis.fr/how-to-guides/ + +Not a coder? Not a problem! Flowchem is multi-faceted, and we can use a lot of help. +These are all activities we’d like to get help with : + + Code maintenance and development + + Developing educational content & narrative documentation + + Writing technical documentation + +The rest of this document discusses working on the flowchem code base and documentation. + +## Development process +1. If you are a first-time contributor: + + * Go to [flowchem gitHub repository](https://github.com/cambiegroup/flowchem) and click the “fork” button to create your own copy of the project. + + * Clone the project to your local computer: + + * `git clone https://github.com/your-username/flowchem.git` + + * Change the directory: + + * cd flowchem + + * Add the upstream repository: + + * git remote add upstream https://github.com/cambiegroup/flowchem.git + + * Now, git `remote -v` will show two remote repositories named: + * `upstream`, which refers to the `flowchem` repository + * `origin`, which refers to your personal fork + +2. Develop your contribution: + + * Pull the latest changes from upstream: + + * `git checkout main` + * `git pull upstream main` + + * Create a branch for the feature you want to work on. Since the branch name will appear in the merge message, use a sensible name. For example, if you intend to add support for a new device type, called ExtendibleEar a good candidate could be ‘add-extendible-ear-support’: + + * `git checkout -b add-extendible-ear-support` + + * Commit locally as you progress (`git add` and `git commit`) Use a properly formatted commit message, write tests that fail before your change and pass afterward, run all the tests locally. Be sure to document any changed behavior in docstrings, keeping to the [Google docstring standard](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html). + +3. To submit your contribution: + + * Push your changes back to your fork on GitHub: + + * `git push origin add-extendible-ear-support` + + * Enter your GitHub username and password (repeat contributors or advanced users can remove this step by connecting to GitHub with SSH). + + * Go to GitHub. The new branch will show up with a green Pull Request button. Make sure the title and message are clear, concise, and self-explanatory. Then click the button to submit it. + + * If your commit introduces a new feature or changes functionality, creat an issue on the GitHub repo to explain your changes. For bug fixes, documentation updates, etc., this is generally not necessary, though if you do not get any reaction, do feel free to ask for review. + +4. Review process: + + * Reviewers (the other developers and interested community members) will write inline and/or general comments on your Pull Request (PR) to help you improve its implementation, documentation and style. We aim at protecting the main branch from direct commits to ensure all changes are introduced via pull requests that can be reviewed. The review is meant as friendly conversation from which we all learn and the overall code quality benefits. Please do not let the review discourage you from contributing: its only aim is to improve the quality of project, not to criticize (we are, after all, very grateful for your contribution!). + + * To update your PR, make your changes on your local repository, commit, run tests, and only if they succeed push to your fork. As soon as those changes are pushed up (to the same branch as before) the PR will update automatically. If you have no idea how to fix the test failures, you may push your changes anyway and ask for help in a PR comment. + + * Various continuous integration (CI) services are triggered after each PR update to build the code, run unit tests, measure code coverage and check coding style of your branch. The CI tests must pass before your PR can be merged. If CI fails, you can find out why by clicking on the “failed” icon (red cross) and inspecting the build and test log. To speed up this cycle you can also test your work locally before committing. + + * A PR which has been approved by at least one core team member will be merged in the main branch and will be part of the next release of flowchem. + +5. Document changes + + * If your change introduces support for a new device make sure to add description for it in the docs and the README. + +6. Cross-referencing issues + + * If the PR solves an issue, you can add the text closes xxxx, where xxxx is the number of the issue. Instead of closes you can use any of the other flavors [gitHub accepts](https://help.github.com/en/articles/closing-issues-using-keywords) such as fix and resolve. + +## Guidelines + +* All code should be documented with docstrings in Google format and comments where appropriate. +* All code should have tests. +* We use [black](https://github.com/psf/black) not to waste time discussing details code style. +* You can install [pre-commit](https://pre-commit.com/) to run black and other linters as part of the pre-commit hooks. See our `.pre-commit-config.yml` for details. The use of linter and import re-ordering is aimed at reducing diff size and merge conflicts in pull request. + +## Test coverage +To run the tests `pytest` and some pytest plugins are needed. To install the testing-related dependency for local testing run this command from the root folder: +```shell +pip install .[test] +``` + +## Building docs +The docs are automatically build for each commit at [readthedocs.com](https://readthedocs.org/projects/flowchem/). +To build it locally, sphynx, myst-parser and other packages are needed. To install the tools to build the docs run this command from the root folder: +```shell +pip install .[docs] +``` + +Then from the docs folder run `make html` to generate html docs in the build directory. + + +```{toctree} +:maxdepth: 2 + +community +design_principles +add_device/index +models/device_models + +``` diff --git a/docs/contributing/models/device_models.md b/docs/contributing/models/device_models.md new file mode 100644 index 00000000..8443f909 --- /dev/null +++ b/docs/contributing/models/device_models.md @@ -0,0 +1,21 @@ +# Models + +Each device object must be subclass of `BaseDevice`, either directly or more likely via one of `BaseDevice` subclasses. +This allows to add features to all device objects ensuring consistency and without repeating code +(i.e. following the [DRY](https://en.wikipedia.org/wiki/Don%27t_repeat_yourself) principle). +For example, the attribute `owl_subclass_of`, defined in `BaseDevice`, allows to specify classes of the Web Ontology +Language. + +The scheme below represents the current device object taxonomy. + +![Flowchem models taxonomy](../../images/flowchem_models.svg) + + +```{toctree} +:maxdepth: 2 + +valves/base_valve +valves/injection_valve +valves/multiposition_valve + +``` diff --git a/docs/contributing/models/valves/base_valve.md b/docs/contributing/models/valves/base_valve.md new file mode 100644 index 00000000..9cc61e3e --- /dev/null +++ b/docs/contributing/models/valves/base_valve.md @@ -0,0 +1,9 @@ +# Base Valve + +```{eval-rst} +.. autoclass:: flowchem.models.valves.base_valve.BaseValve + :show-inheritance: + :members: + :exclude-members: get_router, initialize + :special-members: __init__ +``` diff --git a/docs/contributing/models/valves/injection_valve.md b/docs/contributing/models/valves/injection_valve.md new file mode 100644 index 00000000..a50b6b28 --- /dev/null +++ b/docs/contributing/models/valves/injection_valve.md @@ -0,0 +1,14 @@ +# Injection Valve + +The injection valve model represents any valve with two positions: `LOAD` and `INJECT`. + +The typical example is a 6-ports-2-positions valve commonly used for HPLC sample injection. +Example devices are + +```{eval-rst} +.. autoclass:: flowchem.models.valves.injection_valve.InjectionValve + :show-inheritance: + :members: + :exclude-members: get_router, initialize + :special-members: __init__ +``` diff --git a/docs/contributing/models/valves/multiposition_valve.md b/docs/contributing/models/valves/multiposition_valve.md new file mode 100644 index 00000000..d99a0487 --- /dev/null +++ b/docs/contributing/models/valves/multiposition_valve.md @@ -0,0 +1,9 @@ +# Multi-position Valve + +```{eval-rst} +.. autoclass:: flowchem.models.valves.multiposition_valve.MultiPositionValve + :show-inheritance: + :members: + :exclude-members: get_router, initialize + :special-members: __init__ +``` diff --git a/docs/devices/dataapex/api.rst b/docs/devices/dataapex/api.rst new file mode 100644 index 00000000..aeb08929 --- /dev/null +++ b/docs/devices/dataapex/api.rst @@ -0,0 +1 @@ +.. openapi:: ./clarity.yml diff --git a/docs/devices/dataapex/clarity.md b/docs/devices/dataapex/clarity.md new file mode 100644 index 00000000..9a502935 --- /dev/null +++ b/docs/devices/dataapex/clarity.md @@ -0,0 +1,38 @@ +# DataApex Clarity (HPLC software) + +Clarity is a chromatography data software for data acquisition, processing, and instrument control that can be +controlled via a command line interface (CLI) as described on the [manufacturer website](https://www.dataapex.com/documentation/Content/Help/110-technical-specifications/110.020-command-line-parameters/110.020-command-line-parameters.htm?Highlight=command%20line). + +In `flowchem` we provide a device type, named `Clarity`, to control local Clarity instances via HTTP with flowchem API. + + +## Configuration +Configuration sample showing all possible parameters: + +```toml +[device.hplc] # This is the 'device' identifier +type = "Clarity" + +# Optional paramters (default shown) +executable = "C:\\claritychrom\\bin\\claritychrom.exe" +instrument_number = 1 # Specify the instrument to be controlled (if the same Clarity instance has more than one) +startup-time = 20 # Max time necessary to start-up Clarity and connect all the instrument specified in the configuration +startup-method = "startup-method.met" # Method sent to the device upon startup. +cmd_timeout = 3 # Max amount of time (in s) to wait for the execution of claritychrom.exe commands. +user = "admin" # Default user name +password = "" # Empty or option not present for no password +clarity-cfg-file = "" # Configuration file for Clarity, if e.g. LaunchManager is used to save different configutations +``` + +## API methods +Once configured, a flowchem Clarity object will expose the following commands: + +```{eval-rst} +.. include:: api.rst +``` + +## Further information +Only few of the commands available through Clarity CLI are exposed via flowchem. +It is possible to add support for more commands if necessary, please refer to the +[manufacturer website](https://www.dataapex.com/documentation/Content/Help/110-technical-specifications/110.020-command-line-parameters/110.020-command-line-parameters.htm?Highlight=command%20line) +for a list of all the available options. diff --git a/docs/devices/dataapex/clarity.yml b/docs/devices/dataapex/clarity.yml new file mode 100644 index 00000000..995825a1 --- /dev/null +++ b/docs/devices/dataapex/clarity.yml @@ -0,0 +1,142 @@ +openapi: 3.0.2 +info: + title: Flowchem - devices + description: Flowchem is a python library to control a variety of instruments commonly + found in chemistry labs. + license: + name: MIT License + url: https://opensource.org/licenses/MIT + version: 0.1.0a3 +paths: + /hplc/run: + put: + tags: + - hplc + - hplc + summary: Run + description: 'Run one analysis on the instrument. The sample name has to be + set in advance via sample-name. + + + Note that it takes at least 2 sec until the run actually starts (depending + on instrument configuration). + + While the export of the chromatogram in e.g. ASCII format can be achieved + programmatically via the CLI, the best + + solution is to enable automatic data export for all runs of the HPLC as the + chromatogram will be automatically + + exported as soon as the run is finished.' + operationId: run_hplc_run_put + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + /hplc/method: + put: + tags: + - hplc + - hplc + summary: Set Method + description: 'Sets the HPLC method (i.e. a file with .MET extension) to the + instrument. + + + Make sure to select ''Send Method to Instrument'' option in Method Sending + Options dialog in System Configuration.' + operationId: set_method_hplc_method_put + parameters: + - required: true + schema: + title: Method Name + type: string + name: method_name + in: query + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /hplc/sample-name: + put: + tags: + - hplc + - hplc + summary: Set Sample Name + description: Sets the name of the sample for the next run. + operationId: set_sample_name_hplc_sample_name_put + parameters: + - required: true + schema: + title: Sample Name + type: string + name: sample_name + in: query + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + /hplc/exit: + put: + tags: + - hplc + - hplc + summary: Exit + description: Exit Clarity Chrom. + operationId: exit_hplc_exit_put + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} +components: + schemas: + HTTPValidationError: + title: HTTPValidationError + type: object + properties: + detail: + title: Detail + type: array + items: + $ref: '#/components/schemas/ValidationError' + ValidationError: + title: ValidationError + required: + - loc + - msg + - type + type: object + properties: + loc: + title: Location + type: array + items: + anyOf: + - type: string + - type: integer + msg: + title: Message + type: string + type: + title: Error Type + type: string diff --git a/flowchem/components/devices/Hamilton/Microlab-600-RS-232-Communication-Manual.pdf b/docs/devices/hamilton/Microlab-600-RS-232-Communication-Manual.pdf similarity index 100% rename from flowchem/components/devices/Hamilton/Microlab-600-RS-232-Communication-Manual.pdf rename to docs/devices/hamilton/Microlab-600-RS-232-Communication-Manual.pdf diff --git a/docs/devices/hamilton/api.rst b/docs/devices/hamilton/api.rst new file mode 100644 index 00000000..2e3b5a90 --- /dev/null +++ b/docs/devices/hamilton/api.rst @@ -0,0 +1 @@ +.. openapi:: ./ml600.yml diff --git a/docs/devices/hamilton/ml600.md b/docs/devices/hamilton/ml600.md new file mode 100644 index 00000000..e801669a --- /dev/null +++ b/docs/devices/hamilton/ml600.md @@ -0,0 +1,47 @@ +# Hamilton Syringe Pump ML600 + +Hamilton ML600 pumps connected via serial (RS-232) cables are supported in flowchem via the `ML600` device type. +As for all `flowchem` devices, the virtual instrument can be instantiated via a configuration file that generates an +openAPI endpoint. +For a standard (single syringe) pump model, two components will be available: one pump and one valve. +Pumps with multiple syringes are not currently supported. + + +## Configuration +Configuration sample showing all possible parameters: + +```toml +[device.my-ml600-pump] # This is the pump identifier +type = "ML600" +port = "COM1" # This will be /dev/tty* under linux/MacOS +address= 1 # If multiple devices are daisy chained, number in the chain 1=first... +syringe_volume = "1 ml" # If the wrong syringe volume is set, the flow rate will be wrong. +``` + +```{note} Serial connection parameters +Note, further parameters for the serial connections (i.e. those accepted by `serial.Serial`) such as `baudrate`, +`parity`, `stopbits` and `bytesize` can be specified. +However, it should not be necessary as the following values (which are the default for the instrument) are +automatically used: +* baudrate 9600 +* parity even +* stopbits 1 +* bytesize 7 +``` + +## API methods +Once configured, a flowchem ML600 object will expose the following commands: + +```{eval-rst} +.. include:: api.rst +``` + +## Device detection +Lab PCs often have several devices connected via serial ports. +ML600 pumps can be auto-detected via the `flowchem-autodiscover` command-line utility. +After having installed flowchem, run `flowchem-autodiscover` to create a configuration stub with all the devices that +can be auto-detected on your PC. + +## Further information +For further information about connection of the pump to the controlling PC, daisy-chaining via RJ-12 cables etc. +please refer to the [manufacturer manual](./Microlab-600-RS-232-Communication-Manual.pdf). diff --git a/docs/devices/harvardapparatus/api.rst b/docs/devices/harvardapparatus/api.rst new file mode 100644 index 00000000..fd623508 --- /dev/null +++ b/docs/devices/harvardapparatus/api.rst @@ -0,0 +1 @@ +.. openapi:: ./elite11.yml diff --git a/docs/devices/harvardapparatus/elite11.md b/docs/devices/harvardapparatus/elite11.md new file mode 100644 index 00000000..ec88fa73 --- /dev/null +++ b/docs/devices/harvardapparatus/elite11.md @@ -0,0 +1,51 @@ +# Harvard Apparatus Syringe Pump Elite11 + +## Introduction +Harvard-Apparatus Elite11 pumps connected via USB cables (which creates a virtual serial port) are supported in flowchem +via the `Elite11` device type. +Depending on the pump model, the component might be able of infuse/withdraw or just infusing. +This difference reflect the existence in commerce of both variants, i.e. pumps only capable of infusion and pumps that +support both infusion and withdrawing commands. + +As for all `flowchem` devices, the virtual instrument can be instantiated via a configuration file that generates an +openAPI endpoint. + + +## Configuration +Configuration sample showing all possible parameters: + +```toml +[device.my-elite11-pump] # This is the pump identifier +type = "Elite11" +port = "COM11" # This will be /dev/tty* under linux/MacOS +address = 0 # Only needed for daisy-chaining. The address can be set on the pump, see manufacturer manual. +syringe_diameter = "4.6 mm" +syringe_volume = "1 ml" +baudrate = 115200 # Values between 9,600 and 115,200 can be selected on the pump! (115200 assumed if not specified) +force = 100 # Value percent, use lower force for smaller syringes, see manual. +``` + +```{note} Serial connection parameters +Note, further parameters for the serial connections (i.e. those accepted by `serial.Serial`) such as `baudrate`, +`parity`, `stopbits` and `bytesize` can be specified. +However, it should not be necessary as the following values (which are the default for the instrument) are +automatically used: +* baudrate 115200 +``` + +## API methods +Once configured, a flowchem Elite11 object will expose the following commands: + +```{eval-rst} +.. include:: api.rst +``` + +## Device detection +Lab PCs often have several devices connected via serial ports. +Elite11 pumps can be auto-detected via the `flowchem-autodiscover` command-line utility. +After having installed flowchem, run `flowchem-autodiscover` to create a configuration stub with all the devices that +can be auto-detected on your PC. + +## Further information +For further information about connection of the pump to the controlling PC, daisy-chaining via firmware cables etc. +please refer to the [manufacturer manual](./elite11_manual.pdf). diff --git a/flowchem/components/devices/Harvard_Apparatus/11 Elite & 11 Elite Pico Manual - Rev C.pdf b/docs/devices/harvardapparatus/elite11_manual.pdf similarity index 100% rename from flowchem/components/devices/Harvard_Apparatus/11 Elite & 11 Elite Pico Manual - Rev C.pdf rename to docs/devices/harvardapparatus/elite11_manual.pdf diff --git a/docs/devices/huber/api.rst b/docs/devices/huber/api.rst new file mode 100644 index 00000000..fd623508 --- /dev/null +++ b/docs/devices/huber/api.rst @@ -0,0 +1 @@ +.. openapi:: ./elite11.yml diff --git a/docs/devices/huber/chiller.md b/docs/devices/huber/chiller.md new file mode 100644 index 00000000..66de9ae8 --- /dev/null +++ b/docs/devices/huber/chiller.md @@ -0,0 +1,58 @@ +# Huber Chiller +## Introduction +The majority of Huber chillers can be controlled via so-called `PB Commands` over serial communication. +A variety of `PB Commands` are supported in `flowchem`, but some of them may be unavailable on specific models, see the +[manufacturer documentation](./pb_commands_handbook.pdf) for more details. + +As for all `flowchem` devices, the virtual instrument can be instantiated via a configuration file that generates an +openAPI endpoint. + + +## Configuration +Configuration sample showing all possible parameters: + +```toml +[device.my-huber-chiller] # This is the chiller identifier +type = "HuberChiller" +port = "COM11" # This will be /dev/tty* under linux/MacOS +min_temp = -100 # Min and max temp can be used to further limit the avaiable temperatures +max_temp = +250 # e.g. for compatibility with the reaction system. +``` + +```{note} Serial connection parameters +Note, further parameters for the serial connections (i.e. those accepted by `serial.Serial`) such as `baudrate`, +`parity`, `stopbits` and `bytesize` can be specified. +However, it should not be necessary as the following values (which are the default for the instrument) are +automatically used: +* baudrate 9600 (with Com.G@te other baud rates are possible) +* parity none +* stopbits 1 +* bytesize 8 +``` + +## API methods +Once configured, a flowchem HuberChiller object will expose the following commands: + +```{eval-rst} +.. include:: api.rst +``` + +## Device detection +Lab PCs often have several devices connected via serial ports. +Huber's chillers can be auto-detected via the `flowchem-autodiscover` command-line utility. +After having installed flowchem, run `flowchem-autodiscover` to create a configuration stub with all the devices that +can be auto-detected on your PC. + +## Further information +For further information please refer to the [manufacturer manual](./pb_commands_handbook.pdf) + +```{note} Serial connection parameters +Note, further parameters for the serial connections (i.e. those accepted by `serial.Serial`) such as `baudrate`, +`parity`, `stopbits` and `bytesize` can be specified. +However, it should not be necessary as the following values (which are the default for the instrument) are +automatically used: +* baudrate 9600 +* parity even +* stopbits 1 +* bytesize 7 +``` diff --git a/flowchem/components/devices/Huber/Handbuch_Datenkommunikation_PB_en.pdf b/docs/devices/huber/pb_commands_handbook.pdf similarity index 100% rename from flowchem/components/devices/Huber/Handbuch_Datenkommunikation_PB_en.pdf rename to docs/devices/huber/pb_commands_handbook.pdf diff --git a/docs/devices/knauer/api-pump.rst b/docs/devices/knauer/api-pump.rst new file mode 100644 index 00000000..dba23a36 --- /dev/null +++ b/docs/devices/knauer/api-pump.rst @@ -0,0 +1 @@ +.. openapi:: ./knauer-pump.yml diff --git a/docs/devices/knauer/api-valve.rst b/docs/devices/knauer/api-valve.rst new file mode 100644 index 00000000..7f5429b0 --- /dev/null +++ b/docs/devices/knauer/api-valve.rst @@ -0,0 +1 @@ +.. openapi:: ./knauer-valve.yml diff --git a/docs/devices/knauer/azura_compact.md b/docs/devices/knauer/azura_compact.md new file mode 100644 index 00000000..9f37c298 --- /dev/null +++ b/docs/devices/knauer/azura_compact.md @@ -0,0 +1,40 @@ +# Pump Azura Compact (P 2.1S) +## Introduction +The Knauer Azura Compact pumps can be controlled via flowchem. + +As for all `flowchem` devices, the virtual instrument can be instantiated via a configuration file that generates an +openAPI endpoint. + + +## Connection +Knauer pumps are originally designed to be used with HPLC instruments, so they support ethernet communication. +Moreover, they feature an autodiscover mechanism that makes it possible to automatically find the device IP address +of a device given its (immutable) MAC address. +This enables the use of the valves with dynamic addresses (i.e. with a DHCP server) which simplify the setup procedure. + +## Configuration +Configuration sample showing all possible parameters: + +```toml +[device.my-knauer-pump] # This is the pump identifier +type = "AzuraCompactPump" +ip_address = "192.168.2.1" # Onyl one of either ip_address or mac_address need to be provided +mac_address = "00:11:22:33:44:55" # Onyl one of either ip_address or mac_address need to be provided +max_pressure = "10 bar" # Optionally, a string with natural language specifying max pressure can be provided +min_pressure = "5 bar" # Optionally, a string with natural language specifying max pressure can be provided +``` + +## API methods +Once configured, a flowchem AzuraCompactPump object will expose the following commands: + +```{eval-rst} +.. include:: api-pump.rst +``` + +## Device detection +Azura Compact pumps can be auto-detected via the `flowchem-autodiscover` command-line utility. +After having installed flowchem, run `flowchem-autodiscover` to create a configuration stub with all the devices that +can be auto-detected on your PC. + +## Further information +For further information please refer to the [manufacturer manual](./pump_p2.1s_instructions.pdf) diff --git a/flowchem/components/devices/Knauer/V6870 _P2.1S_P4.1S_Instructions.pdf b/docs/devices/knauer/pump_p2.1s_instructions.pdf similarity index 100% rename from flowchem/components/devices/Knauer/V6870 _P2.1S_P4.1S_Instructions.pdf rename to docs/devices/knauer/pump_p2.1s_instructions.pdf diff --git a/flowchem/components/devices/Knauer/V6855_AZURA_Valve_Unifier_VU_4.1_Instructions_EN.pdf b/docs/devices/knauer/valve_instructions_en.pdf similarity index 100% rename from flowchem/components/devices/Knauer/V6855_AZURA_Valve_Unifier_VU_4.1_Instructions_EN.pdf rename to docs/devices/knauer/valve_instructions_en.pdf diff --git a/docs/devices/knauer/valves.md b/docs/devices/knauer/valves.md new file mode 100644 index 00000000..0163e8ea --- /dev/null +++ b/docs/devices/knauer/valves.md @@ -0,0 +1,48 @@ +# Knauer Valves +## Introduction +A range of different valve heads can be mounted on the same Knauer actuator, so several type of valves can be controlled +with the same protocol. Both standard 6-port-2-position injection valve and multi-position valves +(with 6, 12 or 16 ports) can be controlled via flowchem. + +As for all `flowchem` devices, the virtual instrument can be instantiated via a configuration file that generates an +openAPI endpoint. + + +## Connection +Knauer valves are originally designed to be used with HPLC instruments, so they support ethernet communication. +Moreover, they feature an autodiscover mechanism that makes it possible to automatically find the device IP address +of a device given its (immutable) MAC address. +This enables the use of the valves with dynamic addresses (i.e. with a DHCP server) which simplify the setup procedure. + + +## Configuration +Configuration sample showing all possible parameters: + +```toml +[device.my-knauer-valve] # This is the valve identifier +type = "KnauerValve" # The actual valve type will be detected automatically +ip_address = "192.168.2.1" # Onyl one of either ip_address or mac_address need to be provided +mac_address = "00:11:22:33:44:55" # Onyl one of either ip_address or mac_address need to be provided +``` + +## API methods +Once configured, a flowchem Knauer6Port2PositionValve object will expose the following commands: + +```{eval-rst} +.. include:: api-pump.rst +``` + +## Valve positions +The valve position naming follow the general convention of flowchem, depending on the valve type +(see [Base Valve](../../models/valves/base_valve.md): +* Injection valves have position named 'load' and 'inject' +* Distribution valves have positions from '1' to 'n' where n is the total amount of port available. + +## Device detection +Knauer Valves can be auto-detected via the `flowchem-autodiscover` command-line utility. +After having installed flowchem, run `flowchem-autodiscover` to create a configuration stub with all the devices that +can be auto-detected on your PC. + + +## Further information +For further information please refer to the [manufacturer manual](./valve_instructions_en.pdf) diff --git a/docs/devices/magritek/api.rst b/docs/devices/magritek/api.rst new file mode 100644 index 00000000..c78f38b3 --- /dev/null +++ b/docs/devices/magritek/api.rst @@ -0,0 +1 @@ +.. openapi:: ./spinsolve.yml diff --git a/docs/devices/magritek/spinsolve.md b/docs/devices/magritek/spinsolve.md new file mode 100644 index 00000000..c278de6d --- /dev/null +++ b/docs/devices/magritek/spinsolve.md @@ -0,0 +1,48 @@ +# Magritek Spinsolve +```{admonition} Additional plugin needed! +:class: attention + +To use Spinsolve devices the external plugin `flowchem-spinsolve` is needed! + +Install it with `python -m pip install flowchem-spinsolve`! +``` + +## Introduction +The bench-top NMRs from Magritek are controlled by the proprietary software Spinsolve. +Spinsolve can be controlled remotely via XML over HTTP. + +As for all `flowchem` devices, a Spinsolve virtual instrument can be instantiated via a configuration file that generates an openAPI endpoint. +A peculiarity of controlling the NMR in this way is that the FIDs acquired are stored on +the computer where spinsolve is installed, which may or may not be the same PC where flowchem +is running. +Some utility functions are provided in case you are controlling Spinsolve on a different PC than the one running flowchem, see below for more details. + + +## Configuration +Configuration sample showing all possible parameters: + +```toml +[device.my-benchtop-nmr] # This is the valve identifier +type = "Spinsolve" +host = "127.0.0.1" # IP address of the PC running Spinsolve, 127.0.0.1 for local machine. Only necessary parameter. +port = 13000 # Default spinsolve port +sample_name = "automated-experiment" +solvent = "chloroform-d" +data_folder = "D:\\data2q\\my-experiment" +remote_to_local_mapping = ["D:\\data2q", "\\BSMC-7WP43Y1\\data2q"] +``` + +## API methods +Once configured, a flowchem Spinsolve object will expose the following commands: + +```{eval-rst} +.. include:: api.rst +``` + +## Remote control +When controlling a Spinsolve instance running on a remote PC, it is necessary that the FIDs are saved in a folder that +is accessible from the PC running flowchem as the Spinsolve API does not natively allow for file transfer. +If network drive are used, a location with the same name can be used on both PC. +If that is not the case, a `remote_to_local_mapping` parameter can be used to translate the remote file hierarchy to the +local (flowchem-accessible) one. +Incidentally, this enables the file sharing across PC with different operative system, e.g. if flowchem is running on linux. diff --git a/docs/devices/manson/api.rst b/docs/devices/manson/api.rst new file mode 100644 index 00000000..15bc5c88 --- /dev/null +++ b/docs/devices/manson/api.rst @@ -0,0 +1 @@ +.. openapi:: ./manson.yml diff --git a/docs/devices/manson/manson.md b/docs/devices/manson/manson.md new file mode 100644 index 00000000..5d825655 --- /dev/null +++ b/docs/devices/manson/manson.md @@ -0,0 +1,28 @@ +# Manson Laboratory Power Supply + +## Introduction +The following models of Manson lab power supply are supported: "HCS-3102", "HCS-3014", "HCS-3204" and "HCS-3202". +Once connected via USB, they are recognized as a virtual serial port and are supported in `flowchem` via the device type `MansonPowerSupply`. + +As for all `flowchem` devices, the virtual instrument can be instantiated via a configuration file that generates an openAPI endpoint. + + +## Configuration +Configuration sample showing all possible parameters: + +```toml +[device.my-power-supply] # This is the device name +type = "MansonPowerSupply" +port = "COM12" # This will be /dev/tty* under linux/MacOS +``` + +```{note} Serial connection parameters +Note, further parameters for the serial connections (i.e. those accepted by `serial.Serial`) such as `baudrate`, `parity`, `stopbits` and `bytesize` can be specified. +``` + +## API methods +Once configured, a flowchem MansonPowerSupply object will expose the following commands: + +```{eval-rst} +.. include:: api.rst +``` diff --git a/docs/devices/mettler/flowir.md b/docs/devices/mettler/flowir.md new file mode 100644 index 00000000..dd3e427d --- /dev/null +++ b/docs/devices/mettler/flowir.md @@ -0,0 +1,3 @@ +# FlowIR + +A valid iCIR template name must be specified! diff --git a/flowchem/core/server/__init__.py b/docs/devices/phidgets/p_sensor.md similarity index 100% rename from flowchem/core/server/__init__.py rename to docs/devices/phidgets/p_sensor.md diff --git a/docs/devices/supported_devices.md b/docs/devices/supported_devices.md new file mode 100644 index 00000000..9195a66d --- /dev/null +++ b/docs/devices/supported_devices.md @@ -0,0 +1,36 @@ +# Devices + +The following devices are currently supported in flowchem: + +| Manufacturer | Device / Model | `flowchem` name | `flowchem` components | Auto-discoverable | +|------------------|--------------------|-------------------|-------------------------------------|:-----------------:| +| DataApex | Clarity | Clarity | HPLCControl | NO | +| Hamilton | ML600 | ML600 | SyringePump, DistributionValve | YES | +| HarvardApparatus | Elite11 | Elite11 | SyringePump | YES | +| Huber | various | HuberChiller | TemperatureControl | YES | +| Knauer | Azura Compact | AzuraCompact | HPLCPump, PressureSensor | YES | +| Knauer | V 2.1S | KnauerValve | InjectionValve or DistributionValve | YES | +| Magritek | Spinsolve | Spinsolve | NMRControl | NO | +| Manson | HCS-3102 family | MansonPowerSupply | PowerSupply | NO | +| Mettler Toledo | iCIR | FlowIR | IRControl | NO | +| Phidgets | VINT | PressureSensor | PressureSensor | NO | +| Vici Valco | Universal Actuator | ViciValve | InjectionValve | NO | + + +```{toctree} +:maxdepth: 1 +:caption: Devices + +dataapex/clarity +hamilton/ml600 +harvardapparatus/elite11 +huber/chiller +knauer/azura_compact +knauer/valves +magritek/spinsolve +manson/manson +mettler/flowir +phidgets/p_sensor +vicivlaco/vicivalve + +``` diff --git a/docs/devices/vicivalco/api.rst b/docs/devices/vicivalco/api.rst new file mode 100644 index 00000000..384b3837 --- /dev/null +++ b/docs/devices/vicivalco/api.rst @@ -0,0 +1 @@ +.. openapi:: ./vici.yml diff --git a/docs/devices/vicivalco/universal-actuator.pdf b/docs/devices/vicivalco/universal-actuator.pdf new file mode 100644 index 00000000..e7b88b59 Binary files /dev/null and b/docs/devices/vicivalco/universal-actuator.pdf differ diff --git a/docs/devices/vicivalco/vicivalve.md b/docs/devices/vicivalco/vicivalve.md new file mode 100644 index 00000000..83b473fb --- /dev/null +++ b/docs/devices/vicivalco/vicivalve.md @@ -0,0 +1,55 @@ +# Vici Valco Valves +## Introduction +While different valve heads can be mounted on the same Vici Universal actuator, so far only injection valves are +supported, as they are the most common type. +Support for additional valve types can be trivially added, based on the example of Knauer Valves. + +As for all `flowchem` devices, the virtual instrument can be instantiated via a configuration file that generates an +openAPI endpoint. + + +## Connection +Depending on the device options, Vici valves can be controlled in different ways. +The code here reported assumes serial communication, but can be easily ported to different connection type if necessary. + +## Configuration +Configuration sample showing all possible parameters: + +```toml +[device.my-vici-valve] # This is the valve identifier +type = "ViciValve" +port = "COM11" # This will be /dev/tty* under linux/MacOS +address = 0 # Only needed for daisy-chaining. The address can be set on the pump, see manufacturer manual. +``` + +```{note} Serial connection parameters +Note, further parameters for the serial connections (i.e. those accepted by `serial.Serial`) such as `baudrate`, +`parity`, `stopbits` and `bytesize` can be specified. +However, it should not be necessary as the default for the instrument are automatically used. +``` + +## API methods +Once configured, a flowchem ViciValve object will expose the following commands: + +```{eval-rst} +.. include:: api.rst +``` + +## Valve positions +The valve position naming follow the general convention of flowchem (see [Base Valve](../../models/valves/base_valve.md): +* Injection valves have position named 'LOAD' and 'INJECT' +* Multiposition valves have positions from '1' to 'n' where n is the total amount of port available. + +## Device detection +Knauer Valves can be auto-detected via the `flowchem-autodiscover` command-line utility. +After having installed flowchem, run `flowchem-autodiscover` to create a configuration stub with all the devices that +can be auto-detected on your PC. + +```{note} Valve types +Note that the actual type of valve cannot be detected automatically, so you will need to replace the generic +`KnauerValve` type in the configuration with one of the valid device types (i.e. one of `Knauer6Port2PositionValve`, +`Knauer6Port6PositionValve`, `Knauer12PortValve` and `Knauer16PortValve`) +``` + +## Further information +For further information please refer to the [manufacturer manual](./valve_instructions_en.pdf) diff --git a/docs/getting_started.md b/docs/getting_started.md new file mode 100644 index 00000000..eab97a3f --- /dev/null +++ b/docs/getting_started.md @@ -0,0 +1,55 @@ +# Getting started + +Welcome to the new users guide to flowchem! + +Flowchem is a python library to control a variety of instruments commonly found in chemistry labs. + +## Installing flowchem +While the RESTful API created by flowchem can be consumed from different programs and programming languages, flowchem itself is written in the popular open-source language Python. + +If you already have Python version 3.10 or above, you can install flowchem with pipx as follows: +```shell +pip install pipx +pipx ensurepath +pipx install flowchem +``` +This is the recommended way because it will: +* install flowchem in a virtualenv, without messing up with your system interpreter +* make the `flowchem` command available system-wide, by adding it to the system PATH + +Alternatively, you can install it *normally* via pip with: +```shell +pip install flowchem +``` + +If you don’t have Python yet, you can download it from [python.org](https://www.python.org/downloads/). + +To verify the installation has been completed successfully you can run `flowchem --version` command. + +## How to use flowchem +Flowchem needs one device configuration file that specify the connection settings for all the devices to be controlled. +To save time, flowchem can autodetect many of the supported device type and generate a configuration stub. +This is done by running the `flowchem-autodiscover` program from the command line. + +This is the result of running `flowchem-autodiscover` on a PC with FIXME WHAT connected to it. +```shell +``` + +A new file named `blabla` has been created in the current working directory, with the following content: +```toml +``` + +```{note} +While technically `my-devices.toml` is written in the markup language [TOML format](https://en.wikipedia.org/wiki/TOML), +the syntax of this language is intuitive and designed to be as human-editable as possible. +If you follow this guide you will not need to learn anything about the TOML syntax but you can just copy and modify the +example provided. +``` + + +As you can see, a few placemarks in the autogenerated file have to be replaced by actual settings. +For example, bla + + + +If you have comments or suggestions, please don’t hesitate to [reach out](./community.md)! diff --git a/docs/images/flowchem_models.svg b/docs/images/flowchem_models.svg new file mode 100644 index 00000000..de52be69 --- /dev/null +++ b/docs/images/flowchem_models.svg @@ -0,0 +1,4 @@ + + + +
BaseDevice
BaseDevice
BaseValve
BaseValve
InjectionValve
InjectionValve
MultiPositionValve
MultiPositionValve
Text is not SVG - cannot display
diff --git a/docs/index.md b/docs/index.md index 14f2ae58..a04aca36 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,20 +1,15 @@ -.. Flowchem documentation master file, created by - sphinx-quickstart on Thu Sep 30 12:19:43 2021. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - Welcome to Flowchem's documentation! ==================================== -.. toctree:: - :maxdepth: 2 - :caption: Contents: +Select a topic from the list below, or read the [Getting Started](./getting_started.md) guide. + +```{toctree} +:maxdepth: 2 +getting_started +devices/supported_devices -Indices and tables -================== +contributing/index.md -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` +``` diff --git a/docs/json2yml.py b/docs/json2yml.py new file mode 100644 index 00000000..7993ce22 --- /dev/null +++ b/docs/json2yml.py @@ -0,0 +1,9 @@ +#!/usr/bin/env python +import json +import sys + +import yaml + +print( + yaml.dump(json.load(open(sys.argv[1])), default_flow_style=False, sort_keys=False) +) diff --git a/examples/Hamilton_ML600.py b/examples/Hamilton_ML600.py deleted file mode 100644 index e368bc3d..00000000 --- a/examples/Hamilton_ML600.py +++ /dev/null @@ -1,78 +0,0 @@ -""" Example file for controlling Hamilton ML600 pumps with fllowchem """ -import asyncio -from flowchem import ML600 - -conf_pump1 = { - "port": "COM12", - "address": 1, - "name": "water", - "syringe_volume": 5, -} - -conf_pump2 = { - "port": "COM12", - "address": 2, - "name": "acetone", - "syringe_volume": 5, -} - - -async def example(p1: ML600, p2: ML600): - """Example code for Hamilton ML600 pumps""" - # Initialize pumps. - await p1.initialize_pump() - await p2.initialize_pump() - - # We can also run commands on different pumps concurrently - await asyncio.gather(p1.initialize_pump(), p2.initialize_pump()) - - # Let's set the valve position to inlet - await p1.set_valve_position(ML600.ValvePositionName.INPUT) - await p2.set_valve_position(ML600.ValvePositionName.INPUT) - - # Let's change valve positions a couple of time - print(f"Pump 1 valve position is now {await p1.get_valve_position()}") - await p1.set_valve_position(ML600.ValvePositionName.OUTPUT) - print(f"Pump 1 valve position is now {await p1.get_valve_position()}") - await p1.set_valve_position(ML600.ValvePositionName.INPUT) - - # Valve position commands are special because, as default, they return only at the end of the movement. - # You can avoid this by passing wait_for_movement_end=False. - # The reason for this behaviour is that, while it is intuitive the need to wait for a syringe movement, - # awaiting for the end of a brief valve movement is often forgotten. - await p1.set_valve_position( - ML600.ValvePositionName.OUTPUT, wait_for_movement_end=False - ) - print(f"Pump 1 valve position is now {await p1.get_valve_position()}") - await p1.set_valve_position(ML600.ValvePositionName.INPUT) - - # Note that all the speed parameters are intended in seconds for full stroke, i.e. seconds for syringe_volume - await p1.to_volume(target_volume=0, speed=10) - # We suggest to call the class methods with the full keywords and not positionally. - # For example this line is a lot less readable: - await p2.to_volume(0, 10) - - # Then we can rapidly fill our syringes - await asyncio.gather( - p1.to_volume(p1.syringe_volume, speed=10), - p2.to_volume(p2.syringe_volume, speed=10), - ) - # And let's wait for the movement to be over - await asyncio.gather(p1.wait_until_idle(), p2.wait_until_idle()) - - # And pump in the outlet port - await p1.set_valve_position(ML600.ValvePositionName.OUTPUT) - await p2.set_valve_position(ML600.ValvePositionName.OUTPUT) - - # If you find the stroke per second not convienent, the utility function ML600.flowrate_to_seconds_per_stroke - # can be used to translate flow rate in seconds per stroke. - speed1 = p1.flowrate_to_seconds_per_stroke(flowrate_in_ml_min=0.5) - speed2 = p1.flowrate_to_seconds_per_stroke(flowrate_in_ml_min=0.75) - await p1.to_volume(target_volume=0, speed=speed1) - await p2.to_volume(target_volume=0, speed=speed2) - - -pump1 = ML600.from_config(conf_pump1) -pump2 = ML600.from_config(conf_pump2) - -asyncio.run(example(pump1, pump2)) diff --git a/examples/Huber_chiller.py b/examples/Huber_chiller.py deleted file mode 100644 index bebd4fdd..00000000 --- a/examples/Huber_chiller.py +++ /dev/null @@ -1,46 +0,0 @@ -import asyncio -import time - -import aioserial -from flowchem import HuberChiller - -chiller = HuberChiller(aioserial.AioSerial(url="COM1")) - - -async def main(): - # Set target temperature - await chiller.set_temperature_setpoint("35 °C") - # Start temperature control - await chiller.start_temperature_control() - # Start recirculation - await chiller.start_circulation() - - for _ in range(6): - int_temp = await chiller.internal_temperature() - process_temp = await chiller.process_temperature() - ret_temp = await chiller.return_temperature() - water_in_temp = await chiller.cooling_water_temp() - water_out_temp = await chiller.cooling_water_temp_outflow() - - print( - "Current temperatures are:\n" - f"\tInternal = {int_temp}\n" - f"\tProcess = {process_temp}\n" - f"\tReturn = {ret_temp}\n" - f"\tWater Inlet = {water_in_temp}\n" - f"\tWater Outlet = {water_out_temp}\n" - ) - - time.sleep(10) - - # Stop temperature control - await chiller.stop_temperature_control() - - time.sleep(10) - - # Stop circulation - await chiller.stop_circulation() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/examples/autonomous_reaction_optimization/README.md b/examples/autonomous_reaction_optimization/README.md new file mode 100644 index 00000000..c5727d16 --- /dev/null +++ b/examples/autonomous_reaction_optimization/README.md @@ -0,0 +1 @@ +# Autonomous reaction optimization diff --git a/examples/autonomous_reaction_optimization/_hw_control.py b/examples/autonomous_reaction_optimization/_hw_control.py new file mode 100644 index 00000000..58216a18 --- /dev/null +++ b/examples/autonomous_reaction_optimization/_hw_control.py @@ -0,0 +1,36 @@ +import contextlib + +import requests +from loguru import logger + +HOST = "127.0.0.1" +PORT = 8000 +api_base = f"http://{HOST}:{PORT}" +socl2_endpoint = f"{api_base}/socl2" +hexyldecanoic_endpoint = f"{api_base}/hexyldecanoic" +r4_channel = 0 +r4_endpoint = f"{api_base}/r4-heater/{r4_channel}" +flowir_endpoint = f"{api_base}/flowir" + +__all__ = [ + "socl2_endpoint", + "hexyldecanoic_endpoint", + "r4_endpoint", + "command_session", + "flowir_endpoint", +] + + +def check_for_errors(resp, *args, **kwargs): + resp.raise_for_status() + + +def log_responses(resp, *args, **kwargs): + logger.debug(f"Reply: {resp.text} on {resp.url}") + + +@contextlib.contextmanager +def command_session(): + with requests.Session() as session: + session.hooks["response"] = [log_responses, check_for_errors] + yield session diff --git a/examples/autonomous_reaction_optimization/devices.toml b/examples/autonomous_reaction_optimization/devices.toml new file mode 100644 index 00000000..9711108e --- /dev/null +++ b/examples/autonomous_reaction_optimization/devices.toml @@ -0,0 +1,20 @@ +[device.socl2] +type = "Elite11" +port = "COM4" +syringe_diameter = "14.567 mm" +syringe_volume = "10 ml" +baudrate = 115200 + +[device.hexyldecanoic] +type = "AzuraCompact" +ip_address = "192.168.1.119" +max_pressure = "10 bar" + +#[device.r4-heater] +#type = "R4Heater" +#port = "COM1" + +[device.flowir] +type = "IcIR" +url = "opc.tcp://localhost:62552/iCOpcUaServer" +template = "30sec_2days.iCIRTemplate" diff --git a/examples/autonomous_reaction_optimization/limits.in b/examples/autonomous_reaction_optimization/limits.in new file mode 100644 index 00000000..d016cc9d --- /dev/null +++ b/examples/autonomous_reaction_optimization/limits.in @@ -0,0 +1,3 @@ +#Peak Start Stop +sm 1690 1755 +product 1755 1830 diff --git a/examples/autonomous_reaction_optimization/main_loop.py b/examples/autonomous_reaction_optimization/main_loop.py new file mode 100644 index 00000000..43f6ca3f --- /dev/null +++ b/examples/autonomous_reaction_optimization/main_loop.py @@ -0,0 +1,74 @@ +import time + +from _hw_control import * +from gryffin import Gryffin +from loguru import logger +from run_experiment import run_experiment + +from examples.autonomous_reaction_optimization._hw_control import command_session + +logger.add("./xp.log", level="INFO") + +# load config +config = { + "parameters": [ + {"name": "SOCl2_equivalent", "type": "continuous", "low": 1.0, "high": 1.5}, + {"name": "temperature", "type": "continuous", "low": 30, "high": 65}, + {"name": "residence_time", "type": "continuous", "low": 2, "high": 20}, + ], + "objectives": [ + {"name": "product_ratio_IR", "goal": "max"}, + ], +} + +# Initialize gryffin +gryffin = Gryffin(config_dict=config) +observations = [] + +# Initialize hardware +with command_session() as sess: + # Heater to r.t. + sess.put(r4_endpoint + "/temperature", params={"temperature": "21"}) + sess.put(r4_endpoint + "/power-on") + + # Start pumps with low flow rate + sess.put(socl2_endpoint + "/flow-rate", params={"rate": "5 ul/min"}) + sess.put(socl2_endpoint + "/infuse") + + sess.put(hexyldecanoic_endpoint + "/flow-rate", params={"rate": "50 ul/min"}) + sess.put(hexyldecanoic_endpoint + "/infuse") + + # Ensure iCIR is running + assert ( + sess.get(flowir_endpoint + "/is-connected").text == "true" + ), "iCIR app must be open on the control PC" + # If IR is running I just reuse previous experiment. Because cleaning the probe for the BG is slow + status = sess.get(flowir_endpoint + "/probe-status") + if status == " Not running": + # Start acquisition + xp = { + "template": "30sec_2days.iCIRTemplate", + "name": "hexyldecanoic acid chlorination - automated", + } + sess.put(flowir_endpoint + "/experiment/start", params=xp) + + +# Run optimization for MAX_TIME +MAX_TIME = 8 * 60 * 60 +start_time = time.monotonic() + +while time.monotonic() < (start_time + MAX_TIME): + # query gryffin for new conditions_to_test, 1 exploration 1 exploitation (i.e. lambda 1 and -1) + conditions_to_test = gryffin.recommend( + observations=observations, num_batches=1, sampling_strategies=[-1, 1] + ) + + # evaluate the proposed parameters! + for conditions in conditions_to_test: + # Get this from your experiment! + conditions["product_ratio_IR"] = run_experiment(**conditions) + + logger.info(f"Experiment ended: {conditions}") + + observations.extend(conditions_to_test) + logger.info(observations) diff --git a/examples/autonomous_reaction_optimization/plot/plot.py b/examples/autonomous_reaction_optimization/plot/plot.py new file mode 100644 index 00000000..a9d94ebe --- /dev/null +++ b/examples/autonomous_reaction_optimization/plot/plot.py @@ -0,0 +1,34 @@ +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd + +# Time 2.5, 5.0, 7.5, 10, 12.5, 15 +time = np.linspace(25, 150, 6) +time = time / 10 + +# Temp 50, 60, 70, 80, 90 +temp = np.linspace(50, 90, 5) + +# Data +df = pd.DataFrame.from_dict( + { + "2.5": [0, 1, 2, 4, 8], + "5": [2, 4, 8, 16, 32], + "7.5": [4, 1, 2, 4, 8], + "10": [6, 1, 2, 4, 8], + "12.5": [8, 20, 50, 70, 90], + "15": [20, 50, 70, 90, 100], + } +) +df.index.name = "time" +df.columns.name = "temp" + +with plt.xkcd(): + fig, ax = plt.subplots() + plt.pcolormesh(time, temp, np.array(df)) + ax.set_title("Fake data :D") + ax.set_xlabel("Time (min)") + ax.set_ylabel("Temp (C)") + ax.set_xticks([2.5, 5, 7.5, 10, 12.5, 15]) + fig.tight_layout() + plt.show() diff --git a/examples/autonomous_reaction_optimization/run_experiment.py b/examples/autonomous_reaction_optimization/run_experiment.py new file mode 100644 index 00000000..49277af8 --- /dev/null +++ b/examples/autonomous_reaction_optimization/run_experiment.py @@ -0,0 +1,171 @@ +import time + +import numpy as np +import pandas as pd +from _hw_control import * +from loguru import logger +from scipy import integrate + + +def calculate_flow_rates(SOCl2_equivalent: float, residence_time: float): + """ + Calculate pump flow rate based on target residence time and SOCl2 equivalents + + Stream A: hexyldecanoic acid ----| + |----- REACTOR ---- IR ---- waste + Stream B: thionyl chloride ----| + + Args: + SOCl2_equivalent: + residence_time: + + Returns: dict with pump names and flow rate in ml/min + + """ + REACTOR_VOLUME = 10 # ml + HEXYLDECANOIC_ACID = 1.374 # Molar + SOCl2 = 13.768 # Molar + + total_flow_rate = REACTOR_VOLUME / residence_time # ml/min + + # Solving a system of 2 equations and 2 unknowns... + return { + "hexyldecanoic": ( + a := (total_flow_rate * SOCl2) + / (HEXYLDECANOIC_ACID * SOCl2_equivalent + SOCl2) + ), + "socl2": total_flow_rate - a, + } + + +def set_parameters(rates: dict, temperature: float): + with command_session() as sess: + sess.put( + socl2_endpoint + "/flow-rate", params={"rate": f"{rates['socl2']} ml/min"} + ) + sess.put( + hexyldecanoic_endpoint + "/flow-rate", + params={"rate": f"{rates['hexyldecanoic']} ml/min"}, + ) + + # Sets heater + heater_data = {"temperature": f"{temperature:.2f} °C"} + sess.put(r4_endpoint + "/temperature", params=heater_data) + + +def wait_stable_temperature(): + """Wait until the ste temperature has been reached.""" + logger.info("Waiting for the reactor temperature to stabilize") + while True: + with command_session() as sess: + r = sess.get(r4_endpoint + "/target-reached") + if r.text == "true": + logger.info("Stable temperature reached!") + break + else: + time.sleep(5) + + +def get_ir_once_stable(): + """Keeps acquiring IR spectra until changes are small, then returns the spectrum.""" + logger.info("Waiting for the IR spectrum to be stable") + with command_session() as sess: + # Wait for first spectrum to be available + while int(sess.get(flowir_endpoint + "/sample-count").text) == 0: + time.sleep(1) + # Get spectrum + previous_spectrum = pd.read_json( + sess.get(flowir_endpoint + "/sample/spectrum-treated").text + ) + previous_spectrum = previous_spectrum.set_index("wavenumber") + # In case the id has changed between requests (highly unlikely) + last_sample_id = int(sess.get(flowir_endpoint + "/sample-count").text) + + while True: + # Wait for a new spectrum + while True: + with command_session() as sess: + current_sample_id = int( + sess.get(flowir_endpoint + "/sample-count").text + ) + if current_sample_id > last_sample_id: + break + else: + time.sleep(2) + + with command_session() as sess: + current_spectrum = pd.read_json( + sess.get(flowir_endpoint + "/sample/spectrum-treated").text + ) + current_spectrum = current_spectrum.set_index("wavenumber") + + previous_peaks = integrate_peaks(previous_spectrum) + current_peaks = integrate_peaks(current_spectrum) + + delta_product_ratio = abs(current_peaks["product"] - previous_peaks["product"]) + logger.info(f"Current product ratio is {current_peaks['product']}") + logger.debug(f"Delta product ratio is {delta_product_ratio}") + + if delta_product_ratio < 0.002: # 0.2% error on ratio + logger.info("IR spectrum stable!") + return current_peaks + + previous_spectrum = current_spectrum + last_sample_id = current_sample_id + + +def integrate_peaks(ir_spectrum): + """Integrate areas from `limits.in` in the spectrum provided.""" + # List of peaks to be integrated + peak_list = np.recfromtxt("limits.in", encoding="UTF-8") + + peaks = {} + for name, start, end in peak_list: + # This is a common mistake since wavenumber are plot in reverse order + if start > end: + start, end = end, start + + df_view = ir_spectrum.loc[ + (start <= ir_spectrum.index) & (ir_spectrum.index <= end) + ] + peaks[name] = integrate.trapezoid(df_view["intensity"]) + logger.debug(f"Integral of {name} between {start} and {end} is {peaks[name]}") + + # Normalize integrals + + return {k: v / sum(peaks.values()) for k, v in peaks.items()} + + +def run_experiment( + SOCl2_equivalent: float, temperature: float, residence_time: float +) -> float: + """ + Runs one experiment with the provided conditions + + Args: + SOCl2_equivalent: SOCl2 to substrate ratio + temperature: in Celsius + residence_time: in minutes + + Returns: IR product area / (SM + product areas) + + """ + logger.info( + f"Starting experiment with {SOCl2_equivalent:.2f} eq SOCl2, {temperature:.1f} degC and {residence_time:.2f} min" + ) + # Set stand-by flow-rate first + set_parameters({"hexyldecanoic": "0.1 ml/min", "socl2": "10 ul/min"}, temperature) + wait_stable_temperature() + # Set actual flow rate once the set temperature has been reached + pump_flow_rates = calculate_flow_rates(SOCl2_equivalent, residence_time) + set_parameters(pump_flow_rates, temperature) + # Wait 1 residence time + time.sleep(residence_time * 60) + # Start monitoring IR + peaks = get_ir_once_stable() + + return peaks["product"] + + +if __name__ == "__main__": + print(get_ir_once_stable()) diff --git a/examples/k16valve.py b/examples/k16valve.py deleted file mode 100644 index af800ee1..00000000 --- a/examples/k16valve.py +++ /dev/null @@ -1,24 +0,0 @@ -import asyncio - -from flowchem import Knauer16PortValve - -DELAY = 60 * 5 # in sec -START_POSITION = 1 # First position for collection - - -async def main(): - valve = Knauer16PortValve(ip_address="192.168.1.122") - await valve.initialize() - - position = START_POSITION - - while True: - await valve.switch_to_position(str(position)) - await asyncio.sleep(DELAY) - position += 1 - if position > 16: - position = 1 - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/examples/limits.in b/examples/limits.in deleted file mode 100644 index 667ce938..00000000 --- a/examples/limits.in +++ /dev/null @@ -1,6 +0,0 @@ -#Peak Start Stop -SM -59.43 -59.72 -Unk-1 -56.15 -56.50 -product -56.60 -57.05 -Unk-6 -59.11 -59.38 - diff --git a/examples/nmr control + autointegration.py b/examples/nmr control + autointegration.py deleted file mode 100644 index 8ea66a61..00000000 --- a/examples/nmr control + autointegration.py +++ /dev/null @@ -1,122 +0,0 @@ -import asyncio -import glob -import os -from pathlib import Path -import numpy as np -import pandas as pd -import matplotlib.pyplot as plt -import itertools as it -from flowchem.components.devices.Magritek import Spinsolve, NMRSpectrum -from flowchem import Knauer16PortValve - - -Collector_DELAY = 60 * 30 # in sec -START_POSITION = 7 # First position for collection - - -async def Collector(): - valve = Knauer16PortValve(ip_address="192.168.1.122") - await valve.initialize() - - position = START_POSITION - - while True: - await valve.switch_to_position(str(position)) - await asyncio.sleep(Collector_DELAY) - position += 1 - if position > 16: - position = 1 - - -NMR_DELAY = 60 * 2 # in sec -counter = it.count() - -# read in the integration limits -peak_list = np.recfromtxt("limits.in", encoding="UTF-8") -NMR_forlder_month = r"C:\Projects\Data\2022\03" - - -async def Analysis(observed_result): - nmr = Spinsolve(host="BSMC-YMEF002121") - - while True: - path = await nmr.run_protocol( - "1D FLUORINE+", - { - "Number": 128, - "AcquisitionTime": 3.2, - "RepetitionTime": 2, - "PulseAngle": 90, - }, - ) - observed_time = (NMR_DELAY / 60 + 4) * next(counter) - if str(path) == ".": - # continue - dir_list_day = os.listdir(NMR_forlder_month) - dir_list_time = os.listdir(NMR_forlder_month / Path(dir_list_day[-1])) - path = NMR_forlder_month / Path(dir_list_day[-1]) / Path(dir_list_time[-1]) - print(path) - - # else: - peak_normalized_list = peak_aquire_process(path) - observed_result = observed_result.append( - pd.DataFrame( - peak_normalized_list, - index=["SM", "product", "side-P"], - columns=[observed_time], - ).T - ) - - # result - print(observed_result) - # save - observed_result.to_csv( - r"W:\BS-FlowChemistry\People\Wei-Hsin\Spinsolve\export_dataframe_0317_03.csv", - header=True, - ) - plt.figure() - observed_result.plot() - plt.legend(loc="best") - plt.savefig( - r"W:\BS-FlowChemistry\People\Wei-Hsin\Spinsolve\export_plot_0317_03.png" - ) - - await asyncio.sleep(NMR_DELAY) - - -def peak_aquire_process(path): - spectrum = NMRSpectrum(path) - spectrum.process() - - peak_sum_list = [] - - # loop over the integration limits - for name, start, end in peak_list: - min = spectrum.uc(start, "ppm") - max = spectrum.uc(end, "ppm") - if min > max: - min, max = max, min - # extract the peak - peak = spectrum.processed_data[min : max + 1] - peak_sum_list.append(peak.sum()) - - # peak normalization - y = sum(peak_sum_list) - peak_normalized_list = [i / y for i in peak_sum_list] - return peak_normalized_list - - -async def main(): - observed_time = 0 - observed_result = pd.DataFrame( - [1, 0, 0], index=["SM", "product", "side-P"], columns=[observed_time] - ).T - await asyncio.wait([Collector(), Analysis(observed_result)]) - # await asyncio.gather([Collector(),Analysis(observed_result)]) - - # await Analysis(observed_result) - # await Collector() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/examples/nmr/devices.toml b/examples/nmr/devices.toml new file mode 100644 index 00000000..30816fe4 --- /dev/null +++ b/examples/nmr/devices.toml @@ -0,0 +1,15 @@ + + + +[device.pump-b90e33] +type = "AzuraCompact" +ip_address = "192.168.1.119" # MAC address during discovery: 00:80:a3:b9:0e:33 +# max_pressure = "XX bar" +# min_pressure = "XX bar" +# +#[device.my-benchtop-nmr] # This is the valve identifier +#type = "Spinsolve" +#host = "127.0.0.1" # IP address of the PC running Spinsolve, 127.0.0.1 for local machine. Only necessary parameter. +#port = 13000 +#sample_name = "automated-experiment" +#solvent = "chloroform-d" diff --git a/examples/phidget.py b/examples/phidget.py deleted file mode 100644 index adbe8523..00000000 --- a/examples/phidget.py +++ /dev/null @@ -1,16 +0,0 @@ -import time -import matplotlib.pyplot as plt -from flowchem import PressureSensor - -p_sens = PressureSensor( - pressure_range=("0 bar", "25 bar"), vint_serial_number=627768, vint_channel=0 -) - -start_time = time.time() -x = [] -y = [] -while True: - x.append(time.time() - start_time) - y.append(p_sens.read_pressure()) - plt.scatter(x, y) - plt.show() diff --git a/flowchem/__init__.py b/flowchem/__init__.py deleted file mode 100644 index b130447c..00000000 --- a/flowchem/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# isort: skip_file -from .components import * -from .core import * -from .assemblies import * - -__version__ = "0.0.5" diff --git a/flowchem/assemblies/LTF_reactors/LTF_HTM_ST_3_1.pdf b/flowchem/assemblies/LTF_reactors/LTF_HTM_ST_3_1.pdf deleted file mode 100644 index 13cca67b..00000000 Binary files a/flowchem/assemblies/LTF_reactors/LTF_HTM_ST_3_1.pdf and /dev/null differ diff --git a/flowchem/assemblies/LTF_reactors/LTF_reactors.py b/flowchem/assemblies/LTF_reactors/LTF_reactors.py deleted file mode 100644 index e256b2e3..00000000 --- a/flowchem/assemblies/LTF_reactors/LTF_reactors.py +++ /dev/null @@ -1,57 +0,0 @@ -""" LTF reactors """ -from typing import Optional - -from flowchem.assemblies import Assembly -from flowchem.components.stdlib import Channel, YMixer - - -class LTF_HTM_ST_3_1(Assembly): - """An LTF HTM ST 3 1 reactor.""" - - def _validate(self, dry_run): - return True - - def __init__(self, name: Optional[str] = None): - super().__init__(name=name) - self.port = {"INLET_1", "INLET_2", "QUENCHER", "OUTLET"} - - inlet1 = Channel( - name="INLET_1", length="10 mm", volume="8 ul", material="glass" - ) - inlet2 = Channel( - name="INLET_2", length="10 mm", volume="8 ul", material="glass" - ) - mixer_inlet = YMixer() - reactor1 = Channel( - name="REACTOR", length="60 mm", volume="58 ul", material="glass" - ) - quencher = Channel( - name="QUENCHER", length="15 mm", volume="10 ul", material="glass" - ) - mixer_quencher = YMixer() - reactor2 = Channel( - name="REACTOR2", length="40 mm", volume="46 ul", material="glass" - ) - outlet = Channel( - name="OUTLET", length="10 mm", volume="28 ul", material="glass" - ) - - self.nodes = [ - inlet1, - inlet2, - mixer_inlet, - reactor1, - quencher, - mixer_quencher, - reactor2, - outlet, - ] - self.edges = [ - (inlet1, mixer_inlet), - (inlet2, mixer_inlet), - (mixer_inlet, reactor1), - (reactor1, mixer_quencher), - (quencher, mixer_quencher), - (mixer_quencher, reactor2), - (reactor2, outlet), - ] diff --git a/flowchem/assemblies/LTF_reactors/__init__.py b/flowchem/assemblies/LTF_reactors/__init__.py deleted file mode 100644 index dd78ba48..00000000 --- a/flowchem/assemblies/LTF_reactors/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .LTF_reactors import LTF_HTM_ST_3_1 diff --git a/flowchem/assemblies/README.md b/flowchem/assemblies/README.md deleted file mode 100644 index 3b280bc1..00000000 --- a/flowchem/assemblies/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# flowchem/assemblies - -Pre-defined reactor assembly, i.e. sub-graphs representing hardware that is logically composed by several non-separable components e.g. in a chip reactors. - -* LTF_reactors diff --git a/flowchem/assemblies/__init__.py b/flowchem/assemblies/__init__.py deleted file mode 100644 index f8e98cc3..00000000 --- a/flowchem/assemblies/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .assembly import Assembly -from .LTF_reactors import LTF_HTM_ST_3_1 - -__all__ = ["Assembly", "LTF_HTM_ST_3_1"] diff --git a/flowchem/assemblies/assembly.py b/flowchem/assemblies/assembly.py deleted file mode 100644 index ed57db93..00000000 --- a/flowchem/assemblies/assembly.py +++ /dev/null @@ -1,85 +0,0 @@ -from typing import TYPE_CHECKING, Sequence, Tuple - -from flowchem.components.properties import Component, MultiportComponentMixin - -if TYPE_CHECKING: - from flowchem.core.graph import DeviceGraph - - -class Assembly(MultiportComponentMixin, Component): - """A class representing a collection of components.""" - - nodes: Sequence[Component] - edges: Sequence[Tuple[Component, Component]] - - def _subcomponent_by_name(self, name: str) -> Component: - """Returns a component in nodes by its name.""" - for node in self.nodes: - if node.name == name: - return node - raise ValueError(f"No component named {name} in {self}") - - def explode(self, graph: "DeviceGraph"): - """ - Explode the assembly into its components in the provided graph. - The graph must already include the assembly as a node with all the connections defined. - """ - - assert self in graph.graph.nodes, "Assembly must be in the graph to explode it." - - # Convert edges to self into edges to self's components. - for from_component, to_component, attributes in graph.graph.in_edges( - self, data=True - ): - # If port attribute is unspecified, the connection is assumed to all the assembly's subcomponents. - # This should only happen for logical connections (e.g. temp control). - if attributes["to_port"] is None: - for subcomponent in self.nodes: - graph.graph.add_edge(from_component, subcomponent) - continue - - # New destination is the component with name matching the edge port on the assembly - new_to_component = self._subcomponent_by_name(attributes["to_port"]) - - # Update edge - just add a new one, the old one will be implicitly removed with graph.remove_node(self) - graph.add_connection( - origin=from_component, - destination=new_to_component, - origin_port=attributes.get("from_port", None), - ) - - for from_component, to_component, attributes in graph.graph.out_edges( - self, data=True - ): - assert ( - from_component is self - ), "Getting the edges pointing from the assembly." - - # New origin is the component with name matching the edge port on the assembly - new_from_component = self._subcomponent_by_name(attributes["from_port"]) - - # Update edge - just add a new one, the old one will be implicitly removed with graph.remove_node(self) - graph.add_connection( - origin=new_from_component, - destination=to_component, - destination_port=attributes.get("to_port", None), - ) - - # Updates component names. Ensures unique names in the graph. (Note: do not update those earlier: see above!) - for component in self.nodes: - component.name = f"{self.name}_{component.name}" - - # Remove assembly from graph (this also removes all edges) - graph.graph.remove_node(self) - - # Add nodes to graph - graph.add_device(self.nodes) - # Add edges to graph - for edge in self.edges: - graph.add_connection(edge[0], edge[1]) - - def _validate(self, dry_run): - """Components are valid for dry runs, but not for real runs.""" - raise NotImplementedError( - "Assembly object should be expanded into their components before run." - ) diff --git a/flowchem/cli.py b/flowchem/cli.py deleted file mode 100644 index 0f3953f0..00000000 --- a/flowchem/cli.py +++ /dev/null @@ -1,6 +0,0 @@ -""" Shell script executor """ - - -def main(): - """Main function""" - print("Here I should parse configuration and start server") diff --git a/flowchem/components/README.md b/flowchem/components/README.md deleted file mode 100644 index 0b9093a0..00000000 --- a/flowchem/components/README.md +++ /dev/null @@ -1,15 +0,0 @@ -# flowchem/components/devices - -This folder contains all the components that appear in a device graph. - -For real device graph, only components from **assemblies**, **stdlib** and **devices** should be used. - -This folder includes: -* Simple modular components such as mixers and tubing in [stdlib](stdlib/README.md) - -* actual hardware devices in [devices](devices/README.md) - -* Abstract base classes defining the properties that actual device component can implement in [properties](properties/README.md) - -* dummy object for testing purposes in [dummy](dummy/README.md) -(assemblies/README.md) diff --git a/flowchem/components/__init__.py b/flowchem/components/__init__.py deleted file mode 100644 index 31029ff7..00000000 --- a/flowchem/components/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# isort: skip_file -from . import properties, stdlib, dummy -from .devices import * diff --git a/flowchem/components/devices/Hamilton/ML600.py b/flowchem/components/devices/Hamilton/ML600.py deleted file mode 100644 index c539effc..00000000 --- a/flowchem/components/devices/Hamilton/ML600.py +++ /dev/null @@ -1,810 +0,0 @@ -""" -This module is used to control Hamilton ML600 syringe pump via the protocol1/RNO+. -""" - -from __future__ import annotations - -import string -import time -import warnings -from dataclasses import dataclass -from enum import IntEnum -from typing import TYPE_CHECKING, Optional, Set - -import aioserial -from loguru import logger - -from flowchem.components.stdlib import Pump -from flowchem.exceptions import DeviceError, InvalidConfiguration -from flowchem.units import flowchem_ureg - -if TYPE_CHECKING: - import pint - - -@dataclass -class Protocol1CommandTemplate: - """Class representing a pump command and its expected reply, but without target pump number""" - - command: str - optional_parameter: str = "" - execute_command: bool = True - - def to_pump( - self, address: int, command_value: str = "", argument_value: str = "" - ) -> Protocol1Command: - """Returns a Protocol11Command by adding to the template pump address and command arguments""" - return Protocol1Command( - target_pump_num=address, - command=self.command, - optional_parameter=self.optional_parameter, - command_value=command_value, - argument_value=argument_value, - execute_command=self.execute_command, - ) - - -@dataclass -class Protocol1Command(Protocol1CommandTemplate): - """Class representing a pump command and its expected reply""" - - PUMP_ADDRESS = dict(enumerate(string.ascii_lowercase[:16], start=1)) - # i.e. PUMP_ADDRESS = {1: 'a', 2: 'b', 3: 'c', 4: 'd', ..., 16: 'p'} - # Note ':' is used for broadcast within the daisy chain. - - target_pump_num: int = 1 - command_value: Optional[str] = None - argument_value: Optional[str] = None - - def compile(self) -> bytes: - """Create actual command byte by prepending pump address to command and appending executing command.""" - assert self.target_pump_num in range(1, 17) - if not self.command_value: - self.command_value = "" - - compiled_command = ( - f"{self.PUMP_ADDRESS[self.target_pump_num]}" - f"{self.command}{self.command_value}" - ) - - if self.argument_value: - compiled_command += f"{self.optional_parameter}{self.argument_value}" - # Add execution flag at the end - if self.execute_command is True: - compiled_command += "R" - - return (compiled_command + "\r").encode("ascii") - - -class HamiltonPumpIO: - """Setup with serial parameters, low level IO""" - - ACKNOWLEDGE = chr(6) - NEGATIVE_ACKNOWLEDGE = chr(21) - DEFAULT_CONFIG = { - "timeout": 0.1, - "baudrate": 9600, - "parity": aioserial.PARITY_EVEN, - "stopbits": aioserial.STOPBITS_ONE, - "bytesize": aioserial.SEVENBITS, - } - - def __init__(self, aio_port: aioserial.Serial): - """ - Initialize communication on the serial port where the pumps are located and initialize them - Args: - aio_port: aioserial.Serial() object - """ - self._serial = aio_port - - # These will be set by `HamiltonPumpIO.initialize()` - self._initialized = False - self.num_pump_connected: Optional[int] = None - - @classmethod - def from_config(cls, config): - """Create HamiltonPumpIO from config.""" - # Merge default settings, including serial, with provided ones. - configuration = dict(HamiltonPumpIO.DEFAULT_CONFIG, **config) - - try: - serial_object = aioserial.AioSerial(**configuration) - except aioserial.SerialException as serial_exception: - raise InvalidConfiguration( - f"Cannot connect to the pump on the port <{configuration.get('port')}>" - ) from serial_exception - - return cls(serial_object) - - async def initialize(self, hw_initialization: bool = True): - """ - Ensure connection with pump + initialize - - Args: - hw_initialization: Whether each pump has to be initialized. Note that this might be undesired! - """ - # This has to be run after each power cycle to assign addresses to pumps - self.num_pump_connected = await self._assign_pump_address() - if hw_initialization: - await self._hw_init() - self._initialized = True - - async def _assign_pump_address(self) -> int: - """ - To be run on init, auto assign addresses to pumps based on their position in the daisy chain. - A custom command syntax with no addresses is used here so read and write has been rewritten - """ - try: - await self._write_async("1a\r".encode("ascii")) - except aioserial.SerialException as e: - raise InvalidConfiguration from e - - reply = await self._read_reply_async() - if not reply or reply[:1] != "1": - - raise InvalidConfiguration(f"No pump found on {self._serial.port}") - # reply[1:2] should be the address of the last pump. However, this does not work reliably. - # So here we enumerate the pumps explicitly instead - last_pump = 0 - for pump_num, address in Protocol1Command.PUMP_ADDRESS.items(): - await self._write_async(f"{address}UR\r".encode("ascii")) - if "NV01" in await self._read_reply_async(): - last_pump = pump_num - else: - break - logger.debug(f"Found {last_pump} pumps on {self._serial.port}!") - return int(last_pump) - - async def _hw_init(self): - """Send to all pumps the HW initialization command (i.e. homing)""" - await self._write_async(b":XR\r") # Broadcast: initialize + execute - # Note: no need to consume reply here because there is none (since we are using broadcast) - - async def _write_async(self, command: bytes): - """Writes a command to the pump""" - if not self._initialized: - raise DeviceError( - "Pump not initialized!\n" - "Have you called `initialize()` after object creation?" - ) - await self._serial.write_async(command) - logger.debug(f"Command {repr(command)} sent!") - - async def _read_reply_async(self) -> str: - """Reads the pump reply from serial communication""" - reply_string = await self._serial.readline_async() - logger.debug(f"Reply received: {reply_string}") - return reply_string.decode("ascii") - - @staticmethod - def parse_response(response: str) -> str: - """Split a received line in its components: success, reply""" - status = response[:1] - assert status in ( - HamiltonPumpIO.ACKNOWLEDGE, - HamiltonPumpIO.NEGATIVE_ACKNOWLEDGE, - ), "Invalid status reply!" - - if status == HamiltonPumpIO.ACKNOWLEDGE: - logger.debug("Positive acknowledge received") - else: - logger.warning("Negative acknowledge received") - warnings.warn( - "Negative acknowledge reply received from pump: check command validity!" - ) - - return response[1:].rstrip() - - def reset_buffer(self): - """Reset input buffer before reading from serial. In theory not necessary if all replies are consumed...""" - self._serial.reset_input_buffer() - - async def write_and_read_reply_async(self, command: Protocol1Command) -> str: - """Main HamiltonPumpIO method. - Sends a command to the pump, read the replies and returns it, optionally parsed""" - self.reset_buffer() - await self._write_async(command.compile()) - response = await self._read_reply_async() - - if not response: - raise InvalidConfiguration( - f"No response received from pump, check pump address! " - f"(Currently set to {command.target_pump_num})" - ) - - return self.parse_response(response) - - @property - def name(self) -> str: - """This is used to provide a nice-looking default name to pumps based on their serial connection.""" - try: - return self._serial.name - except AttributeError: - return "" - - -class ML600(Pump): - """ML600 implementation according to docs. Tested on 61501-01 (single syringe). - - From docs: - To determine the volume dispensed per step the total syringe volume is divided by - 48,000 steps. All Hamilton instrument syringes are designed with a 60 mm stroke - length and the Microlab 600 is designed to move 60 mm in 48,000 steps. For - example to dispense 9 mL from a 10 mL syringe you would determine the number of - steps by multiplying 48000 steps (9 mL/10 mL) to get 43,200 steps. - """ - - # This class variable is used for daisy chains (i.e. multiple pumps on the same serial connection). Details below. - _io_instances: Set[HamiltonPumpIO] = set() - # The mutable object (a set) as class variable creates a shared state across all the instances. - # When several pumps are daisy-chained on the same serial port, they need to all access the same Serial object, - # because access to the serial port is exclusive by definition (also locking there ensure thread safe operations). - # FYI it is a borg idiom https://www.oreilly.com/library/view/python-cookbook/0596001673/ch05s23.html - - class ValvePositionName(IntEnum): - """Maps valve position to the corresponding number""" - - POSITION_1 = 1 - # POSITION_2 = 2 - POSITION_3 = 3 - INPUT = 9 # 9 is default inlet, i.e. 1 - OUTPUT = 10 # 10 is default outlet, i.e. 3 - WASH = 11 # 11 is default wash, i.e. undefined - - # Only Hamilton syringes are compatible w/ the ML600, and they come on a limited set of sizes. (Values in ml) - VALID_SYRINGE_VOLUME = { - 0.01, - 0.025, - 0.05, - 0.1, - 0.25, - 0.5, - 1.0, - 2.5, - 5.0, - 10.0, - 25.0, - 50.0, - } - - def __init__( - self, - pump_io: HamiltonPumpIO, - syringe_volume: str, - address: int = 1, - name: Optional[str] = None, - ): - """ - Default constructor, needs an HamiltonPumpIO object. See from_config() class method for config-based init. - - Args: - pump_io: An HamiltonPumpIO w/ serial connection to the daisy chain w/ target pump. - syringe_volume: Volume of the syringe used, either a Quantity or number in ml. - address: number of pump in array, 1 for first one, auto-assigned on init based on position. - name: 'cause naming stuff is important. - """ - super().__init__(name) - # HamiltonPumpIO - self.pump_io = pump_io - ML600._io_instances.add(self.pump_io) # See above for details. - - # Pump address is the pump sequence number if in chain. Count starts at 1, default. - self.address = int(address) - - # The pump name is used for logs and error messages. - self.name = f"Pump {self.pump_io.name}:{address}" if name is None else name - - # Syringe pumps only perform linear movement, and the volume displaced is function of the syringe loaded. - try: - self.syringe_volume = flowchem_ureg(syringe_volume) - except AttributeError as attribute_error: - raise InvalidConfiguration( - f"{self.__class__.__name__}:{self.name} " - f"Syringe volume must be a string parsable as pint.Quantity!\n" - f"It is now a {type(syringe_volume)}: {syringe_volume} " - ) from attribute_error - - if self.syringe_volume.m_as("ml") not in ML600.VALID_SYRINGE_VOLUME: - raise InvalidConfiguration( - f"The specified syringe volume ({syringe_volume}) does not seem to be valid!\n" - f"The volume in ml has to be one of {ML600.VALID_SYRINGE_VOLUME}" - ) - - self._steps_per_ml = flowchem_ureg.Quantity( - f"{48000 / self.syringe_volume} step/ml" - ) - self._offset_steps = 100 # Steps added to each absolute move command, to decrease wear and tear at volume = 0 - self._max_vol = ( - (48000 - self._offset_steps) * flowchem_ureg.step / self._steps_per_ml - ) - - @classmethod - def from_config(cls, **config): - """This class method is used to create instances via config file by the server for HTTP interface.""" - # Many pump can be present on the same serial port with different addresses. - # This shared list of HamiltonPumpIO objects allow shared state in a borg-inspired way, avoiding singletons - # This is only relevant to programmatic instantiation, i.e. when from_config() is called per each pump from a - # config file, as it is the case in the HTTP server. - # HamiltonPump_IO() manually instantiated are not accounted for. - pumpio = None - for obj in ML600._io_instances: - # noinspection PyProtectedMember - if obj._serial.port == config.get("port"): - pumpio = obj - break - - # If not existing serial object are available for the port provided, create a new one - if pumpio is None: - # Remove ML600-specific keys to only have HamiltonPumpIO's kwargs - config_for_pumpio = { - k: v - for k, v in config.items() - if k not in ("syringe_volume", "address", "name") - } - pumpio = HamiltonPumpIO.from_config(config_for_pumpio) - - return cls( - pumpio, - syringe_volume=config.get("syringe_volume"), - address=config.get("address"), - name=config.get("name"), - ) - - async def initialize(self, hw_init=False, init_speed: str = "200 sec / stroke"): - """Must be called after init before anything else.""" - # Test connectivity by querying the pump's firmware version - fw_cmd = Protocol1CommandTemplate(command="U").to_pump(self.address) - firmware_version = await self.pump_io.write_and_read_reply_async(fw_cmd) - logger.info( - f"Connected to Hamilton ML600 {self.name} - FW version: {firmware_version}!" - ) - - if hw_init: - await self.initialize_pump(speed=init_speed) - - async def send_command_and_read_reply( - self, - command_template: Protocol1CommandTemplate, - command_value="", - argument_value="", - ) -> str: - """Sends a command based on its template by adding pump address and parameters, returns reply""" - return await self.pump_io.write_and_read_reply_async( - command_template.to_pump(self.address, command_value, argument_value) - ) - - def _validate_speed(self, speed_value: Optional[str]) -> str: - """Given a speed (seconds/stroke) returns a valid value for it, and a warning if out of bounds.""" - - # Validated speeds are used as command argument, with empty string being the default for None - if speed_value is None: - return "" - - speed = flowchem_ureg(speed_value) - - # Alert if out of bounds but don't raise exceptions, according to general philosophy. - # Target flow rate too high - if speed < flowchem_ureg("2 sec/stroke"): - speed = flowchem_ureg("2 sec/stroke") - warnings.warn( - f"Desired speed ({speed}) is unachievable!" - f"Set to {self._seconds_per_stroke_to_flowrate(speed)}" - f"Wrong units? A bigger syringe is needed?" - ) - - # Target flow rate too low - if speed > flowchem_ureg("3692 sec/stroke"): - speed = flowchem_ureg("3692 sec/stroke") - warnings.warn( - f"Desired speed ({speed}) is unachievable!" - f"Set to {self._seconds_per_stroke_to_flowrate(speed)}" - f"Wrong units? A smaller syringe is needed?" - ) - - return str(round(speed.m_as("sec / stroke"))) - - async def initialize_pump(self, speed: Optional[str] = None): - """ - Initialize both syringe and valve - speed: 2-3692 in seconds/stroke - """ - init_cmd = Protocol1CommandTemplate(command="X", optional_parameter="S") - return await self.send_command_and_read_reply( - init_cmd, argument_value=self._validate_speed(speed) - ) - - async def initialize_valve(self): - """Initialize valve only""" - return await self.send_command_and_read_reply( - Protocol1CommandTemplate(command="LX") - ) - - async def initialize_syringe(self, speed: Optional[str] = None): - """ - Initialize syringe only - speed: 2-3692 in seconds/stroke - """ - init_syringe_cmd = Protocol1CommandTemplate( - command="X1", optional_parameter="S" - ) - return await self.send_command_and_read_reply( - init_syringe_cmd, argument_value=self._validate_speed(speed) - ) - - def flowrate_to_seconds_per_stroke(self, flowrate: str): - """ - Convert flow rates to steps per seconds - - To determine the volume dispensed per step the total syringe volume is divided by - 48,000 steps. All Hamilton instrument syringes are designed with a 60 mm stroke - length and the Microlab 600 is designed to move 60 mm in 48,000 steps. For - example to dispense 9 mL from a 10 mL syringe you would determine the number of - steps by multiplying 48000 steps (9 mL/10 mL) to get 43,200 steps. - """ - flowrate = flowchem_ureg(flowrate) - flowrate_in_steps_sec = flowrate * self._steps_per_ml - seconds_per_stroke = (1 / flowrate_in_steps_sec).to("second/stroke") - - return self._validate_speed(str(seconds_per_stroke)) - - def _seconds_per_stroke_to_flowrate( - self, second_per_stroke: pint.Quantity - ) -> float: - """The inverse of flowrate_to_seconds_per_stroke(). Only internal use.""" - flowrate = 1 / (second_per_stroke * self._steps_per_ml) - return flowrate.to("ml/min") - - def _volume_to_step_position(self, volume_w_units: str) -> int: - """Converts a volume to a step position.""" - # noinspection PyArgumentEqualDefault - volume = flowchem_ureg(volume_w_units) - steps = volume * self._steps_per_ml - return round(steps.m_as("steps")) + self._offset_steps - - async def _to_step_position(self, position: int, speed: str = ""): - """Absolute move to step position.""" - abs_move_cmd = Protocol1CommandTemplate(command="M", optional_parameter="S") - return await self.send_command_and_read_reply( - abs_move_cmd, str(position), self._validate_speed(speed) - ) - - async def get_current_volume(self) -> str: - """Return current syringe position in ml.""" - syringe_pos = await self.send_command_and_read_reply( - Protocol1CommandTemplate(command="YQP") - ) - current_steps = (int(syringe_pos) - self._offset_steps) * flowchem_ureg.step - return str(current_steps / self._steps_per_ml) - - async def to_volume(self, target_volume: str, speed: str = ""): - """Absolute move to volume provided.""" - await self._to_step_position( - self._volume_to_step_position(target_volume), speed - ) - logger.debug(f"Pump {self.name} set to volume {target_volume} at speed {speed}") - - async def pause(self): - """Pause any running command.""" - return await self.send_command_and_read_reply( - Protocol1CommandTemplate(command="K", execute_command=False) - ) - - async def resume(self): - """Resume any paused command.""" - return await self.send_command_and_read_reply( - Protocol1CommandTemplate(command="$", execute_command=False) - ) - - async def stop(self): - """Stops and abort any running command.""" - await self.pause() - return await self.send_command_and_read_reply( - Protocol1CommandTemplate(command="V", execute_command=False) - ) - - async def wait_until_idle(self): - """Returns when no more commands are present in the pump buffer.""" - logger.debug(f"ML600 pump {self.name} wait until idle...") - while self.is_busy: - time.sleep(0.1) - logger.debug(f"...ML600 pump {self.name} idle now!") - - async def version(self) -> str: - """Returns the current firmware version reported by the pump.""" - return await self.send_command_and_read_reply( - Protocol1CommandTemplate(command="U") - ) - - async def is_idle(self) -> bool: - """Checks if the pump is idle (actually check if the last command has ended).""" - return ( - await self.send_command_and_read_reply( - Protocol1CommandTemplate(command="F") - ) - == "Y" - ) - - async def is_busy(self) -> bool: - """Pump is not idle.""" - return not await self.is_idle() - - async def get_valve_position(self) -> ValvePositionName: - """Represent the position of the valve: getter returns Enum, setter needs Enum.""" - valve_pos = await self.send_command_and_read_reply( - Protocol1CommandTemplate(command="LQP") - ) - return ML600.ValvePositionName(int(valve_pos)) - - async def set_valve_position( - self, target_position: ValvePositionName, wait_for_movement_end: bool = True - ): - """Set valve position. wait_for_movement_end is defaulted to True as it is a common mistake not to wait...""" - valve_by_name_cw = Protocol1CommandTemplate(command="LP0") - await self.send_command_and_read_reply( - valve_by_name_cw, command_value=str(int(target_position)) - ) - logger.debug(f"{self.name} valve position set to {target_position.name}") - if wait_for_movement_end: - await self.wait_until_idle() - - async def get_return_steps(self) -> int: - """Return steps' getter. Applied to the end of a downward syringe movement to removes mechanical slack.""" - steps = await self.send_command_and_read_reply( - Protocol1CommandTemplate(command="YQN") - ) - return int(steps) - - async def set_return_steps(self, target_steps: int): - """Return steps' setter. Applied to the end of a downward syringe movement to removes mechanical slack.""" - set_return_steps_cmd = Protocol1CommandTemplate(command="YSN") - await self.send_command_and_read_reply( - set_return_steps_cmd, command_value=str(int(target_steps)) - ) - - async def pickup( - self, - volume: str, - from_valve: ValvePositionName, - flowrate: str = "1 ml/min", - wait: bool = False, - ): - """Get volume from valve specified at given flowrate.""" - cur_vol = flowchem_ureg(await self.get_current_volume()) - if (cur_vol + volume) > self._max_vol: - warnings.warn( - f"Cannot withdraw {volume} given the current syringe position {cur_vol} and a " - f"syringe volume of {self.syringe_volume}" - ) - return - - # Valve to position specified - await self.set_valve_position(from_valve) - # Move up to target volume - await self.to_volume( - str(cur_vol + volume), - speed=self.flowrate_to_seconds_per_stroke(flowrate), - ) - - if wait: - await self.wait_until_idle() - - async def deliver( - self, - volume: str, - to_valve: ValvePositionName, - flowrate: str, - wait: bool = False, - ): - """Delivers volume to valve specified at given flow rate.""" - cur_vol = flowchem_ureg(await self.get_current_volume()) - if volume > cur_vol: - warnings.warn( - f"Cannot deliver {volume} given the current syringe position {cur_vol}!" - ) - return - - # Valve to position specified - await self.set_valve_position(to_valve) - # Move up to target volume - await self.to_volume( - str(cur_vol - volume), - speed=self.flowrate_to_seconds_per_stroke(flowrate), - ) - - if wait: - await self.wait_until_idle() - - async def transfer( - self, - volume: str, - from_valve: ValvePositionName, - to_valve: ValvePositionName, - flowrate_in: str = "1 ml/min", - flowrate_out: str = "1 ml/min", - wait: bool = False, - ): - """Move liquid from place to place.""" - await self.pickup(volume, from_valve, flowrate_in, wait=True) - await self.deliver(volume, to_valve, flowrate_out, wait=wait) - - def get_router(self): - """Creates an APIRouter for this object.""" - from fastapi import APIRouter - - router = APIRouter() - router.add_api_route("/firmware-version", self.version, methods=["GET"]) - router.add_api_route("/initialize/pump", self.initialize_pump, methods=["PUT"]) - router.add_api_route( - "/initialize/valve", self.initialize_valve, methods=["PUT"] - ) - router.add_api_route( - "/initialize/syringe", self.initialize_syringe, methods=["PUT"] - ) - router.add_api_route("/pause", self.pause, methods=["PUT"]) - router.add_api_route("/resume", self.resume, methods=["PUT"]) - router.add_api_route("/resume", self.resume, methods=["PUT"]) - router.add_api_route("/stop", self.stop, methods=["PUT"]) - router.add_api_route("/version", self.stop, methods=["PUT"]) - router.add_api_route("/is-idle", self.is_idle, methods=["GET"]) - router.add_api_route("/is-busy", self.is_busy, methods=["GET"]) - router.add_api_route( - "/valve/position", self.get_valve_position, methods=["GET"] - ) - router.add_api_route( - "/valve/position", self.set_valve_position, methods=["PUT"] - ) - router.add_api_route( - "/syringe/volume", self.get_current_volume, methods=["GET"] - ) - router.add_api_route("/syringe/volume", self.to_volume, methods=["PUT"]) - router.add_api_route( - "/syringe/return-steps", self.get_return_steps, methods=["GET"] - ) - router.add_api_route( - "/syringe/return-steps", self.set_return_steps, methods=["PUT"] - ) - router.add_api_route("/pickup", self.pickup, methods=["PUT"]) - router.add_api_route("/deliver", self.deliver, methods=["PUT"]) - # router.add_api_route("/transfer", self.transfer, methods=["PUT"]) # Might go in timeout - - return router - - -# class TwoPumpAssembly(Thread): -# """ -# Thread to control two pumps and have them generating a continuous flow. -# Note that the pumps should not be accessed directly when used in a TwoPumpAssembly! -# -# Notes: this needs to start a thread owned by the instance to control the pumps. -# The async version of this being possibly simpler w/ tasks and callback :) -# """ -# -# def __init__( -# self, pump1: ML600, pump2: ML600, target_flowrate: str, init_seconds: int = 10 -# ): -# super(TwoPumpAssembly, self).__init__() -# self._p1 = pump1 -# self._p2 = pump2 -# self.daemon = True -# self.cancelled = threading.Event() -# self._flowrate = ensure_quantity(target_flowrate, "ml/min") -# logger = logging.getLogger(__name__).getChild("TwoPumpAssembly") -# # How many seconds per stroke for first filling? application dependent, as fast as possible, but not too much. -# self.init_secs = init_seconds -# -# # While in principle possible, using syringes of different volumes is discouraged, hence... -# assert ( -# pump1.syringe_volume == pump2.syringe_volume -# ), "Syringes w/ equal volume are needed for continuous flow!" -# -# async def initialize(self): -# """ Initialize multi-pump """ -# await self._p1.initialize() -# await self._p2.initialize() -# -# @property -# def flowrate(self): -# """ Returns/sets flowrate. """ -# return self._flowrate -# -# @flowrate.setter -# def flowrate(self, target_flowrate): -# if target_flowrate == 0: -# warnings.warn( -# "Cannot set flowrate to 0! Pump stopped instead, restart previous flowrate with resume!" -# ) -# self.cancel() -# else: -# self._flowrate = target_flowrate -# -# # This will stop current movement, make wait_for_both_pumps() return and move on w/ updated speed -# self._p1.stop() -# self._p2.stop() -# -# async def wait_for_both_pumps(self): -# """ Custom waiting method to wait a shorter time than normal (for better sync) """ -# while await self._p1.is_busy() or await self._p2.is_busy(): -# await asyncio.sleep(0.01) # 10ms sounds reasonable to me -# logger.debug("Both pumps are ready!") -# -# def _speed(self): -# speed = self._p1.flowrate_to_seconds_per_stroke(self._flowrate) -# logger.debug(f"Speed calculated as {speed}") -# return speed -# -# async def execute_stroke( -# self, pump_full: ML600, pump_empty: ML600, speed_s_per_stroke: int -# ): -# """ Perform a cycle (1 syringe stroke) in the continuous-operation mode. See also run(). """ -# # Logic is a bit complex here to ensure pause-less pumping -# # This needs the pump that withdraws to move faster than the pumping one. no way around. -# -# # First start pumping with the full syringe already prepared -# pump_full.to_volume(0, speed=speed_s_per_stroke) -# logger.debug("Pumping...") -# # Then start refilling the empty one -# pump_empty.set_valve_position(pump_empty.ValvePositionName.INPUT) -# # And do that fast so that we finish refill before the pumping is over -# pump_empty.to_volume(pump_empty.syringe_volume, speed=speed_s_per_stroke - 5) -# pump_empty.wait_until_idle() -# # This allows us to set the right pump position on the pump that was empty (not full and ready for next cycle) -# pump_empty.set_valve_position(pump_empty.ValvePositionName.OUTPUT) -# pump_full.wait_until_idle() -# -# def run(self): -# """Overloaded Thread.run, runs the update -# method once per every 10 milliseconds.""" -# # First initialize with init_secs speed... -# self._p1.to_volume(self._p1.syringe_volume, speed=self.init_secs) -# self._p1.wait_until_idle() -# self._p1.valve_position = self._p1.ValvePositionName.OUTPUT -# logger.info("Pumps initialized for continuous pumping!") -# -# while True: -# while not self.cancelled.is_set(): -# self.execute_stroke( -# self._p1, self._p2, speed_s_per_stroke=self._speed() -# ) -# self.execute_stroke( -# self._p2, self._p1, speed_s_per_stroke=self._speed() -# ) -# -# def cancel(self): -# """ Cancel continuous-pumping assembly """ -# self.cancelled.set() -# self._p1.stop() -# self._p2.stop() -# -# def resume(self): -# """ Resume continuous-pumping assembly """ -# self.cancelled.clear() -# -# def stop_and_return_solution_to_container(self): -# """ Let´s not waste our precious stock solutions ;) """ -# self.cancel() -# logger.info( -# "Returning the solution currently loaded in the syringes back to the inlet.\n" -# "Make sure the container is not removed yet!" -# ) -# # Valve to input -# self._p1.valve_position = self._p1.ValvePositionName.INPUT -# self._p2.valve_position = self._p2.ValvePositionName.INPUT -# self.wait_for_both_pumps() -# # Volume to 0 with the init speed (supposedly safe for this application) -# self._p1.to_volume(0, speed=self.init_secs) -# self._p2.to_volume(0, speed=self.init_secs) -# self.wait_for_both_pumps() -# logger.info("Pump flushing completed!") - - -if __name__ == "__main__": - import asyncio - - conf = { - "port": "COM12", - "address": 1, - "name": "test1", - "syringe_volume": 5, - } - pump1 = ML600.from_config(**conf) - asyncio.run(pump1.initialize_pump()) diff --git a/flowchem/components/devices/Hamilton/ML600_finder.py b/flowchem/components/devices/Hamilton/ML600_finder.py deleted file mode 100644 index 14c6a181..00000000 --- a/flowchem/components/devices/Hamilton/ML600_finder.py +++ /dev/null @@ -1,41 +0,0 @@ -""" -This module is used to discover the serial address of any ML600 connected to the PC. -""" -import asyncio - -import aioserial -import serial.tools.list_ports -from loguru import logger - -from flowchem.components.devices.Hamilton.ML600 import ( - HamiltonPumpIO, - InvalidConfiguration, -) - - -def ml600_finder(): - """Try to initialize an ML600 on every available COM port.""" - port_available = [comport.device for comport in serial.tools.list_ports.comports()] - - # Ports connected to an ML600-looking device - valid_ports = set() - - for serial_port in port_available: - try: - print(f"Looking for pump on {serial_port}...") - link = HamiltonPumpIO(aioserial.AioSerial(url=serial_port, timeout=0.1)) - asyncio.run(link.initialize()) - logger.info(f"{link.num_pump_connected} pump(s) found on <{serial_port}>") - valid_ports.add(serial_port) - except InvalidConfiguration: - logger.debug(f"No pump found on {serial_port}") - - return valid_ports - - -if __name__ == "__main__": - ml600_pumps = ml600_finder() - if len(ml600_pumps) > 0: - print(f"The following serial port are connected to ML600: {ml600_pumps}") - else: - print("No ML600 pump found") diff --git a/flowchem/components/devices/Hamilton/__init__.py b/flowchem/components/devices/Hamilton/__init__.py deleted file mode 100644 index d11be9e4..00000000 --- a/flowchem/components/devices/Hamilton/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -""" Hamilton devices """ -from .ML600 import ML600, HamiltonPumpIO - -__all__ = ["ML600", "HamiltonPumpIO"] diff --git a/flowchem/components/devices/Harvard_Apparatus/Elite11_finder.py b/flowchem/components/devices/Harvard_Apparatus/Elite11_finder.py deleted file mode 100644 index 5f974877..00000000 --- a/flowchem/components/devices/Harvard_Apparatus/Elite11_finder.py +++ /dev/null @@ -1,45 +0,0 @@ -""" -This module is used to discover the serial address of any ML600 connected to the PC. -""" -import serial.tools.list_ports -from loguru import logger - -from flowchem.components.devices.Harvard_Apparatus.HA_elite11 import ( - HarvardApparatusPumpIO, -) -from flowchem.exceptions import InvalidConfiguration - - -# noinspection PyProtectedMember -def elite11_finder(): - """Try to initialize an Elite11 on every available COM port.""" - port_available = [comport.device for comport in serial.tools.list_ports.comports()] - - # Ports connected to an elite11-looking device - valid_ports = set() - - for serial_port in port_available: - try: - print(f"Looking for pump on {serial_port}...") - link = HarvardApparatusPumpIO(port=serial_port) - link._serial.write("\r\n".encode("ascii")) - if link._serial.readline() == b"\n": - valid_ports.add(serial_port) - logger.info(f"Pump found on <{serial_port}>") - pump = link._serial.readline().decode("ascii") - logger.info(f"Pump address is {pump[0:2]}!") - print(f"Found a pump with address {pump[0:2]} on {serial_port}!") - else: - logger.debug(f"No pump found on {serial_port}") - except InvalidConfiguration: - pass - - return valid_ports - - -if __name__ == "__main__": - elite11_pumps = elite11_finder() - if len(elite11_pumps) > 0: - print(f"The following serial port are connected to Elite11: {elite11_pumps}") - else: - print("No Elite11 pump found") diff --git a/flowchem/components/devices/Harvard_Apparatus/HA_elite11.py b/flowchem/components/devices/Harvard_Apparatus/HA_elite11.py deleted file mode 100644 index 2c60f4b2..00000000 --- a/flowchem/components/devices/Harvard_Apparatus/HA_elite11.py +++ /dev/null @@ -1,863 +0,0 @@ -""" -This module is used to control Harvard Apparatus Elite 11 syringe pump via the 11 protocol. -""" - -from __future__ import annotations - -import asyncio -import warnings -from dataclasses import dataclass -from enum import Enum -from typing import List, Optional, Set, Tuple - -import aioserial -from loguru import logger -from pydantic import BaseModel - -from flowchem.components.stdlib import Pump -from flowchem.exceptions import DeviceError, InvalidConfiguration -from flowchem.units import flowchem_ureg - - -def _parse_version(version_text: str) -> Tuple[int, int, int]: - """Extract semver from Elite11 version string, e.g. '11 ELITE I/W Single 3.0.4""" - - numbers = version_text.split(" ")[-1] - version_digits = numbers.split(".") - return int(version_digits[0]), int(version_digits[1]), int(version_digits[2]) - - -class PumpInfo(BaseModel): - """Detailed pump info.""" - - pump_type: str - pump_description: str - infuse_only: bool - - @classmethod - def parse_pumpstring(cls, metrics_text: List[str]): - """Parse pump response string into model.""" - pump_type, pump_description, infuse_only = "", "", True - for line in metrics_text: - if line.startswith("Pump type "): - pump_type = line[9:].strip() - elif line.startswith("Pump type string"): - pump_description = line[16:].strip() - elif line.startswith("Direction"): - if "withdraw" in line: - infuse_only = False - else: - infuse_only = True - return cls( - pump_type=pump_type, - pump_description=pump_description, - infuse_only=infuse_only, - ) - - -@dataclass -class Protocol11Command: - """Class representing a pump command and its expected reply""" - - command_string: str - target_pump_address: int - command_argument: str - - def compile(self) -> str: - """ - Create actual command byte by prepending pump address to command. - """ - assert 0 <= self.target_pump_address < 99 - return ( - str(self.target_pump_address) - + self.command_string - + " " - + self.command_argument - + "\r\n" - ) - - -class PumpStatus(Enum): - """Possible pump statuses, as defined by the reply prompt.""" - - IDLE = ":" - INFUSING = ">" - WITHDRAWING = "<" - TARGET_REACHED = "T" - STALLED = "*" - - -class HarvardApparatusPumpIO: - """Setup with serial parameters, low level IO""" - - DEFAULT_CONFIG = {"timeout": 0.1, "baudrate": 115200} - - # noinspection PyPep8 - def __init__(self, port: str, **kwargs): - # Merge default settings, including serial, with provided ones. - configuration = dict(HarvardApparatusPumpIO.DEFAULT_CONFIG, **kwargs) - - try: - self._serial = aioserial.AioSerial(port, **configuration) - except aioserial.SerialException as serial_exception: - logger.error(f"Cannot connect to the Pump on the port <{port}>") - raise InvalidConfiguration( - f"Cannot connect to the Pump on the port <{port}>" - ) from serial_exception - - async def _write(self, command: Protocol11Command): - """Writes a command to the pump""" - command_msg = command.compile() - try: - await self._serial.write_async(command_msg.encode("ascii")) - except aioserial.SerialException as serial_exception: - raise InvalidConfiguration from serial_exception - logger.debug(f"Sent {repr(command_msg)}!") - - async def _read_reply(self) -> List[str]: - """Reads the pump reply from serial communication""" - reply_string = [] - - for line in await self._serial.readlines_async(): - reply_string.append(line.decode("ascii").strip()) - logger.debug(f"Received {repr(line)}!") - - # First line is usually empty, but some prompts such as T* actually leak into this line sometimes. - reply_string.pop(0) - - # remove empty strings from reply_string - reply_string = [x for x in reply_string if x] - - return reply_string - - @staticmethod - def parse_response_line(line: str) -> Tuple[int, PumpStatus, str]: - """Split a received line in its components: address, prompt and reply body""" - assert len(line) >= 3 - pump_address = int(line[0:2]) - status = PumpStatus(line[2:3]) - - # Target reached is the only two-character status - if status is PumpStatus.TARGET_REACHED: - return pump_address, status, line[4:] - return pump_address, status, line[3:] - - @staticmethod - def parse_response( - response: List[str], - ) -> Tuple[List[int], List[PumpStatus], List[str]]: - """Aggregates address prompt and reply body from all the reply lines and return them.""" - parsed_lines = list(map(HarvardApparatusPumpIO.parse_response_line, response)) - # noinspection PyTypeChecker - return zip(*parsed_lines) # type: ignore - - @staticmethod - def check_for_errors(last_response_line, command_sent): - """Further response parsing, checks for error messages""" - if "Command error" in last_response_line: - raise DeviceError( - f"The command {command_sent} is invalid for pump {command_sent.target_pump_address}!" - f"[Reply: {last_response_line}]" - ) - if "Unknown command" in last_response_line: - raise DeviceError( - f"The command {command_sent} is unknown to pump {command_sent.target_pump_address}!" - f"[Maybe a withdraw command has been used with an infuse only pump?]" - f"[Reply: {last_response_line}]" - ) - if "Argument error" in last_response_line: - raise DeviceError( - f"The command {command_sent} to pump {command_sent.target_pump_address} has an " - f"invalid argument [Reply: {last_response_line}]" - ) - if "Out of range" in last_response_line: - raise DeviceError( - f"The command {command_sent} to pump {command_sent.target_pump_address} has an " - f"argument out of range! [Reply: {last_response_line}]" - ) - - def reset_buffer(self): - """Reset input buffer before reading from serial. In theory not necessary if all replies are consumed...""" - try: - self._serial.reset_input_buffer() - except aioserial.PortNotOpenError as port_not_open_error: - raise InvalidConfiguration from port_not_open_error - - async def write_and_read_reply( - self, command: Protocol11Command, return_parsed: bool = True - ) -> List[str]: - """Main PumpIO method. Sends a command to the pump, read the replies and returns it, optionally parsed. - - If unparsed reply is a List[str] with raw replies. - If parsed reply is a List[str] w/ reply body (address and prompt removed from each line)""" - self.reset_buffer() - await self._write(command) - response = await self._read_reply() - - if not response: - raise InvalidConfiguration( - f"No response received from pump, check pump address! " - f"(Currently set to {command.target_pump_address})" - ) - - # Parse reply - ( - pump_address, - return_status, - parsed_response, - ) = HarvardApparatusPumpIO.parse_response(response) - - # Ensures that all the replies came from the target pump (this should always be the case) - assert all(address == command.target_pump_address for address in pump_address) - - # Ensure no stall is present (this might happen, so let's raise an Exception w/ diagnostic text) - if PumpStatus.STALLED in return_status: - raise DeviceError("Pump stalled! Press display on pump to clear error :(") - - HarvardApparatusPumpIO.check_for_errors( - last_response_line=response[-1], command_sent=command - ) - - return parsed_response if return_parsed else response - - @property - def name(self) -> Optional[str]: - """This is used to provide a nice-looking default name to pumps based on their serial connection.""" - try: - return self._serial.name - except AttributeError: - return None - - def autodetermine_address(self) -> int: - """Autodetermine pump address based on response received.""" - self._serial.write("\r\n".encode("ascii")) - self._serial.readline() - prompt = self._serial.readline() - valid_status = [status.value for status in PumpStatus] - address = 0 if prompt[0:2].decode() in valid_status else int(prompt[0:2]) - logger.debug(f"Address autodetected as {address}") - return address - - -# noinspection SpellCheckingInspection -class Elite11Commands: - - """Holds the commands and arguments. Nota bene: Pump needs to be in Quick Start mode, which can be achieved from - the display interface""" - - # collected commands - # Methods can be programmed onto the pump and their execution remotely triggered. - # No support is provided to such feature as "explicit is better than implicit", i.e. the same result can be obtained - # with a sequence of Elite11Commands, with the advantage of ensuring code reproducibility (i.e. no specific - # configuration is needed on the pump side) - # - # Other methods not included: dim display, usb echo, footswitch, poll, version (verbose ver), input, - # output (if pin state high or low) and time commands - - EMPTY_MESSAGE = " " - VERSION = "VER" - - # RUN commands (no parameters, start movement in same direction/reverse direction/infuse/withdraw respectively) - RUN = "run" - REVERSE_RUN = "rrun" - INFUSE = "irun" - WITHDRAW = "wrun" - - # STOP movement - STOP = "stp" - - # Max applied force (in percent) - FORCE = "FORCE" - - # Syringe diameter - DIAMETER = "diameter" - - METRICS = "metrics" - CURRENT_MOVING_RATE = "crate" - - # RAMP Ramping commands (infuse or withdraw) - # setter: iramp [{start rate} {start units} {end rate} {end units} {ramp time in seconds}] - INFUSE_RAMP = "iramp" - GET_WITHDRAW_RAMP = "wramp" - - # RATE - # returns or set rate irate [max | min | lim | {rate} {rate units}] - INFUSE_RATE = "irate" - INFUSE_RATE_LIMITS = "irate lim" - WITHDRAW_RATE = "wrate" - WITHDRAW_RATE_LIMITS = "wrate lim" - - # VOLUME - SYRINGE_VOLUME = "svolume" - INFUSED_VOLUME = "ivolume" - WITHDRAWN_VOLUME = "wvolume" - TARGET_VOLUME = "tvolume" - - # CLEAR VOLUME - CLEAR_INFUSED_VOLUME = "civolume" - CLEAR_WITHDRAWN_VOLUME = "cwvolume" - CLEAR_INFUSED_WITHDRAWN_VOLUME = "cvolume" - CLEAR_TARGET_VOLUME = "ctvolume" - - -# noinspection PyProtectedMember -class Elite11InfuseOnly(Pump): - """ - Controls Harvard Apparatus Elite11 syringe pumps. - - The same protocol (Protocol11) can be used on other HA pumps, but is untested. - Several pumps can be daisy-chained on the same serial connection, if so address 0 must be the first one. - Read the manufacturer manual for more details. - """ - - # This class variable is used for daisy chains (i.e. multiple pumps on the same serial connection). Details below. - _io_instances: Set[HarvardApparatusPumpIO] = set() - # The mutable object (a set) as class variable creates a shared state across all the instances. - # When several pumps are daisy-chained on the same serial port, they need to all access the same Serial object, - # because access to the serial port is exclusive by definition (also locking there ensure thread safe operations). - # FYI it is a borg idiom https://www.oreilly.com/library/view/python-cookbook/0596001673/ch05s23.html - - metadata = { - "author": [ - { - "first_name": "Jakob", - "last_name": "Wolf", - "email": "jakob.wolf@mpikg.mpg.de", - "institution": "Max Planck Institute of Colloids and Interfaces", - "github_username": "JB-Wolf", - }, - { - "first_name": "Dario", - "last_name": "Cambie", - "email": "dario.cambie@mpikg.mpg.de", - "institution": "Max Planck Institute of Colloids and Interfaces", - "github_username": "dcambie", - }, - ], - "stability": "beta", - "supported": True, - } - - def __init__( - self, - pump_io: HarvardApparatusPumpIO, - diameter: str, - syringe_volume: str, - address: Optional[int] = None, - name: Optional[str] = None, - ): - """Query model and version number of firmware to check pump is - OK. Responds with a load of stuff, but the last three characters - are the prompt XXY, where XX is the address and Y is pump status. - The status can be one of the three: [":", ">" "<"] respectively - when stopped, running forwards (pumping), or backwards (withdrawing). - The prompt is used to confirm that the address is correct. - This acts as a check to see that the pump is connected and working.""" - - self.name = f"Pump {pump_io.name}:{address}" if name is None else name - super().__init__(name) - - self.pump_io = pump_io - Elite11InfuseOnly._io_instances.add(self.pump_io) # See above for details. - - self.address: int = address if address is not None else None # type: ignore - self._version = None # Set in initialize - - # diameter and syringe volume - these will be set in initialize() - check values here though. - if diameter is None: - raise InvalidConfiguration( - "Please provide the syringe diameter explicitly!\nThis prevents errors :)" - ) - self._diameter = diameter - - if syringe_volume is None: - raise InvalidConfiguration( - "Please provide the syringe volume explicitly!\nThis prevents errors :)" - ) - self._syringe_volume = syringe_volume - - @classmethod - def from_config( - cls, - port: str, - diameter: str, - syringe_volume: str, - address: int = None, - name: str = None, - **serial_kwargs, - ): - """Programmatic instantiation from configuration - - Many pump can be present on the same serial port with different addresses. - This shared list of PumpIO objects allow shared state in a borg-inspired way, avoiding singletons - This is only relevant to programmatic instantiation, i.e. when from_config() is called per each pump from a - config file, as it is the case in the HTTP server. - Pump_IO() manually instantiated are not accounted for. - """ - pumpio = None - for obj in Elite11InfuseOnly._io_instances: - if obj._serial.port == port: - pumpio = obj - break - - # If not existing serial object are available for the port provided, create a new one - if pumpio is None: - pumpio = HarvardApparatusPumpIO(port, **serial_kwargs) - - return cls( - pumpio, - address=address, - name=name, - diameter=diameter, - syringe_volume=syringe_volume, - ) - - async def initialize(self): - """Ensure a valid connection with the pump has been established and sets parameters.""" - # Autodetect address if none provided - print(f"THE ASDDRESS is {self.address=}") - if self.address is None: - self.address = self.pump_io.autodetermine_address() - - try: - await self.stop() - except IndexError as index_e: - raise InvalidConfiguration( - f"Check pump address! Currently {self.address=}" - ) from index_e - - await self.set_syringe_diameter(self._diameter) - await self.set_syringe_volume(self._syringe_volume) - - logger.info( - f"Connected to pump '{self.name}' on port {self.pump_io.name}:{self.address}!" - ) - - # makes sure that a 'clean' pump is initialized. - self._version = _parse_version(await self.version()) - - if self._version[0] >= 3: - await self.clear_volumes() - - async def _send_command_and_read_reply( - self, command: str, parameter="", parse=True - ) -> str: - """Sends a command based on its template and return the corresponding reply as str""" - - cmd = Protocol11Command( - command_string=command, - target_pump_address=self.address, - command_argument=parameter, - ) - reply = await self.pump_io.write_and_read_reply(cmd, return_parsed=parse) - return reply[0] - - async def _send_command_and_read_reply_multiline( - self, command: str, parameter="", parse=True - ) -> List[str]: - """Sends a command based on its template and return the corresponding reply as str""" - - cmd = Protocol11Command( - command_string=command, - target_pump_address=self.address, - command_argument=parameter, - ) - return await self.pump_io.write_and_read_reply(cmd, return_parsed=parse) - - async def _bound_rate_to_pump_limits(self, rate: str) -> float: - """Bound the rate provided to pump's limit. These are function of the syringe diameter. - - NOTE: Infusion and withdraw limits are equal!""" - # Get current pump limits (those are function of the syringe diameter) - limits_raw = await self._send_command_and_read_reply( - Elite11Commands.INFUSE_RATE_LIMITS - ) - - # Lower limit usually expressed in nl/min so unit-aware quantities are needed - lower_limit, upper_limit = map(flowchem_ureg, limits_raw.split(" to ")) - - # Also add units to the provided rate - set_rate = flowchem_ureg(rate) - - # Bound rate to acceptance range - if set_rate < lower_limit: - warnings.warn( - f"The requested rate {rate} is lower than the minimum possible ({lower_limit})!" - f"Setting rate to {lower_limit} instead!" - ) - set_rate = lower_limit - - if set_rate > upper_limit: - warnings.warn( - f"The requested rate {rate} is higher than the maximum possible ({upper_limit})!" - f"Setting rate to {upper_limit} instead!" - ) - set_rate = upper_limit - - return set_rate.to("ml/min").magnitude - - async def version(self) -> str: - """Returns the current firmware version reported by the pump""" - return await self._send_command_and_read_reply( - Elite11Commands.VERSION - ) # '11 ELITE I/W Single 3.0.4 - - async def get_status(self) -> PumpStatus: - """Empty message to trigger a new reply and evaluate connection and pump current status via reply prompt""" - status = await self._send_command_and_read_reply( - Elite11Commands.EMPTY_MESSAGE, parse=False - ) - return PumpStatus(status[2:3]) - - async def is_moving(self) -> bool: - """Evaluate prompt for current status, i.e. moving or not""" - prompt = await self.get_status() - return prompt in (PumpStatus.INFUSING, PumpStatus.WITHDRAWING) - - async def is_idle(self) -> bool: - """Returns true if idle.""" - return not await self.is_moving() - - async def get_syringe_volume(self) -> str: - """Returns the syringe volume as str w/ units.""" - return await self._send_command_and_read_reply( - Elite11Commands.SYRINGE_VOLUME - ) # e.g. '100 ml' - - async def set_syringe_volume(self, volume_w_units: str = None): - """Sets the syringe volume in ml. - - :param volume_w_units: the volume of the syringe. - """ - volume = flowchem_ureg(volume_w_units) - await self._send_command_and_read_reply( - Elite11Commands.SYRINGE_VOLUME, parameter=f"{volume.m_as('ml'):.15f} m" - ) - - async def run(self): - """Activates pump, runs in the previously set direction.""" - - if await self.is_moving(): - warnings.warn("Cannot start pump: already moving!") - return - - await self._send_command_and_read_reply(Elite11Commands.RUN) - logger.info("Pump movement started! (direction unspecified)") - - async def infuse_run(self): - """Activates pump, runs in infuse mode.""" - if await self.is_moving(): - warnings.warn("Cannot start pump: already moving!") - return - - await self._send_command_and_read_reply(Elite11Commands.INFUSE) - logger.info("Pump movement started in infuse direction!") - - async def stop(self): - """stops pump""" - await self._send_command_and_read_reply(Elite11Commands.STOP) - logger.info("Pump stopped") - - async def wait_until_idle(self): - """Wait until the pump is no more moving""" - while await self.is_moving(): - await asyncio.sleep(0.05) - - async def get_infusion_rate(self) -> str: - """Returns the infusion rate as str w/ units""" - return await self._send_command_and_read_reply( - Elite11Commands.INFUSE_RATE - ) # e.g. '0.2 ml/min' - - async def set_infusion_rate(self, rate: str): - """Sets the infusion rate""" - set_rate = await self._bound_rate_to_pump_limits(rate=rate) - await self._send_command_and_read_reply( - Elite11Commands.INFUSE_RATE, parameter=f"{set_rate:.10f} m/m" - ) - - async def get_infused_volume(self) -> str: - """Return infused volume as string w/ units""" - return await self._send_command_and_read_reply(Elite11Commands.INFUSED_VOLUME) - - async def clear_infused_volume(self): - """Reset the pump infused volume counter to 0""" - if self._version[0] < 3: - warnings.warn("Command not supported by pump, update firmware!") - return - await self._send_command_and_read_reply(Elite11Commands.CLEAR_INFUSED_VOLUME) - - async def clear_volumes(self): - """Set all pump volumes to 0""" - await self.set_target_volume("0 ml") - await self.clear_infused_volume() - - async def get_force(self): - """ - Pump force, in percentage. - Manufacturer suggested values are: - stainless steel: 100% - plastic syringes: 50% if volume <= 5 ml else 100% - glass/glass: 30% if volume <= 20 ml else 50% - glass/plastic: 30% if volume <= 250 ul, 50% if volume <= 5ml else 100% - """ - percent = await self._send_command_and_read_reply(Elite11Commands.FORCE) - return int(percent[:-1]) - - async def set_force(self, force_percent: float): - """Sets the pump force, see `Elite11.get_force()` for suggested values.""" - await self._send_command_and_read_reply( - Elite11Commands.FORCE, parameter=str(int(force_percent)) - ) - - async def get_syringe_diameter(self) -> str: - """Syringe diameter in mm. This can be set in the interval 1 mm to 33 mm""" - return await self._send_command_and_read_reply(Elite11Commands.DIAMETER) - - async def set_syringe_diameter(self, diameter_w_units: str): - """ - Set syringe diameter. This can be set in the interval 1 mm to 33 mm - """ - diameter = flowchem_ureg(diameter_w_units) - if not 1 * flowchem_ureg.mm <= diameter <= 33 * flowchem_ureg.mm: - warnings.warn( - f"Diameter provided ({diameter}) is not valid, ignored! [Accepted range: 1-33 mm]" - ) - return - - await self._send_command_and_read_reply( - Elite11Commands.DIAMETER, parameter=f"{diameter.to('mm').magnitude:.4f} mm" - ) - - async def get_current_flowrate(self) -> str: - """ - If pump moves, this returns the current moving rate. If not running empty string. - :return: current moving rate - """ - if await self.is_moving(): - return await self._send_command_and_read_reply( - Elite11Commands.CURRENT_MOVING_RATE - ) - warnings.warn("Pump is not moving, cannot provide moving rate!") - return "" - - async def get_target_volume(self) -> str: - """Returns target volume or a falsy empty string if not set.""" - - target_vol = await self._send_command_and_read_reply( - Elite11Commands.TARGET_VOLUME - ) - if "Target volume not set" in target_vol: - return "" - return target_vol - - async def set_target_volume(self, volume: str): - """ - Sets target volume in ml. If the volume is set to 0, the target is cleared. - """ - target_volume = flowchem_ureg(volume) - if target_volume.magnitude == 0: - await self._send_command_and_read_reply(Elite11Commands.CLEAR_TARGET_VOLUME) - else: - set_vol = await self._send_command_and_read_reply( - Elite11Commands.TARGET_VOLUME, - parameter=f"{target_volume.m_as('ml')} m", - ) - if "Argument error" in set_vol: - warnings.warn( - f"Cannot set target volume of {target_volume} with a " - f"{self.get_syringe_volume()} syringe!" - ) - - async def pump_info(self) -> PumpInfo: - """Returns much info - - e.g. - ('Pump type Pump 11', - 'Pump type string 11 ELITE I/W Single', - 'Display type Sharp', - 'Steps per rev 400', - 'Gear ratio 1:1', - 'Pulley ratio 2.4:1', - 'Lead screw 24 threads per inch', - 'Microstepping 16 microsteps per step', - 'Low speed limit 27 seconds', - 'High speed limit 26 microseconds', - 'Motor polarity Reverse', - 'Min syringe size 0.1 mm', - 'Max syringe size 33 mm', - 'Min raw force % 20%', - 'Max raw force % 80%', - 'Encoder 100 lines', - 'Direction Infuse/withdraw', - 'Programmable Yes', - 'Limit switches No', - 'Command set None', '') - """ - parsed_multiline_response = await self._send_command_and_read_reply_multiline( - Elite11Commands.METRICS - ) - return PumpInfo.parse_pumpstring(parsed_multiline_response) - - def get_router(self): - """Creates an APIRouter for this object.""" - from fastapi import APIRouter - - router = APIRouter() - router.add_api_route( - "/parameters/syringe-volume", self.get_syringe_volume, methods=["GET"] - ) - router.add_api_route( - "/parameters/syringe-volume", self.set_syringe_volume, methods=["PUT"] - ) - router.add_api_route("/parameters/force", self.get_force, methods=["PUT"]) - router.add_api_route("/parameters/force", self.set_force, methods=["PUT"]) - router.add_api_route("/run", self.run, methods=["PUT"]) - router.add_api_route("/run/infuse", self.infuse_run, methods=["PUT"]) - router.add_api_route("/stop", self.stop, methods=["PUT"]) - router.add_api_route("/infusion-rate", self.get_infusion_rate, methods=["GET"]) - router.add_api_route("/infusion-rate", self.set_infusion_rate, methods=["PUT"]) - router.add_api_route("/info/version", self.version, methods=["GET"]) - router.add_api_route( - "/info/status", self.get_status, methods=["GET"], response_model=PumpStatus - ) - router.add_api_route("/info/is-moving", self.is_moving, methods=["GET"]) - router.add_api_route( - "/info/current-flowrate", self.get_current_flowrate, methods=["GET"] - ) - router.add_api_route( - "/info/infused-volume", self.get_infused_volume, methods=["GET"] - ) - router.add_api_route( - "/info/reset-infused-volume", self.clear_infused_volume, methods=["PUT"] - ) - router.add_api_route("/info/reset-all", self.clear_volumes, methods=["GET"]) - - return router - - -# noinspection PyProtectedMember -class Elite11InfuseWithdraw(Elite11InfuseOnly): - """ - Controls Harvard Apparatus Elite11 syringe pumps - INFUSE AND WITHDRAW. - """ - - def __init__( - self, - pump_io: HarvardApparatusPumpIO, - diameter: str, - syringe_volume: str, - address: Optional[int] = None, - name: Optional[str] = None, - ): - """Query model and version number of firmware to check pump is - OK. Responds with a load of stuff, but the last three characters - are the prompt XXY, where XX is the address and Y is pump status. - The status can be one of the three: [":", ">" "<"] respectively - when stopped, running forwards (pumping), or backwards (withdrawing). - The prompt is used to confirm that the address is correct. - This acts as a check to see that the pump is connected and working.""" - - super().__init__(pump_io, diameter, syringe_volume, address, name) - - async def initialize(self): - """Ensure a valid connection with the pump has been established and sets parameters.""" - await super().initialize() - - # Additionally, ensure pump support withdrawing upon initialization - pump_info = await self.pump_info() - assert not pump_info.infuse_only - - async def inverse_run(self): - """Activates pump, runs opposite to previously set direction.""" - if await self.is_moving(): - warnings.warn("Cannot start pump: already moving!") - return - - await self._send_command_and_read_reply(Elite11Commands.REVERSE_RUN) - logger.info("Pump movement started in reverse direction!") - - async def withdraw_run(self): - """Activates pump, runs in withdraw mode.""" - if await self.is_moving(): - warnings.warn("Cannot start pump: already moving!") - return - - await self._send_command_and_read_reply(Elite11Commands.WITHDRAW) - - logger.info("Pump movement started in withdraw direction!") - - async def get_withdraw_rate(self) -> str: - """Returns the infusion rate as a string w/ units""" - return await self._send_command_and_read_reply(Elite11Commands.WITHDRAW_RATE) - - async def set_withdraw_rate(self, rate: str): - """Sets the infusion rate""" - set_rate = await self._bound_rate_to_pump_limits(rate=rate) - await self._send_command_and_read_reply( - Elite11Commands.WITHDRAW_RATE, parameter=f"{set_rate} m/m" - ) - - async def get_withdrawn_volume(self) -> str: - """Returns the withdrawn volume from the last clear_*_volume() command, according to the pump""" - return await self._send_command_and_read_reply(Elite11Commands.WITHDRAWN_VOLUME) - - async def clear_withdrawn_volume(self): - """Reset the pump withdrawn volume counter to 0""" - await self._send_command_and_read_reply(Elite11Commands.CLEAR_WITHDRAWN_VOLUME) - - async def clear_volumes(self): - """Set all pump volumes to 0""" - await self.set_target_volume("0 ml") - await self.clear_infused_volume() - await self.clear_withdrawn_volume() - - def get_router(self): - router = super().get_router() - # Creates an APIRouter for this object. - router.add_api_route("/run/inverse", self.inverse_run, methods=["PUT"]) - router.add_api_route("/run/withdraw", self.withdraw_run, methods=["PUT"]) - router.add_api_route("/withdraw-rate", self.get_withdraw_rate, methods=["GET"]) - router.add_api_route("/withdraw-rate", self.set_withdraw_rate, methods=["PUT"]) - router.add_api_route( - "/info/withdrawn-volume", self.get_withdrawn_volume, methods=["GET"] - ) - router.add_api_route( - "/info/reset-withdrawn", self.clear_withdrawn_volume, methods=["PUT"] - ) - - return router - - async def __aenter__(self): - await self.initialize() - return self - - async def __aexit__(self, exc_type, exc_value, traceback): - await self.stop() - - async def _update(self): - """Actuates flow rate changes.""" - if self.rate == 0: - await self.stop() - else: - await self.set_infusion_rate(str(self.rate)) - await self.infuse_run() - - -if __name__ == "__main__": - pump = Elite11InfuseOnly.from_config( - port="COM4", syringe_volume="10 ml", diameter="10 mm" - ) - - async def main(): - """Test function""" - await pump.initialize() - # assert await pump.get_infused_volume() == 0 - await pump.set_syringe_diameter("30 mm") - await pump.set_infusion_rate("0.1 ml/min") - await pump.set_target_volume("0.05 ml") - await pump.infuse_run() - await asyncio.sleep(2) - await pump.pump_info() - - asyncio.run(main()) diff --git a/flowchem/components/devices/Harvard_Apparatus/__init__.py b/flowchem/components/devices/Harvard_Apparatus/__init__.py deleted file mode 100644 index 800fdbf0..00000000 --- a/flowchem/components/devices/Harvard_Apparatus/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -""" Harvard Apparatus devices """ -from .HA_elite11 import Elite11InfuseOnly, Elite11InfuseWithdraw, HarvardApparatusPumpIO - -__all__ = ["Elite11InfuseOnly", "Elite11InfuseWithdraw", "HarvardApparatusPumpIO"] diff --git a/flowchem/components/devices/Huber/__init__.py b/flowchem/components/devices/Huber/__init__.py deleted file mode 100644 index 6d99cc2e..00000000 --- a/flowchem/components/devices/Huber/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -""" Huber devices """ -from .huberchiller import HuberChiller, PBCommand - -__all__ = ["HuberChiller", "PBCommand"] diff --git a/flowchem/components/devices/Huber/huberchiller.py b/flowchem/components/devices/Huber/huberchiller.py deleted file mode 100644 index c2f8beb0..00000000 --- a/flowchem/components/devices/Huber/huberchiller.py +++ /dev/null @@ -1,552 +0,0 @@ -""" -Driver for Huber chillers. -""" -import asyncio -import warnings -from dataclasses import dataclass -from typing import Dict, List, Optional - -import aioserial -import pint -from loguru import logger - -from flowchem.components.properties import TempControl -from flowchem.exceptions import DeviceError, InvalidConfiguration -from flowchem.units import flowchem_ureg - - -@dataclass -class PBCommand: - """Class representing a PBCommand""" - - command: str - - def to_chiller(self) -> bytes: - """Validate and encode to bytes array to be transmitted.""" - self.validate() - return self.command.encode("ascii") - - def validate(self): - """Check command structure to be compliant with PB format""" - if len(self.command) == 8: - self.command += "\r\n" - # 10 characters - assert len(self.command) == 10 - # Starts with { - assert self.command[0] == "{" - # M for master (commands) S for slave (replies). - assert self.command[1] in ("M", "S") - # Address, i.e. the desired function. Hex encoded. - assert 0 <= int(self.command[2:4], 16) < 256 - # Value - assert self.command[4:8] == "****" or 0 <= int(self.command[4:8], 16) <= 65536 - # EOL - assert self.command[8:10] == "\r\n" - - @property - def data(self) -> str: - """Data portion of PBCommand.""" - return self.command[4:8] - - def parse_temperature(self) -> str: - """Parse a device temp from hex string to celsius float [two's complement 16-bit signed hex, see manual]""" - temp = ( - (int(self.data, 16) - 65536) / 100 - if int(self.data, 16) > 32767 - else (int(self.data, 16)) / 100 - ) - # -151 used for invalid temperatures - if temp == -151: - return "" - return str(flowchem_ureg(f"{temp} °C")) - - def parse_integer(self) -> int: - """Parse a device reply from hexadecimal string to base 10 integers.""" - return int(self.data, 16) - - def parse_rpm(self) -> str: - """Parse a device reply from hexadecimal string to rpm.""" - return str(flowchem_ureg(f"{self.parse_integer()} rpm")) - - def parse_bits(self) -> List[bool]: - """Parse a device reply from hexadecimal string to 16 constituting bits.""" - bits = f"{int(self.data, 16):016b}" - return [bool(int(x)) for x in bits] - - def parse_boolean(self): - """Parse a device reply from hexadecimal string (0x0000 or 0x0001) to boolean.""" - return self.parse_integer() == 1 - - def parse_status1(self) -> Dict[str, bool]: - """Parse response to status1 command and returns dict""" - bits = self.parse_bits() - return dict( - temp_ctl_is_process=bits[0], - circulation_active=bits[1], - refrigerator_on=bits[2], - temp_is_process=bits[3], - circulating_pump=bits[4], - cooling_power_available=bits[5], - tkeylock=bits[6], - is_pid_auto=bits[7], - error=bits[8], - warning=bits[9], - int_temp_mode=bits[10], - ext_temp_mode=bits[11], - dv_e_grade=bits[12], - power_failure=bits[13], - freeze_protection=bits[14], - ) - - def parse_status2(self) -> Dict[str, bool]: - """Parse response to status2 command and returns dict. See manufacturer docs for more info""" - bits = self.parse_bits() - return dict( - controller_is_external=bits[0], - drip_tray_full=bits[5], - venting_active=bits[7], - venting_successful=bits[8], - venting_monitored=bits[9], - ) - - -class HuberChiller(TempControl): - """ - Control class for Huber chillers. - """ - - def __init__(self, aio: aioserial.AioSerial, name=None): - super().__init__(name) - self._serial = aio - - @classmethod - def from_config(cls, port, name=None, **serial_kwargs): - """ - Create instance from config dict. Used by server to initialize obj from config. - - Only required parameter is 'port'. Optional 'loop' + others (see AioSerial()) - """ - try: - serial_object = aioserial.AioSerial(port, **serial_kwargs) - except aioserial.SerialException as serial_exception: - raise InvalidConfiguration( - f"Cannot connect to the HuberChiller on the port <{port}>" - ) from serial_exception - - return cls(serial_object, name) - - async def initialize(self): - """Ensure the connection w/ device is working.""" - serial_num = await self.serial_number() - if serial_num == 0: - raise DeviceError("No reply received from Huber Chiller!") - logger.debug(f"Connected with Huber Chiller S/N {serial_num}") - - async def send_command_and_read_reply(self, command: str) -> str: - """Sends a command to the chiller and reads the reply. - - :param command: string to be transmitted - :return: reply received - """ - # Send command. Using PBCommand ensure command validation, see PBCommand.to_chiller() - pb_command = PBCommand(command.upper()) - await self._serial.write_async(pb_command.to_chiller()) - logger.debug(f"Command {command[0:8]} sent to chiller!") - - # Receive reply and return it after decoding - try: - reply = await asyncio.wait_for(self._serial.readline_async(), 1) - except asyncio.TimeoutError: - warnings.warn( - "No reply received. Likely the command is not supported by the hardware!" - ) - logger.error("No reply received") - return command.replace("M", "S").replace( - "****", "0000" - ) # Fake reply to keep going - - logger.debug(f"Reply {reply[0:8].decode('ascii')} received") - return reply.decode("ascii") - - async def get_temperature_setpoint(self) -> str: - """Returns the set point used by temperature controller. Internal if not probe, otherwise process temp.""" - reply = await self.send_command_and_read_reply("{M00****") - return PBCommand(reply).parse_temperature() - - async def set_temperature_setpoint(self, temp: str): - """Set the set point used by temperature controller. Internal if not probe, otherwise process temp.""" - min_t = flowchem_ureg(await self.min_setpoint()) - max_t = flowchem_ureg(await self.max_setpoint()) - temp = flowchem_ureg(temp) - - if temp > max_t: - temp = max_t - warnings.warn( - f"Temperature requested {temp} is out of range [{min_t} - {max_t}] for HuberChiller {self}!" - f"Setting to {max_t} instead." - ) - if temp < min_t: - temp = min_t - warnings.warn( - f"Temperature requested {temp} is out of range [{min_t} - {max_t}] for HuberChiller {self}!" - f"Setting to {min_t} instead." - ) - - await self.send_command_and_read_reply("{M00" + self._temp_to_string(temp)) - - async def internal_temperature(self) -> str: - """Returns internal temp (bath temperature).""" - reply = await self.send_command_and_read_reply("{M01****") - return PBCommand(reply).parse_temperature() - - async def process_temperature(self) -> str: - """Returns the current process temperature. If not T probe, the device returns -151, here parsed as None.""" - reply = await self.send_command_and_read_reply("{M3A****") - return PBCommand(reply).parse_temperature() - - async def return_temperature(self) -> str: - """Returns the temp of the thermal fluid flowing back to the device.""" - reply = await self.send_command_and_read_reply("{M02****") - return PBCommand(reply).parse_temperature() - - async def pump_pressure(self) -> str: - """Return pump pressure in mbar (note that you probably want barg, i.e. to remove 1 bar)""" - reply = await self.send_command_and_read_reply("{M03****") - pressure = PBCommand(reply).parse_integer() - return str(flowchem_ureg(f"{pressure} mbar")) - - async def current_power(self) -> str: - """Returns the current power in Watts (negative for cooling, positive for heating).""" - reply = await self.send_command_and_read_reply("{M04****") - power = PBCommand(reply).parse_integer() - return str(flowchem_ureg(f"{power} watt")) - - async def status(self) -> Dict[str, bool]: - """Returns the info contained in vstatus1 as dict.""" - reply = await self.send_command_and_read_reply("{M0A****") - return PBCommand(reply).parse_status1() - - async def status2(self) -> Dict[str, bool]: - """Returns the info contained in vstatus2 as dict.""" - reply = await self.send_command_and_read_reply("{M3C****") - return PBCommand(reply).parse_status2() - - async def is_temperature_control_active(self) -> bool: - """Returns whether temperature control is active or not.""" - reply = await self.send_command_and_read_reply("{M14****") - return PBCommand(reply).parse_boolean() - - async def start_temperature_control(self): - """Starts temperature control, i.e. start operation.""" - await self.send_command_and_read_reply("{M140001") - - async def stop_temperature_control(self): - """Stops temperature control, i.e. stop operation.""" - await self.send_command_and_read_reply("{M140000") - - async def is_circulation_active(self) -> bool: - """Returns whether temperature control is active or not.""" - reply = await self.send_command_and_read_reply("{M16****") - return PBCommand(reply).parse_boolean() - - async def start_circulation(self): - """Starts circulation pump.""" - await self.send_command_and_read_reply("{M160001") - - async def stop_circulation(self): - """Stops circulation pump.""" - await self.send_command_and_read_reply("{M160000") - - async def pump_speed(self) -> str: - """Returns current circulation pump speed (in rpm).""" - reply = await self.send_command_and_read_reply("{M26****") - return PBCommand(reply).parse_rpm() - - async def pump_speed_setpoint(self) -> str: - """Returns the set point of the circulation pump speed (in rpm).""" - reply = await self.send_command_and_read_reply("{M48****") - return PBCommand(reply).parse_rpm() - - async def set_pump_speed(self, rpm: str): - """Set the pump speed, in rpm. See device display for range.""" - parsed_rpm = flowchem_ureg(rpm) - await self.send_command_and_read_reply( - "{M48" + self._int_to_string(parsed_rpm.m_as("rpm")) - ) - - async def cooling_water_temp(self) -> str: - """Returns the cooling water inlet temperature (in Celsius).""" - reply = await self.send_command_and_read_reply("{M2C****") - return PBCommand(reply).parse_temperature() - - async def cooling_water_pressure(self) -> Optional[float]: - """Returns the cooling water inlet pressure (in mbar).""" - reply = await self.send_command_and_read_reply("{M2D****") - if pressure := PBCommand(reply).parse_integer() == 64536: - return None - return pressure - - async def cooling_water_temp_outflow(self) -> str: - """Returns the cooling water outlet temperature (in Celsius).""" - reply = await self.send_command_and_read_reply("{M4C****") - return PBCommand(reply).parse_temperature() - - async def min_setpoint(self) -> str: - """Returns the minimum accepted value for the temperature setpoint (in Celsius).""" - reply = await self.send_command_and_read_reply("{M30****") - return PBCommand(reply).parse_temperature() - - async def max_setpoint(self) -> str: - """Returns the maximum accepted value for the temperature setpoint (in Celsius).""" - reply = await self.send_command_and_read_reply("{M31****") - return PBCommand(reply).parse_temperature() - - async def alarm_max_internal_temp(self) -> str: - """Returns the max internal temp before the alarm is triggered and a fault generated.""" - reply = await self.send_command_and_read_reply("{M51****") - return PBCommand(reply).parse_temperature() - - async def set_alarm_max_internal_temp(self, temp: str): - """Sets the max internal temp before the alarm is triggered and a fault generated.""" - temp = flowchem_ureg(temp) - await self.send_command_and_read_reply("{M51" + self._temp_to_string(temp)) - - async def alarm_min_internal_temp(self) -> str: - """Returns the min internal temp before the alarm is triggered and a fault generated.""" - reply = await self.send_command_and_read_reply("{M52****") - return PBCommand(reply).parse_temperature() - - async def set_alarm_min_internal_temp(self, temp: str): - """Sets the min internal temp before the alarm is triggered and a fault generated.""" - temp = flowchem_ureg(temp) - await self.send_command_and_read_reply("{M52" + self._temp_to_string(temp)) - - async def alarm_max_process_temp(self) -> str: - """Returns the max process temp before the alarm is triggered and a fault generated.""" - reply = await self.send_command_and_read_reply("{M53****") - return PBCommand(reply).parse_temperature() - - async def set_alarm_max_process_temp(self, temp: str): - """Sets the max process temp before the alarm is triggered and a fault generated.""" - temp = flowchem_ureg(temp) - await self.send_command_and_read_reply("{M53" + self._temp_to_string(temp)) - - async def alarm_min_process_temp(self) -> str: - """Returns the min process temp before the alarm is triggered and a fault generated.""" - reply = await self.send_command_and_read_reply("{M54****") - return PBCommand(reply).parse_temperature() - - async def set_alarm_min_process_temp(self, temp: str): - """Sets the min process temp before the alarm is triggered and a fault generated.""" - temp = flowchem_ureg(temp) - await self.send_command_and_read_reply("{M54" + self._temp_to_string(temp)) - - async def set_ramp_duration(self, ramp_time: str): - """Sets the duration (in seconds) of a ramp to the temperature set by a later call to ramp_to_temperature.""" - parsed_time = flowchem_ureg(ramp_time) - await self.send_command_and_read_reply( - "{M59" + self._int_to_string(parsed_time.m_as("s")) - ) - - async def ramp_to_temperature(self, temperature: str): - """Sets the duration (in seconds) of a ramp to the temperature set by a later call to start_ramp().""" - temp = flowchem_ureg(temperature) - await self.send_command_and_read_reply("{M5A" + self._temp_to_string(temp)) - - async def is_venting(self) -> bool: - """Whether the chiller is venting or not.""" - reply = await self.send_command_and_read_reply("{M6F****") - return PBCommand(reply).parse_boolean() - - async def start_venting(self): - """Starts venting. ONLY USE DURING SETUP! READ THE MANUAL!""" - await self.send_command_and_read_reply("{M6F0001") - - async def stop_venting(self): - """Stops venting.""" - await self.send_command_and_read_reply("{M6F0000") - - async def is_draining(self) -> bool: - """Whether the chiller is venting or not.""" - reply = await self.send_command_and_read_reply("{M70****") - return PBCommand(reply).parse_boolean() - - async def start_draining(self): - """Starts venting. ONLY USE DURING SHUT DOWN! READ THE MANUAL!""" - await self.send_command_and_read_reply("{M700001") - - async def stop_draining(self): - """Stops venting.""" - await self.send_command_and_read_reply("{M700000") - - async def serial_number(self) -> int: - """GGet serial number.""" - serial1 = await self.send_command_and_read_reply("{M1B****") - serial2 = await self.send_command_and_read_reply("{M1C****") - pb1, pb2 = PBCommand(serial1), PBCommand(serial2) - return int(pb1.data + pb2.data, 16) - - async def wait_for_temperature_simple(self) -> None: - """Returns as soon as the target temperature range has been reached, or timeout.""" - raise NotImplementedError - - async def wait_for_temperature_stable(self) -> None: - """Returns when the target temperature range has been maintained for X seconds, or timeout.""" - raise NotImplementedError - - @staticmethod - def _temp_to_string(temp: pint.Quantity) -> str: - """From temperature to string for command. f^-1 of PCommand.parse_temperature.""" - min_temp = flowchem_ureg("-151 °C") - max_temp = flowchem_ureg("327 °C") - if not isinstance(temp, pint.Quantity): - logger.warning( - f"Implicit assumption that the temperature provided [{temp}] is in Celsius. Add units pls!" - ) - temp = flowchem_ureg(f"{temp} °C") - assert min_temp <= temp <= max_temp - # Hexadecimal two's complement - return f"{int(temp.m_as('°C') * 100) & 65535:04X}" - - @staticmethod - def _int_to_string(number: int) -> str: - """From temperature to string for command. f^-1 of PCommand.parse_integer.""" - return f"{number:04X}" - - async def __aenter__(self): - await self.initialize() - await self.set_temperature_setpoint(temp="20 °C") - await self.set_temperature_setpoint(temp="20 °C") - await self.start_temperature_control() - await self.start_circulation() - return self - - async def __aexit__(self, exc_type, exc_value, traceback): - await self.set_temperature_setpoint("20 °C") - - # Wait until close to room temperature before turning off chiller - while flowchem_ureg.parse_expression( - await self.process_temperature() - ) > flowchem_ureg.parse_expression("40 °C"): - await asyncio.sleep(5) - - # Actually turn off chiller - await self.stop_circulation() - await self.stop_temperature_control() - - async def _update(self): - await self.set_temperature_setpoint(self.temp) - - def get_router(self): - """Creates an APIRouter for this HuberChiller instance.""" - # Local import to allow direct use of HuberChiller w/o fastapi installed - from fastapi import APIRouter - - router = APIRouter() - router.add_api_route( - "/temperature/set-point", self.get_temperature_setpoint, methods=["GET"] - ) - router.add_api_route( - "/temperature/set-point", self.set_temperature_setpoint, methods=["PUT"] - ) - router.add_api_route( - "/temperature/set-point/min", self.min_setpoint, methods=["GET"] - ) - router.add_api_route( - "/temperature/set-point/max", self.max_setpoint, methods=["GET"] - ) - router.add_api_route( - "/temperature/process", self.process_temperature, methods=["GET"] - ) - router.add_api_route( - "/temperature/internal", self.internal_temperature, methods=["GET"] - ) - router.add_api_route( - "/temperature/return", self.return_temperature, methods=["GET"] - ) - router.add_api_route("/power-exchanged", self.current_power, methods=["GET"]) - router.add_api_route("/status", self.status, methods=["GET"]) - router.add_api_route("/status2", self.status2, methods=["GET"]) - router.add_api_route("/pump/speed", self.pump_speed, methods=["GET"]) - router.add_api_route( - "/temperature-control", self.is_temperature_control_active, methods=["GET"] - ) - router.add_api_route( - "/temperature-control/start", - self.start_temperature_control, - methods=["GET"], - ) - router.add_api_route( - "/temperature-control/stop", self.stop_temperature_control, methods=["GET"] - ) - router.add_api_route( - "/pump/circulation", self.is_circulation_active, methods=["GET"] - ) - router.add_api_route( - "/pump/circulation/start", self.start_circulation, methods=["GET"] - ) - router.add_api_route( - "/pump/circulation/stop", self.stop_circulation, methods=["GET"] - ) - router.add_api_route("/pump/pressure", self.pump_pressure, methods=["GET"]) - router.add_api_route("/pump/speed", self.pump_speed, methods=["GET"]) - router.add_api_route( - "/pump/speed/set-point", self.pump_speed_setpoint, methods=["GET"] - ) - router.add_api_route( - "/pump/speed/set-point", self.set_pump_speed, methods=["PUT"] - ) - router.add_api_route( - "/cooling-water/temperature-inlet", self.cooling_water_temp, methods=["GET"] - ) - router.add_api_route( - "/cooling-water/temperature-outlet", - self.cooling_water_temp_outflow, - methods=["GET"], - ) - router.add_api_route( - "/cooling-water/pressure", self.cooling_water_pressure, methods=["GET"] - ) - router.add_api_route( - "/alarm/process/min-temp", self.alarm_min_process_temp, methods=["GET"] - ) - router.add_api_route( - "/alarm/process/max-temp", self.alarm_max_process_temp, methods=["GET"] - ) - router.add_api_route( - "/alarm/process/min-temp", self.set_alarm_min_process_temp, methods=["PUT"] - ) - router.add_api_route( - "/alarm/process/max-temp", self.set_alarm_min_process_temp, methods=["PUT"] - ) - router.add_api_route( - "/alarm/internal/min-temp", self.alarm_min_internal_temp, methods=["GET"] - ) - router.add_api_route( - "/alarm/internal/max-temp", self.alarm_max_internal_temp, methods=["GET"] - ) - router.add_api_route( - "/alarm/internal/min-temp", - self.set_alarm_min_internal_temp, - methods=["PUT"], - ) - router.add_api_route( - "/alarm/internal/max-temp", - self.set_alarm_min_internal_temp, - methods=["PUT"], - ) - router.add_api_route("/venting/is_venting", self.is_venting, methods=["GET"]) - router.add_api_route("/venting/start", self.start_venting, methods=["GET"]) - router.add_api_route("/venting/stop", self.stop_venting, methods=["GET"]) - router.add_api_route("/draining/is_venting", self.is_draining, methods=["GET"]) - router.add_api_route("/draining/start", self.start_draining, methods=["GET"]) - router.add_api_route("/draining/stop", self.stop_draining, methods=["GET"]) - router.add_api_route("/serial_number", self.serial_number, methods=["GET"]) - - return router - - -if __name__ == "__main__": - chiller = HuberChiller(aioserial.AioSerial(port="COM8")) - status = asyncio.run(chiller.status()) - print(status) diff --git a/flowchem/components/devices/Knauer/HPLC_control.py b/flowchem/components/devices/Knauer/HPLC_control.py deleted file mode 100644 index 9573ebfe..00000000 --- a/flowchem/components/devices/Knauer/HPLC_control.py +++ /dev/null @@ -1,261 +0,0 @@ -# This could become a mess... -# what needs to be done is switch the lamps on, which works over serial. -# the rest is just sending commands to the console, possibly also to another machine - -# https://www.dataapex.com/documentation/Content/Help/110-technical-specifications/110.020-command-line-parameters/110.020-command-line-parameters.htm?Highlight=command%20line - -import socket -import subprocess -from pathlib import Path -from threading import Thread -from time import sleep -from typing import Union - -import tenacity - -from flowchem.exceptions import InvalidConfiguration - -try: - # noinspection PyUnresolvedReferences - from flowchem.components.devices.Knauer.Knauer_HPLC_NDA import Lamp_Command - - HAS_KNAUER_COMMANDS = True -except ModuleNotFoundError: - HAS_KNAUER_COMMANDS = False - raise ModuleNotFoundError("You need to get the NDA communication from Knauer.") - -# Todo should have a command constructor dataclass, would be more neat. For now, will do without to get it running asap - -# TODO Very weird, when starting from synthesis, fractioning valve is blocked. no idea why, it's ip is not used. - - -class ClarityInterface: - def __init__( - self, - remote: bool = False, - host: str = None, - port: int = None, - path_to_executable: str = None, - instrument_number: int = 1, - ): - if not HAS_KNAUER_COMMANDS: - raise InvalidConfiguration( - "Knauer Lamps unusable: no Knauer Commands available.\n" - "Contact your distributor to get the serial API documentation." - ) - # just determine path to executable, and open socket if for remote usage - self.remote = remote - self.instrument = instrument_number - self.path_to_executable = path_to_executable - if self.remote: - self.interface = MessageSender(host, port) - self.command_executor = self.interface.open_socket_and_send - else: - self.command_executor = ClarityExecutioner.execute_command # type:ignore - - # TODO would have to have some way to fail - @classmethod - def from_config(cls, config_dict: dict): - try: - pass - except: - pass - - # if remote execute everything on other PC, else on this - # Todo doesn't make sense here, done other way - def execute_command(self, command_string): - if self.remote: - self.command_executor(command_string) - else: - self.command_executor(command_string, self.path_to_executable) - - # bit displaced convenience function to switch on the lamps of hplc detector. - # TODO remove if published - def switch_lamp_on(self, address="192.168.10.111", port=10001): - """ - Has to be performed BEFORE starting clarity, otherwise sockets get blocked - Args: - address: - port: - - Returns: - - """ - - # send the respective two commands and check return. Send to socket - message_sender = MessageSender(address, port) - message_sender.open_socket_and_send(Lamp_Command.deut_lamp_on) - sleep(1) - message_sender.open_socket_and_send(Lamp_Command.hal_lamp_on) - sleep(15) - - # define relevant strings - def open_clarity_chrom( - self, user: str, config_file: str, password: str = None, start_method: str = "" - ): - """ - start_method: supply the path to the method to start with, this is important for a soft column start - config file: if you want to start with specific instrumment configuration, specify location of config file here - """ - if not password: - self.execute_command( - f"i={self.instrument} cfg={config_file} u={user} {start_method}" - ) - else: - self.execute_command( - f"i={self.instrument} cfg={config_file} u={user} p={password} {start_method}" - ) - sleep(20) - - # TODO should be OS agnostic - def slow_flowrate_ramp(self, path: str, method_list: tuple = ()): - """ - path: path where the methods are located - method list - """ - for current_method in method_list: - self.execute_command(f"i={self.instrument} {path}\\{current_method}") - # not very elegant, but sending and setting method takes at least 10 seconds, only has to run during platform startup and can't see more elegant way how to do that - sleep(20) - - def load_file(self, path_to_file: str): - """has to be done to open project, then method. Take care to select 'Send Method to Instrument' option in Method - Sending Options dialog in System Configuration.""" - self.execute_command(f"i={self.instrument} {path_to_file}") - sleep(10) - - def set_sample_name(self, sample_name): - """Sets the sample name for the next single run""" - self.execute_command(f"i={self.instrument} set_sample_name={sample_name}") - sleep(1) - - def run(self): - """Runs the instrument. Care should be taken to activate automatic data export on HPLC. (can be done via command, - but that only makes it more complicated). Takes at least 2 sec until run starts""" - self.execute_command(f"run={self.instrument}") - - def exit(self): - """Exit Clarity Chrom""" - self.execute_command("exit") - sleep(10) - - -class MessageSender: - def __init__(self, host, port): - self.host = host - self.port = port - - # encode('utf-8') - - @tenacity.retry( - stop=tenacity.stop_after_attempt(5), wait=tenacity.wait_fixed(2), reraise=True - ) - def open_socket_and_send(self, message: str): - s = socket.socket() - s.connect((self.host, self.port)) - s.sendall(message.encode("utf-8")) - s.close() - - -class ClarityExecutioner: - """This needs to run on the computer having claritychrom installed, except for one uses the same PC. However, - going via socket and localhost would also work, but seems a bit cumbersome. - open up server socket. Everything coming in will be prepended with claritychrom.exe (if it is not already)""" - - command_prepend = "claritychrom.exe" - - def __init__(self, port, allowed_client="192.168.10.20", host_ip="192.168.10.11"): - self.port = port - self.allowed_client = allowed_client - self.host_ip = host_ip - # think that should also go in thread, otherwise blocks - self.server_socket = self.open_server() - self.executioner = Thread(target=self.get_commands_and_execute, daemon=False) - print("a") - self.executioner.start() - print("b") - - def open_server(self): - s = socket.socket() - s.bind((self.host_ip, self.port)) - s.listen(5) - return s - - def accept_new_connection(self): - client_socket, address = self.server_socket.accept() - if not address[0] == self.allowed_client: - client_socket.close() - print(f"nice try {client_socket, address}") - else: - # if below code is executed, that means the sender is connected - print(f"[+] {address} is connected.") - # in unicode - request = client_socket.recv(1024).decode("utf-8") - client_socket.close() - print(request) - return request - - # TODO: instrument number has to go into command execution - def execute_command( - self, - command: str, - folder_of_executable: Union[Path, str] = r"C:\claritychrom\bin\\", - ): - prefix = "claritychrom.exe" - # sanitize input a bit - if command.split(" ")[0] != prefix: - command = folder_of_executable + prefix + " " + command # type:ignore - print(command) - try: - x = subprocess - x.run(command, shell=True, capture_output=False, timeout=3) - except subprocess.TimeoutExpired: - print("Damn, Subprocess") - - def get_commands_and_execute(self): - while True: - request = self.accept_new_connection() - self.execute_command(request) - sleep(1) - print("listening") - - -###TODO: also dsk or k for opening with specific desktop could be helpful-. -# TODO Export results can be specified -> exports result, rewrite to a nicer interface - -if __name__ == "__main__": - computer_w_Clarity = False - if computer_w_Clarity: - analyser = ClarityExecutioner(10014) - else: - commander = ClarityInterface( - remote=True, host="192.168.10.11", port=10014, instrument_number=2 - ) - commander.exit() - commander.switch_lamp_on() # address and port hardcoded - commander.open_clarity_chrom( - "admin", - config_file=r"C:\ClarityChrom\Cfg\automated_exp.cfg ", - start_method=r"D:\Data2q\sugar-optimizer\autostartup_analysis\autostartup_005_Sugar-c18_shortened.MET", - ) - commander.slow_flowrate_ramp( - r"D:\Data2q\sugar-optimizer\autostartup_analysis", - method_list=( - "autostartup_005_Sugar-c18_shortened.MET", - "autostartup_01_Sugar-c18_shortened.MET", - "autostartup_015_Sugar-c18_shortened.MET", - "autostartup_02_Sugar-c18_shortened.MET", - "autostartup_025_Sugar-c18_shortened.MET", - "autostartup_03_Sugar-c18_shortened.MET", - "autostartup_035_Sugar-c18_shortened.MET", - "autostartup_04_Sugar-c18_shortened.MET", - "autostartup_045_Sugar-c18_shortened.MET", - "autostartup_05_Sugar-c18_shortened.MET", - ), - ) - commander.load_file( - r"D:\Data2q\sugar-optimizer\autostartup_analysis\auto_Sugar-c18_shortened.MET" - ) - # commander.load_file("opendedicatedproject") # open a project for measurements - commander.set_sample_name("test123") - commander.run() diff --git a/flowchem/components/devices/Knauer/Knauer_autodiscover.py b/flowchem/components/devices/Knauer/Knauer_autodiscover.py deleted file mode 100644 index e533aa82..00000000 --- a/flowchem/components/devices/Knauer/Knauer_autodiscover.py +++ /dev/null @@ -1,122 +0,0 @@ -""" Autodiscover Knauer devices on network """ -import asyncio -import queue -import socket -import sys -import time -from threading import Thread -from typing import Dict, Text, Tuple, Union - -from getmac import getmac -from loguru import logger - -Address = Tuple[str, int] - - -class BroadcastProtocol(asyncio.DatagramProtocol): - """From https://gist.github.com/yluthu/4f785d4546057b49b56c""" - - def __init__(self, target: Address, response_queue: queue.Queue): - self.target = target - self.loop = asyncio.get_event_loop() - self._queue = response_queue - - def connection_made(self, transport: asyncio.transports.DatagramTransport): # type: ignore - """Called upon connection.""" - sock = transport.get_extra_info("socket") # type: socket.socket - sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) # sets to broadcast - transport.sendto(b"\x00\x01\x00\xf6", self.target) - - def datagram_received(self, data: Union[bytes, Text], addr: Address): - """Called on data received""" - logger.trace(f"Received data from {addr}") - self._queue.put(addr[0]) - - -async def get_device_type(ip_address: str) -> str: - """Returns either 'Pump', 'Valve' or 'Unknown'""" - fut = asyncio.open_connection(host=ip_address, port=10001) - try: - reader, writer = await asyncio.wait_for(fut, timeout=3) - except ConnectionError: - return "ConnectionError" - except asyncio.TimeoutError: - if ip_address == "192.168.1.2": - return "TimeoutError - Nice FlowIR that you have :D" - return "TimeoutError" - - # Test Pump - writer.write("HEADTYPE:?\n\r".encode()) - reply = await reader.readuntil(separator=b"\r") - if reply.startswith(b"HEADTYPE"): - logger.debug(f"Device {ip_address} is a Pump") - return "Pump" - - # Test Valve - writer.write("T:?\n\r".encode()) - reply = await reader.readuntil(separator=b"\r") - if reply.startswith(b"VALVE"): - logger.debug(f"Device {ip_address} is a Valve") - return "Valve" - - return "Unknown" - - -def autodiscover_knauer(source_ip: str = "") -> Dict[str, str]: - """ - Automatically find Knauer ethernet device on the network and returns the IP associated to each MAC address. - Note that the MAC is the key here as it is the parameter used in configuration files. - Knauer devices only support DHCP so static IPs are not an option. - - - Args: - source_ip: source IP for autodiscover (only relevant if multiple network interfaces are available!) - Returns: - List of tuples (IP, MAC, device_type), one per device replying to autodiscover - """ - - # Define source IP resolving local hostname. - if not source_ip: - hostname = socket.gethostname() - source_ip = socket.gethostbyname(hostname) - - loop = asyncio.get_event_loop() - device_q: queue.Queue = queue.Queue() - coro = loop.create_datagram_endpoint( - lambda: BroadcastProtocol(("255.255.255.255", 30718), response_queue=device_q), - local_addr=(source_ip, 28688), - ) - loop.run_until_complete(coro) - thread = Thread(target=loop.run_forever) - thread.start() - time.sleep(2) - loop.call_soon_threadsafe(loop.stop) # here - thread.join() - - device_list = [] - for _ in range(40): - try: - device_list.append(device_q.get_nowait()) - except queue.Empty: - break - - device_info = dict() - for device_ip in device_list: - # MAC address - mac = getmac.get_mac_address(ip=device_ip) - device_info[mac] = device_ip - return device_info - - -if __name__ == "__main__": - # This is a bug of asyncio on Windows :| - if sys.platform == "win32": - asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) - - # Autodiscover devices (dict mac as index, IP as value) - devices = autodiscover_knauer() - - for mac_address, ip in devices.items(): - # Device Type - device_type = asyncio.run(get_device_type(ip)) - print(f"MAC: {mac_address} IP: {ip} DEVICE_TYPE: {device_type}") diff --git a/flowchem/components/devices/Knauer/__init__.py b/flowchem/components/devices/Knauer/__init__.py deleted file mode 100644 index 60640814..00000000 --- a/flowchem/components/devices/Knauer/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -""" Knauer devices """ -from .AzuraCompactPump import AzuraCompactPump -from .Knauer_autodiscover import autodiscover_knauer -from .KnauerValve import ( - Knauer6Port2PositionValve, - Knauer6Port6PositionValve, - Knauer12PortValve, - Knauer16PortValve, -) - -__all__ = [ - "AzuraCompactPump", - "Knauer6Port2PositionValve", - "Knauer6Port6PositionValve", - "Knauer12PortValve", - "Knauer16PortValve", -] diff --git a/flowchem/components/devices/Magritek/__init__.py b/flowchem/components/devices/Magritek/__init__.py deleted file mode 100644 index 5da083a4..00000000 --- a/flowchem/components/devices/Magritek/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -""" Magritek devices """ -from .nmrspectrum import NMRSpectrum -from .spinsolve import Spinsolve - -__all__ = ["NMRSpectrum", "Spinsolve"] diff --git a/flowchem/components/devices/Magritek/nmrspectrum.py b/flowchem/components/devices/Magritek/nmrspectrum.py deleted file mode 100644 index 879716d1..00000000 --- a/flowchem/components/devices/Magritek/nmrspectrum.py +++ /dev/null @@ -1,67 +0,0 @@ -""" NMR-spectrum object represents an NMR spectrum. """ -import time -from pathlib import Path - -import matplotlib.pyplot as plt -import nmrglue as ng - - -class NMRSpectrum: - """General spectrum object, instantiated from Spinsolve folder w/ experimental results.""" - - def __init__(self, location: Path): - jcamp_file = location / "nmr_fid.dx" - if not jcamp_file.exists(): - print("File nmr_fid.dx not existing, waiting 2 sec just in case...") - time.sleep(2) - self.dic, self.raw_data = ng.spinsolve.read(dir=location.as_posix()) - self.processed_data = None - - @property - def uc(self): - """ - - Returns: - - """ - data = self.processed_data if self.processed_data is not None else self.raw_data - return ng.spinsolve.make_uc(self.dic, data) - - def process(self): - """Basic spectrum processing. Application-specific processing suggested.""" - # Zerofill - self.processed_data = ng.proc_base.zf_auto( - ng.proc_base.zf_double(self.raw_data, 1) - ) - - # FT - self.processed_data = ng.proc_base.fft(self.processed_data) - - # Phasing - try: - # Try to extract phase info from JCAMP-DX file... - ph0 = float(self.dic["dx"]["$PHC0"].pop()) - ph1 = float(self.dic["dx"]["$PHC1"].pop()) - self.processed_data = ng.proc_base.ps(self.processed_data, ph0, ph1, True) - except KeyError: - # Authophase needed - no info on phase from nmrglue - self.processed_data = ng.proc_autophase.autops( - self.processed_data, - "acme", - disp=False, - ) - - # Delete imaginary - self.processed_data = ng.proc_base.di(self.processed_data) - - def plot(self, ppm_range=(8, 0)): - """Returns spectrum as matplotlib figure""" - if self.processed_data is None: - self.process() - - fig = plt.figure() - axes = fig.add_subplot(111) - axes.plot(self.uc.ppm_scale(), self.processed_data) - - plt.xlim(ppm_range) # plot as we are used to, from positive to negative - return fig diff --git a/flowchem/components/devices/Magritek/parser.py b/flowchem/components/devices/Magritek/parser.py deleted file mode 100644 index bc68573c..00000000 --- a/flowchem/components/devices/Magritek/parser.py +++ /dev/null @@ -1,86 +0,0 @@ -""" Functions related to instrument reply parsing """ - -import warnings -from enum import Enum - -from lxml import etree - - -class StatusNotification(Enum): - """ - Represent the type of the status notification - """ - - STARTED = 1 # received, starting protocol - RUNNING = 2 # All good, received, protocol is running - STOPPING = 3 # Abort called, waiting for current scan end - FINISHING = ( - 4 # Upon , means also processing/saving data is over - ERROR = 6 # If an error occurs - UNKNOWN = 7 - - -# def extract_error(xml_message: etree._Element) -> str: -# """ -# Search for an error tag in the XML tree provided. -# If an error is found returns its error message, empty string if no errors are present. -# """ -# error = xml_message.find(".//Error") -# return error.get("error") if error is not None else "" - - -def parse_status_notification(xml_message: etree.Element): - """ - Parse a status notification reply. - """ - status = xml_message.find(".//StatusNotification") - - # No status notification found - if status is None: - warnings.warn( - "Parse status notification called on a message with no StatusNotification tags!" - ) - return None - - # StatusNotification child can be (w/ submsg). , or - child = status[0] - - if child.tag == "State": - return parse_state(child) - - if child.tag == "Progress": - return StatusNotification.RUNNING, None - - if child.tag == "Completed": - return StatusNotification.COMPLETED, None - - if child.tag == "Error": - return StatusNotification.ERROR, None - - warnings.warn("Could not detect StatusNotification state!") - return StatusNotification.UNKNOWN, None - - -def parse_state(xml_message: etree.Element): - """Parse state message""" - status_type = StatusNotification.UNKNOWN - - # Parse status - status = xml_message.get("status") - if status == "Running": - status_type = StatusNotification.STARTED - elif status == "Ready": - status_type = StatusNotification.FINISHING - elif status == "Stopping": - status_type = StatusNotification.STOPPING - else: - warnings.warn(f"Unidentified notification status: {status}") - - # Full path is only available on experiment, so often this string is empty - remote_folder = xml_message.get("dataFolder") - - if remote_folder: - return status_type, remote_folder - return status_type, None diff --git a/flowchem/components/devices/Magritek/reader.py b/flowchem/components/devices/Magritek/reader.py deleted file mode 100644 index 7e1f8798..00000000 --- a/flowchem/components/devices/Magritek/reader.py +++ /dev/null @@ -1,116 +0,0 @@ -""" Connection reader + XML parser for Spinsolve replies """ - -import queue -import time -import warnings -from typing import List, Optional - -from lxml import etree - -from flowchem.components.devices.Magritek.utils import get_my_docs_path - - -class Reader: - """ - This class is responsible for collecting and parsing replies from the spectrometer. - It does not contain functionality to handle I/O. - """ - - def __init__(self, reply_queue: queue.Queue, xml_schema=None): - self._queue = reply_queue - - if xml_schema is None: - # This is the default location upon Spinsolve installation. However, remote control can be from remote ;) - my_docs = get_my_docs_path() - try: - self.schema = etree.XMLSchema( - file=str(my_docs / "Magritek" / "Spinsolve" / "RemoteControl.xsd") - ) - except etree.XMLSchemaParseError: # i.e. not found - self.schema = None - else: - self.schema = xml_schema - - self.parser = etree.XMLParser() - self._replies: List[etree.Element] = [] - self._rcv_buffer = b"" - - def wait_for_reply(self, reply_type="", timeout=1): - """ - Awaits for a reply of type reply_type or up to timeout - """ - reply = self.get_next_reply(reply_type) - - # If already available just return - if reply is not None: - return reply - - # This is ugly, but usually unnecessary as replies are received immediately. - # Only relevant if controlling remote devices over connections with significant latency - start_time = time.time() - while reply is None and time.time() < (start_time + timeout): - reply = self.get_next_reply(reply_type) - time.sleep(0.1) - - if reply is None: - raise RuntimeError("No reply received from device!") - - return reply - - def get_next_reply(self, reply_type=""): - """ - Returns the next reply of given type in self._replies. - """ - self.fetch_replies() - - valid_replies = [ - reply for reply in self._replies if reply[0].tag.endswith(reply_type) - ] - - if len(valid_replies) > 0: - first_valid_reply = valid_replies[0] - self._replies.remove(first_valid_reply) - return first_valid_reply - - def clear_replies(self, reply_type=""): - """Remove old replies.""" - # Shortcut if none provided... - if not reply_type: - self._replies.clear() - - # Otherwise, check type - for reply in self._replies: - if reply[0].tag.endswith(reply_type): - self._replies.remove(reply) - - def fetch_replies(self): - """ - Fetch the unprocessed chunks from the queue and adds them to the reception buffer - """ - while not self._queue.empty(): - # From queue only complete replies thanks to readuntil(b"") - tree = self.parse_tree(self._queue.get()) - self._queue.task_done() - - if tree: - self._replies.append(tree) - - if tree and self.schema: - self.validate_tree(tree) - - def parse_tree(self, tree_string) -> Optional[etree.Element]: - """Parse an XML reply tree, add it to the replies and validate it (if the schema is available).""" - - try: - return etree.fromstring(tree_string, self.parser) - except etree.XMLSyntaxError: - warnings.warn(f"Cannot parse response XML {tree_string}") - return None - - def validate_tree(self, tree: etree.Element): - """Validate the XML tree against the schema.""" - - try: - self.schema.validate(tree) - except etree.XMLSyntaxError as syntax_error: - warnings.warn(f"Invalid XML received! [Validation error: {syntax_error}]") diff --git a/flowchem/components/devices/Magritek/spinsolve.py b/flowchem/components/devices/Magritek/spinsolve.py deleted file mode 100644 index b98c5088..00000000 --- a/flowchem/components/devices/Magritek/spinsolve.py +++ /dev/null @@ -1,395 +0,0 @@ -""" Spinsolve module """ -import asyncio -import pprint as pp -import queue -import threading -import warnings -from pathlib import Path -from typing import Optional, Union - -from loguru import logger -from lxml import etree -from packaging import version -from unsync import unsync - -from flowchem.components.devices.Magritek.msg_maker import ( - create_message, - create_protocol_message, - get_request, - set_attribute, - set_data_folder, - set_user_data, -) -from flowchem.components.devices.Magritek.parser import ( - StatusNotification, - parse_status_notification, -) -from flowchem.components.devices.Magritek.reader import Reader -from flowchem.components.properties import ActiveComponent - - -@unsync -async def get_streams_for_connection(host: str, port: str): - """ - Given a target (host, port) returns the corresponding asyncio streams (I/O). - """ - try: - read, write = await asyncio.open_connection(host, port) - except Exception as e: - raise ConnectionError(f"Error connecting to {host}:{port} -- {e}") from e - return read, write - - -class Spinsolve(ActiveComponent): - """ - Spinsolve class, gives access to the spectrometer remote control API - """ - - def __init__( - self, - host="127.0.0.1", - **kwargs, - ): - """ - Constructor, actuates the connection upon instantiation. - - kwargs are: name, xml_schema - """ - - super().__init__(kwargs.get("name")) - # IOs - self._io_reader, self._io_writer = get_streams_for_connection( - host, kwargs.get("port", "13000") - ).result() - - # Queue needed for thread-safe operation, the reader is in a different thread - self._replies: queue.Queue = queue.Queue() - self._reader = Reader(self._replies, kwargs.get("xml_schema", None)) - threading.Thread(target=self.connenction_listener_thread, daemon=True).start() - - # Check if the instrument is connected - hw_info = self.hw_request() - - # If not raise ConnectionError - if hw_info.find(".//ConnectedToHardware").text != "true": - raise ConnectionError( - "The spectrometer is not connected to the control PC running Spinsolve software!" - ) - - # If connected parse and log instrument info - self.software_version = hw_info.find(".//SpinsolveSoftware").text - self.hardware_type = hw_info.find(".//SpinsolveType").text - logger.debug( - f"Connected with Spinsolve model {self.hardware_type}, SW version: {self.software_version}" - ) - - # Load available protocols - self.protocols_option = self.request_available_protocols() - - # Set experimental variable - self.data_folder = kwargs.get("data_folder") - - # An optional mapping between remote and local folder location can be used for remote use - self._folder_mapper = kwargs.get("remote_to_local_mapping") - # Ensure mapper validity before starting. This will save you time later ;) - if self._folder_mapper is not None: - assert self._folder_mapper(self.data_folder) is not None - - # Sets default sample, solvent value and user data - self.sample = kwargs.get("sample_name", "FlowChem Experiment") - self.solvent = kwargs.get("solvent", "Chloroform?") - self.user_data = dict(control_software="flowchem") - - # Finally, check version - if version.parse(self.software_version) < version.parse("1.18.1.3062"): - warnings.warn( - f"Spinsolve version {self.software_version} is older than the reference (1.18.1.3062)" - ) - - def connenction_listener_thread(self): - """Thread that listens to the connection and parses the reply""" - self.connection_listener().result() - - @unsync - async def connection_listener(self): - """ - Listen for replies and puts them in the queue - """ - while True: - try: - chunk = await self._io_reader.readuntil(b"") - except asyncio.CancelledError: - break - self._replies.put(chunk) - - @unsync - async def _transmit(self, message: bytes): - """ - Sends the message to the spectrometer - """ - self._io_writer.write(message) - await self._io_writer.drain() - - @property - def solvent(self) -> str: - """Get current solvent""" - # Send request - self.send_message(get_request("Solvent")) - - # Get reply - reply = self._read_reply(reply_type="GetResponse") - - # Parse and return - return reply.find(".//Solvent").text - - @solvent.setter - def solvent(self, solvent: str): - """Sets the solvent""" - self.send_message(set_attribute("Solvent", solvent)) - - @property - def sample(self) -> str: - """Get current solvent (appears in acqu.par)""" - # Send request - self.send_message(get_request("Sample")) - - # Get reply - reply = self._read_reply(reply_type="GetResponse") - - # Parse and return - return reply.find(".//Sample").text - - @sample.setter - def sample(self, sample: str): - """Sets the sample name (appears in acqu.par)""" - self.send_message(set_attribute("Sample", sample)) - - @property - def data_folder(self): - """Get current data_folder location""" - return self._data_folder # No command available to directly query Spinsolve :( - - @data_folder.setter - def data_folder(self, location: str): - """Sets the location provided as data folder. optionally, with typeThese are included in acq.par""" - if location is not None: - self._data_folder = location - self.send_message(set_data_folder(location)) - - @property - def user_data(self) -> dict: - """Create a get user data request and parse result""" - # Send request - self.send_message(get_request("UserData")) - - # Get reply - reply = self._read_reply(reply_type="GetResponse") - - # Parse and return - return { - data_item.get("key"): data_item.get("value") - for data_item in reply.findall(".//Data") - } - - @user_data.setter - def user_data(self, data_to_be_set: dict): - """Sets the user data proewqvided in the dict. These are included in acq.par""" - self.send_message(set_user_data(data_to_be_set)) - - def _read_reply(self, reply_type="", timeout=5): - """Looks in the received replies for one of type reply_type""" - # Get reply of reply_type from the reader object that holds the StreamReader - reply = self._reader.wait_for_reply(reply_type=reply_type, timeout=timeout) - logger.debug(f"Got a reply from spectrometer: {etree.tostring(reply)}") - - return reply - - async def _async_read_reply(self, *args): - loop = asyncio.get_event_loop() - return await loop.run_in_executor(None, self._read_reply, *args) - - def send_message(self, root: etree.Element): - """ - Sends the tree connected XML etree.Element provided - """ - # Turn the etree.Element provided into an ElementTree - tree = etree.ElementTree(root) - - # Export to string with declaration - message = etree.tostring(tree, xml_declaration=True, encoding="utf-8") - - # Transmit - logger.debug(f"Transmitting request to spectrometer: {message}") - self._transmit(message).result() - - def hw_request(self): - """ - Sends an HW request to the spectrometer, receive the reply and returns it - """ - self.send_message(create_message("HardwareRequest")) - # Larger timeout than usual as this is the first request sent to the spectrometer - # When the PC/Spinsolve is in standby, the reply to the first req is slower than usual - return self._read_reply(reply_type="HardwareResponse", timeout=15) - - def request_available_protocols(self) -> dict: - """ - Get a list of available protocol on the current spectrometer - """ - # Request available protocols - self.send_message(create_message("AvailableProtocolOptionsRequest")) - # Get reply - tree = self._read_reply(reply_type="AvailableProtocolOptionsResponse") - - # Parse reply and construct the dict with protocols available - protocols = {} - for element in tree.findall(".//Protocol"): - protocol_name = element.get("protocol") - protocols[protocol_name] = { - option.get("name"): [value.text for value in option.findall("Value")] - for option in element.findall("Option") - } - - return protocols - - async def run_protocol( - self, protocol_name, protocol_options=None - ) -> Optional[Union[str, Path]]: - """ - Runs a protocol - - Returns true if the protocol is started correctly, false otherwise. - """ - # All protocol names are UPPERCASE, so force upper here to avoid case issues - protocol_name = protocol_name.upper() - if not self.is_protocol_available(protocol_name): - warnings.warn( - f"The protocol requested '{protocol_name}' is not available on the spectrometer!\n" - f"Valid options are: {pp.pformat(sorted(self.protocols_option.keys()))}" - ) - return None - - # Validate protocol options (check values and remove invalid ones, with warning) - valid_protocol_options = self._validate_protocol_request( - protocol_name, protocol_options - ) - - # Start protocol - self._reader.clear_replies() - self.send_message( - create_protocol_message(protocol_name, valid_protocol_options) - ) - - # Follow status notifications and finally get location of remote data - remote_data_folder = await self.check_notifications() - logger.info(f"Protocol over - remote data folder is {remote_data_folder}") - - # If a folder mapper is present use it to translate the location - if self._folder_mapper: - return self._folder_mapper(remote_data_folder) - return remote_data_folder - - async def check_notifications(self) -> Path: - """ - Read all the StatusNotification and returns the dataFolder - """ - remote_folder = Path() - while True: - # Get all StatusNotification - status_update = await self._async_read_reply("StatusNotification", 6000) - - # Parse them - status, folder = parse_status_notification(status_update) - logger.debug(f"Status update: Status is {status} and data folder={folder}") - - # When I get a finishing response end protocol and return the data folder! - if status is StatusNotification.FINISHING: - remote_folder = Path(folder) - break - - if status is StatusNotification.ERROR: - # Usually device busy - warnings.warn("Error detected on running protocol -- aborting.") - self.abort() # Abort running experiment - break - - return remote_folder - - def abort(self): - """Abort current running command""" - self.send_message(create_message("Abort")) - - def is_protocol_available(self, desired_protocol): - """Check if the desired protocol is available on the current instrument""" - return desired_protocol in self.protocols_option - - def _validate_protocol_request(self, protocol_name, protocol_options) -> dict: - """Ensures the protocol names, option name and option values are valid.""" - # Valid option for protocol - valid_options = self.protocols_option.get(protocol_name) - if valid_options is None or protocol_options is None: - return {} - - # For each option, check if valid. If not, remove it, raise warning and continue - for option_name, option_value in list(protocol_options.items()): - if option_name not in valid_options: - protocol_options.pop(option_name) - warnings.warn( - f"Invalid option {option_name} for protocol {protocol_name} -- DROPPED!" - ) - continue - - # Get valid option values (list of them or empty list if not a multiple choice) - valid_values = valid_options[option_name] - - # If there is no list of valid options accept anything - if not valid_values: - continue - # otherwise validate the value as well - elif str(option_value) not in valid_values: - protocol_options.pop(option_name) - warnings.warn( - f"Invalid value {option_value} for option {option_name} in protocol {protocol_name}" - f" -- DROPPED!" - ) - - # Returns the dict with only valid options/value pairs - return protocol_options - - # def shim(self, shim_type="CheckShim") -> Tuple[float, float]: - # """ Perform one of the standard shimming routine {CheckShim | QuickShim | PowerShim} """ - # # Check shim type - # if shim_type not in self.STD_SHIM_REQUEST: - # warnings.warn(f"Invalid shimming protocol: {shim_type} not in {self.STD_SHIM_REQUEST}. Assumed CheckShim") - # shim_type = "CheckShim" - # - # # Submit request - # self.send_message(create_message(shim_type+"Request")) - # - # # Wait for reply - # response_tag = shim_type + "Response" - # wait_time = { - # "CheckShim": 180, - # "QuickShim": 600, - # "PowerShim": 3600, - # } - # reply = self._read_reply(reply_type=response_tag, timeout=wait_time[shim_type]) - # - # # Check for errors - # error = reply.find(f".//{response_tag}").get("error") - # if error: - # warnings.warn(f"Error occurred during shimming: {error}") - # return None, None - # - # # Return LineWidth and BaseWidth - # return float(reply.find(".//LineWidth").text), float(reply.find(".//BaseWidth").text) - - def shim(self): - """Performs a shim on sample""" - raise NotImplementedError("Use run protocol with a shimming protocol instead!") - - -if __name__ == "__main__": - hostname = "BSMC-YMEF002121" - - nmr: Spinsolve = Spinsolve(host=hostname) - print(nmr.sample) diff --git a/flowchem/components/devices/Manson/__init__.py b/flowchem/components/devices/Manson/__init__.py deleted file mode 100644 index c76af147..00000000 --- a/flowchem/components/devices/Manson/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -""" Manson devices """ -from .manson import MansonPowerSupply - -__all__ = ["MansonPowerSupply"] diff --git a/flowchem/components/devices/MettlerToledo/FlowIR.py b/flowchem/components/devices/MettlerToledo/FlowIR.py deleted file mode 100644 index 7baddb7f..00000000 --- a/flowchem/components/devices/MettlerToledo/FlowIR.py +++ /dev/null @@ -1,255 +0,0 @@ -""" Async implementation of FlowIR """ - -import asyncio -import datetime -import warnings -from typing import List, Optional - -from asyncua import Client, ua -from loguru import logger - -from flowchem.components.devices.MettlerToledo.iCIR_common import ( - IRSpectrum, - ProbeInfo, - iCIR_spectrometer, -) -from flowchem.components.properties import ActiveComponent -from flowchem.exceptions import DeviceError - - -class FlowIR(iCIR_spectrometer, ActiveComponent): - """ - Object to interact with the iCIR software controlling the FlowIR and ReactIR. - """ - - counter = 0 - - def __init__(self, url: str = None, name: str = None): - """ - Initiate connection with OPC UA server. - Intended to be used as context-manager! - """ - ActiveComponent.__init__(self, name) - - if name is None: - self.name = f"FlowIR_{self.counter}" - else: - self.name = name - - # Default (local) url if none provided - if url is None: - url = "opc.tcp://localhost:62552/iCOpcUaServer" - - self.opcua = Client(url) - self.version = None - - async def initialize(self): - """Initialize and check connection""" - try: - await self.opcua.connect() - except asyncio.TimeoutError as timeout_error: - raise DeviceError( - f"Could not connect to FlowIR on {self.opcua.server_url}!" - ) from timeout_error - await self.check_version() - logger.debug("FlowIR initialized!") - - def is_local(self): - """Returns true if the server is on the same machine running the python code.""" - return any( - x in self.opcua.aio_obj.server_url.netloc - for x in ("localhost", "127.0.0.1") - ) - - async def check_version(self): - """Check if iCIR is installed and open and if the version is supported.""" - try: - self.version = await self.opcua.get_node( - self.SOFTWARE_VERSION - ).get_value() # "7.1.91.0" - if self.version not in self._supported_versions: - warnings.warn( - f"The current version of iCIR [self.version] has not been tested!" - f"Pleas use one of the supported versions: {self._supported_versions}" - ) - except ua.UaStatusCodeError as error: # iCIR app closed - raise DeviceError( - "iCIR app not installed or closed or no instrument available!" - ) from error - - # noinspection PyPep8Naming - async def is_iCIR_connected(self) -> bool: - """Check connection with instrument""" - return await self.opcua.get_node(self.CONNECTION_STATUS).get_value() - - async def probe_info(self) -> ProbeInfo: - """Return FlowIR probe information""" - probe_info = await self.opcua.get_node(self.PROBE_DESCRIPTION).get_value() - return self.parse_probe_info(probe_info) - - async def probe_status(self): - """Returns current probe status""" - return await self.opcua.get_node(self.PROBE_STATUS).get_value() - - async def is_running(self) -> bool: - """Is the probe currently measuring?""" - return await self.probe_status() == "Running" - - async def last_sample_time(self) -> datetime.datetime: - """Returns date/time of latest scan""" - return await self.opcua.get_node(self.LAST_SAMPLE_TIME).get_value() - - async def sample_count(self) -> Optional[int]: - """Sample count (integer autoincrement) watch for changes to ensure latest spectrum is recent""" - return await self.opcua.get_node(self.SAMPLE_COUNT).get_value() - - @staticmethod - async def _wavenumber_from_spectrum_node(node) -> List[float]: - """Gets the X-axis value of a spectrum. This is necessary as they change e.g. with resolution.""" - node_property = await node.get_properties() - x_axis = await node_property[0].get_value() - return x_axis.AxisSteps - - @staticmethod - async def spectrum_from_node(node) -> IRSpectrum: - """Given a Spectrum node returns it as IRSpectrum""" - try: - intensity = await node.get_value() - wavenumber = await FlowIR._wavenumber_from_spectrum_node(node) - return IRSpectrum(wavenumber=wavenumber, intensity=intensity) - - except ua.uaerrors.BadOutOfService: - return IRSpectrum(wavenumber=[], intensity=[]) - - async def last_spectrum_treated(self) -> IRSpectrum: - """Returns an IRSpectrum element for the last acquisition""" - return await FlowIR.spectrum_from_node( - self.opcua.get_node(self.SPECTRA_TREATED) - ) - - async def last_spectrum_raw(self) -> IRSpectrum: - """RAW result latest scan""" - return await FlowIR.spectrum_from_node(self.opcua.get_node(self.SPECTRA_RAW)) - - async def last_spectrum_background(self) -> IRSpectrum: - """RAW result latest scan""" - return await FlowIR.spectrum_from_node( - self.opcua.get_node(self.SPECTRA_BACKGROUND) - ) - - async def start_experiment( - self, template: str, name: str = "Unnamed flowchem exp." - ): - """Starts an experiment on iCIR - - Args: - template: name of the experiment template, should be in the right folder on the PC running iCIR - name: experiment name. - """ - template = FlowIR._normalize_template_name(template) - if self.is_local() and FlowIR.is_template_name_valid(template) is False: - raise DeviceError( - f"Cannot start template {template}: name not valid! Check if is in: " - r"C:\ProgramData\METTLER TOLEDO\iC OPC UA Server\1.2\Templates" - ) - if await self.is_running(): - warnings.warn( - "I was asked to start an experiment while a current experiment is already running!" - "I will have to stop that first! Sorry for that :)" - ) - # Stop running experiment and wait for the spectrometer to be ready - await self.stop_experiment() - await self.wait_until_idle() - - start_xp_nodeid = self.opcua.get_node(self.START_EXPERIMENT).nodeid - method_parent = self.opcua.get_node(self.METHODS) - try: - # Collect_bg does not seem to work in automation, set to false and do not expose in start_experiment()! - collect_bg = False - await method_parent.call_method(start_xp_nodeid, name, template, collect_bg) - except ua.uaerrors.Bad as error: - raise DeviceError( - "The experiment could not be started!\n" - "Check iCIR status and close any open experiment." - ) from error - logger.info(f"FlowIR experiment {name} started with template {template}!") - - async def stop_experiment(self): - """Stops the experiment currently running (it does not imply instrument is then idle, wait for scan end)""" - method_parent = self.opcua.get_node(self.METHODS) - stop_nodeid = self.opcua.get_node(self.STOP_EXPERIMENT).nodeid - await method_parent.call_method(stop_nodeid) - - async def wait_until_idle(self): - """Waits until no experiment is running.""" - while await self.is_running(): - await asyncio.sleep(0.2) - - def get_router(self): - """Creates an APIRouter for this HuberChiller instance.""" - from fastapi import APIRouter - - router = APIRouter() - router.add_api_route("/is-connected", self.is_iCIR_connected, methods=["GET"]) - router.add_api_route("/is-running", self.is_running, methods=["GET"]) - router.add_api_route("/probe/info", self.is_iCIR_connected, methods=["GET"]) - router.add_api_route("/probe/status", self.is_iCIR_connected, methods=["GET"]) - router.add_api_route( - "/sample/last-acquisition-time", self.last_sample_time, methods=["GET"] - ) - router.add_api_route( - "/sample/spectrum/last-treated", self.last_spectrum_treated, methods=["GET"] - ) - router.add_api_route( - "/sample/spectrum/last-raw", self.last_spectrum_raw, methods=["GET"] - ) - router.add_api_route( - "/sample/spectrum/last-background", - self.last_spectrum_background, - methods=["GET"], - ) - router.add_api_route( - "/experiment/start", self.start_experiment, methods=["PUT"] - ) - router.add_api_route("/experiment/stop", self.stop_experiment, methods=["GET"]) - - return router - - -if __name__ == "__main__": - ... - # async def main(): - # opcua_client = Client( - # url=FlowIR.iC_OPCUA_DEFAULT_SERVER_ADDRESS.replace("localhost", "BSMC-YMEF002121") - # ) - # - # async with FlowIR(opcua_client) as ir_spectrometer: - # await ir_spectrometer.check_version() - # - # if await ir_spectrometer.is_iCIR_connected(): - # print("FlowIR connected!") - # else: - # raise ConnectionError("FlowIR not connected :(") - # - # template_name = "15_sec_integration.iCIRTemplate" - # await ir_spectrometer.start_experiment( - # name="reaction_monitoring", template=template_name - # ) - # - # spectrum = await ir_spectrometer.last_spectrum_treated() - # while len(spectrum.intensity) == 0: - # spectrum = await ir_spectrometer.last_spectrum_treated() - # - # for x in range(3): - # spectra_count = await ir_spectrometer.sample_count() - # - # while await ir_spectrometer.sample_count() == spectra_count: - # await asyncio.sleep(1) - # - # print("New spectrum!") - # spectrum = await ir_spectrometer.last_spectrum_treated() - # print(spectrum) - # - # await ir_spectrometer.stop_experiment() - # - # asyncio.run(main()) diff --git a/flowchem/components/devices/MettlerToledo/__init__.py b/flowchem/components/devices/MettlerToledo/__init__.py deleted file mode 100644 index e2d9e8d3..00000000 --- a/flowchem/components/devices/MettlerToledo/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -""" MettlerToledo devices """ -from .FlowIR import FlowIR - -__all__ = ["FlowIR"] diff --git a/flowchem/components/devices/MettlerToledo/iCIR_common.py b/flowchem/components/devices/MettlerToledo/iCIR_common.py deleted file mode 100644 index 8911d3e5..00000000 --- a/flowchem/components/devices/MettlerToledo/iCIR_common.py +++ /dev/null @@ -1,124 +0,0 @@ -""" Common iCIR code. """ -import warnings -from pathlib import Path -from typing import List, TypedDict - -from pydantic import BaseModel - - -class ProbeInfo(TypedDict): - """Dictionary returned from iCIR with probe info.""" - - spectrometer: str - spectrometer_SN: str - probe_SN: str - detector: str - apodization: str - ip_address: str - probe_type: str - sampling_interval: str - resolution: str - scan_option: str - gain: str - - -# noinspection PyPep8Naming -class iCIR_spectrometer: - """Common code between sync and async implementations""" - - iC_OPCUA_DEFAULT_SERVER_ADDRESS = "opc.tcp://localhost:62552/iCOpcUaServer" - _supported_versions = {"7.1.91.0"} - SOFTWARE_VERSION = "ns=2;s=Local.iCIR.SoftwareVersion" - CONNECTION_STATUS = "ns=2;s=Local.iCIR.ConnectionStatus" - PROBE_DESCRIPTION = "ns=2;s=Local.iCIR.Probe1.ProbeDescription" - PROBE_STATUS = "ns=2;s=Local.iCIR.Probe1.ProbeStatus" - LAST_SAMPLE_TIME = "ns=2;s=Local.iCIR.Probe1.LastSampleTime" - SAMPLE_COUNT = "ns=2;s=Local.iCIR.Probe1.SampleCount" - SPECTRA_TREATED = "ns=2;s=Local.iCIR.Probe1.SpectraTreated" - SPECTRA_RAW = "ns=2;s=Local.iCIR.Probe1.SpectraRaw" - SPECTRA_BACKGROUND = "ns=2;s=Local.iCIR.Probe1.SpectraBackground" - START_EXPERIMENT = "ns=2;s=Local.iCIR.Probe1.Methods.Start Experiment" - STOP_EXPERIMENT = "ns=2;s=Local.iCIR.Probe1.Methods.Stop" - METHODS = "ns=2;s=Local.iCIR.Probe1.Methods" - - @staticmethod - def _normalize_template_name(template_name) -> str: - """Adds .iCIRTemplate extension from string if not already present""" - return ( - template_name - if template_name.endswith(".iCIRTemplate") - else template_name + ".iCIRTemplate" - ) - - @staticmethod - def is_template_name_valid(template_name: str) -> bool: - """ - From Mettler Toledo docs: - You can use the Start method to create and run a new experiment in one of the iC analytical applications - (i.e. iC IR, iC FBRM, iC Vision, iC Raman). Note that you must provide the name of an existing experiment - template file that can be used as a basis for the new experiment. - The template file must be located in a specific folder on the iC OPC UA Server computer. - This is usually C:\\ProgramData\\METTLER TOLEDO\\iC OPC UA Server\\1.2\\Templates. - """ - - template_directory = Path( - r"C:\ProgramData\METTLER TOLEDO\iC OPC UA Server\1.2\Templates" - ) - if not template_directory.exists() or not template_directory.is_dir(): - warnings.warn("iCIR template folder not found on the local PC!") - return False - - # Ensures the name has been provided with no extension (common mistake) - template_name = iCIR_spectrometer._normalize_template_name(template_name) - for existing_template in template_directory.glob("*.iCIRTemplate"): - if existing_template.name == template_name: - return True - return False - - @staticmethod - def parse_probe_info(probe_info_reply: str) -> ProbeInfo: - """Convert the device reply into a ProbeInfo dictionary - - Example probe_info_reply reply is: - 'FlowIR; SN: 2989; Detector: DTGS; Apodization: HappGenzel; IP Address: 192.168.1.2; - Probe: DiComp (Diamond); SN: 14570173; Interface: FlowIR™ Sensor; Sampling: 4000 to 650 cm-1; - Resolution: 8; Scan option: AutoSelect; Gain: 232;' - """ - fields = probe_info_reply.split(";") - probe_info = { - "spectrometer": fields[0], - "spectrometer_SN": fields[1].split(": ")[1], - "probe_SN": fields[6].split(": ")[1], - } - - # Use aliases, i.e. translate API names (left) to dict key (right) - translate_attributes = { - "Detector": "detector", - "Apodization": "apodization", - "IP Address": "ip_address", - "Probe": "probe_type", - "Sampling": "sampling_interval", - "Resolution": "resolution", - "Scan option": "scan_option", - "Gain": "gain", - } - for element in fields: - if ":" in element: - piece = element.split(":") - if piece[0].strip() in translate_attributes: - probe_info[translate_attributes[piece[0].strip()]] = piece[ - 1 - ].strip() - - return probe_info # type: ignore - - -class IRSpectrum(BaseModel): - """ - IR spectrum class. - Consider rampy for advance features (baseline fit, etc.) - See e.g. https://github.com/charlesll/rampy/blob/master/examples/baseline_fit.ipynb - """ - - wavenumber: List[float] - intensity: List[float] diff --git a/flowchem/components/devices/Phidgets/__init__.py b/flowchem/components/devices/Phidgets/__init__.py deleted file mode 100644 index e6ca8fc3..00000000 --- a/flowchem/components/devices/Phidgets/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -""" Phidget-based devices """ -from .phidget import PressureSensor - -__all__ = ["PressureSensor"] diff --git a/flowchem/components/devices/README.md b/flowchem/components/devices/README.md deleted file mode 100644 index 43c2b9ca..00000000 --- a/flowchem/components/devices/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# flowchem/components - -This folder contains all the components that appear in a device graph. - -Those include the following: -* Abstract base classes defining the properties that actual device component can implement in [properties](properties/README.md) -* Simple modular components such as mixers and tubing in [stdlib](stdlib/README.md) -* actual hardware devices in [devices](devices/README.md) -* dummy object for testing purposes in [dummy](dummy/README.md) -* pre-defined reactor assembly, i.e. sub-graphs representing hardware that is logically composed by several non-separable components e.g. in a chip reactor in [reactors](reactors/README.md) diff --git a/flowchem/components/devices/Vapourtec/R4_heater.py b/flowchem/components/devices/Vapourtec/R4_heater.py deleted file mode 100644 index 2fc913fb..00000000 --- a/flowchem/components/devices/Vapourtec/R4_heater.py +++ /dev/null @@ -1,142 +0,0 @@ -""" Control module for the Vapourtec R4 heater """ -import time -from typing import Optional - -import aioserial -from loguru import logger - -from flowchem.components.properties import ActiveComponent -from flowchem.exceptions import InvalidConfiguration -from flowchem.units import flowchem_ureg - -try: - from flowchem.components.devices.Vapourtec.commands import ( - R4Command, - VapourtecCommand, - ) - - HAS_VAPOURTEC_COMMANDS = True -except ImportError as e: - HAS_VAPOURTEC_COMMANDS = False - raise PermissionError( - "Cannot redistribute Vapourtec commands... Contact Vapourtec to get them!" - ) from e - - -class R4Heater(ActiveComponent): - """R4 reactor heater control class.""" - - DEFAULT_CONFIG = { - "timeout": 0.1, - "baudrate": 19200, - "parity": aioserial.PARITY_NONE, - "stopbits": aioserial.STOPBITS_ONE, - "bytesize": aioserial.EIGHTBITS, - } - """ Virtual control of the Vapourtec R4 heating module. """ - - def __init__(self, name: Optional[str] = None, **config): - super().__init__(name) - if not HAS_VAPOURTEC_COMMANDS: - raise InvalidConfiguration( - "R4Heater unusable: no Vapourtec Commands available.\n" - "Contact your distributor to get the serial API documentation." - ) - - # Merge default settings, including serial, with provided ones. - configuration = dict(R4Heater.DEFAULT_CONFIG, **config) - try: - self._serial = aioserial.AioSerial(**configuration) - except aioserial.SerialException as ex: - raise InvalidConfiguration( - f"Cannot connect to the R4Heater on the port <{config.get('port')}>" - ) from ex - - async def _write(self, command: str): - """Writes a command to the pump""" - cmd = command + "\r\n" - await self._serial.write_async(cmd.encode("ascii")) - logger.debug(f"Sent command: {repr(command)}") - - async def _read_reply(self) -> str: - """Reads the pump reply from serial communication""" - reply_string = await self._serial.readline_async() - logger.debug(f"Reply received: {reply_string.decode('ascii')}") - return reply_string.decode("ascii") - - async def write_and_read_reply(self, command: R4Command) -> str: - """Main HamiltonPumpIO method. - Sends a command to the pump, read the replies and returns it, optionally parsed""" - self._serial.reset_input_buffer() - await self._write(command.compile()) - response = await self._read_reply() - - if not response: - raise InvalidConfiguration("No response received from heating module!") - - return response.rstrip() - - async def wait_for_target_temp(self, channel: int): - """Waits until the target channel has reached the desired temperature and is stable""" - t_stable = False - failure = 0 - while not t_stable: - try: - ret_code = await self.write_and_read_reply( - VapourtecCommand.TEMP.set_argument(str(channel)) - ) - except InvalidConfiguration as ex: - ret_code = "N" - failure += 1 - if failure > 3: - raise ex - else: - failure = 0 - - if ret_code[:1] == "S": - logger.debug(f"Target temperature reached on channel {channel}!") - t_stable = True - else: - time.sleep(1) - - async def set_temperature( - self, channel, target_temperature: str, wait: bool = False - ): - """Set temperature and optionally waits for S""" - set_command = getattr(VapourtecCommand, f"SET_CH{channel}_TEMP") - - set_temperature = flowchem_ureg(target_temperature) - # Float not accepted, must cast to int - await self.write_and_read_reply( - set_command.set_argument(round(set_temperature.m_as("°C"))) - ) - # Set temperature implies channel on - await self.write_and_read_reply(VapourtecCommand.CH_ON.set_argument(channel)) - - if wait: - await self.wait_for_target_temp(channel) - - def get_router(self): - """Creates an APIRouter for this object.""" - from fastapi import APIRouter - - router = APIRouter() - router.add_api_route("/temperature/set", self.set_temperature, methods=["PUT"]) - - return router - - -if __name__ == "__main__": - import asyncio - - heat = R4Heater(port="COM11") - - async def main(): - """test function""" - # noinspection PyArgumentEqualDefault - await heat.set_temperature(0, "30 °C", wait=False) - print("not waiting - default behaviour.") - await heat.set_temperature(0, "30 °C", wait=True) - print("actually I waited") - - asyncio.run(main()) diff --git a/flowchem/components/devices/Vapourtec/__init__.py b/flowchem/components/devices/Vapourtec/__init__.py deleted file mode 100644 index 35f4ce97..00000000 --- a/flowchem/components/devices/Vapourtec/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -""" Vapourtec devices """ -try: - from .R4_heater import R4Heater - - __all__ = ["R4Heater"] -except PermissionError: - print("Vapourtec devices disabled - no command description found.") diff --git a/flowchem/components/devices/ViciValco/ViciValco_Actuator.py b/flowchem/components/devices/ViciValco/ViciValco_Actuator.py deleted file mode 100644 index 424df098..00000000 --- a/flowchem/components/devices/ViciValco/ViciValco_Actuator.py +++ /dev/null @@ -1,383 +0,0 @@ -""" -This module is used to control Vici Valco Universal Electronic Actuators. -""" - -from __future__ import annotations - -from dataclasses import dataclass -from typing import Optional, Set - -import aioserial -from loguru import logger - -from flowchem.components.properties import InjectionValve -from flowchem.exceptions import ActuationError, DeviceError, InvalidConfiguration - - -@dataclass -class ViciProtocolCommandTemplate: - """Class representing a valve command and its expected reply, but without target valve number""" - - command: str - optional_parameter: str = "" - - def to_valve( - self, address: int, command_value: str = "", argument_value: str = "" - ) -> ViciProtocolCommand: - """Returns a Protocol11Command by adding to the template valve address and command arguments""" - return ViciProtocolCommand( - target_valve_num=address, - command=self.command, - optional_parameter=self.optional_parameter, - command_value=command_value, - argument_value=argument_value, - ) - - -@dataclass -class ViciProtocolCommand(ViciProtocolCommandTemplate): - """Class representing a valve command and its expected reply""" - - target_valve_num: Optional[int] = 1 - command_value: Optional[str] = None - argument_value: Optional[str] = None - - def compile(self) -> bytes: - """Create actual command byte by prepending valve address to command and appending executing command.""" - - assert self.target_valve_num in range(0, 11) - if not self.command_value: - self.command_value = "" - - compiled_command = ( - f"{self.target_valve_num}" f"{self.command}{self.command_value}" - ) - - if self.argument_value: - compiled_command += f"{self.optional_parameter}{self.argument_value}" - - return (compiled_command + "\r").encode("ascii") - - -class ViciValcoValveIO: - """Setup with serial parameters, low level IO""" - - DEFAULT_CONFIG = { - "timeout": 0.5, - "baudrate": 9600, - "parity": aioserial.PARITY_NONE, - "stopbits": aioserial.STOPBITS_ONE, - "bytesize": aioserial.EIGHTBITS, - } - - def __init__(self, aio_port: aioserial.Serial): - """ - Initialize communication on the serial port where the valves are located and initialize them - Args: - aio_port: aioserial.Serial() object - """ - - self._serial = aio_port - - # These will be set in initialize - self.num_valve_connected = 0 - self._initialized = False - - @classmethod - def from_config(cls, port, **serial_kwargs): - """Create ViciValcoValveIO from config.""" - # Merge default serial settings with provided ones. - configuration = dict(ViciValcoValveIO.DEFAULT_CONFIG, **serial_kwargs) - - try: - serial_object = aioserial.AioSerial(port, **configuration) - except aioserial.SerialException as serial_exception: - raise InvalidConfiguration( - f"Cannot connect to the valve on the port <{port}>" - ) from serial_exception - - return cls(serial_object) - - async def initialize(self, hw_initialization: bool = True): - """Ensure connection + initialize.""" - # This has to be run after each power cycle to assign addresses to valves - self.num_valve_connected = await self.detect_valve_address() - - if hw_initialization: - self._hw_init() - - self._initialized = True - - async def detect_valve_address(self) -> int: - """Detects number of valves connected.""" - try: - await self._serial.write_async("*ID\r".encode("ascii")) - except aioserial.SerialException as serial_error: - raise InvalidConfiguration from serial_error - - reply = self._serial.readlines() - n_valves = len(reply) - if n_valves == 0: - raise InvalidConfiguration(f"No valve found on {self._serial.port}") - - logger.debug(f"Found {len(reply)} valves on {self._serial.port}!") - return len(reply) - - def _hw_init(self): - """Send to all valves the HW initialization command (i.e. homing)""" - self._serial.write("*HM\r".encode("ascii")) # Broadcast: initialize + execute - # Note: no need to consume reply here because there is none (since we are using broadcast) - - async def _write(self, command: bytes): - """Writes a command to the valve""" - if not self._initialized: - raise DeviceError( - "Valve not initialized!\n" - "Have you called `initialize()` after object creation?" - ) - await self._serial.write_async(command) - logger.debug(f"Command {repr(command)} sent!") - - async def _read_reply(self, lines) -> str: - """Reads the valve reply from serial communication""" - reply_string = "" - for _ in range(lines): - line = await self._serial.readline_async() - reply_string += line.decode("ascii") - - logger.debug(f"Reply received: {reply_string}") - return reply_string - - def reset_buffer(self): - """Reset input buffer before reading from serial. In theory not necessary if all replies are consumed...""" - self._serial.reset_input_buffer() - - async def write_and_read_reply(self, command: ViciProtocolCommand, lines) -> str: - """Main ViciValcoValveIO method. - Sends a command to the valve, read the replies and returns it, optionally parsed""" - self.reset_buffer() - await self._write(command.compile()) - - if not lines: - return "" - - response = await self._read_reply(lines) - - if not response: - raise InvalidConfiguration( - f"No response received from valve, check valve address! " - f"(Currently set to {command.target_valve_num})" - ) - return response.rstrip() - - @property - def name(self) -> str: - """This is used to provide a nice-looking default name to valve based on its serial connection.""" - try: - return self._serial.name - except AttributeError: - return "" - - -class ViciValco(InjectionValve): - """ " """ - - # This class variable is used for daisy chains (i.e. multiple valves on the same serial connection). Details below. - _io_instances: Set[ViciValcoValveIO] = set() - # The mutable object (a set) as class variable creates a shared state across all the instances. - # When several valves are daisy-chained on the same serial port, they need to all access the same Serial object, - # because access to the serial port is exclusive by definition (also locking there ensure thread safe operations). - # FYI it is a borg idiom https://www.oreilly.com/library/view/python-cookbook/0596001673/ch05s23.html - - valve_position_name = {"load": 1, "inject": 2} - - def __init__(self, valve_io: ViciValcoValveIO, address: int = 0, name: str = None): - """ - Default constructor, needs an ViciValcoValveIO object. See from_config() class method for config-based init. - Args: - valve_io: An ViciValcoValveIO w/ serial connection to the daisy chain w/ target valve. - address: number of valve in array, 1 for first one, auto-assigned on init based on position. - name: 'cause naming stuff is important. - """ - - # ViciValcoValveIO - self.valve_io = valve_io - ViciValco._io_instances.add(self.valve_io) # See above for details. - - # The valve name is used for logs and error messages. - self.name = f"Valve {self.valve_io.name}:{address}" if name is None else name - - super().__init__(name) - - # valve address is the valve sequence number if in chain. Count starts at 1, default. - self.address = int(address) - - @classmethod - def from_config(cls, port: str, address: int, name: str = None, **serial_kwargs): - """This class method is used to create instances via config file by the server for HTTP interface.""" - # Many valve can be present on the same serial port with different addresses. - # This shared list of ViciValcoValveIO objects allow shared state in a borg-inspired way, avoiding singletons - # This is only relevant to programmatic instantiation, i.e. when from_config() is called per each valve from a - # config file, as it is the case in the HTTP server. - # ViciValcoValve_IO() manually instantiated are not accounted for. - valveio = None - for obj in ViciValco._io_instances: - if obj._serial.port == port: - valveio = obj - break - - # If not existing serial object are available for the port provided, create a new one - if valveio is None: - valveio = ViciValcoValveIO.from_config(port, **serial_kwargs) - - return cls(valveio, address=address, name=name) - - async def initialize(self): - """Must be called after init before anything else.""" - # Test connectivity by querying the valve's firmware version - fw_cmd = ViciProtocolCommandTemplate(command="VR").to_valve(self.address) - firmware_version = await self.valve_io.write_and_read_reply(fw_cmd, lines=5) - logger.info( - f"Connected to Vici Valve {self.name} - FW version: {firmware_version}!" - ) - - async def send_command_and_read_reply( - self, - command_template: ViciProtocolCommandTemplate, - command_value="", - argument_value="", - lines=1, - ) -> str: - """Sends a command based on its template by adding valve address and parameters, returns reply""" - return await self.valve_io.write_and_read_reply( - command_template.to_valve(self.address, command_value, argument_value), - lines, - ) - - async def learn_valve_positions(self) -> None: - """Initialize valve only, there is no reply -> lines = 0""" - await self.send_command_and_read_reply( - ViciProtocolCommandTemplate(command="LRN"), lines=0 - ) - - async def initialize_valve(self) -> None: - """Initialize valve only: Move to Home position""" - await self.send_command_and_read_reply( - ViciProtocolCommandTemplate(command="HM"), lines=0 - ) - # seems necessary to make sure move is finished - await self.get_valve_position() - - async def version(self) -> str: - """Returns the current firmware version reported by the valve.""" - - return await self.send_command_and_read_reply( - ViciProtocolCommandTemplate(command="VR"), lines=5 - ) - - async def get_valve_position(self) -> int: - """Represent the position of the valve: getter returns Enum, setter needs Enum.""" - valve_pos = await self.send_command_and_read_reply( - ViciProtocolCommandTemplate(command="CP") - ) - return ViciValco.valve_position_name[valve_pos[-1]] - - async def set_valve_position(self, target_position: int): - """Set valve position. Switches really quick and doesn't reply, so waiting does not make sense.""" - valve_by_name_cw = ViciProtocolCommandTemplate(command="GO") - await self.send_command_and_read_reply( - valve_by_name_cw, command_value=str(target_position), lines=0 - ) - logger.debug(f"{self.name} valve position set to {target_position}") - new_position = await self.get_valve_position() - if not new_position == target_position: - raise ActuationError - - async def __aenter__(self): - await self.initialize() - return self - - async def __aexit__(self, exc_type, exc_value, traceback): - pass - - async def _update(self): - """Used in automation.""" - await self.set_valve_position(self.setting) - - def get_router(self): - """Creates an APIRouter for this object.""" - from fastapi import APIRouter - - router = APIRouter() - router.add_api_route("/firmware-version", self.version, methods=["GET"]) - router.add_api_route( - "/initialize/valve", self.initialize_valve, methods=["PUT"] - ) - router.add_api_route( - "/valve/position", self.get_valve_position, methods=["GET"] - ) - router.add_api_route( - "/valve/position", self.set_valve_position, methods=["PUT"] - ) - - return router - - -if __name__ == "__main__": - import asyncio - - valve1 = ViciValco.from_config(port="COM13", address=0, name="test1") - - asyncio.run(valve1.initialize_valve()) - - asyncio.run(valve1.set_valve_position(2)) - - asyncio.run(valve1.set_valve_position(1)) - - -# Control Command List for reference, don't see much of a point to implement all these, -# especially since most don't return anything -# -# GO[nn] - Move to nn position -> None -# -# HM - Move to the first Position -> None -# -# CW[nn] - Move Clockwise to nn Position -> -# -# CC[nn] - Move Counter Clockwise to nn Position -> -# -# TO - Toggle Position to Oposite -> -# -# TT - Timed Toggle -# -# DT[nnnnn] - Set Delay time for TT Command -# -# CP - Returns Current Position -> [ADDRESS]CP[A|B] -# -# AM[n] - Sets the Actuator Mode [1] Two Position With Stops, -> [ADDRESS]AM[1|2|3] -# -# [2] Two Position Without Stops, [3] Multi Position -# -# SB[nnnnn] - Set the Baud Rate to nnnnn -> [ADDRESS]SB[BAUDRATE:int] -# -# NP[nn] - Set the Number of Positions to nn -> 0E2 NP Invalid -# -# SM[n] - Set the Direction [F]orward, [R]everse, [A]uto 0E2 SM Invalid -# -# LRN - Learn Stops Location -> None -# -# CNT[nnnnn] - Set Cycle Counter -> 0CNT10254 -# -# VR[n] - Firmware Version [] Main [1] Display [2] Interface -> 0Dec 15 2011 \n 015:02:20 \n 0UA_MAIN_CT -# -# ID[nn] - Set Device ID nn=(0-9, A-Z) -> 0ID0 -# -# [n]ID* - Reset ID to none n=Current ID -# -# IFM[n] - Interface Mode [0] No Response [1] limited response -> 0IFM0 -# -# [2] Extended Response -# -# LG[n] - Legacy Response Mode [0] Off [1] On -> 0LG0 -# -# /? - Displays This List diff --git a/flowchem/components/devices/ViciValco/__init__.py b/flowchem/components/devices/ViciValco/__init__.py deleted file mode 100644 index cd5b2340..00000000 --- a/flowchem/components/devices/ViciValco/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -""" ViciValco devices """ -from .ViciValco_Actuator import ViciValco - -__all__ = ["ViciValco"] diff --git a/flowchem/components/devices/__init__.py b/flowchem/components/devices/__init__.py deleted file mode 100644 index ef0a5a28..00000000 --- a/flowchem/components/devices/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -""" HW device, organized by manufacturer. """ -from .Hamilton import * -from .Harvard_Apparatus import * -from .Huber import * -from .Knauer import * -from .Magritek import * -from .Manson import * -from .MettlerToledo import * -from .Phidgets import * -from .Vapourtec import * -from .ViciValco import * diff --git a/flowchem/components/dummy/__init__.py b/flowchem/components/dummy/__init__.py deleted file mode 100644 index b0e8de0f..00000000 --- a/flowchem/components/dummy/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# isort: skip_file -from .dummy import Dummy -from .dummy_pump import DummyPump -from .dummy_sensor import DummySensor -from .dummy_tempcontrol import DummyTempControl -from .dummy_valve import DummyValve -from .broken_dummy_component import BrokenDummyComponent -from .broken_dummy_sensor import BrokenDummySensor diff --git a/flowchem/components/dummy/broken_dummy_component.py b/flowchem/components/dummy/broken_dummy_component.py deleted file mode 100644 index b3641eb3..00000000 --- a/flowchem/components/dummy/broken_dummy_component.py +++ /dev/null @@ -1,27 +0,0 @@ -from flowchem.components.dummy import Dummy - - -class BrokenDummyComponent(Dummy): - """ - A fake component, used internally for testing. Its `_update()` method always returns `True` during a dry run but always returns `False` in a real run. - - ::: danger - Using this component during real protocol execution will result in a failure. - ::: - - Arguments: - - `name`: The name of the component. - - Attributes: - - `active`: Whether the component is active. This doesn't actually mean anything. - """ - - def __init__(self, name=None): - super().__init__(name=name) - - async def _update(self) -> None: - if self.active: - raise RuntimeError - - def _validate(self, dry_run): - return True diff --git a/flowchem/components/dummy/broken_dummy_sensor.py b/flowchem/components/dummy/broken_dummy_sensor.py deleted file mode 100644 index 986a3710..00000000 --- a/flowchem/components/dummy/broken_dummy_sensor.py +++ /dev/null @@ -1,29 +0,0 @@ -from typing import Optional - -from flowchem.components.properties import Sensor - - -class BrokenDummySensor(Sensor): - """ - A dummy sensor returning the number of times it has been read. Fails after a few reads, raising a `RuntimeError`. - - ::: danger - Using this component during real protocol execution will result in a failure. - ::: - - Attributes: - - `name` (str, optional): The name of the Sensor. - - `rate` (Quantity): Data collection rate in Hz. A rate of 0 Hz corresponds to the sensor being off. - """ - - def __init__(self, name: Optional[str] = None): - super().__init__(name=name) - self._unit = "Dimensionless" - self.counter = 0 - - async def _read(self) -> int: - """Collect the data.""" - self.counter += 1 - if self.counter > 15 and self.rate: - raise RuntimeError("Unsurprisingly, the broken sensor is broken.") - return self.counter diff --git a/flowchem/components/dummy/dummy.py b/flowchem/components/dummy/dummy.py deleted file mode 100644 index 2942e81d..00000000 --- a/flowchem/components/dummy/dummy.py +++ /dev/null @@ -1,32 +0,0 @@ -from typing import Optional - -from flowchem.components.properties import ActiveComponent - - -class Dummy(ActiveComponent): - """ - A fake component, used internally for testing. - - ::: warning - This component *can* be used in a non-dry run protocol execution. It does not do anything. - ::: - - Arguments: - - `name`: The component's name. - - Attributes: - - `name`: The component's name. - - `active`: Whether the component is active. This doesn't actually mean anything. - """ - - def __init__(self, name: Optional[str] = None): - super().__init__(name=name) - self.active = False - - self._base_state = dict(active=False) - - async def _update(self) -> None: - if self.active: - print("Active!") - else: - print("Inactive.") diff --git a/flowchem/components/dummy/dummy_pump.py b/flowchem/components/dummy/dummy_pump.py deleted file mode 100644 index 6d4e26c5..00000000 --- a/flowchem/components/dummy/dummy_pump.py +++ /dev/null @@ -1,28 +0,0 @@ -from typing import Optional - -from loguru import logger - -from flowchem.components.stdlib import Pump - - -class DummyPump(Pump): - """ - A fake pumping device whose primary feature is that it moves fluid, used internally for testing. - - ::: warning - Users should not instantiate a `DummyPump` for use in a `Protocol` because it is not an actual lab instrument. - ::: - - Arguments: - - `name`: The name of the pump. - - Attributes: - - `name` (`str`): The name of the pump. - - `rate` (`pint.Quantity`): The flow rate of the pump. Must be of the dimensionality of volume/time. - """ - - def __init__(self, name: Optional[str] = None): - super().__init__(name=name) - - async def _update(self): - logger.trace(f"Set {self} rate to {self.rate}") diff --git a/flowchem/components/dummy/dummy_sensor.py b/flowchem/components/dummy/dummy_sensor.py deleted file mode 100644 index 97246233..00000000 --- a/flowchem/components/dummy/dummy_sensor.py +++ /dev/null @@ -1,27 +0,0 @@ -import random -from typing import Optional - -from flowchem.components.properties import Sensor - - -class DummySensor(Sensor): - """A dummy sensor returning the number of times it has been read. - - ::: danger - Don't use this in a real apparatus! It doesn't return real data. - ::: - - Attributes: - - `name`: The component's name. - - `rate`: Data collection rate in Hz as a `pint.Quantity`. A rate of 0 Hz corresponds to the sensor being off. - """ - - def __init__(self, name: Optional[str] = None): - super().__init__(name=name) - self._unit = "Dimensionless" - self.counter = 0.0 - - async def _read(self) -> float: - """Collect the data.""" - self.counter += (random.random() * 2) - 1 - return self.counter diff --git a/flowchem/components/dummy/dummy_tempcontrol.py b/flowchem/components/dummy/dummy_tempcontrol.py deleted file mode 100644 index ddc79c67..00000000 --- a/flowchem/components/dummy/dummy_tempcontrol.py +++ /dev/null @@ -1,25 +0,0 @@ -from typing import Optional - -from loguru import logger - -from flowchem.components.properties import TempControl - - -class DummyTempControl(TempControl): - """ - A fake temp controller, used internally for testing. - - ::: danger - This component can be used in a real protocol, although it doesn't actually exist. - ::: - - Arguments: - - `port`: The available port names. - - `name`: The name of the valve. - """ - - def __init__(self, name: Optional[str] = None): - super().__init__(name=name) - - async def _update(self) -> None: - logger.trace(f"Current temperature for {self.name} is {self.temp}") diff --git a/flowchem/components/dummy/dummy_valve.py b/flowchem/components/dummy/dummy_valve.py deleted file mode 100644 index 3bfacebe..00000000 --- a/flowchem/components/dummy/dummy_valve.py +++ /dev/null @@ -1,27 +0,0 @@ -from typing import Optional - -from loguru import logger - -from flowchem.components.properties import Valve - - -class DummyValve(Valve): - """ - A fake valve, used internally for testing. - - ::: danger - This component can be used in a real protocol, although it doesn't actually exist. - ::: - - Arguments: - - `port`: The available port names. - - `name`: The name of the valve. - """ - - def __init__(self, name: Optional[str] = None, port: set = None): - if port is None: - port = {"position_1", "position_2", "position_3"} - super().__init__(name=name, port=port) - - async def _update(self) -> None: - logger.trace(f"Switching {self.name} to port {self.setting}") diff --git a/flowchem/components/properties/__init__.py b/flowchem/components/properties/__init__.py deleted file mode 100644 index 58370e4a..00000000 --- a/flowchem/components/properties/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# isort: skip_file -from .base_component import Component -from .active_component import ActiveComponent -from .mapped_component import MultiportComponentMixin -from .injection_valve import InjectionValve -from .passive_component import PassiveComponent -from .passivemixer import PassiveMixer -from .sensor import Sensor -from .tempcontrol import TempControl -from .valve import Valve diff --git a/flowchem/components/properties/active_component.py b/flowchem/components/properties/active_component.py deleted file mode 100644 index 7f8a46ad..00000000 --- a/flowchem/components/properties/active_component.py +++ /dev/null @@ -1,164 +0,0 @@ -import asyncio -import math -import warnings -from typing import Any, Dict, List, MutableMapping, Optional - -from loguru import logger - -from flowchem.components.properties import Component -from flowchem.units import flowchem_ureg - - -class ActiveComponent(Component): - """ - A connected, controllable component. - - All components being manipulated in a `Protocol` must be of type `ActiveComponent`. - - ::: tip - Users should not directly instantiate an `ActiveComponent` because it is an abstract base class, not a functioning laboratory instrument. - ::: - - Arguments: - - `name`: The name of the component. - - Attributes: - - `name`: The name of the component. - - """ - - _id_counter = 0 - - def __init__(self, name: Optional[str] = None): - super().__init__(name=name) - self._base_state: Dict[str, Any] = NotImplemented - """ - A placeholder for the base state of the component. - All subclasses of `ActiveComponent` must have this attribute. - The dict must have values which can be parsed into compatible units of the object's other attributes, if applicable. - At the end of a protocol and when not under explicit control by the user, the component will return to this state. - """ - - def _update_from_params(self, params: dict) -> None: - """ - Updates the attributes of the object from a dict. - - Arguments: - - `params`: A dict whose keys are the strings of attribute names and values are the new values of the attribute. - """ - for key, value in params.items(): - if isinstance(getattr(self, key), flowchem_ureg.Quantity): - setattr(self, key, flowchem_ureg.parse_expression(value)) - else: - setattr(self, key, value) - - async def _update(self): - raise NotImplementedError(f"Implement an _update() method for {repr(self)}.") - - def _validate(self, dry_run: bool) -> None: - """ - Checks if a component's class is valid. - - Arguments: - - `dry_run`: Whether this is a validation check for a dry run. Ignores the actual executability of the component. - - Returns: - - Whether the component is valid or not. - """ - - logger.debug(f"Validating {self.name}...") - - # base_state method must return a dict - if not isinstance(self._base_state, dict): - raise ValueError("_base_state is not a dict") - - # the base_state dict must not be empty - if not self._base_state: - raise ValueError("_base_state dict must not be empty") - - # validate the base_state dict - for k, v in self._base_state.items(): - if not hasattr(self, k): - raise ValueError( - f"base_state sets {k} for {repr(self)} but {k} is not an attribute of {repr(self)}. " - f"Valid attributes are {self.__dict__}" - ) - - # dimensionality check between _base_state units and attributes - if isinstance(self.__dict__[k], flowchem_ureg.Quantity): - # figure out the dimensions we're comparing - expected_dim = flowchem_ureg.parse_expression(v).dimensionality - actual_dim = self.__dict__[k].dimensionality - - if expected_dim != actual_dim: - raise ValueError( - f"Invalid dimensionality in _base_state for {repr(self)}. " - f"Got {flowchem_ureg.parse_expression(v).dimensionality} for {k}, " - f"expected {self.__dict__[k].dimensionality}" - ) - - # if not dimensional, do type matching - elif not isinstance(self.__dict__[k], type(v)): - raise ValueError( - f"Bad type matching for {k} in _base_state dict. " - f"Should be {type(self.__dict__[k])} but is {type(v)}." - ) - - # once we've checked everything, it should be good - if not dry_run: - self._update_from_params(self._base_state) - logger.trace(f"Attempting to call _update() for {repr(self)}.") - asyncio.run(self._validate_update()) - - logger.debug(f"{repr(self)} is valid") - - async def _validate_update(self): - async with self: - res = await self._update() - if res is not None: - raise ValueError(f"Received return value {res} from update.") - - def validate_procedures(self, procedures: List[MutableMapping]) -> None: - """Given all the procedures the component is involved in, checks them.""" - # skip validation if no procedure is given - if not procedures: - warnings.warn( - f"{self} is an active component but was not used in this protocol." - " If this is intentional, ignore this warning." - ) - return - - # check for conflicting continuous procedures - procedures_without_time = [ - x for x in procedures if x["start"] is None and x["stop"] is None - ] - if len(procedures_without_time) > 1: - raise RuntimeError( - f"{self} cannot have two procedures for the entire duration of the protocol. " - "If each procedure defines a different attribute to be set for the entire duration, " - "combine them into one call to add(). Otherwise, reduce ambiguity by defining start " - "and stop times for each procedure. " - ) - - # Unlike mw, avoid inferring stop time for procedures. - # Procedures will become atomic in XDL steps, - # avoiding multiple procedures per component per step. - for procedure in procedures: - assert procedure["start"] is not None - assert procedure["stop"] is not None - - # For now, we still have to check for conflicting procedures - for i, procedure in enumerate(procedures): - try: - # the start time of the next procedure - next_start = procedures[i + 1]["start"] - except IndexError: # Last one - continue - - # check for overlapping procedures - if next_start < procedure["stop"] and not math.isclose( - next_start, procedure["stop"] - ): - msg = "Cannot have two overlapping procedures. " - msg += f"{procedure} and {procedures[i + 1]} conflict" - raise RuntimeError(msg) diff --git a/flowchem/components/properties/base_component.py b/flowchem/components/properties/base_component.py deleted file mode 100644 index 3769012a..00000000 --- a/flowchem/components/properties/base_component.py +++ /dev/null @@ -1,44 +0,0 @@ -""" All devices should inherit from this class. """ -from typing import Optional - - -class Component: - """ - One of the individual, irreducible parts of a flow chemistry setup. - - All components in an `Apparatus` must be of type `Component`. - However, it is unlikely that a user will directly instantiate a `Component`. - - Arguments: - - `name`: The name of the component. - - Attributes: - - `name`: The name of the component. - """ - - _id_counter = 0 - - def __init__(self, name: Optional[str] = None): - # name the object, either sequentially or with a given name - if name is None: - self.name = self.__class__.__name__ + "_" + str(self.__class__._id_counter) - self.__class__._id_counter += 1 - else: - self.name = str(name) - - def __repr__(self): - return f"<{self.__class__.__name__} {self.name}>" - - def __str__(self): - return f"{self.__class__.__name__} {self.name}" - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc_value, traceback): - pass - - def _validate(self, dry_run): - """Components are valid for dry runs, but not for real runs.""" - if not dry_run: - raise RuntimeError diff --git a/flowchem/components/properties/injection_valve.py b/flowchem/components/properties/injection_valve.py deleted file mode 100644 index 57dd64fc..00000000 --- a/flowchem/components/properties/injection_valve.py +++ /dev/null @@ -1,27 +0,0 @@ -""" Represent a generic injection valve. """ -from abc import ABC -from typing import Optional - -from flowchem.components.properties import ActiveComponent, MultiportComponentMixin - - -class InjectionValve(MultiportComponentMixin, ActiveComponent, ABC): - """ - A generic injection valve, i.e. a valve with positions 'inject' and 'load'. - """ - - def __init__( - self, - name: Optional[str] = None, - ): - # For injection valves, the positions are 'load' and 'inject' - self.position = {"inject", "load"} - self.setting = "load" - - # Call Valve init - super().__init__(name=name) - - # Ensure base state is loading. - self._base_state = {"setting": "load"} - - # TODO add injection loop volume diff --git a/flowchem/components/properties/mapped_component.py b/flowchem/components/properties/mapped_component.py deleted file mode 100644 index c9d39ed3..00000000 --- a/flowchem/components/properties/mapped_component.py +++ /dev/null @@ -1,38 +0,0 @@ -from typing import Optional, Set, Union - -from flowchem.components.properties import Component - - -class MultiportComponentMixin(Component): - """ - A Mixin to be added to component with more than 1 inlet or outlet port. - - All components with multimple ports should derive from this to ensure proper validation of the port positions. - - The port names have to be provided as follows (e.g. w/ a set, ensuring unique names): - - self.port = {'position_name_1', 'position_name_2'} - where 'position_name_1' can be an int (e.g. multipos. valves) or a string (e.g. 'inject', 'load') for 2-pos valves. - The port names specified will be used as edge attributes in the graph (attrs. from_position and to_position). - - A Mixin is used to prefer composition to inheritance, see en.wiki:Composition_over_inheritance. - For more details on Mixins in Python, Fluent Python chapter 14 or Effective Python item 41 (links are free for MPG): - - https://learning.oreilly.com/library/view/fluent-python-2nd/9781492056348/ch14.html#idm45517018812200 - - https://learning.oreilly.com/library/view/effective-python-90/9780134854717/ch05.xhtml#item41 - - - Arguments: - - `name`: The name of the component. - - """ - - def __init__(self, name: Optional[str] = None): - super().__init__(name=name) - self.port: Set[Union[str, int]] = set() - - def _validate(self, dry_run): - if not self.port: - raise ValueError(f"{self} requires a mapping, None provided.") - assert any( - [c is not None for c in self.port] - ), f"{self} has no mapped components. Please check the mapping." - return super()._validate(dry_run) diff --git a/flowchem/components/properties/passive_component.py b/flowchem/components/properties/passive_component.py deleted file mode 100644 index bd4d1dae..00000000 --- a/flowchem/components/properties/passive_component.py +++ /dev/null @@ -1,8 +0,0 @@ -""" All devices should inherit from this class. """ -from flowchem.components.properties import Component - - -class PassiveComponent(Component): - """A non-connected, non-controllable, passive component.""" - - _id_counter = 0 diff --git a/flowchem/components/properties/passivemixer.py b/flowchem/components/properties/passivemixer.py deleted file mode 100644 index 805f8a90..00000000 --- a/flowchem/components/properties/passivemixer.py +++ /dev/null @@ -1,12 +0,0 @@ -from typing import Optional - -from flowchem.components.properties import PassiveComponent - - -class PassiveMixer(PassiveComponent): - """ - A generic mixer (essentially an alias of PassiveComponent). - """ - - def __init__(self, name: Optional[str] = None): - super().__init__(name=name) diff --git a/flowchem/components/properties/sensor.py b/flowchem/components/properties/sensor.py deleted file mode 100644 index 751c253d..00000000 --- a/flowchem/components/properties/sensor.py +++ /dev/null @@ -1,85 +0,0 @@ -from __future__ import annotations - -import asyncio -import time -from typing import TYPE_CHECKING, AsyncGenerator, Optional -from warnings import warn - -from loguru import logger - -from flowchem.components.properties import ActiveComponent -from flowchem.units import flowchem_ureg - -if TYPE_CHECKING: - from flowchem import Experiment - - -class Sensor(ActiveComponent): - """ - A generic sensor. - - Attributes: - - `name`: The name of the Sensor. - - `rate`: Data collection rate in Hz as a `pint.Quantity`. A rate of 0 Hz corresponds to the sensor being off. - """ - - def __init__(self, name: Optional[str] = None): - super().__init__(name=name) - self.rate = flowchem_ureg.parse_expression("0 Hz") - self._unit: str = "" - self._base_state = {"rate": "0 Hz"} - - async def _read(self): - """ - Collects the data. - In the generic `Sensor` implementation, this raises a `NotImplementedError`. - Subclasses of `Sensor` should implement their own version of this method. - """ - raise NotImplementedError - - async def _monitor( - self, experiment: "Experiment", dry_run: bool = False - ) -> AsyncGenerator: - """ - If data collection is off and needs to be turned on, turn it on. - If data collection is on and needs to be turned off, turn off and return data. - """ - while not experiment._end_loop: # type: ignore - # if the sensor is off, hand control back over - if not self.rate: - await asyncio.sleep(0) - continue - - if not dry_run: - yield {"data": await self._read(), "timestamp": time.time()} - else: - yield {"data": "simulated read", "timestamp": time.time()} - - # then wait for the sensor's next read - if self.rate: - await asyncio.sleep(1 / self.rate.m_as("Hz")) - - logger.debug(f"Monitor loop for {self} has completed.") - - async def _validate_read(self): - async with self: - logger.trace("Context entered") - res = await self._read() - if not res: - warn( - "Sensor reads should probably return data. " - f"Currently, {self}._read() does not return anything." - ) - - def _validate(self, dry_run: bool) -> None: - logger.debug(f"Performing sensor specific checks for {self}...") - if not dry_run: - logger.trace("Executing Sensor-specific checks...") - logger.trace("Entering context...") - asyncio.run(self._validate_read()) - logger.trace("Performing general component checks...") - super()._validate(dry_run=dry_run) - - async def _update(self) -> None: - # sensors don't have an update method; they implement read - pass diff --git a/flowchem/components/properties/tempcontrol.py b/flowchem/components/properties/tempcontrol.py deleted file mode 100644 index 0a9cff09..00000000 --- a/flowchem/components/properties/tempcontrol.py +++ /dev/null @@ -1,28 +0,0 @@ -from abc import ABC -from typing import Optional - -from flowchem.components.properties import ActiveComponent -from flowchem.units import flowchem_ureg - - -class TempControl(ActiveComponent, ABC): - """ - A generic temperature controller. - - Arguments: - - `internal_tubing`: The `Tube` inside the temperature controller. - - `name`: The component's name. - - Attributes: - - `active`: Whether the temperature controller is active. - - `name`: The name of the Sensor. - - `temp`: The temperature setting as a `pint.Quantity`. - """ - - def __init__(self, name: Optional[str] = None): - super().__init__(name=name) - - self.temp = flowchem_ureg.parse_expression("0 degC") - self.active = False - - self._base_state = dict(temp="0 degC", active=False) diff --git a/flowchem/components/properties/valve.py b/flowchem/components/properties/valve.py deleted file mode 100644 index c42d0250..00000000 --- a/flowchem/components/properties/valve.py +++ /dev/null @@ -1,35 +0,0 @@ -from abc import ABC -from typing import Any, Dict, Optional, Set, Union - -from flowchem.components.properties import ActiveComponent, MultiportComponentMixin - - -class Valve(MultiportComponentMixin, ActiveComponent, ABC): - """ - A generic valve. - - Arguments: - - `port`: The port numbers. - - `name`: The name of the valve. - """ - - def __init__( - self, - port: Set[Union[int, str]], - name: Optional[str] = None, - ): - super().__init__(name=name) - - self.port = port - # Base state is first port or 1 if no port is provided - if port: - self.setting = next(iter(port)) - else: - self.setting = 1 - - self._base_state: Dict[str, Any] = {"setting": 1} - - def _validate(self, dry_run): - if not self.port: - raise ValueError(f"The port names for valve {self} are not valid.") - return super()._validate(dry_run) diff --git a/flowchem/components/stdlib/__init__.py b/flowchem/components/stdlib/__init__.py deleted file mode 100644 index b48d1ffe..00000000 --- a/flowchem/components/stdlib/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -from .channel import Channel -from .cross_mixer import CrossMixer -from .pump import Pump -from .t_mixer import TMixer -from .tube import Tube -from .vessel import Vessel -from .vessel_chemicals import VesselChemicals -from .y_mixer import YMixer diff --git a/flowchem/components/stdlib/channel.py b/flowchem/components/stdlib/channel.py deleted file mode 100644 index 16fad59f..00000000 --- a/flowchem/components/stdlib/channel.py +++ /dev/null @@ -1,30 +0,0 @@ -from flowchem.components.properties import Component -from flowchem.units import flowchem_ureg - - -class Channel(Component): - """ - A reaction channel. - - Arguments: - - `length`: The length of the channel as a str. - - `volume`: The channel volume as a str. - - `material`: The material around the channel. - - """ - - _id_counter = 0 - - def __init__(self, length: str, volume: str, material: str, name: str = None): - """ - See the `Tube` attributes for a description of the arguments. - """ - self.length = flowchem_ureg.parse_expression(length) - self.volume = flowchem_ureg.parse_expression(volume) - - self.material = material - - super().__init__(name) - - def __repr__(self): - return f"Channel of length {self.length} and volume {self.volume}" diff --git a/flowchem/components/stdlib/cross_mixer.py b/flowchem/components/stdlib/cross_mixer.py deleted file mode 100644 index 2da2313e..00000000 --- a/flowchem/components/stdlib/cross_mixer.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import Optional - -from flowchem.components.properties import PassiveMixer - - -class CrossMixer(PassiveMixer): - """ - A cross mixer. - - This is an alias of `Component`. - - Arguments: - - `name`: The name of the mixer. - - Attributes: - - `name`: The name of the mixer. - """ - - def __init__(self, name: Optional[str] = None): - super().__init__(name=name) diff --git a/flowchem/components/stdlib/pump.py b/flowchem/components/stdlib/pump.py deleted file mode 100644 index 9df38660..00000000 --- a/flowchem/components/stdlib/pump.py +++ /dev/null @@ -1,23 +0,0 @@ -from abc import ABC -from typing import Optional - -from flowchem.components.properties import ActiveComponent -from flowchem.units import flowchem_ureg - - -class Pump(ActiveComponent, ABC): - """ - A generic pumping device whose primary feature is that it moves fluid. - - Arguments: - - `name`: The name of the pump. - - Attributes: - - `name`: The name of the pump. - - `rate`: The flow rate of the pump as a `pint.Quantity`. Must be of the dimensionality of volume/time. - """ - - def __init__(self, name: Optional[str] = None): - super().__init__(name=name) - self.rate = flowchem_ureg.parse_expression("0 ml/min") - self._base_state = dict(rate="0 mL/min") diff --git a/flowchem/components/stdlib/t_mixer.py b/flowchem/components/stdlib/t_mixer.py deleted file mode 100644 index 82a92796..00000000 --- a/flowchem/components/stdlib/t_mixer.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import Optional - -from flowchem.components.properties import PassiveMixer - - -class TMixer(PassiveMixer): - """ - A T mixer. - - This is an alias of `Component`. - - Arguments: - - `name`: The name of the mixer. - - Attributes: - - `name`: The name of the mixer. - """ - - def __init__(self, name: Optional[str] = None): - super().__init__(name=name) diff --git a/flowchem/components/stdlib/tube.py b/flowchem/components/stdlib/tube.py deleted file mode 100644 index 898b935d..00000000 --- a/flowchem/components/stdlib/tube.py +++ /dev/null @@ -1,72 +0,0 @@ -from math import pi - -from loguru import logger - -from flowchem.components.properties import Component -from flowchem.units import flowchem_ureg - - -class Tube(Component): - """ - A tube. - - Arguments: - - `length`: The length of the tube as a str. - - `ID`: The inner diameter of the tube as a str. - - `OD`: The outer diameter of the tube as a str. - - `material`: The material of the tube. - - Attributes: - - `ID`: The inner diameter of the tube, converted to a `pint.Quantity`. - - `length`: The length of the tube, converted to a `pint.Quantity`. - - `material`: The material of the tube. - - `OD`: The outer diameter of the tube, converted to a `pint.Quantity`. - - `volume`: The tube volume, as determined from the length and inner diameter, converted to a `pint.Quantity`. - - Raises: - - ValueError: When the outer diameter is less than the inner diameter of the tube. - """ - - tube_counter = 0 - - def __init__(self, length: str, ID: str, OD: str, material: str): - """ - See the `Tube` attributes for a description of the arguments. - - ::: tip Note - The arguments to __init__ are `str`s, not `pint.Quantity`s. - ::: - """ - self.length = flowchem_ureg.parse_expression(length) - self.ID = flowchem_ureg.parse_expression(ID) - self.OD = flowchem_ureg.parse_expression(OD) - - # check to make sure units are valid - for measurement in [self.length, self.ID, self.OD]: - if measurement.dimensionality != flowchem_ureg.mm.dimensionality: - logger.exception("Invalid units for tube length, ID, or OD") - raise ValueError( - f"{measurement.units} is an invalid unit of measurement for length." - ) - - # ensure diameters are valid - if self.OD <= self.ID: - logger.exception("Invalid tube dimensions") - raise ValueError( - f"Outer diameter {OD} must be greater than inner diameter {ID}" - ) - if self.length < self.OD or self.length < self.ID: - logger.warning( - f"Tube length ({self.length}) is less than diameter." - "Make sure that this is not an error." - ) - - self.material = material - self.volume = pi * ((self.ID / 2) ** 2) * self.length - - Tube.tube_counter += 1 - self.name = f"Tube_{Tube.tube_counter}" - super(Tube, self).__init__(name=self.name) - - def __repr__(self): - return f"Tube of length {self.length}, ID {self.ID}, OD {self.OD}" diff --git a/flowchem/components/stdlib/vessel.py b/flowchem/components/stdlib/vessel.py deleted file mode 100644 index e7ea2018..00000000 --- a/flowchem/components/stdlib/vessel.py +++ /dev/null @@ -1,32 +0,0 @@ -""" A vessel, optionally with info on the chemical input contained. """ -from typing import Optional - -from ord_schema.proto.reaction_pb2 import ReactionInput - -from flowchem.components.properties import Component - - -class Vessel(Component): - """ - A generic vessel. - - Arguments: - - `description`: The contents of the vessel. - - `name`: The name of the vessel, if different from the description. - - Attributes: - - `description`: The contents of the vessel. - - `name`: The name of the vessel, if different from the description. - """ - - def __init__(self, description: Optional[str] = None, name: Optional[str] = None): - super().__init__(name=name) - self.description = description - self.chemical = None - - def _validate(self, dry_run): - # If chemical info are provided, they should be ReactionInput - if self.chemical is not None: - assert isinstance( - self.chemical, ReactionInput - ), "Vessel have a ReactionInput" diff --git a/flowchem/components/stdlib/vessel_chemicals.py b/flowchem/components/stdlib/vessel_chemicals.py deleted file mode 100644 index 395c2aba..00000000 --- a/flowchem/components/stdlib/vessel_chemicals.py +++ /dev/null @@ -1,40 +0,0 @@ -from typing import Optional - -from ord_schema.proto.reaction_pb2 import ReactionInput - -from flowchem.components.stdlib import Vessel - - -class VesselChemicals(Vessel): - """ - A Vessel with additional properties for chemical identity - """ - - metadata = { - "author": [ - { - "first_name": "Dario", - "last_name": "Cambie", - "email": "dario.cambie@mpikg.mpg.de", - "institution": "Max Planck Institute of Colloids and Interfaces", - "github_username": "dcambie", - } - ], - "stability": "beta", - "supported": True, - } - - def __init__( - self, - description: Optional[str] = None, - name: Optional[str] = None, - ): - super().__init__(name=name, description=description) - self.chemical = None - - def _validate(self, dry_run): - super(VesselChemicals, self)._validate(dry_run) - - assert isinstance( - self.chemical, ReactionInput - ), "VesselChemicals have a ReactionInput" diff --git a/flowchem/components/stdlib/y_mixer.py b/flowchem/components/stdlib/y_mixer.py deleted file mode 100644 index 987997a7..00000000 --- a/flowchem/components/stdlib/y_mixer.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import Optional - -from flowchem.components.properties import PassiveMixer - - -class YMixer(PassiveMixer): - """ - A Y mixer. - - This is an alias of `Component`. - - Arguments: - - `name`: The name of the mixer. - - Attributes: - - See arguments. - """ - - def __init__(self, name: Optional[str] = None): - super().__init__(name=name) diff --git a/flowchem/core/__init__.py b/flowchem/core/__init__.py deleted file mode 100644 index 6dd1382e..00000000 --- a/flowchem/core/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .execute import Datapoint -from .experiment import Experiment -from .graph import DeviceGraph -from .protocol import Protocol diff --git a/flowchem/core/execute.py b/flowchem/core/execute.py deleted file mode 100644 index ccab326d..00000000 --- a/flowchem/core/execute.py +++ /dev/null @@ -1,309 +0,0 @@ -from __future__ import annotations - -import asyncio -import time -import traceback -from collections import namedtuple -from contextlib import AsyncExitStack -from copy import deepcopy -from time import asctime, localtime -from typing import TYPE_CHECKING, Dict, Iterable, List, Union - -from loguru import logger - -from flowchem.components.properties import ActiveComponent, Sensor -from flowchem.exceptions import ProtocolCancelled - -if TYPE_CHECKING: - from flowchem import Experiment - - -Datapoint = namedtuple("Datapoint", ["data", "timestamp", "experiment_elapsed_time"]) - - -async def handle_exception(tasks_to_cancel): - """Called upon exception in main loop.""" - logger.error("Protocol execution is stopping NOW!") - for task in tasks_to_cancel: - task.cancel() - await asyncio.sleep(5) - - -async def main(experiment: "Experiment", dry_run: Union[bool, int], strict: bool): - """ - The function that actually does the execution of the protocol. - - Arguments: - - `experiment`: The experiment to execute. - - `dry_run`: Whether to simulate the experiment or actually perform it. If an integer greater than zero, the dry run will execute at that many times speed. - - `strict`: Whether to stop execution upon any errors. - """ - - logger.info("Using Flowchem ⚗️👩‍👨🧪") - logger.info("Performing final launch status check...") - - # Run protocol - try: - # To programmatically enter many context manager (one per component) AsyncExitStack is used - async with AsyncExitStack() as stack: - # Enter async context manager of each component. This initializes connections to hardware. - if not dry_run: - components = [ - await stack.enter_async_context(compo) - for compo in experiment._compiled_protocol.keys() # type:ignore - ] - else: - components = list(experiment._compiled_protocol.keys()) # type:ignore - - tasks = [] - - # For each component get the relevant coroutines - for component in components: - # Find out when each component's monitoring should end - procedures: Iterable = experiment._compiled_protocol[ - component - ] # type:ignore - end_times: List[float] = [p["time"] for p in procedures] - end_time: float = max(end_times) # we only want the last end time - logger.trace(f"Calculated end time for {component} as {end_time}s") - - tasks.extend( - [ - wait_and_execute_procedure( - procedure=procedure, - component=component, - experiment=experiment, - dry_run=dry_run, - strict=strict, - ) - for procedure in experiment._compiled_protocol[ - component - ] # type:ignore - ] - ) - logger.trace(f"Task list generated for {component}.") - - # for sensors, add the monitor task - if isinstance(component, Sensor): - logger.trace(f"Creating sensor monitoring task for {component}") - tasks.append(_monitor(component, experiment, bool(dry_run), strict)) - logger.debug(f"{component} is GO!") - - logger.debug("All components are GO!") - - # Add a task to monitor the stop button - tasks.append(check_if_cancelled(experiment)) - tasks.append(pause_handler(experiment, components)) - tasks.append(end_loop(experiment)) - logger.debug("All tasks are GO!") - - # Add a reminder about FF - if type(dry_run) == int: - logger.info(f"Simulating at {dry_run}x speed...") - - # begin the experiment - logger.info("All checks passed. Experiment is GO!") - experiment.is_executing = True - experiment.start_time = time.time() - - # convert to local time for the start message - _local_time = asctime(localtime(experiment.start_time)) - start_msg = f"{experiment} started at {_local_time}." - - logger.success(start_msg) - - try: - # FIXME the list tasks actually contains coroutines, not tasks. A rename would be nice. - task_list = [asyncio.create_task(coro) for coro in tasks] - await asyncio.gather(*task_list) - - except ProtocolCancelled: - logger.error("Stop button pressed.") - await handle_exception(tasks) - logger.critical(f"{experiment} finished by STOP button.") - - except (RuntimeError, Exception) as e: - logger.error(f"Got {repr(e)}. Full traceback is logged at trace level.") - await handle_exception(tasks) - logger.critical(f"{experiment} finished by exception.") - - else: - logger.success(f"{experiment} finished successfully.") - - finally: - # when this code block is reached, the tasks will have either all completed or - # an exception has occurred. - experiment.end_time = time.time() - - # when this code block is reached, the tasks will have completed or have been cancelled. - _local_time = asctime(localtime(experiment.end_time)) - end_msg = f"{experiment} completed at {_local_time}." - - # Stop all the sensors and exit the read loops - logger.debug("Resetting all components") - - # reset object - for component in list( - experiment._compiled_protocol.keys() - ): # type:ignore - # reset object - logger.debug(f"Resetting {component} to base state") - component._update_from_params(component._base_state) - - await asyncio.sleep(1) - - # we only reach this line if things went well - logger.info(end_msg) - finally: - - # set some protocol metadata - experiment.was_executed = True # type:ignore - # after E.was_executed=True, we THEN log that we're cleaning up so it's shown - # in the cleanup category, not with a time in EET - logger.info("Experimentation is over. Cleaning up...") - experiment.is_executing = False # type:ignore - - if experiment._bound_logger is not None: # type:ignore - logger.trace("Deactivating logging to Jupyter notebook widget...") - logger.remove(experiment._bound_logger) # type:ignore - - -async def wait_and_execute_procedure( - procedure, - component: ActiveComponent, - experiment: "Experiment", - dry_run: Union[bool, int], - strict: bool, -): - - # wait for the right moment - params = procedure["params"] - await wait(procedure["time"], experiment, f"Set {component} to {params}") - - # NOTE: this doesn't actually call the _update() method - component._update_from_params(params) - logger.trace(f"{component} object state updated to reflect new params.") - - if dry_run: - logger.info(f"Simulating: {params} on {component} at {procedure['time']}s") - else: - logger.info(f"Executing: {params} on {component} at {procedure['time']}s") - try: - await component._update() # NOTE: This does! - except Exception as e: - level = "ERROR" if strict else "WARNING" - logger.log(level, f"Failed to update {component}!") - logger.trace(traceback.format_exc()) - if strict: - raise RuntimeError(str(e)) - - record = { - "timestamp": time.time(), - "params": params, - "type": "executed_procedure" if not dry_run else "simulated_procedure", - "component": component, - "experiment_elapsed_time": time.time() - experiment.start_time, - } - - experiment.executed_procedures.append(record) - - -async def _monitor( - sensor: Sensor, experiment: "Experiment", dry_run: bool, strict: bool -): - logger.debug(f"Started monitoring {sensor.name}") - try: - async for result in sensor._monitor(dry_run=dry_run, experiment=experiment): - await experiment._update( - device=sensor.name, - datapoint=Datapoint( - data=result["data"], - timestamp=result["timestamp"], - experiment_elapsed_time=result["timestamp"] - experiment.start_time, - ), - ) - logger.debug(f"Stopped monitoring {sensor}") - except Exception as e: - logger.log("ERROR" if strict else "WARNING", f"Failed to read {sensor}!") - logger.trace(traceback.format_exc()) - if strict: - raise RuntimeError(str(e)) - - -async def end_loop(experiment: "Experiment"): - await wait( - experiment.protocol._inferred_duration, experiment, "End loop" - ) # type:ignore - experiment._end_loop = True # type:ignore - - -async def check_if_cancelled(experiment: "Experiment") -> None: - while not experiment._end_loop: # type:ignore - if experiment.cancelled: # type:ignore - raise ProtocolCancelled("protocol cancelled") - await asyncio.sleep(0) - - -async def pause_handler( - experiment: "Experiment", components: List[ActiveComponent] -) -> None: - was_paused = False - states: Dict[ActiveComponent, dict] = {} - # this is either the planned duration of the experiment or cancellation - while not experiment._end_loop: # type:ignore - - # we need to pause - if experiment.paused and not was_paused: - was_paused = True - for component in components: - logger.debug(f"Pausing {component}.") - states[component] = deepcopy(component.__dict__) - component._update_from_params(component._base_state) - await component._update() - logger.debug("All components set to base states.") - logger.trace(f"Saved states are {states}.") - - # we are paused but the button was hit, so we need to resume - elif not experiment.paused and was_paused: - logger.trace(f"Previous states: {states}") - for component in components: - for k, v in states[component].items(): - setattr(component, k, v) - await component._update() - logger.debug(f"Reset {component} to {states[component]}.") - was_paused = False - states = {} - logger.debug("All components reset to state before pause.") - - await asyncio.sleep(0) - - -async def wait(duration: float, experiment: "Experiment", name: str): - """A pause-aware version of asyncio.sleep""" - if type(experiment.dry_run) == int: - duration /= experiment.dry_run - await asyncio.sleep(duration) - logger.trace(f"<{name}> Just woke up from {duration}s nap") - - while True: - # if, at the end of sleeping, the experiment is paused, wait for it to resume - while experiment.paused: - await asyncio.sleep(0.1) - assert not experiment.paused - - # figure out how long we've been paused for - eet_offset = experiment._total_paused_duration - # and where in the experimental plan we are - assert isinstance(experiment.start_time, float) # make the type checker happy - eet = time.time() - experiment.start_time - eet_offset - - # do the logging thing - logger.trace(f"Expected End Time is {eet}") - logger.trace(f"<{name}> was supposed to execute after {duration}s") - - if (duration - eet) > 0: - logger.trace(f"Waiting {duration - eet} more seconds") - await asyncio.sleep(duration - eet) - else: - logger.trace(f"It's go time for <{name}>!") - break diff --git a/flowchem/core/experiment.py b/flowchem/core/experiment.py deleted file mode 100644 index d1999d72..00000000 --- a/flowchem/core/experiment.py +++ /dev/null @@ -1,421 +0,0 @@ -from __future__ import annotations - -import asyncio -import json -import os -import time -from hashlib import blake2b -from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union -from warnings import warn - -import aiofiles -import ipywidgets as widgets -from bokeh.io import output_notebook, push_notebook, show -from bokeh.plotting import figure -from bokeh.resources import INLINE -from IPython import get_ipython -from IPython.display import display -from loguru import logger - -from flowchem.components.properties import ActiveComponent, Sensor -from flowchem.core.execute import main - -if TYPE_CHECKING: - from flowchem import Datapoint, Protocol - - -class Experiment(object): - """ - Experiments contain all data from execution of a protocol. - - Arguments: - - `protocol`: The protocol for which the experiment was conducted. - - `compiled_protocol`: The results of `protocol._compile()`. - - `verbosity`: See `Protocol.execute` for a description of the verbosity options. - - `dry_run`: Whether the experiment is a dry run and, if so, by what factor it is sped up by. - - Attributes: - - `graph`: The DeviceGraph upon which the experiment is conducted. - - `cancelled`: Whether the experiment is cancelled. - - `compiled_protocol`: The results of `protocol._compile()`. - - `data`: A list of `Datapoint` namedtuples from the experiment's sensors. - - `dry_run`: Whether the experiment is a dry run and, if so, by what factor it is sped up by. - - `end_time`: The Unix time of the experiment's end. - - `executed_procedures`: A list of the procedures that were executed during the experiment. - - `experiment_id`: The experiment's ID. By default, of the form `YYYY_MM_DD_HH_MM_SS_HASH`, where HASH is the 3-bytes hexadecimal blake2b hash of the protocol's YAML. - - `paused`: Whether the experiment is currently paused. - - `protocol`: The protocol for which the experiment was conducted. - - `start_time`: The Unix time of the experiment's is. - """ - - def __init__(self, protocol: "Protocol"): - # args - self.graph = protocol.graph - self.protocol: Protocol = protocol - - # computed values - self.experiment_id: Optional[str] = None - - # default values - self.dry_run: Union[bool, int] - self.start_time: float # hasn't started until main() is called - self.created_time = ( - time.time() - ) # when the object was created (might be != from start_time) - self.end_time: float - self.data: Dict[str, List[Datapoint]] = {} - self.cancelled = False - self.was_executed = False - self.executed_procedures: List[ - Dict[str, Union[float, Dict[str, Any], str, ActiveComponent]] - ] = [] - - # internal values (unstable!) - _local_time = time.localtime(self.created_time) - self._created_time_local: str = time.strftime("%Y_%m_%d_%H_%M_%S", _local_time) - self._charts = {} # type: ignore - self._graphs_shown = False - self._sensors = self.graph[Sensor] - self._sensors.reverse() - self._device_name_to_unit = {c.name: c._unit for c in self._sensors} - self._sensor_names: List[str] = [s.name for s in self._sensors] - self._bound_logger = None - self._plot_height = 300 - self._is_executing = False - self._paused = False - self._pause_times: List[Dict[str, float]] = [] - self._end_loop = False # when to stop monitoring the buttons - self._file_logger_id: Optional[int] = None - self._log_file: Optional[Path] = None - self._data_file: Optional[Path] = None - self._transformed_data: Dict[str, Dict[str, List[Datapoint]]] = { - s: {"datapoints": [], "timestamps": []} for s in self._sensor_names - } - - def __str__(self): - return f"Experiment {self.experiment_id}" - - def __repr__(self): - return f"" - - async def _update(self, device: str, datapoint): - - # If a chart has been registered to the device, update it. - if device not in self.data: - self.data[device] = [] - self.data[device].append(datapoint) - - if self._data_file is not None: - line = json.dumps( - { - "device": device, - "timestamp": datapoint.timestamp, - "experiment_elapsed_time": datapoint.experiment_elapsed_time, - "data": datapoint.data, - "unit": self.protocol.graph[device]._unit, - } - ) - async with aiofiles.open(self._data_file, "a+") as f: - await f.write(line + "\n") - - if get_ipython() is None: - return - - if not self._graphs_shown: - logger.debug("Graphs not shown. Initializing...") - for sensor, output in self._sensor_outputs.items(): # type: ignore - logger.trace(f"Initializing graph for {sensor}") - - # bind the height of the graph to the selected plot height - output.layout.height = f"{self._plot_height}px" - - with output: - # create the figure object - p = figure( - title=f"{sensor} data", - plot_height=self._plot_height, - plot_width=600, - ) - r = p.line( - source=self._transformed_data[sensor.name], - x="timestamps", - y="datapoints", - color="#2222aa", - line_width=3, - ) - p.xaxis.axis_label = "Experiment elapsed time (seconds)" - p.yaxis.axis_label = self._device_name_to_unit[sensor.name] - - # since we're in the with-statement, this will show up in the accordion - output_notebook(resources=INLINE, hide_banner=True) - target = show(p, notebook_handle=True) - - # save the target and plot for later updating - self._charts[sensor.name] = (target, r) - logger.trace(f"Sucessfully initialized graph for {sensor.name}") - logger.trace("All graphs successfully initialized") - self._graphs_shown = True - - if device in self._transformed_data: - target, r = self._charts[device] - self._transformed_data[device]["datapoints"].append(datapoint.data) - self._transformed_data[device]["timestamps"].append( - datapoint.experiment_elapsed_time - ) - r.data_source.data["datapoints"] = self._transformed_data[device][ - "datapoints" - ] - r.data_source.data["timestamps"] = self._transformed_data[device][ - "timestamps" - ] - push_notebook(handle=target) - - def _on_stop_clicked(self, b): - logger.debug("Stop button pressed.") - self.cancelled = True - - def _on_pause_clicked(self, b): - self.paused = not self.paused - - @property - def _total_paused_duration(self) -> float: - """Calculate the total amount of time the experiment was paused for.""" - duration = 0.0 - for pause in self._pause_times: - if "stop" in pause: - duration += pause["stop"] - pause["start"] - return duration - - def get_confirmation(self): - """Ensure user input is present before starting procedure.""" - confirmation = input("Execute? [y/N]: ").lower() - if not confirmation or confirmation[0] != "y": - logger.critical("Aborting execution...") - raise RuntimeError("Execution aborted by user.") - - def _execute( - self, - dry_run: Union[bool, int], - verbosity: str, - confirm: bool, - strict: bool, - log_file: Union[str, bool, os.PathLike, None], - log_file_verbosity: Optional[str], - log_file_compression: Optional[str], - data_file: Union[str, bool, os.PathLike, None], - ): - self.dry_run = dry_run - - # make the user confirm if it's the real deal - if not self.dry_run and not confirm: - self.get_confirmation() - - self._compiled_protocol = self.protocol._compile(dry_run=bool(dry_run)) - - # now that we're ready to start, create the time and ID attributes - protocol_hash: str = blake2b( - str(self.protocol.yaml()).encode(), digest_size=3 - ).hexdigest() - self.experiment_id = f"{self._created_time_local}_{protocol_hash}" - - # handle logging to a file, not None nor False - if log_file: - # No location provided: automatically log to the app directory - if log_file is True: - app_path = Path("~/.flowchem").expanduser() - app_path.mkdir(exist_ok=True) - log_file_location = app_path / Path(self.experiment_id + ".log.jsonl") - elif isinstance(log_file, (str, os.PathLike)): - log_file_location = Path(log_file) - else: - raise TypeError( - f"Invalid type {type(data_file)} for data file." - "Expected str or a pathlib.Path object." - ) - - # Configure a logger to file - self._file_logger_id = logger.add( - log_file_location, - level=verbosity.upper() - if log_file_verbosity is None - else log_file_verbosity.upper(), - compression=log_file_compression, - serialize=True, - enqueue=True, - ) - logger.trace(f"File logger ID is {self._file_logger_id}") - - # determine the log file's path - if log_file_compression is not None: - # No location provided: automatically log to the app directory - self._log_file = log_file_location.with_suffix( - log_file_location.suffix + "." + log_file_compression - ) - else: - self._log_file = log_file_location - - if data_file: - # automatically log to app directory - if data_file is True: - app_path = Path("~/.flowchem").expanduser() - app_path.mkdir(exist_ok=True) - self._data_file = app_path / Path(self.experiment_id + ".data.jsonl") - elif isinstance(data_file, (str, os.PathLike)): - self._data_file = Path(data_file) - else: - raise TypeError( - f"Invalid type {type(data_file)} for data file." - "Expected str or a pathlib.Path object." - ) - - if get_ipython(): - self._display(verbosity=verbosity.upper(), strict=strict) - asyncio.ensure_future(main(experiment=self, dry_run=dry_run, strict=strict)) - else: - asyncio.run(main(experiment=self, dry_run=dry_run, strict=strict)) - - def _display(self, verbosity: str, strict: bool): - - # create pause button - self._pause_button = widgets.Button(description="Pause", icon="pause") - self._pause_button.on_click(self._on_pause_clicked) - - # create a stop button - self._stop_button = widgets.Button( - description="Stop", button_style="danger", icon="stop" - ) - self._stop_button.on_click(self._on_stop_clicked) - - # create a nice, pretty HTML string wth the metadata - metadata = "
    " - for k, v in { - "Graph": self.graph.name, - "Protocol": self.protocol.name, - "Description": self.protocol.description, - "Start time": time.ctime(self.created_time), - "Expected completion": time.ctime( - self.created_time + self.protocol._inferred_duration - ), - "Procedure count": sum([len(x) for x in self._compiled_protocol.values()]), - "Abort on error": strict, - "Log file": self._log_file.absolute() if self._log_file else None, - "Data file": self._data_file.absolute() if self._data_file else None, - }.items(): - if not v: - continue - metadata += f"
  • {k}: {v}
  • " - metadata += "
" - - # create the output tab widget with a log tab - self._tab = widgets.Tab() - self._log_widget = widgets.Output() - self._tab.children = (self._log_widget, widgets.HTML(value=metadata)) - self._tab.set_title(0, "Log") - self._tab.set_title(1, "Metadata") - - if self._sensors: - self._sensor_outputs = {s: widgets.Output() for s in self._sensors} - - self._accordion = widgets.Accordion( - children=tuple(self._sensor_outputs.values()) - ) - self._tab.children = tuple(list(self._tab.children) + [self._accordion]) - self._tab.set_title(2, "Sensors") - - # we know that the accordion will line up with the dict since dict order - # is preserved in Python 3.7+ - for i, sensor in enumerate(self._sensor_outputs): - self._accordion.set_title(i, sensor.name) - - # decide whether to show a pause button - buttons = [self._stop_button] - if type(self.dry_run) != int: - buttons.insert(0, self._pause_button) - - self._output_widget = widgets.VBox( - [ - widgets.HTML(value=f"

Experiment {self.experiment_id}

"), - widgets.HBox(buttons), - self._tab, - ] - ) - - def _log(x): - with self._log_widget: # the log - # .xxx in floats - pad_length = len(str(int(self.protocol._inferred_duration))) + 4 - # we don't (cleanup) to look weird, so pad to at least its length - pad_length = max((pad_length, len("cleanup"))) - - if self.is_executing and not self.was_executed: - elapsed_time = f"{time.time() - self.start_time:0{pad_length}.3f}" - print(f"({elapsed_time}) {x.rstrip()}") - elif self.was_executed: - print(f"({'cleanup'.center(pad_length)}) {x.rstrip()}") - else: - print(f"({'setup'.center(pad_length)}) " + x.rstrip()) - - # don't enqueue since it breaks the graphing - self._bound_logger = logger.add( - lambda x: _log(x), - level=verbosity, - colorize=True, - format="{level.icon} {message}", - ) # type: ignore - - display(self._output_widget) - - @property - def is_executing(self): - return self._is_executing - - @is_executing.setter - def is_executing(self, is_executing): - if not is_executing and self._file_logger_id is not None: - logger.info("Wrote logs to " + str(self._log_file.absolute())) - logger.trace(f"Removing generated file logger {self._file_logger_id}") - logger.remove(self._file_logger_id) - logger.trace("File logger removed") - # ensure that an execution without logging after one with it doesn't break - self._log_file = None - self._file_logger_id = None - if not is_executing and self._data_file: - logger.info("Wrote data to " + str(self._data_file.absolute())) - logger._data_file = None - - # this deactivates sensor monitoring and button usability - if not is_executing: - self._end_loop = True - logger.trace(f"_end_loop for {self} is now True.") - - logger.trace(f"{repr(self)}.is_executing is now {is_executing}") - self._is_executing = is_executing - - @property - def paused(self): - return self._paused - - @paused.setter - def paused(self, paused): - - # pausing a sped up dry run is meaningless - if type(self.dry_run) == int: - warn("Pausing a speed run is not supported. This will have no effect.") - - # issue a warning if the user overuses the pause button - if len(self._pause_times) >= 3: - logger.warning("Pausing repeatedly may adversely affect protocol timing.") - - if paused and not self._paused: - logger.warning("Paused execution.") - self._pause_times.append(dict(start=time.time())) - elif not paused and self._paused: - self._pause_times[-1]["stop"] = time.time() - logger.warning("Resumed execution.") - self._paused = paused - - # control the pause button - self._pause_button.description = "Resume" if paused else "Pause" - self._pause_button.button_style = "success" if paused else "" - self._pause_button.icon = "play" if paused else "pause" diff --git a/flowchem/core/graph/__init__.py b/flowchem/core/graph/__init__.py deleted file mode 100644 index ebdf1899..00000000 --- a/flowchem/core/graph/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .devicegraph import DeviceGraph diff --git a/flowchem/core/graph/devicegraph.py b/flowchem/core/graph/devicegraph.py deleted file mode 100644 index e344ca8f..00000000 --- a/flowchem/core/graph/devicegraph.py +++ /dev/null @@ -1,281 +0,0 @@ -""" DeviceGraph class. Represents all the hardware available for experiments. """ -from __future__ import annotations - -from typing import Any, Iterable, List, Optional, Union - -import networkx as nx -from loguru import logger - -from flowchem.assemblies import Assembly -from flowchem.components.properties import Component, MultiportComponentMixin -from flowchem.components.stdlib import Tube -from flowchem.exceptions import InvalidConfiguration -from flowchem.units import flowchem_ureg - - -class DeviceGraph: - """ - Represents the device graph. - - This borrows logic from mw.Apparatus and ChempilerGraph - """ - - _id_counter = 0 - - def __init__(self, name: Optional[str] = None): - # if given a name, then name the apparatus, else default to a sequential name - if name is not None: - self.name = name - else: - self.name = "DeviceGraph" + str(DeviceGraph._id_counter) - DeviceGraph._id_counter += 1 - - # NetworkX Multi directed graph object - self.graph: nx.MultiDiGraph = nx.MultiDiGraph() - - def add_device(self, device: Any): - """Add a device or list of devices to the graph""" - - if isinstance(device, Iterable): - for component in device: - self._add_device(component) - else: - self._add_device(device) - - def _add_device(self, device: Union[Component, Assembly]): - """Adds a single device to the graph""" - assert isinstance( - device, (Component, Assembly) - ), "Device must be a Component or a component assembly!" - self.graph.add_node(device) - logger.debug(f"Added device <{device.name}> to the device graph {self.name}") - - def add_connection( - self, - origin: Union[str, Component], - destination: Union[str, Component], - origin_port: Optional[Union[str, int]] = None, - destination_port: Optional[Union[str, int]] = None, - ): - """ - Add a connection to the graph, given either names or objects to be linked. - - Note: if strings are passed for origin/destination, the corresponding node MUST already be part of the graph! - Note: if the origin or destination are Component instances that are not yet part of the graph, - they will be added to the graph. - """ - - # If device names are passed, get the device objects - try: - if isinstance(origin, str): - origin = self[origin] - assert isinstance(origin, Component), "Origin must be a Component!" - if isinstance(destination, str): - destination = self[destination] - assert isinstance( - destination, Component - ), "Destination must be a Component!" - except KeyError as key_error: - logger.exception( - "A connection was attempted by node name with nodes that are not part of the graph!" - ) - raise InvalidConfiguration("Invalid nodes for connection!") from key_error - - # If ports are specified, ensure the values are valid with the respective component - if origin_port is not None: - assert isinstance( - origin, MultiportComponentMixin - ), "Only MappedComponents have ports!" - assert origin_port in origin.port, "The port specified was not found!" - - if destination_port is not None: - assert isinstance( - destination, MultiportComponentMixin - ), "Only MappedComponents have ports!" - assert destination_port in destination.port, ( - f"The port {destination_port} was not found in {destination}" - f"[ports available are: {destination.port}]!" - ) - - # Add the connection - self.graph.add_edge( - origin, destination, from_port=origin_port, to_port=destination_port - ) - - def __repr__(self): - return f"" - - def __str__(self): - return f"DeviceGraph {self.name} with {len(self)} devices." - - def __len__(self): - return self.graph.number_of_nodes() - - def __contains__(self, item): - try: - self[item] - except KeyError: - return False - return True - - def __getitem__(self, item): - """ - Utility method - - DeviceGraph['name'] gives the device with that name - DeviceGraph[class] returns a list of devices of that type - DeviceGraph[device_instance] returns true if the object is part of the graph - """ - # If a type is passed return devices with that type - if isinstance(item, type): - return [device for device in self.graph.nodes if isinstance(device, item)] - - # If a string is passed return the device with that name - if isinstance(item, str): - for node in self.graph.nodes: - if node.name == item: - return node - raise KeyError(f"No component named '{item}' in {repr(self)}.") - - # a shorthand way to check if a component is in the apparatus - if item in self.graph.nodes: - return item - - raise KeyError(f"{repr(item)} is not in {repr(self)}.") - - def component_from_origin_and_port( - self, origin: Component, port: Union[str, int] - ) -> Component: - """Returns the component that is connected to the origin component in the port provided.""" - - assert origin in self, "The origin component is not part of the graph!" - assert isinstance( - origin, MultiportComponentMixin - ), "Only MappedComponents have ports!" - - for _, to_component, data in self.graph.out_edges(origin, data=True): - if data["from_port"] == port: - return to_component - raise KeyError(f"No component connected to port {port}") - - def visualize(self): - """Visualize the graph""" - import matplotlib.pyplot as plt - - nx.draw(self.graph, with_labels=True) - plt.show() - - def explode_all(self): - """Explode all devices in the graph""" - # Copy list of nodes to prevent change during iteration - original_nodes = list(self.graph.nodes) - for device in original_nodes: - # if a device has an 'explode' method it means it is a device group. - # Note: this naive approach would fail on nested groups - if hasattr(device, "explode"): - logger.debug(f"Exploding {device.name}") - device.explode(self) - - def validate(self) -> bool: - """Validates the graph. This is called by Protocol when the DeviceGraph is used.""" - - # Make sure that all the components are connected - return nx.is_weakly_connected(self.graph) - - def summarize(self): - """ - Prints a summary table of the DeviceGraph. - Rich takes care of the formatting both in console and jupyter cells. - """ - from rich.table import Table - - # Components table - components_table = Table(title="Components") - - # Columns: Name, Type - components_table.add_column("Name") - components_table.add_column("Type") - - # Fill rows with all devices while skipping tubes (saving them for the second table) - tubes: List[Tube] = [] - for component in sorted(self.graph.nodes, key=lambda x: x.__class__.__name__): - if component.__class__.__name__ != "Tube": - components_table.add_row(component.name, component.__class__.__name__) - else: - tubes.append(component) - - # Tubing table - tubing_table = Table( - "From", "To", "Length", "I.D.", "O.D.", "Volume", "Material", title="Tubing" - ) - - # store and calculate the computed totals for tubing - total_length = 0 * flowchem_ureg.mm - total_volume = 0 * flowchem_ureg.ml - - last_tube = tubes[-1] - for tube in tubes: - total_length += tube.length - total_volume += tube.volume - from_component = next(self.graph.predecessors(tube)) - to_component = next(self.graph.successors(tube)) - - # Draw a line after the last tube - end_section = tube is last_tube - - tubing_table.add_row( - from_component.name, - to_component.name, - f"{tube.length:~H}", - f"{tube.ID:~H}", - f"{tube.OD:~H}", - f"{tube.volume.to('ml'):.4f~H}", - tube.material, - end_section=end_section, - ) - - tubing_table.add_row( - "Total", - "n/a", - f"{total_length:~H}", - "n/a", - "n/a", - f"{total_volume.to('ml'):.4f~H}", - "n/a", - ) - - # Print tables - from rich.console import Console - - console = Console() - console.print(components_table) - console.print(tubing_table) - - -if __name__ == "__main__": - from flowchem.core.graph.parser import parse_graph_file - - graph = parse_graph_file("sample_config.yml") - graph.summarize() - - graph.explode_all() - graph.summarize() - - # from flowchem import Protocol - # a = graph.to_apparatus() - # print(a) - # p = Protocol(a) - # - # from datetime import timedelta - # t0 = timedelta(seconds=0) - # - # # p.add(graph["quencher"], start=t0, duration=timedelta(seconds=10), rate="0.1 ml/min") - # p.add( - # graph["activator"], start=t0, duration=timedelta(seconds=10), rate="0.1 ml/min" - # ) - # print(graph["chiller"]) - # print(type(graph["chiller"])) - # p.add(graph["chiller"], start=t0, duration=timedelta(seconds=10), temp="45 degC") - # - # E = p.execute(dry_run=False) - # # E.visualize() diff --git a/flowchem/core/graph/devicenode.py b/flowchem/core/graph/devicenode.py deleted file mode 100644 index 0ff08720..00000000 --- a/flowchem/core/graph/devicenode.py +++ /dev/null @@ -1,73 +0,0 @@ -""" For each device node described in the configuration [graph] instantiated it and create endpoints """ -import inspect -import warnings - -from fastapi import APIRouter -from loguru import logger - -from flowchem import Spinsolve -from flowchem.components.properties import ActiveComponent -from flowchem.core.server.routers import spinsolve_get_router - - -class DeviceNode: - """Represent a node in the device graph, holds the HW object and its metadata/config.""" - - # Router generators for device class that do not implement self.get_router() - # All callable take the device obj and return an APIRouter - router_generator = {Spinsolve: spinsolve_get_router} - - def __init__(self, device_config, obj_type): - self._router = None - - # No configuration for t-mixer et al. - if device_config is None: - device_config = {} - - # Ensure the name is set - if "name" not in device_config: - warnings.warn("Device name not set, using class name") - device_config["name"] = obj_type.__name__ - - # DEVICE INSTANTIATION - try: - # Special class method for initialization required for some devices - if hasattr(obj_type, "from_config"): - self.device = obj_type.from_config(**device_config) - else: - self.device = obj_type(**device_config) - logger.debug(f"Created {self.device.name} instance: {self.device}") - except TypeError as error: - raise ConnectionError( - f"Wrong configuration provided for device: {device_config.get('name')} of type {obj_type}!\n" - f"Configuration: {device_config}\n" - f"Accepted parameters: {inspect.getfullargspec(obj_type).args}" - ) from error - - @property - def router(self): - """Returns an APIRouter associated with the device""" - if self._router: - return self._router - - if hasattr(self.device, "get_router"): - router = self.device.get_router() - else: - try: - router = DeviceNode.router_generator[type(self.device)](self.device) - except KeyError: - # Only warn no router for active components - if isinstance(self.device, ActiveComponent): - logger.warning( - f"No router available for device '{self.device.name}'" - f"[Class: {type(self.device).__name__}]" - ) - router = APIRouter() - - # Router name is lowercase with no whitespace - router_name = self.device.name.replace(" ", "").lower() - - router.prefix = f"/{router_name}" - router.tags = [router_name] - self._router = router - return self._router diff --git a/flowchem/core/graph/flowchem-graph-spec.schema b/flowchem/core/graph/flowchem-graph-spec.schema deleted file mode 100644 index 1d7427e0..00000000 --- a/flowchem/core/graph/flowchem-graph-spec.schema +++ /dev/null @@ -1,520 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://cambiegroup.github.io/flowchem/flowchem-graph-spec.json", - "type": "object", - "title": "Flowchem hardware description", - "description": "Defines a graph of devices.", - "properties": { - "version": { - "type": "string", - "description": "Version of the graph file; flowchem will reject versions it does not understand." - }, - "devices": { - "type": "array", - "description": "Devices, nodes of the graph", - "uniqueItems": false, - "items": { - "anyOf": [ - { - "$ref": "#/definitions/huberchiller" - }, - { - "$ref": "#/definitions/ml600" - }, - { - "$ref": "#/definitions/spinsolve" - }, - { - "$ref": "#/definitions/pressure-sensor" - }, - { - "$ref": "#/definitions/flowir" - }, - { - "$ref": "#/definitions/manson" - }, - { - "$ref": "#/definitions/r4heater" - }, - { - "$ref": "#/definitions/elite11infuseonly" - }, - { - "$ref": "#/definitions/elite11infusewithdraw" - }, - { - "$ref": "#/definitions/azura-compact" - }, - { - "$ref": "#/definitions/knauer-valve-12" - }, - { - "$ref": "#/definitions/knauer-valve-16" - }, - { - "$ref": "#/definitions/knauer-valve-6-6" - }, - { - "$ref": "#/definitions/knauer-valve-6-2" - }, - { - "$ref": "#/definitions/clarity_interface" - }, - { - "$ref": "#/definitions/vici-valco" - }, - { - "required": [ - "TMixer" - ] - }, - { - "required": [ - "LTF_HTM_ST_3_1" - ] - }, - { - "required": [ - "VesselChemicals" - ] - }, - { - "required": [ - "Dummy" - ] - }, - { - "required": [ - "DummyPump" - ] - }, - { - "required": [ - "DummySensor" - ] - }, - { - "required": [ - "DummyValve" - ] - }, - { - "required": [ - "DummyTempControl" - ] - } - ] - } - }, - "connections": { - "type": "array", - "description": "Connections, edges of the graph", - "uniqueItems": false, - "items": { - "type": "object" - } - } - }, - "required": [ - "version", - "devices", - "connections" - ], - "additionalProperties": false, - "definitions": { - "huberchiller": { - "id": "#/definitions/huberchiller", - "additionalProperties": false, - "properties": { - "HuberChiller": { - "properties": { - "name": { - "type": "string" - }, - "port": { - "type": "string" - }, - "baudrate": { - "type": "number" - } - }, - "required": [ - "port", - "name" - ] - } - } - }, - "ml600": { - "id": "#/definitions/ml600", - "additionalProperties": false, - "properties": { - "ML600": { - "properties": { - "port": { - "type": "string" - }, - "syringe_volume": { - "type": "string" - }, - "address": { - "type": "number" - }, - "name": { - "type": "string" - }, - "baudrate": { - "type": "number" - } - }, - "required": [ - "port", - "syringe_volume", - "name" - ] - } - } - }, - "spinsolve": { - "id": "#/definitions/spinsolve", - "additionalProperties": false, - "properties": { - "Spinsolve": { - "properties": { - "name": { - "type": "string" - }, - "host": { - "type": "string" - }, - "port": { - "type": "number" - }, - "data_folder": { - "type": "string" - }, - "xml_schema": { - "type": "string" - }, - "sample_name": { - "type": "string" - }, - "solvent": { - "type": "string" - } - }, - "required": [ - "host", - "data_folder", - "name" - ] - } - } - }, - "pressure-sensor": { - "id": "#/definitions/pressure-sensor", - "additionalProperties": false, - "properties": { - "PressureSensor": { - "required": [ - "name" - ], - "properties": { - "name": { - "type": "string" - }, - "sensor_min_bar": { - "type": "number" - }, - "sensor_max_bar": { - "type": "number" - }, - "vint_serial_number": { - "type": "number" - }, - "vint_channel": { - "type": "number" - }, - "phidget_is_remote": { - "type": "boolean" - } - } - } - } - }, - "flowir": { - "id": "#/definitions/flowir", - "additionalProperties": false, - "properties": { - "FlowIR": { - "required": [ - "name" - ], - "properties": { - "url": { - "type": "string" - }, - "name": { - "type": "string" - } - } - } - } - }, - "manson": { - "id": "#/definitions/manson", - "additionalProperties": false, - "properties": { - "MansonPowerSupply": { - "properties": { - "name": { - "type": "string" - }, - "port": { - "type": "string" - }, - "baudrate": { - "type": "number" - } - }, - "required": [ - "port", - "name" - ] - } - } - }, - "r4heater": { - "id": "#/definitions/r4heater", - "additionalProperties": false, - "properties": { - "R4Heater": { - "properties": { - "name": { - "type": "string" - }, - "port": { - "type": "string" - } - }, - "required": [ - "port", - "name" - ] - } - } - }, - "elite11infuseonly": { - "id": "#/definitions/elite11infuseonly", - "additionalProperties": false, - "properties": { - "Elite11InfuseOnly": { - "type": "object", - "properties": { - "port": { - "type": "string" - }, - "address": { - "type": "number" - }, - "diameter": { - "type": "string" - }, - "syringe_volume": { - "type": "string" - } - }, - "required": [ - "port", - "syringe_volume", - "diameter" - ] - } - } - }, - "elite11infusewithdraw": { - "id": "#/definitions/elite11infusewithdraw", - "additionalProperties": false, - "properties": { - "Elite11InfuseWithdraw": { - "type": "object", - "properties": { - "port": { - "type": "string" - }, - "address": { - "type": "number" - }, - "diameter": { - "type": "string" - }, - "syringe_volume": { - "type": "string" - } - }, - "required": [ - "port", - "syringe_volume", - "diameter" - ] - } - } - }, - "azura-compact": { - "id": "#/definitions/azura-compact", - "additionalProperties": false, - "properties": { - "AzuraCompactPump": { - "properties": { - "name": { - "type": "string" - }, - "mac_address": { - "type": "string" - }, - "ip_address": { - "type": "string" - } - }, - "oneOf": [ - { - "required": [ - "mac_address", - "name" - ] - }, - { - "required": [ - "ip_address", - "name" - ] - } - ] - } - } - }, - "knauer-valve-12": { - "id": "#/definitions/knauer-valve-12", - "additionalProperties": false, - "properties": { - "Knauer12PortValve": { - "properties": { - "name": { - "type": "string" - }, - "mac_address": { - "type": "string" - } - }, - "required": [ - "mac_address", - "name" - ] - } - } - }, - "knauer-valve-16": { - "id": "#/definitions/knauer-valve-16", - "additionalProperties": false, - "properties": { - "Knauer16PortValve": { - "properties": { - "name": { - "type": "string" - }, - "mac_address": { - "type": "string" - } - }, - "required": [ - "mac_address", - "name" - ] - } - } - }, - "knauer-valve-6-6": { - "id": "#/definitions/knauer-valve-6-6", - "additionalProperties": false, - "properties": { - "Knauer6Port6PositionValve": { - "properties": { - "name": { - "type": "string" - }, - "mac_address": { - "type": "string" - } - }, - "required": [ - "mac_address", - "name" - ] - } - } - }, - "knauer-valve-6-2": { - "id": "#/definitions/knauer-valve-6-2", - "additionalProperties": false, - "properties": { - "Knauer6Port2PositionValve": { - "properties": { - "name": { - "type": "string" - }, - "mac_address": { - "type": "string" - } - }, - "required": [ - "mac_address", - "name" - ] - } - } - }, - "clarity_interface": { - "id": "#/definitions/clarity_interface", - "additionalProperties": false, - "properties": { - "ClarityInterface": { - "properties": { - "name": { - "type": "string" - }, - "instrument_number": { - "type": "number" - } - }, - "required": [ - "instrument_number", - "name" - ] - } - } - }, - "vici-valco": { - "id": "#/definitions/vici-valco", - "additionalProperties": false, - "properties": { - "ViciValco": { - "properties": { - "name": { - "type": "string" - }, - "address": { - "type": "number" - }, - "port": { - "type": "string" - } - }, - "required": [ - "port", - "name" - ] - } - } - } - } -} diff --git a/flowchem/core/graph/owen_config.yml b/flowchem/core/graph/owen_config.yml deleted file mode 100644 index 5654e800..00000000 --- a/flowchem/core/graph/owen_config.yml +++ /dev/null @@ -1,91 +0,0 @@ -devices: - - donor: - Elite11InfuseOnly: - port: COM11 - diameter: 4.6 mm - syringe_volume: 1 ml - address: 0 - - activator: - Elite11InfuseOnly: - port: COM11 - diameter: 4.6 mm - syringe_volume: 1 ml - address: 1 - - quencher: - AzuraCompactPump: -# mac_address: 00:80:A3:BA:C3:4A - ip_address: 192.168.10.113 - max_pressure: 13 bar - - sample_loop: - ViciValco: - port: COM13 - address: 0 - - chiller: -# Dummy: - HuberChiller: - port: COM3 - - reactor: - LTF_HTM_ST_3_1: - - -connections: - - Tube: - from: - device: donor - to: - device: reactor - position: 1 - - length: 0.1 m - inner-diameter: 0.760 mm - outer-diameter: 1.6 mm - material: "PFA" - - - Tube: - from: - device: activator - to: - device: reactor - position: 2 - - length: 0.1 m - inner-diameter: 0.760 mm - outer-diameter: 1.6 mm - material: "PFA" - - - Interface: - from: - device: chiller - - to: - device: reactor - - - Tube: - from: - device: quencher - to: - device: reactor - position: 3 - - length: 0.1 m - inner-diameter: 0.760 mm - outer-diameter: 1.6 mm - material: "PFA" - - - Tube: - from: - device: reactor - position: 4 - to: - device: sample_loop - - length: 0.1 m - inner-diameter: 0.760 mm - outer-diameter: 1.6 mm - material: "PFA" diff --git a/flowchem/core/graph/owen_config2.yml b/flowchem/core/graph/owen_config2.yml deleted file mode 100644 index eea2db4c..00000000 --- a/flowchem/core/graph/owen_config2.yml +++ /dev/null @@ -1,92 +0,0 @@ -version: "1.0" -devices: - -# donor: -# Elite11InfuseOnly: -# port: COM1 -# diameter: 4.6 mm -# syringe_volume: 1 ml -# address: 0 -# -# activator: -# Elite11InfuseOnly: -# port: COM2 -# diameter: 4.6 mm -# syringe_volume: 1 ml -# address: 1 - - quencher: - AzuraCompactPump: -# mac_address: 00:80:A3:BA:C3:4A - ip_address: 192.168.10.113 - max_pressure: 13 bar - -# sample_loop: -# ViciValco: -# port: COM4 -# address: 0 -# -# chiller: -# HuberChiller: -# port: COM5 - - reactor: - LTF_HTM_ST_3_1: - - -physical_connections: - - Tube: - from: - device: donor - to: - device: reactor - port: INLET_1 - - length: 0.1 m - inner-diameter: 0.760 mm - outer-diameter: 1.6 mm - material: "PFA" - - - Tube: - from: - device: activator - to: - device: reactor - port: INLET_2 - - length: 0.1 m - inner-diameter: 0.760 mm - outer-diameter: 1.6 mm - material: "PFA" - - - Tube: - from: - device: quencher - to: - device: reactor - port: QUENCHER - - length: 0.1 m - inner-diameter: 0.760 mm - outer-diameter: 1.6 mm - material: "PFA" - - - Tube: - from: - device: reactor - port: OUTLET - to: - device: sample_loop - - length: 0.1 m - inner-diameter: 0.760 mm - outer-diameter: 1.6 mm - material: "PFA" - -logical_connections: - - Interface: - from: - device: chiller - - to: - device: reactor diff --git a/flowchem/core/graph/parser.py b/flowchem/core/graph/parser.py deleted file mode 100644 index d9e94bd0..00000000 --- a/flowchem/core/graph/parser.py +++ /dev/null @@ -1,163 +0,0 @@ -" Parse a device graph file and instantiate DeviceGraph object. " -import inspect -import itertools -from pathlib import Path -from types import ModuleType -from typing import Dict, Iterable, Union - -import yaml -from loguru import logger - -import flowchem.assemblies -from flowchem.components.stdlib import Tube -from flowchem.core.graph.devicegraph import DeviceGraph -from flowchem.core.graph.devicenode import DeviceNode -from flowchem.core.graph.validation import validate_graph -from flowchem.exceptions import InvalidConfiguration - -# Packages containing the device class definitions. -# Devices' classes must be in the module top level to be found. -DEVICE_MODULES = [ - flowchem.components.devices, - flowchem.components.stdlib, - flowchem.components.dummy, - flowchem.assemblies, -] - - -def get_device_class_mapper(modules: Iterable[ModuleType]) -> Dict[str, type]: - """ - Given an iterable of modules containing the device classes, return a - dictionary Dict[device_class_name, DeviceClass] - - Args: - modules (Iterable[ModuleType]): The modules to inspect for devices. - Only class in the top level of each module will be extracted. - Returns: - device_dict (Dict[str, type]): Dict of device class names and their - respective classes, i.e. {device_class_name: DeviceClass}. - """ - # Get (name, obj) tuple for the top level of each module. - objects_in_modules = [ - inspect.getmembers(module, inspect.isclass) for module in modules - ] - - # Return them as dict (itertools to flatten the nested, per module, lists) - return dict(itertools.chain.from_iterable(objects_in_modules)) - - -def parse_device_section(devices: Dict, graph: DeviceGraph): - """Parse the devices' section of the graph config""" - - # Device mapper needed for device instantiation - device_mapper = get_device_class_mapper(DEVICE_MODULES) - logger.debug(f"Device classes found: {device_mapper.keys()}") - - # Parse devices - for device_node in devices: - device_class, device_config = next(iter(device_node.items())) - try: - obj_type = device_mapper[device_class] - except KeyError as error: - logger.exception( - f"Device of type {device_class} unknown! [Known devices: {device_mapper.keys()}]" - ) - raise InvalidConfiguration( - f"Device of type {device_class} unknown! \n" - f"[Known devices: {list(device_mapper.keys())}]" - ) from error - - # Create device object and add it to the graph - device = DeviceNode(device_config, obj_type).device - graph.add_device(device) - - -def parse_connection_section(connections: Dict, graph: DeviceGraph): - """Parse connections from the graph config""" - for edge in connections: - edge_class, edge_config = next(iter(edge.items())) - - if "Tube" in edge_class: - _parse_tube_connection(edge_config, graph) - elif "Interface" in edge_class: - _parse_interface_connection(edge_config, graph) - else: - raise InvalidConfiguration(f"Invalid connection type in {edge}") - - -def _parse_tube_connection(tube_config, graph: DeviceGraph): - """ - The Tube object is a convenience object for connecting devices without explicitly creating the - in-between tube node. - """ - tube = Tube( - length=tube_config["length"], - ID=tube_config["inner-diameter"], - OD=tube_config["outer-diameter"], - material=tube_config["material"], - ) - graph.add_device(tube) - - # Create logic connections for newly created tube - inlet = { - "from": dict( - device=tube_config["from"]["device"], - port=tube_config["from"].get("port", None), - ), - "to": dict(device=tube.name), - } - _parse_interface_connection(inlet, graph) - - outlet = { - "from": dict(device=tube.name), - "to": dict( - device=tube_config["to"]["device"], port=tube_config["to"].get("port", None) - ), - } - _parse_interface_connection(outlet, graph) - - -def _parse_interface_connection(iface_config, graph: DeviceGraph): - """Parse a dict containing the Tube connection and returns the Connection""" - graph.add_connection( - origin=iface_config["from"]["device"], - destination=iface_config["to"]["device"], - origin_port=iface_config["from"].get("port", None), - destination_port=iface_config["to"].get("port", None), - ) - - -def parse_graph_config(graph_config: Dict, name: str = None) -> DeviceGraph: - """Parse a graph config and returns a DeviceGraph object.""" - - # Validate graph - validate_graph(graph_config) - - # Create DeviceGraph object - device_graph = DeviceGraph(name) - - # Parse devices - parse_device_section(graph_config["devices"], device_graph) - - # Parse connections - parse_connection_section(graph_config["connections"], device_graph) - - logger.info(f"Parsed graph {name}") - return device_graph - - -def parse_graph_file(file: Union[str, Path]): - """Parse a graph config file and returns a DeviceGraph object.""" - file_path = Path(file) - name = file_path.stem - - with file_path.open(encoding="utf-8") as stream: - try: - config = yaml.safe_load(stream) - except yaml.parser.ParserError as parser_error: - logger.exception(parser_error) - raise InvalidConfiguration( - f"Invalid YAML in graph {file_path}" - ) from parser_error - - return parse_graph_config(config, name) diff --git a/flowchem/core/graph/sample_config.yml b/flowchem/core/graph/sample_config.yml deleted file mode 100644 index ec268b4e..00000000 --- a/flowchem/core/graph/sample_config.yml +++ /dev/null @@ -1,100 +0,0 @@ -version: "1.1" - -# Nodes -devices: - - # Test device - - Dummy: - name: dummydevice1 - - # Analytics and sensors -# - Spinsolve: -# name: nmr -# host: BSMC-7WP43Y1 -# port: 13000 -# data_folder: W:\BS-FlowChemistry\Equipment\Magritek_NMR\data_exchange - -# - FlowIR: -# name: ir - -# - PressureSensor: -# name: p_sensor -# vint_serial_number: 627768 - - # Temperature controller - - R4Heater: - name: heater - port: COM4 - - - HuberChiller: - name: chiller - port: COM1 - - # Pumps - - ML600: - name: pump1 - port: COM5 - address: 1 - syringe_volume: 5 ml - - - ML600: - name: pump2 - port: COM5 - address: 2 - syringe_volume: 5 ml - -# - Elite11InfuseOnly: -# name: pump3 -# port: COM6 -# address: 1 -# syringe_volume: 5 ml -# diameter: 12.23 mm - -# - AzuraCompactPump: -# name: pump4 -# mac_address: 00:12:a2:b4:33:11 - - # Valves -# - Knauer16PortValve: -# name: valve1 -# mac_address: 00:00:00:00:1a:ff -# -# - Knauer12PortValve: -# name: valve2 -# mac_address: 00:00:00:00:1a:fe -# -# - Knauer6Port6PositionValve: -# name: valve3 -# mac_address: 00:00:00:00:1a:fc -# -# - Knauer6Port2PositionValve: -# name: valve4 -# mac_address: 00:00:00:00:1a:fb -# -# - ViciValco: -# name: valve5 -# port: COM5 -# address: 0 - -# # Other -# - MansonPowerSupply: -# name: power-supply -# port: COM7 - - - VesselChemicals: - name: reagent1 - description: Long description - -connections: - - Tube: - from: - device: pump1 - position: 0 - to: - device: reagent1 - position: 0 - - length: 0.1 m - inner-diameter: 0.760 mm - outer-diameter: 1.6 mm - material: "PFA" diff --git a/flowchem/core/graph/validation.py b/flowchem/core/graph/validation.py deleted file mode 100644 index e63b2d59..00000000 --- a/flowchem/core/graph/validation.py +++ /dev/null @@ -1,30 +0,0 @@ -""" Validate graph files """ -from __future__ import annotations - -import json -import os -from typing import Dict - -import jsonschema - -# Validation schema for graph file -SCHEMA = os.path.join( - os.path.dirname(os.path.realpath(__file__)), "../graph/flowchem-graph-spec.schema" -) - - -def load_graph_schema(): - """Loads and return the DeviceGraph schema.""" - with open(SCHEMA, "r", encoding="utf-8") as file_handle: - schema = json.load(file_handle) - jsonschema.Draft7Validator.check_schema(schema) - return schema - - -def validate_graph(graph: Dict): - """ - Validate a graph file. - """ - schema = load_graph_schema() - jsonschema.validate(graph, schema=schema) - assert graph["version"] == "1.1" diff --git a/flowchem/core/protocol.py b/flowchem/core/protocol.py deleted file mode 100644 index 647159dd..00000000 --- a/flowchem/core/protocol.py +++ /dev/null @@ -1,518 +0,0 @@ -import json -from copy import deepcopy -from datetime import timedelta -from math import isclose -from os import PathLike -from typing import Any, Dict, Iterable, List, MutableMapping, Optional, Union - -import altair as alt -import pandas as pd -import yaml -from IPython import get_ipython -from IPython.display import Code - -from flowchem.components.properties import ( - ActiveComponent, - Component, - MultiportComponentMixin, - TempControl, -) -from flowchem.core.experiment import Experiment -from flowchem.core.graph.devicegraph import DeviceGraph -from flowchem.units import flowchem_ureg - - -class Protocol: - """ - A set of procedures for a DeviceGraph. - - A protocol is defined as a list of procedures, atomic steps for the individual active components of a DeviceGraph. - - Arguments: - - `graph`: The DeviceGraph object for which the protocol is being defined. - - `name`: The name of the protocol. Defaults to "Protocol_X" where *X* is protocol count. - - `description`: A longer description of the protocol. - - Attributes: - - `graph`: The apparatus for which the protocol is being defined. - - `description`: A longer description of the protocol. - - `is_executing`: Whether the protocol is executing. - - `name`: The name of the protocol. Defaults to "Protocol_X" where *X* is protocol count. - - `procedures`: A list of the procedures for the protocol in which each procedure is a dict. - - `was_executed`: Whether the protocol was executed. - """ - - _id_counter = 0 - - def __init__( - self, - graph: DeviceGraph, - name: Optional[str] = None, - description: Optional[str] = None, - ): - """See main docstring.""" - # type checking - if not isinstance(graph, DeviceGraph): - raise TypeError( - f"Must pass an Apparatus object. Got {type(graph)}, " - "which is not an instance of flowchem.DeviceGraph." - ) - - # store the passed args - self.graph = graph - self.description = description - - # generate the name - if name is not None: - self.name = name - else: - self.name = "Protocol_" + str(Protocol._id_counter) - Protocol._id_counter += 1 - - # ensure apparatus is valid - if not graph.validate(): - raise ValueError("DeviceGraph is not valid.") - - # default values - self.procedures: List[ - Dict[str, Union[float, None, ActiveComponent, Dict[str, Any]]] - ] = [] - - def __repr__(self): - return f"<{self.__str__()}>" - - def __str__(self): - return f"Protocol {self.name} defined over {repr(self.graph)}" - - def _check_component_kwargs(self, component: ActiveComponent, **kwargs) -> None: - """Checks that the given keyword arguments are valid for a component.""" - for kwarg, value in kwargs.items(): - # check that the component even has the attribute - if not hasattr(component, kwarg): - # id nor determine valid attrs for the error message - valid_attrs = [x for x in vars(component).keys()] - # we don't care about the name attr - valid_attrs = [x for x in valid_attrs if x != "name"] - # or internal ones - valid_attrs = [x for x in valid_attrs if not x.startswith("_")] - - msg = f"Invalid attribute {kwarg} for {component}. " - msg += f"Valid attributes are {valid_attrs}" - raise ValueError(msg) - - # for kwargs that will be converted later, just check that the units match - if isinstance(component.__dict__[kwarg], flowchem_ureg.Quantity): - try: - value_dim = flowchem_ureg.parse_expression(value).dimensionality - except AttributeError: - value_dim = type(value) - kwarg_dim = component.__dict__[kwarg].dimensionality - - # perform the check - if value_dim != kwarg_dim: - msg = f"Bad dimensionality of {kwarg} for {component}. " - msg += f"Expected {kwarg_dim} but got {value_dim}." - raise ValueError(msg) - - # if it's not a quantity, check the types - elif not isinstance(value, type(component.__dict__[kwarg])): - expected_type = type(component.__dict__[kwarg]) - msg = "Bad type matching. " - msg += f"Expected '{kwarg}' to an instance of {expected_type} but got" - msg += f"{repr(value)}, which is of type {type(value)}." - raise ValueError(msg) - - def _add_single( - self, - component: ActiveComponent, - start: Union[str, timedelta], - stop=None, - duration=None, - **kwargs, - ) -> None: - """Adds a single procedure to the protocol. - - See add() for full documentation. - """ - - # make sure that the component being added is part of the apparatus - assert component in self.graph, f"{component} must be part of the apparatus." - - # don't let users give empty procedures - if not kwargs: - raise RuntimeError( - "No kwargs supplied. " - "This will not manipulate the state of your synthesizer. " - "Ensure your call to add() is valid." - ) - - # FIXME procedures are XDLexe like, the actual valve position should be passed directly, resolve before! - # If a MultiportComponentMixin component is passed together with a new port position, check validity - if isinstance(component, MultiportComponentMixin) and "setting" in kwargs: - if isinstance(kwargs["setting"], Component): - assert self.graph.graph.has_edge(component, kwargs["setting"]) - assert ( - self.graph.graph[component][kwargs["setting"]][0]["from_port"] - in component.port - ) - if isinstance(kwargs["setting"], str): - to_component = self.graph[kwargs["setting"]] - assert self.graph.graph.has_edge(component, to_component) - assert ( - self.graph.graph[component][to_component]["from_port"] - in component.port - ) - if isinstance(kwargs["setting"], int): - assert kwargs["setting"] in component.port - - # make sure the component and keywords are valid - self._check_component_kwargs(component, **kwargs) - - # parse the start time - if start is None: - start = "0 secs" - if isinstance(start, timedelta): - start = str(start.total_seconds()) + " seconds" - start_time = flowchem_ureg.parse_expression(start) - - # Stop or duration - if stop is not None and duration is not None: - raise RuntimeError("Must provide one of stop and duration, not both.") - - # Parse duration - if duration is not None: - if isinstance(duration, timedelta): - duration = str(duration.total_seconds()) + " seconds" - stop_time = start_time + flowchem_ureg.parse_expression(duration) - # Parse stop - else: - assert stop is not None - if isinstance(stop, timedelta): - stop = str(stop.total_seconds()) + " seconds" - stop_time = flowchem_ureg.parse_expression(stop) - - if start_time > stop_time: - raise ValueError("Procedure beginning is after procedure end.") - - # a little magic for temperature controllers - if isinstance(component, TempControl): - if kwargs.get("temp") is not None and kwargs.get("active") is None: - kwargs["active"] = True - elif not kwargs.get("active") and kwargs.get("temp") is None: - kwargs["temp"] = "0 degC" - elif kwargs["active"] and kwargs.get("temp") is None: - raise RuntimeError( - f"TempControl {component} is activated but temperature " - "setting is not given. Specify 'temp' in your call to add()." - ) - - # add the procedure to the procedure list - self.procedures.append( - dict( - start=start_time.m_as("second"), - stop=stop_time.m_as("second"), - component=component, - params=kwargs, - ) - ) - - def add( - self, - component: Union[ActiveComponent, Iterable[ActiveComponent]], - start=None, - stop=None, - duration=None, - **kwargs, - ): - """ - Adds a procedure to the protocol. - - ::: warning - If stop and duration are both `None`, the procedure's stop time will be inferred as the end of the protocol. - ::: - - Arguments: - - `component_added`: The component(s) for which the procedure being added. If an interable, all components will have the same parameters. - - `start`: The start time of the procedure relative to the start of the protocol, such as `"5 seconds"`. May also be a `datetime.timedelta`. Defaults to `"0 seconds"`, *i.e.* the beginning of the protocol. - - `stop`: The stop time of the procedure relative to the start of the protocol, such as `"30 seconds"`. May also be a `datetime.timedelta`. May not be given if `duration` is used. - duration: The duration of the procedure, such as "1 hour". May not be used if `stop` is used. - - `**kwargs`: The state of the component for the procedure. - - Raises: - - `TypeError`: A component is not of the correct type (*i.e.* a Component object) - - `ValueError`: An error occurred when attempting to parse the kwargs. - - `RuntimeError`: Stop time of procedure is unable to be determined or invalid component. - """ - - if isinstance(component, Iterable): - for _component in component: - self._add_single( - _component, start=start, stop=stop, duration=duration, **kwargs - ) - else: - self._add_single( - component, start=start, stop=stop, duration=duration, **kwargs - ) - - @property - def _inferred_duration(self): - # infer the duration of the protocol - computed_durations = sorted( - [x["stop"] for x in self.procedures], - key=lambda z: z if z is not None else 0, - ) - if all([x is None for x in computed_durations]): - raise RuntimeError( - "Unable to automatically infer duration of protocol. " - "Must define stop or duration for at least one procedure" - ) - return computed_durations[-1] - - def _compile( - self, dry_run: bool = True, _visualization: bool = False - ) -> Dict[ActiveComponent, List[Dict[str, Union[float, str, Dict[str, Any]]]]]: - """ - Compile the protocol into a dict of devices and their procedures. - - Returns: - - A dict with components as keys and lists of their procedures as the value. - The elements of the list of procedures are dicts with two keys: - "time" in seconds - "params", whose value is a dict of parameters for the procedure. - - Raises: - - `RuntimeError`: When compilation fails. - """ - output = {} - - # Only compile active components - for component in self.graph[ActiveComponent]: - # determine the procedures for each component - component_procedures: List[MutableMapping] = sorted( - [x for x in self.procedures if x["component"] == component], - key=lambda x: x["start"], - ) - - # validate component - try: - component._validate(dry_run=dry_run) - except Exception as e: - raise RuntimeError( - f"{component} isn't valid. Got error: '{str(e)}'." - ) from e - - # Validates procedures for component - component.validate_procedures(component_procedures) - - # give the component instructions at all times - compiled = [] - for i, procedure in enumerate(component_procedures): - if _visualization: - compiled.append( - dict( - start=procedure["start"], - stop=procedure["stop"], - params=procedure["params"], - ) - ) - else: - compiled.append( - dict(time=procedure["start"], params=procedure["params"]) - ) - - # if the procedure is over at the same time as the next - # procedure begins, don't go back to the base state - try: - if isclose( - component_procedures[i + 1]["start"], procedure["stop"] - ): - continue - except IndexError: - pass - - # otherwise, go back to base state - new_state = { - "time": procedure["stop"], - "params": component._base_state, - } - compiled.append(new_state) - - output[component] = compiled - - # raise warning if duration is explicitly given but not used? - return output - - def to_dict(self): - compiled = deepcopy(self._compile(dry_run=True)) - compiled = {k.name: v for (k, v) in compiled.items()} - return compiled - - def to_list(self): - output = [] - for procedure in deepcopy(self.procedures): - procedure["component"] = procedure["component"].name - output.append(procedure) - return output - - def yaml(self) -> Union[str, Code]: - """ - Outputs the uncompiled procedures to YAML. - - Internally, this is a conversion of the output of `Protocol.json` for the purpose of enhanced human readability. - - Returns: - - YAML of the procedure list. - When in Jupyter, this string is wrapped in an `IPython.display.Code` object for nice syntax highlighting. - - """ - compiled_yaml = yaml.safe_dump(self.to_list(), default_flow_style=False) - - if get_ipython(): - return Code(compiled_yaml, language="yaml") - return compiled_yaml - - def json(self) -> Union[str, Code]: - """ - Outputs the uncompiled procedures to JSON. - - Returns: - - JSON of the protocol. - When in Jupyter, this string is wrapped in a `IPython.display.Code` object for nice syntax highlighting. - """ - compiled_json = json.dumps(self.to_list(), sort_keys=True, indent=4) - - if get_ipython(): - return Code(compiled_json, language="json") - return compiled_json - - def visualize(self, legend: bool = False, width=500, renderer: str = "notebook"): - """ - Generates a Gantt plot visualization of the protocol. - - Arguments: - - `legend`: Whether to show a legend. - - `renderer`: Which renderer to use. Defaults to "notebook" but can also be "jupyterlab", or "nteract", depending on the development environment. If not in a Jupyter Notebook, this argument is ignored. - - `width`: The width of the Gantt chart. - - Returns: - - An interactive visualization of the protocol. - """ - - # don't try to render a visualization to the notebook if we're not in one - if get_ipython(): - alt.renderers.enable(renderer) - - for component, procedures in self._compile(_visualization=True).items(): - # generate a dict that will be a row in the dataframe - for procedure in procedures: - procedure["component"] = str(component) - procedure["start"] = pd.Timestamp(procedure["start"], unit="s") - procedure["stop"] = pd.Timestamp(procedure["stop"], unit="s") - - # hoist the params to the main dict - assert isinstance(procedure["params"], dict) # needed for typing - for k, v in procedure["params"].items(): - procedure[k] = v - - # show what the valve is actually connecting to - if ( - isinstance(component, MultiportComponentMixin) - and "setting" in procedure.keys() - ): - mapped_component = self.graph.component_from_origin_and_port(component, procedure["setting"]) # type: ignore - procedure["mapped component"] = mapped_component.name - # TODO: make this deterministic for color coordination - procedure["params"] = json.dumps(procedure["params"]) - - # prettify the tooltips - tooltips = [ - alt.Tooltip("utchoursminutesseconds(start):T", title="start (h:m:s)"), - alt.Tooltip("utchoursminutesseconds(stop):T", title="stop (h:m:s)"), - "component", - ] - - # just add the params to the tooltip - tooltips.extend( - [ - x - for x in procedures[0].keys() - if x not in ["component", "start", "stop", "params"] - ] - ) - - # generate the component's graph - source = pd.DataFrame(procedures) - component_chart = ( - alt.Chart(source, width=width) - .mark_bar() - .encode( - x="utchoursminutesseconds(start):T", - x2="utchoursminutesseconds(stop):T", - y="component", - color=alt.Color("params:N", legend=None) - if not legend - else "params", - tooltip=tooltips, - ) - ) - - # label the axes - component_chart.encoding.x.title = "Experiment Elapsed Time (h:m:s)" - component_chart.encoding.y.title = "Component" - - # combine with the other charts - try: - chart += component_chart # type: ignore - except NameError: - chart = component_chart - - return chart.interactive() - - def execute( - self, - dry_run: Union[bool, int] = False, - verbosity: str = "info", - confirm: bool = False, - strict: bool = True, - log_file: Union[str, bool, PathLike, None] = True, - log_file_verbosity: Optional[str] = "trace", - log_file_compression: Optional[str] = None, - data_file: Union[str, bool, PathLike, None] = True, - ) -> Experiment: - """ - Executes the procedure. - - Arguments: - - `confirm`: Whether to bypass the manual confirmation message before execution. - - `dry_run`: Whether to simulate the experiment or actually perform it. Defaults to `False`, - which means executing the protocol on real hardware. If an integer greater than zero, - the dry run will execute at that many times speed. - - `strict`: Whether to stop execution upon encountering any errors. - If False, errors will be noted but ignored. - - `verbosity`: The level of logging verbosity. One of "critical", "error", "warning", "success", "info", "debug", or "trace" in descending order of severity. "debug" and (especially) "trace" are not meant to be used regularly, as they generate significant amounts of usually useless information. However, these verbosity levels are useful for tracing where exactly a bug was generated, especially if no error message was thrown. - - `log_file`: The file to write the logs to during execution. If `True`, the data will be written to a file in `~/.mechwolf` with the filename `{experiment_id}.log.jsonl`. If falsey, no logs will be written to the file. - - `log_file_verbosity`: How verbose the logs in file should be. By default, it is "trace", which is the most verbose logging available. If `None`, it will use the same level as `verbosity`. - - `log_file_compression`: Whether to compress the log file after the experiment. - - `data_file`: The file to write the experimental data to during execution. If `True`, the data will be written to a file in `~/.mechwolf` with the filename `{experiment_id}.data.jsonl`. If falsey, no data will be written to the file. - - Returns: - - An `Experiment` object. In a Jupyter notebook, the object yields an interactive visualization. If protocol execution fails for any reason that does not raise an error, the return type is None. - - Raises: - - `RuntimeError`: When attempting to execute a protocol on invalid components. - """ - - # the Experiment object is going to hold all the info - E = Experiment(self) - E._execute( - dry_run=dry_run, - verbosity=verbosity, - confirm=confirm, - strict=strict, - log_file=log_file, - log_file_verbosity=log_file_verbosity, - log_file_compression=log_file_compression, - data_file=data_file, - ) - - return E diff --git a/flowchem/core/server/api_server.py b/flowchem/core/server/api_server.py deleted file mode 100644 index 4b649c0f..00000000 --- a/flowchem/core/server/api_server.py +++ /dev/null @@ -1,87 +0,0 @@ -"""" Run with uvicorn main:app """ -from pathlib import Path -from typing import Dict - -import yaml -from flowchem.core.graph.validation import validate_graph -from fastapi import FastAPI -from fastapi.responses import HTMLResponse -from loguru import logger - -import flowchem -from flowchem.core.graph.devicenode import DeviceNode -from flowchem.core.graph.parser import DEVICE_MODULES, get_device_class_mapper -from flowchem.exceptions import InvalidConfiguration - - -def create_server_from_config(config: Dict = None, config_file: Path = None) -> FastAPI: - """ - Based on the yaml device graph provided, creates device objects and connect to them + . - - config: Path to the yaml file with the device config or dict. - """ - - assert ( - config is not None - and config_file is None - or config is None - and config_file is not None - ) - - if config_file is not None: - with config_file.open() as stream: - config = yaml.safe_load(stream) - - assert isinstance(config, dict) # This is here just to make mypy happy. - - # Validate config - validate_graph(config) - - # FastAPI server - app = FastAPI(title="flowchem", version=flowchem.__version__) - - # Device mapper - device_mapper = get_device_class_mapper(DEVICE_MODULES) - logger.debug( - f"The following device classes have been found: {device_mapper.keys()}" - ) - - # Parse list of devices and generate endpoints - for device_name, node_config in config["devices"].items(): - # Schema validation ensures only 1 hit here - try: - device_class = [ - name for name in device_mapper.keys() if name in node_config - ].pop() - except IndexError as error: - raise InvalidConfiguration( - f"No class available for device '{device_name}'" - ) from error - - # Object type - obj_type = device_mapper[device_class] - device_config = node_config[device_class] - - node = DeviceNode(device_config, obj_type) - logger.debug(f"Created device <{device_name}> with config: {device_config}") - - # Add to App - app.include_router( - node.router, prefix=node.router.prefix, tags=node.router.tags - ) - logger.debug(f"Router for <{device_name}> added to app!") - - return app - - -if __name__ == "__main__": - myapp = create_server_from_config(config_file=Path("../graph/sample_config.yml")) - - @myapp.get("/", response_class=HTMLResponse, include_in_schema=False) - def root(): - """Server root""" - return "

Flowchem Device Server!

" "API Reference" - - import uvicorn - - uvicorn.run(myapp, host="127.0.0.1") diff --git a/flowchem/core/server/routers/README.md b/flowchem/core/server/routers/README.md deleted file mode 100644 index 9e128d83..00000000 --- a/flowchem/core/server/routers/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# CUSTOM ROUTERS - -The routers in this folder are needed as shims for device classes that do not implement a `self.get_router() -> APIRouter()` method. diff --git a/flowchem/core/server/routers/Spinsolve_router.py b/flowchem/core/server/routers/Spinsolve_router.py deleted file mode 100644 index b635ccf0..00000000 --- a/flowchem/core/server/routers/Spinsolve_router.py +++ /dev/null @@ -1,47 +0,0 @@ -""" Router for a Spinsolve object """ -from fastapi import APIRouter - -from flowchem import Spinsolve - - -def spinsolve_get_router(device: Spinsolve) -> APIRouter: - """Adds an APIRouter on top of an existing Spinsolve object""" - router = APIRouter() - - @router.get("/solvent") - async def get_solvent(): - """ - - Returns: - - """ - return device.solvent - - @router.put("/solvent/{solvent_name}") - async def set_solvent(solvent_name: str): - """ - - Args: - solvent_name: - """ - device.solvent = solvent_name - - @router.get("/sample-name") - async def get_sample(): - """ - - Returns: - - """ - return device.sample - - @router.put("/sample-name/{value}") - async def set_sample(value: str): - """ - - Args: - value: - """ - device.sample = value - - return router diff --git a/flowchem/core/server/routers/__init__.py b/flowchem/core/server/routers/__init__.py deleted file mode 100644 index 44ff03c3..00000000 --- a/flowchem/core/server/routers/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .Spinsolve_router import spinsolve_get_router diff --git a/flowchem/exceptions.py b/flowchem/exceptions.py deleted file mode 100644 index bffafd90..00000000 --- a/flowchem/exceptions.py +++ /dev/null @@ -1,17 +0,0 @@ -""" Exceptions used in the flowchem module. """ - - -class DeviceError(BaseException): - """Generic DeviceError""" - - -class InvalidConfiguration(DeviceError): - """The configuration provided is not valid, e.g. no connection w/ device obtained""" - - -class ActuationError(DeviceError): - """The attempted move did not succeed""" - - -class ProtocolCancelled(Exception): - """The protocol was cancelled by the user""" diff --git a/flowchem/units.py b/flowchem/units.py deleted file mode 100644 index e17a946e..00000000 --- a/flowchem/units.py +++ /dev/null @@ -1,6 +0,0 @@ -""" Unit-conversion related functions """ -import pint - -flowchem_ureg = pint.UnitRegistry(autoconvert_offset_to_baseunit=True) -flowchem_ureg.define("step = []") -flowchem_ureg.define("stroke = 48000 * step") diff --git a/flowchem/utils/deploy.py b/flowchem/utils/deploy.py deleted file mode 100644 index 4d6f561c..00000000 --- a/flowchem/utils/deploy.py +++ /dev/null @@ -1,78 +0,0 @@ -""" Util function to install dependencies on PC with no internet access. """ -import argparse -import subprocess -import sys -from pathlib import Path - -EXCHANGE_FOLDER = Path(r"W:\BS-FlowChemistry\Resources\python_packages_local") - - -def install_from_folder(package: str, folder: Path): - """pip-install packages locally available in a folder.""" - subprocess.check_call( - [ - sys.executable, - "-m", - "pip", - "install", - "--no-index", - f"--find-links={folder.as_posix()}", - package, - ] - ) - - -def download_to_folder(package: str, folder: Path): - """pip-download packages to a local folder.""" - subprocess.check_call( - [sys.executable, "-m", "pip", "download", package, "-d", folder.as_posix()] - ) - - -def get_package_list(): - """Return the list of packages to download/install from the requirements file""" - req_file = Path("../../requirements.txt") - if not req_file.exists(): - print( - "run pip-compile first to create a requirements.txt file [pip install pip-tools to install pip-compile]" - ) - - package = [] - - with req_file.open(encoding="utf-8") as file_handle: - lines = file_handle.readlines() - for line in lines: - # Ignore nmrglue - if "nmrglue" in line: - continue - - package.append(line.split("~=")[0]) - - return package - - -def download_all(target_folder=EXCHANGE_FOLDER): - """Downloads all packages in requirements.""" - for package in get_package_list(): - download_to_folder(package, target_folder) - print(f"Downloaded {package} to {target_folder.as_posix()}") - - -def install_all(target_folder=EXCHANGE_FOLDER): - """Installs all packages in requirements.""" - for package in get_package_list(): - install_from_folder(package, target_folder) - print(f"Installed {package} from {target_folder.as_posix()}") - - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument("--download", help="Download packages", action="store_true") - parser.add_argument("--install", help="Install packages", action="store_true") - args = parser.parse_args() - if args.download: - download_all() - elif args.install: - install_all() - else: - print("Nothing to do! either download or install!") diff --git a/flowchem/utils/reaction_input_helper.py b/flowchem/utils/reaction_input_helper.py deleted file mode 100644 index ecd45382..00000000 --- a/flowchem/utils/reaction_input_helper.py +++ /dev/null @@ -1,28 +0,0 @@ -""" Helper functions to populate the ORD-SCHEMA ReactionInput object. """ -from ord_schema.proto.reaction_pb2 import FlowRate, ReactionInput - -# from ord_schema.proto.reaction_pb2 import Compound, CrudeComponent, ReactionRole, ReactionIdentifier, ... -from flowchem.units import flowchem_ureg - - -def add_flowrate_to_input(reaction: ReactionInput, flowrate_text: str): - """Add a flowrate to a reaction.""" - - # Parse the flowrate - flowrate = flowchem_ureg(flowrate_text) - assert ( - flowrate.units == flowchem_ureg.volume / flowchem_ureg.time - ), "Flowrate must be in units of volume/time" - - # Convert it to ml/min (we could use different ORD values, but this is easier) - flowrate.ito(flowchem_ureg.milliliter / flowchem_ureg.minute) - - # Make it into a protobuf - flow_rate_pb = FlowRate() - flow_rate_pb.value = flowrate.magnitude - flow_rate_pb.units = FlowRate.FlowRateUnit.Value("MILLILITER_PER_MINUTE") - - # Add the flow rate to the reaction - reaction.flow_rate.CopyFrom(flow_rate_pb) - - return reaction diff --git a/noxfile.py b/noxfile.py deleted file mode 100644 index 60dc0d47..00000000 --- a/noxfile.py +++ /dev/null @@ -1,34 +0,0 @@ -import nox - -nox.options.stop_on_first_error = True - - -@nox.session(python=["3.9"]) -def black(session): - session.install("black") - session.run("black", "flowchem", "--check") - - -@nox.session(python=["3.9", "3.10"]) -def lint(session): - session.install("flake8") - session.run( - "flake8", - "flowchem", - "--count", - "--select=E9,F63,F7,F82", - "--show-source", - "--statistics", - ) - - -@nox.session(python=["3.9", "3.10"]) -def type_check(session): - session.install("mypy") - session.run("mypy", "--install-types", "--non-interactive", "flowchem") - - -@nox.session(python=["3.9", "3.10"]) -def tests(session): - session.install(".[test]") - session.run("pytest") diff --git a/pyproject.toml b/pyproject.toml index 59317f22..f902dc3e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,26 +1,131 @@ [build-system] -requires = ["setuptools", "wheel"] +requires = ["setuptools>64", "wheel", "pip>=22"] build-backend = "setuptools.build_meta" +[project] +name = "flowchem" +version = "1.0.0-alpha1" +description = "Flowchem is a python library to control a variety of instruments commonly found in chemistry labs." +readme = "README.md" +requires-python = ">=3.10" +license = { text = "MIT" } +keywords = ["chemistry", "automation", "laboratory", "science"] +authors = [ + { name = "Dario Cambié", email = "2422614+dcambie@users.noreply.github.com" }, + { name = "Jakob Wolf", email = "Jakob.Wolf@mpikg.mpg.de" }, + { name = "Wei-Hsin Hsu", email = "Wei-hsin.Hsu@mpikg.mpg.de" }, +] +maintainers = [ + { name = "Dario Cambié", email = "2422614+dcambie@users.noreply.github.com" } +] +classifiers = [ + "License :: OSI Approved :: MIT License" +] +dependencies = [ + "aioserial>=1.3.0", + "asyncua>=0.9.92", + "fastapi>=0.65.2", + "loguru>=0.5.0", + "lxml>=4.6.4", + "packaging>=21.3", + "pint>=0.16.1", # See hgrecco/pint#1642 + "pydantic>=1.8.2", + "pyserial>=3", + "rich_click>=0.3.0", + 'tomli; python_version<"3.11"', + "uvicorn>=0.13.4", + "zeroconf>=0.39.4", +] + +[project.optional-dependencies] +dev = [ + "mypy", + "flake8", + "black", + "pylint", + "tox", + "pre-commit" +] +test = [ + "flowchem-test", + "pytest", + "pytest-asyncio", + "pytest-cov", + "pytest-mock", + "httpx", + "requests", +] +phidget = [ + "phidget22>=1.7.20211005", +] +plot = [ + "matplotlib>=3.5.0", +] +docs = [ + "mistune==0.8.4", # Due to sphinx-contrib/openapi#121 + "myst-parser", + "sphinx", + "sphinx-autodoc-typehints", + "sphinx-rtd-theme", + "sphinxcontrib-openapi", +] +types = [ + "types-lxml", + "data-science-types", +] + +[project.urls] +homepage = "https://github.com/cambiegroup/flowchem" +documentation = "https://flowchem.readthedocs.io" +repository = "https://github.com/cambiegroup/flowchem" + +[project.scripts] +flowchem = "flowchem.__main__:main" +flowchem-autodiscover = "flowchem.autodiscover:main" + +[tool.setuptools] +package-dir = {"" = "src"} + +[tool.setuptools.packages.find] +where = ["src"] + +[tool.setuptools.package-data] +flowchem = ["py.typed"] + +[tool.mypy] +ignore_missing_imports = true +python_version = "3.10" + [tool.pytest.ini_options] testpaths = "tests" asyncio_mode = "auto" addopts = "-m 'not HApump and not Spinsolve and not FlowIR and not KPump' --cov=flowchem --cov-fail-under=30" + markers = [ "HApump: tests requiring a local HA Elite11 connected.", "Spinsolve: tests requiring a connection to Spinsolve.", "FlowIR: tests requiring a connection to a FlowIR.", "KPump: tests for Azura compact" ] -[tool.mypy] -ignore_missing_imports = true -python_version = 3.9 - -[tool.pylint.'MESSAGES CONTROL'] -max-line-length = 120 -#load-plugins = "perflint" -extension-pkg-allow-list = 'pydantic, lxml' -#disable = "C0330, R0201" +[tool.tox] +legacy_tox_ini = """ +[tox] +envlist = mypy,flake8,test +isolated_build = true +minversion = 3.10 -[tool.isort] -profile = "black" +[testenv:{mypy,flake8,test}] +envdir = {toxworkdir}/.tox +changedir = {envtmpdir} +extras = + dev + test +description = + mypy: Run mypy + flake8: Run flake8 + test: Run the tests with pytest under {basepython} +commands = + mypy: mypy --check-untyped-defs --python-version 3.10 --ignore-missing-imports {toxinidir}/src + flake8: flake8 --count --select=E9,F63,F7,F82 --show-source --statistics + test: python -m pytest {posargs} {toxinidir} +""" diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index b9556603..00000000 --- a/requirements.txt +++ /dev/null @@ -1,369 +0,0 @@ -# -# This file is autogenerated by pip-compile with python 3.8 -# To update, run: -# -# pip-compile -# -absl-py==1.0.0 - # via ord-schema -aiofiles==0.8.0 - # via - # asyncua - # flowchem (setup.py) -aioserial==1.3.0 - # via flowchem (setup.py) -aiosqlite==0.17.0 - # via asyncua -altair==4.1.0 - # via flowchem (setup.py) -anyio==3.4.0 - # via starlette -argon2-cffi==21.1.0 - # via notebook -asgiref==3.4.1 - # via uvicorn -asteval==0.9.25 - # via lmfit -asyncua==0.9.96 - # via flowchem (setup.py) -attrs==21.2.0 - # via jsonschema -backcall==0.2.0 - # via ipython -bleach==4.1.0 - # via nbconvert -bokeh==2.4.2 - # via flowchem (setup.py) -cachetools==4.2.4 - # via google-auth -certifi==2021.10.8 - # via requests -cffi==1.15.0 - # via - # argon2-cffi - # cryptography -charset-normalizer==2.0.8 - # via requests -click==8.0.3 - # via uvicorn -colorama==0.4.4 - # via - # click - # ipython - # loguru - # rich -commonmark==0.9.1 - # via rich -cryptography==36.0.0 - # via asyncua -cycler==0.11.0 - # via matplotlib -debugpy==1.5.1 - # via ipykernel -decorator==5.1.0 - # via ipython -defusedxml==0.7.1 - # via nbconvert -entrypoints==0.3 - # via - # altair - # jupyter-client - # nbconvert -fastapi==0.70.0 - # via flowchem (setup.py) -fonttools==4.28.2 - # via matplotlib -future==0.18.2 - # via uncertainties -getmac==0.8.2 - # via flowchem (setup.py) -google-api-core==2.2.2 - # via google-api-python-client -google-api-python-client==2.32.0 - # via flowchem (setup.py) -google-auth==2.3.3 - # via - # google-api-core - # google-api-python-client - # google-auth-httplib2 -google-auth-httplib2==0.1.0 - # via google-api-python-client -googleapis-common-protos==1.53.0 - # via google-api-core -graphviz==0.19 - # via flowchem (setup.py) -h11==0.12.0 - # via uvicorn -httplib2==0.20.2 - # via - # google-api-python-client - # google-auth-httplib2 -idna==3.3 - # via - # anyio - # requests -ifaddr==0.1.7 - # via zeroconf -importlib-resources==5.4.0 - # via jsonschema -ipykernel==6.6.0 - # via - # ipywidgets - # notebook -ipython==7.31.1 - # via - # flowchem (setup.py) - # ipykernel - # ipywidgets -ipython-genutils==0.2.0 - # via - # ipywidgets - # nbformat - # notebook -ipywidgets==7.6.5 - # via flowchem (setup.py) -jedi==0.18.1 - # via ipython -jinja2==3.0.3 - # via - # altair - # bokeh - # nbconvert - # notebook -jsonschema==4.2.1 - # via - # altair - # flowchem (setup.py) - # nbformat -jupyter-client==7.1.0 - # via - # ipykernel - # nbclient - # notebook -jupyter-core==4.11.2 - # via - # jupyter-client - # nbconvert - # nbformat - # notebook -jupyterlab-pygments==0.1.2 - # via nbconvert -jupyterlab-widgets==1.0.2 - # via ipywidgets -kiwisolver==1.3.2 - # via matplotlib -lmfit==1.0.3 - # via flowchem (setup.py) -loguru==0.6.0 - # via flowchem (setup.py) -lxml==4.9.1 - # via flowchem (setup.py) -markupsafe==2.0.1 - # via jinja2 -matplotlib==3.5.0 - # via flowchem (setup.py) -matplotlib-inline==0.1.3 - # via - # ipykernel - # ipython -mistune==2.0.3 - # via nbconvert -nbclient==0.5.9 - # via nbconvert -nbconvert==6.5.1 - # via notebook -nbformat==5.1.3 - # via - # ipywidgets - # nbclient - # nbconvert - # notebook -nest-asyncio==1.5.4 - # via - # jupyter-client - # nbclient - # notebook -networkx==2.6.3 - # via flowchem (setup.py) -nmrglue==0.8 - # via flowchem (setup.py) -notebook==6.4.12 - # via widgetsnbextension -numpy==1.22.0 - # via - # altair - # bokeh - # flowchem (setup.py) - # lmfit - # matplotlib - # nmrglue - # pandas - # scipy -ord-schema==0.3.0 - # via flowchem (setup.py) -packaging==21.3 - # via - # bleach - # bokeh - # flowchem (setup.py) - # matplotlib - # pint - # setuptools-scm -pandas==1.3.4 - # via - # altair - # flowchem (setup.py) -pandocfilters==1.5.0 - # via nbconvert -parso==0.8.3 - # via jedi -pickleshare==0.7.5 - # via ipython -pillow==9.0.1 - # via - # bokeh - # matplotlib -pint==0.18 - # via flowchem (setup.py) -prometheus-client==0.12.0 - # via notebook -prompt-toolkit==3.0.23 - # via ipython -protobuf==3.19.5 - # via - # google-api-core - # googleapis-common-protos -pyasn1==0.4.8 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.2.8 - # via google-auth -pycparser==2.21 - # via cffi -pydantic==1.8.2 - # via - # fastapi - # flowchem (setup.py) -pygments==2.10.0 - # via - # ipython - # jupyterlab-pygments - # nbconvert - # rich -pyparsing==3.0.6 - # via - # httplib2 - # matplotlib - # packaging -pyrsistent==0.18.0 - # via jsonschema -pyserial==3.5 - # via - # aioserial - # flowchem (setup.py) -python-dateutil==2.8.2 - # via - # asyncua - # jupyter-client - # matplotlib - # pandas -pytz==2021.3 - # via - # asyncua - # pandas -pywin32==302 - # via jupyter-core -pywinpty==1.1.6 - # via terminado -pyyaml==6.0 - # via - # bokeh - # flowchem (setup.py) -pyzmq==22.3.0 - # via - # jupyter-client - # notebook -requests==2.26.0 - # via google-api-core -rich==10.15.2 - # via flowchem (setup.py) -rsa==4.8 - # via google-auth -scipy==1.7.3 - # via - # flowchem (setup.py) - # lmfit - # nmrglue -send2trash==1.8.0 - # via notebook -setuptools-scm==6.3.2 - # via matplotlib -six==1.16.0 - # via - # absl-py - # bleach - # google-auth - # google-auth-httplib2 - # python-dateutil -sniffio==1.2.0 - # via anyio -sortedcontainers==2.4.0 - # via asyncua -starlette==0.16.0 - # via fastapi -terminado==0.12.1 - # via notebook -testpath==0.5.0 - # via nbconvert -tomli==1.2.2 - # via setuptools-scm -toolz==0.11.2 - # via altair -tornado==6.1 - # via - # bokeh - # ipykernel - # jupyter-client - # notebook - # terminado -traitlets==5.1.1 - # via - # ipykernel - # ipython - # ipywidgets - # jupyter-client - # jupyter-core - # matplotlib-inline - # nbclient - # nbconvert - # nbformat - # notebook -typing-extensions==4.0.1 - # via - # aiosqlite - # bokeh - # pydantic -uncertainties==3.1.6 - # via lmfit -uritemplate==4.1.1 - # via google-api-python-client -urllib3==1.26.7 - # via requests -uvicorn==0.15.0 - # via flowchem (setup.py) -wcwidth==0.2.5 - # via prompt-toolkit -webencodings==0.5.1 - # via bleach -widgetsnbextension==3.5.2 - # via ipywidgets -win32-setctime==1.0.4 - # via loguru -zeroconf==0.37.0 - # via flowchem (setup.py) -zipp==3.6.0 - # via importlib-resources - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index a0b44b4a..00000000 --- a/setup.cfg +++ /dev/null @@ -1,82 +0,0 @@ -[metadata] -name = flowchem -version = 0.0.8 -author = Dario Cambié, Jakob Wolf, Wei-hsin Hsu -author_email = dario.cambie@mpikg.mpg.de, jakob.wolf@mpikg.mpg.de, Wei-hsin.Hsu@mpikg.mpg.de -description = Flowchem is a python library to automate flow chemistry experiments. -long_description = file: README.md -long_description_content_type = text/markdown -url = https://github.com/cambiegroup/flowchem -metadata_version = 2.2 -license = MIT -license_files = LICENSE -classifiers = - License :: OSI Approved :: MIT License - -[options] -packages = find: -include_package_data = True -install_requires = - aiofiles>=0.8.0 - aioserial>=1.3.0 - altair>=4.1.0 - asyncua>=0.9.92 - bokeh>=2.4.2 - fastapi>=0.65.2 - getmac>=0.8.2 - graphviz>=0.19 - ipython>=7.30.1 - ipywidgets>=7.6.5 - jsonschema>=4.2.1 - lmfit>=1.0.3 - loguru>=0.5.0 - lxml>=4.6.4 - matplotlib>=3.5.0 - networkx>=2.6.3 - nmrglue >= 0.8 - numpy>=1.20.3 - ord-schema>=0.3.0 - packaging>=21.3 - pandas>=1.0.0 - pint>=0.16.1 - pydantic>=1.8.2 - pyserial>=3 - pyyaml>=6.0 - scipy>=1.6.3, <1.8 - unsync>=1.0.0 - uvicorn>=0.13.4 - zeroconf>=0.36.2 - google-api-python-client - rich - ipywidgets -python_requires = >=3.8 - -[options.extras_require] -;nmr = nmrglue, lxml, matplotlib, packaging -;http = zeroconf, fastapi, uvicorn -phidget=phidget22>=1.7.20211005 - -; DEVELOPER DEPENDENCIES -dev = mypy - flake8 - black - isort - pylint - perflint - pre-commit -test = pytest - pytest-asyncio - pytest-cov - -; ALL -all = phidget22>=1.7.20211005 - mypy - flake8 - black - pytest - pytest-asyncio - pytest-cov - -;[options.entry_points] -;console_scripts = -; flowchem=flowchem.cli:main diff --git a/setup.py b/setup.py deleted file mode 100644 index 60684932..00000000 --- a/setup.py +++ /dev/null @@ -1,3 +0,0 @@ -from setuptools import setup - -setup() diff --git a/src/flowchem/__init__.py b/src/flowchem/__init__.py new file mode 100644 index 00000000..840d1742 --- /dev/null +++ b/src/flowchem/__init__.py @@ -0,0 +1,17 @@ +# Single-sourcing version, Option 5 in https://packaging.python.org/en/latest/guides/single-sourcing-package-version/ +from importlib.metadata import PackageNotFoundError +from importlib.metadata import version + +try: + __version__ = version(__name__) +except PackageNotFoundError: + __version__ = "unknown" +finally: + del version, PackageNotFoundError + +# Unit registry +import pint + +ureg = pint.UnitRegistry(autoconvert_offset_to_baseunit=True) +ureg.define("step = []") +ureg.define("stroke = 48000 * step") diff --git a/src/flowchem/__main__.py b/src/flowchem/__main__.py new file mode 100644 index 00000000..c4cb1eca --- /dev/null +++ b/src/flowchem/__main__.py @@ -0,0 +1,63 @@ +""" +Entry-point module for the command line prefixer, called in case you use `python -m flowchem`. +Why does this file exist, and why `__main__`? For more info, read: +- https://www.python.org/dev/peps/pep-0338/ +- https://docs.python.org/3/using/cmdline.html#cmdoption-m +""" +import asyncio +import sys +from pathlib import Path + +import rich_click as click +import uvicorn +from loguru import logger + +from flowchem import __version__ +from flowchem.server.api_server import run_create_server_from_file + + +@click.argument("device_config_file", type=click.Path(), required=True) +@click.option( + "-l", "--log", "logfile", type=click.Path(), default=None, help="Save logs to file." +) +@click.option( + "-h", "--host", "host", type=str, default="127.0.0.1", help="Server host." +) +@click.version_option() +@click.command() +def main(device_config_file, logfile, host): + """ + Flowchem main program. + + Parse DEVICE_CONFIG_FILE and starts a server exposing the devices via RESTful API. + """ + + if sys.platform == "win32": + asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) + + logger.info(f"Starting flowchem v. {__version__}!") + if logfile: + logger.add(Path(logfile)) + logger.debug(f"Starting server with configuration file: '{device_config_file}'") + + async def main_loop(): + """The loop must be shared between uvicorn and flowchem.""" + flowchem_instance = await run_create_server_from_file( + Path(device_config_file), host=host + ) + config = uvicorn.Config( + flowchem_instance["api_server"], + host=host, + port=flowchem_instance["port"], + log_level="info", + timeout_keep_alive=3600, + ) + server = uvicorn.Server(config) + logger.info(f"I got a server it is {server}") + await server.serve() + + asyncio.run(main_loop()) + + +if __name__ == "__main__": + main() diff --git a/src/flowchem/autodiscover.py b/src/flowchem/autodiscover.py new file mode 100644 index 00000000..be98f037 --- /dev/null +++ b/src/flowchem/autodiscover.py @@ -0,0 +1,131 @@ +"""This module is used to autodiscover any supported devices connected to the PC.""" +from pathlib import Path + +import aioserial +import rich_click as click +import serial.tools.list_ports as list_ports +from loguru import logger + +from flowchem.devices.hamilton.ml600_finder import ml600_finder +from flowchem.devices.harvardapparatus.elite11_finder import elite11_finder +from flowchem.devices.huber.huber_finder import chiller_finder +from flowchem.devices.knauer.knauer_finder import knauer_finder + +SERIAL_DEVICE_INSPECTORS = (ml600_finder, elite11_finder, chiller_finder) + + +def inspect_serial_ports() -> set[str]: + """Search for known devices on local serial ports and generate config stubs.""" + port_available = [comport.device for comport in list_ports.comports()] + logger.info( + f"Found the following serial port(s) on the current device: {port_available}" + ) + + dev_found_config: set[str] = set() + # Loop each serial port + for serial_port in port_available: + logger.info(f"Looking for known devices on {serial_port}...") + # Check if the serial port is available (i.e. not already open) + try: + port = aioserial.Serial(serial_port) + port.close() + except OSError: + logger.info(f"Skipping {serial_port} (cannot be opened: already in use?)") + continue + + # For each port try all functions that can detect serial port devices + for inspector in SERIAL_DEVICE_INSPECTORS: + # a list of config is return by the inspector, if len(config) == 0 then it is falsy + if config := inspector(serial_port): + dev_found_config.update(config) + break + logger.info(f"No known device found on {serial_port}") + + return dev_found_config + + +def inspect_eth(source_ip): + """Search for known devices on ethernet and generate config stubs.""" + logger.info("Starting ethernet detection") + + return knauer_finder(source_ip) + + +@click.command() +@click.option( + "--output", + "-o", + type=click.Path(), + help="Output file", + show_default=True, + default="flowchem_config.toml", +) +@click.option( + "--overwrite", + "-w", + is_flag=True, + help="Overwrite existing configuration file if present", +) +@click.option("--safe", "-s", is_flag=True, help="Run only safe modules.") +@click.option( + "--assume-yes", + "--yes", + "-y", + is_flag=True, + help="Assume 'yes' as answer to all prompts and run non-interactively.", +) +@click.option( + "--source-ip", + help="Source IP for broadcast packets. (Relevant if multiple eth interface are available)", + default=None, +) +def main(output, overwrite, safe, assume_yes, source_ip): + """Auto-find devices connected to the current PC.""" + # Validate output location + if Path(output).exists() and not overwrite: + logger.error( + f"Output file `{output}` already existing! Use `--overwrite` to replace it." + ) + return + + # Ask confirmation for serial communication + confirm = False + if not safe and not assume_yes: + logger.warning( + "The autodiscover include modules that involve communication over serial ports." + ) + logger.warning("These modules are *not* guaranteed to be safe!") + logger.warning( + "Unsupported devices could be placed in an unsafe state as result of the discovery process!" + ) + confirm = click.confirm("Do you want to include the search for serial devices?") + + # Search serial devices + if not safe and (assume_yes or confirm): + serial_config = inspect_serial_ports() + else: + serial_config = set() + + # Search ethernet devices + eth_config = inspect_eth(source_ip) + + # Print results + if not serial_config and not eth_config: + logger.error(f"No device found! The output file `{output}` won't be created.") + return + logger.info(f"Found {len(serial_config) + len(eth_config)} devices!") + + # Print configuration + configuration = "".join(serial_config) + "".join(eth_config) + logger.info( + f"The following configuration will be written to `{output}:\n{configuration}" + ) + + # Write to file + with Path(output).open("w", encoding="utf-8") as f: + f.write(configuration) + logger.info(f"Configuration written to `{output}`!") + + +if __name__ == "__main__": + main() diff --git a/src/flowchem/components/README.md b/src/flowchem/components/README.md new file mode 100644 index 00000000..778ae99d --- /dev/null +++ b/src/flowchem/components/README.md @@ -0,0 +1,11 @@ +# flowchem/components + +This folder contains the model components defining the public API of flowchem devices. + +Ideally, for components of the same type (e.g. SyringePumps) it should be possible to change from one manufacturer to +another one by simply updating the configuration file, while the public API remains unchanged. + +It is, however, still possible for individual devices to support additional commands, beyond the minimum set defined by +this specs. The use of such commands is discouraged as it limits the portability of any derived code. + +For a list of all components consult the documentation. diff --git a/flowchem/utils/__init__.py b/src/flowchem/components/__init__.py similarity index 100% rename from flowchem/utils/__init__.py rename to src/flowchem/components/__init__.py diff --git a/tests/__init__.py b/src/flowchem/components/analytics/__init__.py similarity index 100% rename from tests/__init__.py rename to src/flowchem/components/analytics/__init__.py diff --git a/src/flowchem/components/analytics/dad_control.py b/src/flowchem/components/analytics/dad_control.py new file mode 100644 index 00000000..d9faa655 --- /dev/null +++ b/src/flowchem/components/analytics/dad_control.py @@ -0,0 +1,22 @@ +"""A Diode Array Detector control component.""" +from flowchem.components.base_component import FlowchemComponent +from flowchem.devices.flowchem_device import FlowchemDevice + + +class DADControl(FlowchemComponent): + def __init__(self, name: str, hw_device: FlowchemDevice): + """DAD Control component.""" + super().__init__(name, hw_device) + self.add_api_route("/lamp", self.get_lamp, methods=["GET"]) + self.add_api_route("/lamp", self.set_lamp, methods=["PUT"]) + + # Ontology: diode array detector + self.metadata.owl_subclass_of = "http://purl.obolibrary.org/obo/CHMO_0002503" + + async def get_lamp(self): + """Lamp status.""" + ... + + async def set_lamp(self, state: bool, lamp_name: str): + """Lamp status.""" + ... diff --git a/src/flowchem/components/analytics/hplc_control.py b/src/flowchem/components/analytics/hplc_control.py new file mode 100644 index 00000000..f149ee8d --- /dev/null +++ b/src/flowchem/components/analytics/hplc_control.py @@ -0,0 +1,31 @@ +"""An HPLC control component.""" +from __future__ import annotations + +from typing import TYPE_CHECKING + +from flowchem.components.base_component import FlowchemComponent + +if TYPE_CHECKING: + from flowchem.devices.flowchem_device import FlowchemDevice + + +class HPLCControl(FlowchemComponent): + def __init__(self, name: str, hw_device: FlowchemDevice): + """HPLC Control component. Sends methods, starts run, do stuff.""" + super().__init__(name, hw_device) + self.add_api_route("/run-sample", self.run_sample, methods=["PUT"]) + self.add_api_route("/send-method", self.send_method, methods=["PUT"]) + + # Ontology: high performance liquid chromatography instrument + self.metadata.owl_subclass_of = "http://purl.obolibrary.org/obo/OBI_0001057" + + async def send_method(self, method_name): + """Submits a method to the HPLC. + + This is e.g. useful when the injection is automatically triggerd when switching a valve. + """ + ... + + async def run_sample(self, sample_name: str, method_name: str): + """Runs a sample at the HPLC with the provided sample name and method.""" + ... diff --git a/src/flowchem/components/analytics/ir_control.py b/src/flowchem/components/analytics/ir_control.py new file mode 100644 index 00000000..a187a3e4 --- /dev/null +++ b/src/flowchem/components/analytics/ir_control.py @@ -0,0 +1,36 @@ +"""An IR control component.""" +from pydantic import BaseModel + +from flowchem.components.base_component import FlowchemComponent +from flowchem.devices.flowchem_device import FlowchemDevice + + +class IRSpectrum(BaseModel): + """ + IR spectrum class. + + Consider rampy for advance features (baseline fit, etc.) + See e.g. https://github.com/charlesll/rampy/blob/master/examples/baseline_fit.ipynb + """ + + wavenumber: list[float] + intensity: list[float] + + +class IRControl(FlowchemComponent): + def __init__(self, name: str, hw_device: FlowchemDevice): + """HPLC Control component. Sends methods, starts run, do stuff.""" + super().__init__(name, hw_device) + self.add_api_route("/acquire-spectrum", self.acquire_spectrum, methods=["PUT"]) + self.add_api_route("/stop", self.stop, methods=["PUT"]) + + # Ontology: high performance liquid chromatography instrument + self.metadata.owl_subclass_of = "http://purl.obolibrary.org/obo/OBI_0001057" + + async def acquire_spectrum(self) -> IRSpectrum: # type: ignore + """Acquire an IR spectrum.""" + ... + + async def stop(self): + """Stops acquisition and exit gracefully.""" + ... diff --git a/src/flowchem/components/analytics/nmr_control.py b/src/flowchem/components/analytics/nmr_control.py new file mode 100644 index 00000000..b29e66cd --- /dev/null +++ b/src/flowchem/components/analytics/nmr_control.py @@ -0,0 +1,22 @@ +"""An NMR control component.""" +from flowchem.components.base_component import FlowchemComponent +from flowchem.devices.flowchem_device import FlowchemDevice + + +class NMRControl(FlowchemComponent): + def __init__(self, name: str, hw_device: FlowchemDevice): + """NMR Control component.""" + super().__init__(name, hw_device) + self.add_api_route("/acquire-spectrum", self.acquire_spectrum, methods=["PUT"]) + self.add_api_route("/stop", self.stop, methods=["PUT"]) + + # Ontology: fourier transformation NMR instrument + self.metadata.owl_subclass_of = "http://purl.obolibrary.org/obo/OBI_0000487" + + async def acquire_spectrum(self): + """Acquire an NMR spectrum.""" + ... + + async def stop(self): + """Stops acquisition and exit gracefully.""" + ... diff --git a/src/flowchem/components/base_component.py b/src/flowchem/components/base_component.py new file mode 100644 index 00000000..b8849f18 --- /dev/null +++ b/src/flowchem/components/base_component.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +from collections.abc import Callable +from typing import TYPE_CHECKING + +from fastapi import APIRouter +from loguru import logger +from pydantic import BaseModel + +if TYPE_CHECKING: + from flowchem.devices.flowchem_device import FlowchemDevice +from flowchem.devices.flowchem_device import DeviceInfo + + +class ComponentInfo(BaseModel): + """Metadata associated with flowchem components.""" + + name = "" + owl_subclass_of = "http://purl.obolibrary.org/obo/OBI_0000968" # 'device' + hw_device: DeviceInfo + + +class FlowchemComponent: + def __init__(self, name: str, hw_device: FlowchemDevice): + """Initialize component.""" + self.name = name + self.hw_device = hw_device + self.metadata = ComponentInfo( + hw_device=self.hw_device.get_metadata(), name=name + ) + + # Initialize router + self._router = APIRouter( + prefix=f"/{hw_device.name}/{name}", tags=[hw_device.name] + ) + self.add_api_route( + "/", + self.get_metadata, + methods=["GET"], + response_model=ComponentInfo, + ) + + @property + def router(self): + """Return the API Router. Serves as hook for subclass to add routes.""" + return self._router + + def add_api_route(self, path: str, endpoint: Callable, **kwargs): + """Hook for subclasses to add routes to router.""" + logger.debug(f"Adding route {path} for router of {self.name}") + self._router.add_api_route(path, endpoint, **kwargs) + + def get_metadata(self) -> ComponentInfo: + """Return metadata.""" + return self.metadata diff --git a/src/flowchem/components/pumps/__init__.py b/src/flowchem/components/pumps/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/flowchem/components/pumps/base_pump.py b/src/flowchem/components/pumps/base_pump.py new file mode 100644 index 00000000..71385bf1 --- /dev/null +++ b/src/flowchem/components/pumps/base_pump.py @@ -0,0 +1,35 @@ +"""Base pump component.""" +from flowchem.components.base_component import FlowchemComponent +from flowchem.devices.flowchem_device import FlowchemDevice + + +class BasePump(FlowchemComponent): + def __init__(self, name: str, hw_device: FlowchemDevice): + """A generic pump.""" + super().__init__(name, hw_device) + self.add_api_route("/infuse", self.infuse, methods=["PUT"]) + self.add_api_route("/stop", self.stop, methods=["PUT"]) + self.add_api_route("/is-pumping", self.is_pumping, methods=["GET"]) + if self.is_withdrawing_capable(): + self.add_api_route("/withdraw", self.withdraw, methods=["PUT"]) + + async def infuse(self, rate: str = "", volume: str = "") -> bool: # type: ignore + """Start infusion.""" + ... + + async def stop(self) -> bool: # type: ignore + """Stop pumping.""" + ... + + async def is_pumping(self) -> bool: # type: ignore + """Is pump running?""" + ... + + @staticmethod + def is_withdrawing_capable() -> bool: # type: ignore + """Can the pump reverse its normal flow direction?""" + ... + + async def withdraw(self, rate: str = "", volume: str = "") -> bool: # type: ignore + """Pump in the opposite direction of infuse.""" + ... diff --git a/src/flowchem/components/pumps/hplc_pump.py b/src/flowchem/components/pumps/hplc_pump.py new file mode 100644 index 00000000..cc45c29f --- /dev/null +++ b/src/flowchem/components/pumps/hplc_pump.py @@ -0,0 +1,14 @@ +"""Syringe pump component, two flavours, infuse only, infuse-withdraw.""" +from loguru import logger + +from flowchem.components.pumps.base_pump import BasePump +from flowchem.devices.flowchem_device import FlowchemDevice + + +class HPLCPump(BasePump): + def __init__(self, name: str, hw_device: FlowchemDevice): + """A generic Syringe pump.""" + super().__init__(name, hw_device) + + # Ontology: HPLC isocratic pump + self.metadata.owl_subclass_of = "http://purl.obolibrary.org/obo/OBI_0000556" diff --git a/src/flowchem/components/pumps/syringe_pump.py b/src/flowchem/components/pumps/syringe_pump.py new file mode 100644 index 00000000..9fb087d4 --- /dev/null +++ b/src/flowchem/components/pumps/syringe_pump.py @@ -0,0 +1,10 @@ +"""Syringe pump component, two flavours, infuse only, infuse-withdraw.""" +from flowchem.components.base_component import ComponentInfo +from flowchem.components.pumps.base_pump import BasePump + + +class SyringePump(BasePump): + def get_metadata(self) -> ComponentInfo: + # Ontology: syringe pump + self.metadata.owl_subclass_of = "http://purl.obolibrary.org/obo/OBI_0400100" + return super().get_metadata() diff --git a/src/flowchem/components/sensors/__init__.py b/src/flowchem/components/sensors/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/flowchem/components/sensors/pressure_sensor.py b/src/flowchem/components/sensors/pressure_sensor.py new file mode 100644 index 00000000..326af868 --- /dev/null +++ b/src/flowchem/components/sensors/pressure_sensor.py @@ -0,0 +1,19 @@ +"""Pressure sensor.""" +from .sensor import Sensor +from flowchem.devices.flowchem_device import FlowchemDevice + + +class PressureSensor(Sensor): + """A pressure sensor.""" + + def __init__(self, name: str, hw_device: FlowchemDevice): + """A generic Syringe pump.""" + super().__init__(name, hw_device) + self.add_api_route("/read-pressure", self.read_pressure, methods=["GET"]) + + # Ontology: Pressure Sensor Device + self.metadata.owl_subclass_of = "http://purl.obolibrary.org/obo/NCIT_C50167" + + async def read_pressure(self, units: str = "bar"): + """Read from sensor, result to be expressed in units (optional).""" + ... diff --git a/src/flowchem/components/sensors/sensor.py b/src/flowchem/components/sensors/sensor.py new file mode 100644 index 00000000..76a573b0 --- /dev/null +++ b/src/flowchem/components/sensors/sensor.py @@ -0,0 +1,14 @@ +"""Sensor device.""" +from __future__ import annotations + +from flowchem.components.base_component import FlowchemComponent +from flowchem.devices.flowchem_device import FlowchemDevice + + +class Sensor(FlowchemComponent): + """A generic sensor.""" + + def __init__(self, name: str, hw_device: FlowchemDevice): + super().__init__(name, hw_device) + # Ontology: HPLC isocratic pump + self.metadata.owl_subclass_of = "http://purl.obolibrary.org/obo/NCIT_C50166" diff --git a/src/flowchem/components/technical/__init__.py b/src/flowchem/components/technical/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/flowchem/components/technical/power_control.py b/src/flowchem/components/technical/power_control.py new file mode 100644 index 00000000..9feabe45 --- /dev/null +++ b/src/flowchem/components/technical/power_control.py @@ -0,0 +1,54 @@ +"""Power control, sets both voltage and current. (Could be split in two, unnecessarty for now).""" +from __future__ import annotations + +import pint + +from flowchem.components.base_component import FlowchemComponent +from flowchem.devices.flowchem_device import FlowchemDevice + + +class PowerControl(FlowchemComponent): + """A generic temperature controller.""" + + def __init__( + self, + name: str, + hw_device: FlowchemDevice, + ): + """Create a TemperatureControl object.""" + super().__init__(name, hw_device) + + self.add_api_route("/power-on", self.power_on, methods=["PUT"]) + self.add_api_route("/power-off", self.power_off, methods=["PUT"]) + + # Current + self.add_api_route("/current", self.get_current, methods=["GET"]) + self.add_api_route("/current", self.set_current, methods=["PUT"]) + + # Voltage + self.add_api_route("/voltage", self.get_voltage, methods=["GET"]) + self.add_api_route("/voltage", self.set_voltage, methods=["PUT"]) + + async def set_current(self, current: str): + """Set the target current to the given string in natural language.""" + ... + + async def get_current(self) -> float: # type: ignore + """Return current in Ampere.""" + ... + + async def set_voltage(self, voltage: str): + """Set the target voltage to the given string in natural language.""" + ... + + async def get_voltage(self) -> float: # type: ignore + """Return current in Volt.""" + ... + + async def power_on(self): + """Turn on temperature control.""" + ... + + async def power_off(self): + """Turn off temperature control.""" + ... diff --git a/src/flowchem/components/technical/temperature_control.py b/src/flowchem/components/technical/temperature_control.py new file mode 100644 index 00000000..5fdb3804 --- /dev/null +++ b/src/flowchem/components/technical/temperature_control.py @@ -0,0 +1,80 @@ +"""Temperature control, either for heating or cooling.""" +from __future__ import annotations + +from typing import NamedTuple +from typing import TYPE_CHECKING + +import pint +from loguru import logger + +from flowchem import ureg +from flowchem.components.base_component import FlowchemComponent + +if TYPE_CHECKING: + from flowchem.devices.flowchem_device import FlowchemDevice + + +class TempRange(NamedTuple): + min: pint.Quantity = ureg.Quantity("-100 °C") + max: pint.Quantity = ureg.Quantity("+250 °C") + + +class TemperatureControl(FlowchemComponent): + """A generic temperature controller.""" + + def __init__(self, name: str, hw_device: FlowchemDevice, temp_limits: TempRange): + """Create a TemperatureControl object.""" + super().__init__(name, hw_device) + + self.add_api_route("/temperature", self.set_temperature, methods=["PUT"]) + self.add_api_route("/temperature", self.get_temperature, methods=["GET"]) + + self.add_api_route("/power-on", self.power_on, methods=["PUT"]) + self.add_api_route("/power-off", self.power_off, methods=["PUT"]) + + self.add_api_route("/target-reached", self.is_target_reached, methods=["GET"]) + self.add_api_route("/limits", self.temperature_limits, methods=["GET"]) + + self._limits = temp_limits + + async def set_temperature(self, temp: str) -> pint.Quantity: + """Set the target temperature to the given string in natural language.""" + if temp.isnumeric(): + temp = temp + "°C" + logger.warning("No units provided to set_temperature, assuming Celsius.") + set_t = ureg.Quantity(temp) + + if set_t < self._limits[0]: + set_t = self._limits[0] + logger.warning( + f"Temperature requested {set_t} is out of range [{self._limits}] for {self.name}!" + f"Setting to {self._limits[0]} instead." + ) + + if set_t > self._limits[1]: + set_t = self._limits[1] + logger.warning( + f"Temperature requested {set_t} is out of range [{self._limits}] for {self.name}!" + f"Setting to {self._limits[1]} instead." + ) + return set_t + + async def get_temperature(self) -> float: # type: ignore + """Return temperature in Celsius.""" + ... + + async def is_target_reached(self) -> bool: # type: ignore + """Return True if the set temperature target has been reached.""" + ... + + async def temperature_limits(self) -> TempRange: + """Return a dict with `min` and `max` temperature in Celsius.""" + return self._limits + + async def power_on(self): + """Turn on temperature control.""" + ... + + async def power_off(self): + """Turn off temperature control.""" + ... diff --git a/src/flowchem/components/test.py b/src/flowchem/components/test.py new file mode 100644 index 00000000..c6240eda --- /dev/null +++ b/src/flowchem/components/test.py @@ -0,0 +1,9 @@ +"""A test component.""" +from flowchem.components.base_component import FlowchemComponent +from flowchem.devices.flowchem_device import FlowchemDevice + + +class TestComponent(FlowchemComponent): + def __init__(self, name: str, hw_device: FlowchemDevice): + """Initialize a TestComponent with the provided endpoints.""" + super().__init__(name, hw_device) diff --git a/src/flowchem/components/valves/__init__.py b/src/flowchem/components/valves/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/flowchem/components/valves/base_valve.py b/src/flowchem/components/valves/base_valve.py new file mode 100644 index 00000000..9ef137b4 --- /dev/null +++ b/src/flowchem/components/valves/base_valve.py @@ -0,0 +1,72 @@ +"""Generic valve.""" +from __future__ import annotations + +from pydantic import BaseModel + +from flowchem.components.base_component import FlowchemComponent +from flowchem.devices.flowchem_device import FlowchemDevice + + +class ValveInfo(BaseModel): + ports: list[str] + positions: dict[str, list[tuple[str, str]]] + + +class BaseValve(FlowchemComponent): + """An abstract class for devices of type valve. + + .. warning:: + Device objects should not directly generate components with this object but rather a more specific valve type, + such as `InjectionValve` or `MultiPositionValve`. + + All valves are characterized by: + + - a `positions` attribute, which is a set of strings representing the valve positions. + - a `set_position()` method + - a `get_position()` method + """ + + def __init__( + self, + name: str, + hw_device: FlowchemDevice, + positions: dict[str, list[tuple[str, str]]], + ports: list[str], + ): + """ + Create a valve object. + + Args: + name: device name, passed to FlowchemComponent. + hw_device: the object that controls the hardware. + positions: list of string representing the valve ports. The order in the list reflect the physical world. + This potentially enables to select rotation direction to avoid specific interactions. + """ + assert len(set(positions)) == len(positions), "Positions are unique" + self._positions = positions + self._ports = ports # This is necessary because the port order cannot be determined from ValvePosition only + + super().__init__(name, hw_device) + + self.add_api_route("/position", self.get_position, methods=["GET"]) + self.add_api_route("/position", self.set_position, methods=["PUT"]) + self.add_api_route("/connections", self.connections, methods=["GET"]) + + async def get_position(self) -> str: # type: ignore + """Get the current position of the valve.""" + ... + + async def set_position(self, position: str) -> bool: + """Set the valve to the specified position.""" + assert position in self._positions.keys() + return True + + def connections(self) -> ValveInfo: + """ + Get the list of all available positions for this valve. + + These are the human-friendly port names, and they do not necessarily match the port names used in the + communication with the device. + E.g. positions "load" and "inject" could translate to positions "1" and "2". + """ + return ValveInfo(ports=self._ports, positions=self._positions) diff --git a/src/flowchem/components/valves/distribution_valves.py b/src/flowchem/components/valves/distribution_valves.py new file mode 100644 index 00000000..9c02f4f9 --- /dev/null +++ b/src/flowchem/components/valves/distribution_valves.py @@ -0,0 +1,111 @@ +"""Distribution valves, generally connected to syringe pumps, direct the flow from a fixed port to one of the others.""" +from flowchem.components.valves.base_valve import BaseValve +from flowchem.devices.flowchem_device import FlowchemDevice + + +class TwoPortDistribution(BaseValve): + def __init__(self, name: str, hw_device: FlowchemDevice): + positions = { + "input": [("pump", "input")], + "output": [("pump", "output")], + } + super().__init__(name, hw_device, positions, ports=["pump", "input", "output"]) + + +class SixPortDistribution(BaseValve): + def __init__(self, name: str, hw_device: FlowchemDevice): + positions = { + "1": [("pump", "1")], + "2": [("pump", "2")], + "3": [("pump", "3")], + "4": [("pump", "4")], + "5": [("pump", "5")], + "6": [("pump", "6")], + } + super().__init__( + name, hw_device, positions, ports=["pump", "1", "2", "3", "4", "5", "6"] + ) + + +class TwelvePortDistribution(BaseValve): + def __init__(self, name: str, hw_device: FlowchemDevice): + positions = { + "1": [("pump", "1")], + "2": [("pump", "2")], + "3": [("pump", "3")], + "4": [("pump", "4")], + "5": [("pump", "5")], + "6": [("pump", "6")], + "7": [("pump", "7")], + "8": [("pump", "8")], + "9": [("pump", "9")], + "10": [("pump", "10")], + "11": [("pump", "11")], + "12": [("pump", "12")], + } + super().__init__( + name, + hw_device, + positions, + ports=[ + "pump", + "1", + "2", + "3", + "4", + "5", + "6", + "7", + "8", + "9", + "10", + "11", + "12", + ], + ) + + +class SixteenPortDistribution(BaseValve): + def __init__(self, name: str, hw_device: FlowchemDevice): + positions = { + "1": [("pump", "1")], + "2": [("pump", "2")], + "3": [("pump", "3")], + "4": [("pump", "4")], + "5": [("pump", "5")], + "6": [("pump", "6")], + "7": [("pump", "7")], + "8": [("pump", "8")], + "9": [("pump", "9")], + "10": [("pump", "10")], + "11": [("pump", "11")], + "12": [("pump", "12")], + "13": [("pump", "13")], + "14": [("pump", "14")], + "15": [("pump", "15")], + "16": [("pump", "16")], + } + super().__init__( + name, + hw_device, + positions, + ports=[ + "pump", + "1", + "2", + "3", + "4", + "5", + "6", + "7", + "8", + "9", + "10", + "11", + "12", + "13", + "14", + "15", + "16", + ], + ) diff --git a/src/flowchem/components/valves/injection_valves.py b/src/flowchem/components/valves/injection_valves.py new file mode 100644 index 00000000..4f7f3819 --- /dev/null +++ b/src/flowchem/components/valves/injection_valves.py @@ -0,0 +1,16 @@ +"""Injection valves are multiport, two-position valves, e.g. 6-2 commonly used w/ injection loops for HPLC injection.""" +from flowchem.components.valves.base_valve import BaseValve +from flowchem.devices.flowchem_device import FlowchemDevice + + +class SixPortTwoPosition(BaseValve): + def __init__(self, name: str, hw_device: FlowchemDevice): + # These are hardware-port, only input and output are routable from the fixed syringe. + # All three are listed as this simplifies the creation of graphs + positions = { + "load": [("1", "2"), ("3", "4"), ("5", "6")], + "inject": [("6", "1"), ("2", "3"), ("4", "5")], + } + super().__init__( + name, hw_device, positions, ports=["1", "2", "3", "4", "5", "6"] + ) diff --git a/src/flowchem/devices/README.md b/src/flowchem/devices/README.md new file mode 100644 index 00000000..fc3b564e --- /dev/null +++ b/src/flowchem/devices/README.md @@ -0,0 +1,5 @@ +# flowchem/devices + +This folder contains the drivers for the devices supported, grouped by manufacturer. + +See the docs for a guide on how to add support for a new device. diff --git a/src/flowchem/devices/__init__.py b/src/flowchem/devices/__init__.py new file mode 100644 index 00000000..117ca973 --- /dev/null +++ b/src/flowchem/devices/__init__.py @@ -0,0 +1,14 @@ +# Add all flowchem-device classes to the flowchem.device namespace +# This is needed by config parser and hides the complexity of the folder hierarchy to the library users. +# All * are defined as __all__ in the corresponding submodule to simplify name changes / refactoring. +from .dataapex import * +from .hamilton import * +from .harvardapparatus import * +from .huber import * +from .knauer import * +from .magritek import * +from .manson import * +from .mettlertoledo import * +from .phidgets import * +from .vapourtec import * +from .vicivalco import * diff --git a/src/flowchem/devices/dataapex/__init__.py b/src/flowchem/devices/dataapex/__init__.py new file mode 100644 index 00000000..412caa4c --- /dev/null +++ b/src/flowchem/devices/dataapex/__init__.py @@ -0,0 +1,3 @@ +from .clarity import Clarity + +__all__ = ["Clarity"] diff --git a/src/flowchem/devices/dataapex/clarity.py b/src/flowchem/devices/dataapex/clarity.py new file mode 100644 index 00000000..9efe862f --- /dev/null +++ b/src/flowchem/devices/dataapex/clarity.py @@ -0,0 +1,83 @@ +"""Controls a local ClarityChrom instance via the CLI interface.""" +# See https://www.dataapex.com/documentation/Content/Help/110-technical-specifications/110.020-command-line-parameters/110.020-command-line-parameters.htm?Highlight=command%20line +import asyncio +from pathlib import Path +from shutil import which + +from loguru import logger + +from .clarity_hplc_control import ClarityComponent +from flowchem.devices.flowchem_device import DeviceInfo +from flowchem.devices.flowchem_device import FlowchemDevice +from flowchem.people import * + + +class Clarity(FlowchemDevice): + def __init__( + self, + name, + executable: str = r"C:\claritychrom\bin\claritychrom.exe", + instrument_number: int = 1, + startup_time: float = 20, + startup_method: str = "", + cmd_timeout: float = 3, + user: str = "admin", + password: str = "", + cfg_file: str = "", + ): + super().__init__(name=name) + + # Validate executable + if which(executable): + self.executable = executable + else: + assert '"' not in executable + self.executable = f'"{executable}"' + assert which(executable) or Path(executable).is_file(), "Valid executable found" + + # Save instance variables + self.instrument = instrument_number + self.startup_time = startup_time + self.cmd_timeout = cmd_timeout + + # Pre-form initialization command to avoid passing tons of vars to initialize() + self._init_command = "" + self._init_command += f" cfg={cfg_file}" if cfg_file else "" + self._init_command += f" u={user}" if user else "" + self._init_command += f" p={password}" if password else "" + self._init_command += f' "{startup_method}"' + + async def initialize(self): + """Start ClarityChrom and wait for it to be responsive.""" + await self.execute_command(self._init_command) + logger.info(f"Clarity startup: waiting {self.startup_time} seconds") + await asyncio.sleep(self.startup_time) + + def metadata(self) -> DeviceInfo: + """Return hw device metadata.""" + return DeviceInfo( + authors=[dario, jakob, wei_hsin], + maintainers=[dario], + manufacturer="DataApex", + model="Clarity Chromatography", + ) + + async def execute_command(self, command: str, without_instrument_num: bool = False): + """Execute claritychrom.exe command.""" + if without_instrument_num: + cmd_string = self.executable + f" {command}" + else: + cmd_string = self.executable + f" i={self.instrument} {command}" + + logger.debug(f"Executing Clarity command `{command}`") + process = await asyncio.create_subprocess_shell(cmd_string) + try: + await asyncio.wait_for(process.wait(), timeout=self.cmd_timeout) + return True + except TimeoutError: + logger.error(f"Subprocess timeout expired (timeout = {self.cmd_timeout} s)") + return False + + def get_components(self): + """Return an HPLC_Control component.""" + return (ClarityComponent(name="clarity", hw_device=self),) diff --git a/src/flowchem/devices/dataapex/clarity_hplc_control.py b/src/flowchem/devices/dataapex/clarity_hplc_control.py new file mode 100644 index 00000000..f545de54 --- /dev/null +++ b/src/flowchem/devices/dataapex/clarity_hplc_control.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from flowchem.components.analytics.hplc_control import HPLCControl + +if TYPE_CHECKING: + from flowchem.devices import Clarity + + +class ClarityComponent(HPLCControl): + hw_device: Clarity # for typing's sake + + def __init__(self, name: str, hw_device: Clarity): + """Device-specific initialization.""" + super().__init__(name, hw_device) + # Clarity-specific command + self.add_api_route("/exit", self.exit, methods=["PUT"]) + + async def exit(self) -> bool: + """Exit Clarity Chrom.""" + return await self.hw_device.execute_command("exit", without_instrument_num=True) + + async def send_method(self, method_name) -> bool: + """ + Sets the HPLC method (i.e. a file with .MET extension) to the instrument. + + Make sure to select 'Send Method to Instrument' option in Method Sending Options dialog in System Configuration. + """ + return await self.hw_device.execute_command(f" {method_name}") + + async def run_sample(self, sample_name: str, method_name: str) -> bool: + """ + Run one analysis on the instrument. + + Note that it takes at least 2 sec until the run actually starts (depending on instrument configuration). + While the export of the chromatogram in e.g. ASCII format can be achieved programmatically via the CLI, the best + solution is to enable automatic data export for all runs of the HPLC as the chromatogram will be automatically + exported as soon as the run is finished. + """ + if not await self.hw_device.execute_command(f'set_sample_name="{sample_name}"'): + return False + if not await self.send_method(method_name): + return False + return await self.hw_device.execute_command( + f"run={self.hw_device.instrument}", without_instrument_num=True + ) diff --git a/src/flowchem/devices/flowchem_device.py b/src/flowchem/devices/flowchem_device.py new file mode 100644 index 00000000..f9a03dee --- /dev/null +++ b/src/flowchem/devices/flowchem_device.py @@ -0,0 +1,54 @@ +"""Base object for all hardware-control device classes.""" +from abc import ABC +from collections.abc import Iterable +from typing import TYPE_CHECKING + +from pydantic import BaseModel + +from flowchem import __version__ + +if TYPE_CHECKING: + from flowchem.components.base_component import FlowchemComponent + + +class Person(BaseModel): + name: str + email: str + + +class DeviceInfo(BaseModel): + """Metadata associated with hardware devices.""" + + backend = f"flowchem v. {__version__}" + authors: "list[Person]" + maintainers: "list[Person]" + manufacturer: str + model: str + serial_number = "unknown" + version = "" + additional_info: dict = {} + + +DeviceInfo.update_forward_refs() + + +class FlowchemDevice(ABC): + """ + Base flowchem device. + + All hardware-control classes must subclass this to signal they are flowchem-device and be enabled for initializaiton + during config parsing. + """ + + def __init__(self, name): + """All device have a name, which is the key in the config dict thus unique.""" + self.name = name + + async def initialize(self): + pass + + def get_metadata(self) -> DeviceInfo: + return self.metadata # type: ignore + + def components(self) -> Iterable["FlowchemComponent"]: + return () diff --git a/src/flowchem/devices/hamilton/__init__.py b/src/flowchem/devices/hamilton/__init__.py new file mode 100644 index 00000000..6b8cda9c --- /dev/null +++ b/src/flowchem/devices/hamilton/__init__.py @@ -0,0 +1,4 @@ +"""Hamilton devices.""" +from .ml600 import ML600 + +__all__ = ["ML600"] diff --git a/src/flowchem/devices/hamilton/ml600.py b/src/flowchem/devices/hamilton/ml600.py new file mode 100644 index 00000000..701f6cc9 --- /dev/null +++ b/src/flowchem/devices/hamilton/ml600.py @@ -0,0 +1,502 @@ +"""Control Hamilton ML600 syringe pump via the protocol1/RNO+.""" +from __future__ import annotations + +import string +import warnings +from dataclasses import dataclass +from typing import TYPE_CHECKING + +import aioserial +from loguru import logger + +from flowchem import ureg +from flowchem.devices.flowchem_device import DeviceInfo +from flowchem.devices.flowchem_device import FlowchemDevice +from flowchem.devices.hamilton.ml600_pump import ML600Pump +from flowchem.devices.hamilton.ml600_valve import ML600Valve +from flowchem.exceptions import InvalidConfiguration +from flowchem.people import * + +if TYPE_CHECKING: + import pint + + +# i.e. PUMP_ADDRESS = {1: 'a', 2: 'b', 3: 'c', 4: 'd', ..., 16: 'p'} +# Note ':' is used for broadcast within the daisy chain. +PUMP_ADDRESS = dict(enumerate(string.ascii_lowercase[:16], start=1)) + + +@dataclass +class Protocol1Command: + """Class representing a pump command and its expected reply.""" + + command: str + target_pump_num: int = 1 + target_syringe: str = "" + command_value: str = "" + optional_parameter: str = "" + parameter_value: str = "" + execution_command: str = "R" # Execute + + def compile(self) -> str: + """Create actual command byte by prepending pump address to command and appending executing command.""" + compiled_command = ( + f"{PUMP_ADDRESS[self.target_pump_num]}" + f"{self.target_syringe}" + f"{self.command}{self.command_value}" + ) + + if self.parameter_value: + compiled_command += f"{self.optional_parameter}{self.parameter_value}" + + return compiled_command + self.execution_command + + +class HamiltonPumpIO: + """Setup with serial parameters, low level IO.""" + + ACKNOWLEDGE = chr(6) + NEGATIVE_ACKNOWLEDGE = chr(21) + DEFAULT_CONFIG = { + "timeout": 0.1, + "baudrate": 9600, + "parity": aioserial.PARITY_EVEN, + "stopbits": aioserial.STOPBITS_ONE, + "bytesize": aioserial.SEVENBITS, + } + + def __init__(self, aio_port: aioserial.Serial): + """Initialize serial port, not pumps.""" + self._serial = aio_port + self.num_pump_connected: int | None = ( + None # Set by `HamiltonPumpIO.initialize()` + ) + + @classmethod + def from_config(cls, config): + """Create HamiltonPumpIO from config.""" + configuration = HamiltonPumpIO.DEFAULT_CONFIG | config + + try: + serial_object = aioserial.AioSerial(**configuration) + except aioserial.SerialException as serial_exception: + raise InvalidConfiguration( + f"Cannot connect to the pump on the port <{configuration.get('port')}>" + ) from serial_exception + + return cls(serial_object) + + async def initialize(self, hw_initialization: bool = True): + """Ensure connection with pump and initialize it (if hw_initialization is True).""" + self.num_pump_connected = await self._assign_pump_address() + if hw_initialization: + await self._hw_init() + + async def _assign_pump_address(self) -> int: + """ + Auto assign pump addresses. + + To be run on init, auto assign addresses to pumps based on their position in the daisy chain. + A custom command syntax with no addresses is used here so read and write has been rewritten + """ + try: + await self._write_async(b"1a\r") + except aioserial.SerialException as e: + raise InvalidConfiguration from e + + reply = await self._read_reply_async() + if not reply or reply[:1] != "1": + raise InvalidConfiguration(f"No pump found on {self._serial.port}") + # reply[1:2] should be the address of the last pump. However, this does not work reliably. + # So here we enumerate the pumps explicitly instead + last_pump = 0 + for pump_num, address in PUMP_ADDRESS.items(): + await self._write_async(f"{address}UR\r".encode("ascii")) + if "NV01" in await self._read_reply_async(): + last_pump = pump_num + else: + break + logger.debug(f"Found {last_pump} pumps on {self._serial.port}!") + return int(last_pump) + + async def _hw_init(self): + """Send to all pumps the HW initialization command (i.e. homing).""" + await self._write_async(b":XR\r") # Broadcast: initialize + execute + # Note: no need to consume reply here because there is none (since we are using broadcast) + + async def _write_async(self, command: bytes): + """Write a command to the pump.""" + await self._serial.write_async(command) + logger.debug(f"Command {repr(command)} sent!") + + async def _read_reply_async(self) -> str: + """Read the pump reply from serial communication.""" + reply_string = await self._serial.readline_async() + logger.debug(f"Reply received: {reply_string}") + return reply_string.decode("ascii") + + def parse_response(self, response: str) -> str: + """Split a received line in its components: status, reply.""" + status, reply = response[:1], response[1:] + + assert status in (self.ACKNOWLEDGE, self.NEGATIVE_ACKNOWLEDGE) + if status == self.NEGATIVE_ACKNOWLEDGE: + logger.warning("Negative acknowledge received") + warnings.warn("Negative acknowledge reply: check command syntax!") + + return reply.rstrip() # removes trailing + + async def write_and_read_reply_async(self, command: Protocol1Command) -> str: + """Send a command to the pump, read the replies and returns it, optionally parsed.""" + self._serial.reset_input_buffer() + await self._write_async(f"{command.compile()}\r".encode("ascii")) + response = await self._read_reply_async() + + if not response: + raise InvalidConfiguration( + f"No response received from pump! " + f"Maybe wrong pump address? (Set to {command.target_pump_num})" + ) + + return self.parse_response(response) + + +class ML600(FlowchemDevice): + """ML600 implementation according to manufacturer docs. Tested on a 61501-01 (i.e. single syringe system). + + From manufacturer docs: + To determine the volume dispensed per step the total syringe volume is divided by + 48,000 steps. All Hamilton instrument syringes are designed with a 60 mm stroke + length and the Microlab 600 is designed to move 60 mm in 48,000 steps. For + example to dispense 9 mL from a 10 mL syringe you would determine the number of + steps by multiplying 48000 steps (9 mL/10 mL) to get 43,200 steps. + """ + + DEFAULT_CONFIG = { + "default_infuse_rate": "1 ml/min", + "default_withdraw_rate": "1 ml/min", + } + + metadata = DeviceInfo( + authors=[dario, jakob, wei_hsin], + maintainers=[dario], + manufacturer="Hamilton", + model="ML600", + ) + + # This class variable is used for daisy chains (i.e. multiple pumps on the same serial connection). Details below. + _io_instances: set[HamiltonPumpIO] = set() + # The mutable object (a set) as class variable creates a shared state across all the instances. + # When several pumps are daisy-chained on the same serial port, they need to all access the same Serial object, + # because access to the serial port is exclusive by definition (also locking there ensure thread safe operations). + + # Only Hamilton syringes are compatible w/ the ML600, and they come on a limited set of sizes. (Values in ml) + VALID_SYRINGE_VOLUME = { + 0.01, + 0.025, + 0.05, + 0.1, + 0.25, + 0.5, + 1.0, + 2.5, + 5.0, + 10.0, + 25.0, + 50.0, + } + + def __init__( + self, + pump_io: HamiltonPumpIO, + syringe_volume: str, + name: str, + address: int = 1, + **config, + ): + """ + Default constructor, needs an HamiltonPumpIO object. See from_config() class method for config-based init. + + Args: + pump_io: An HamiltonPumpIO w/ serial connection to the daisy chain w/ target pump. + syringe_volume: Volume of the syringe used, either a Quantity or number in ml. + address: number of pump in array, 1 for first one, auto-assigned on init based on position. + name: 'cause naming stuff is important. + """ + super().__init__(name) + # HamiltonPumpIO + self.pump_io = pump_io + ML600._io_instances.add(self.pump_io) # See above for details. + + # Pump address is the pump sequence number if in chain. Count starts at 1, default. + self.address = int(address) + + # Syringe pumps only perform linear movement, and the volume displaced is function of the syringe loaded. + try: + self.syringe_volume = ureg.Quantity(syringe_volume) + except AttributeError as attribute_error: + logger.error(f"Invalid syringe volume {syringe_volume}!") + raise InvalidConfiguration( + f"Invalid syringe volume provided." + f"The syringe volume is a string with units! e.g. '5 ml'" + ) from attribute_error + + if self.syringe_volume.m_as("ml") not in ML600.VALID_SYRINGE_VOLUME: + raise InvalidConfiguration( + f"The specified syringe volume ({syringe_volume}) is invalid!\n" + f"The volume (in ml) has to be one of {ML600.VALID_SYRINGE_VOLUME}" + ) + + self._steps_per_ml = ureg.Quantity(f"{48000 / self.syringe_volume} step/ml") + self._offset_steps = 100 # Steps added to each absolute move command, to decrease wear and tear at volume = 0 + self._max_vol = (48000 - self._offset_steps) * ureg.step / self._steps_per_ml + + # This enables to configure on per-pump basis uncommon parameters + self.config = ML600.DEFAULT_CONFIG | config + + @classmethod + def from_config(cls, **config): + """This class method is used to create instances via config file by the server for HTTP interface.""" + # Many pump can be present on the same serial port with different addresses. + # This shared list of HamiltonPumpIO objects allow shared state in a borg-inspired way, avoiding singletons + # This is only relevant to programmatic instantiation, i.e. when from_config() is called per each pump from a + # config file, as it is the case in the HTTP server. + pumpio = None + for obj in ML600._io_instances: + # noinspection PyProtectedMember + if obj._serial.port == config.get("port"): + pumpio = obj + break + + # If not existing serial object are available for the port provided, create a new one + if pumpio is None: + # Remove ML600-specific keys to only have HamiltonPumpIO's kwargs + config_for_pumpio = { + k: v + for k, v in config.items() + if k not in ("syringe_volume", "address", "name") + } + pumpio = HamiltonPumpIO.from_config(config_for_pumpio) + + return cls( + pumpio, + syringe_volume=config.get("syringe_volume", ""), + address=config.get("address", 1), + name=config.get("name", ""), + ) + + async def initialize(self, hw_init=False, init_speed: str = "200 sec / stroke"): + """Must be called after init before anything else.""" + await self.pump_io.initialize() + # Test connectivity by querying the pump's firmware version + fw_cmd = Protocol1Command(command="U", target_pump_num=self.address) + self.metadata.version = await self.pump_io.write_and_read_reply_async(fw_cmd) + logger.info( + f"Connected to Hamilton ML600 {self.name} - FW version: {self.metadata.version}!" + ) + + if hw_init: + await self.initialize_pump(speed=ureg.Quantity(init_speed)) + + async def send_command_and_read_reply(self, command: Protocol1Command) -> str: + """Send a command to the pump. Here we just add the right pump number.""" + command.target_pump_num = self.address + return await self.pump_io.write_and_read_reply_async(command) + + def _validate_speed(self, speed: pint.Quantity | None) -> str: + """ + Validate the speed. + + Given a speed (seconds/stroke) returns a valid value for it, and a warning if out of bounds. + """ + # Validated speeds are used as command argument, with empty string being the default for None + if speed is None: + return "" + + # Alert if out of bounds but don't raise exceptions, according to general philosophy. + # Target flow rate too high + if speed < ureg.Quantity("2 sec/stroke"): + speed = ureg.Quantity("2 sec/stroke") + warnings.warn( + f"Desired speed ({speed}) is unachievable!" + f"Set to {self._seconds_per_stroke_to_flowrate(speed)}" + f"Wrong units? A bigger syringe is needed?" + ) + + # Target flow rate too low + if speed > ureg.Quantity("3692 sec/stroke"): + speed = ureg.Quantity("3692 sec/stroke") + warnings.warn( + f"Desired speed ({speed}) is unachievable!" + f"Set to {self._seconds_per_stroke_to_flowrate(speed)}" + f"Wrong units? A smaller syringe is needed?" + ) + + return str(round(speed.m_as("sec / stroke"))) + + async def initialize_pump(self, speed: pint.Quantity | None = None): + """ + Initialize both syringe and valve. + + speed: 2-3692 in seconds/stroke + """ + init_pump = Protocol1Command( + command="X", + optional_parameter="S", + parameter_value=self._validate_speed(speed), + ) + return await self.send_command_and_read_reply(init_pump) + + # async def initialize_valve(self): + # """Initialize valve only.""" + # return await self.send_command_and_read_reply(Protocol1Command(command="LX")) + + # async def initialize_syringe(self, speed: pint.Quantity | None = None): + # """ + # Initialize syringe only. + # + # speed: 2-3692 in seconds/stroke + # """ + # init_syringe = Protocol1Command( + # command="X1", + # optional_parameter="S", + # parameter_value=self._validate_speed(speed), + # ) + # return await self.send_command_and_read_reply(init_syringe) + + def flowrate_to_seconds_per_stroke(self, flowrate: pint.Quantity): + """ + Convert flow rates to steps per seconds. + + To determine the volume dispensed per step the total syringe volume is divided by + 48,000 steps. All Hamilton instrument syringes are designed with a 60 mm stroke + length and the Microlab 600 is designed to move 60 mm in 48,000 steps. For + example to dispense 9 mL from a 10 mL syringe you would determine the number of + steps by multiplying 48000 steps (9 mL/10 mL) to get 43,200 steps. + """ + flowrate_in_steps_sec = flowrate * self._steps_per_ml + return (1 / flowrate_in_steps_sec).to("second/stroke") + + def _seconds_per_stroke_to_flowrate(self, second_per_stroke) -> float: + """Converts seconds per stroke to flow rate. Only internal use.""" + flowrate = 1 / (second_per_stroke * self._steps_per_ml) + return flowrate.to("ml/min") + + def _volume_to_step_position(self, volume: pint.Quantity) -> int: + """Convert a volume to a step position.""" + # noinspection PyArgumentEqualDefault + steps = volume * self._steps_per_ml + return round(steps.m_as("steps")) + self._offset_steps + + async def _to_step_position( + self, position: int, speed: pint.Quantity | None = None + ): + """Absolute move to step position.""" + abs_move_cmd = Protocol1Command( + command="M", + optional_parameter="S", + command_value=str(position), + parameter_value=self._validate_speed(speed), + ) + return await self.send_command_and_read_reply(abs_move_cmd) + + async def get_current_volume(self) -> pint.Quantity: + """Return current syringe position in ml.""" + syringe_pos = await self.send_command_and_read_reply( + Protocol1Command(command="YQP") + ) + + current_steps = (int(syringe_pos) - self._offset_steps) * ureg.step + return current_steps / self._steps_per_ml + + async def to_volume(self, target_volume: pint.Quantity, rate: pint.Quantity): + """Absolute move to volume provided.""" + speed = self.flowrate_to_seconds_per_stroke(rate) + await self._to_step_position( + self._volume_to_step_position(target_volume), speed + ) + logger.debug(f"Pump {self.name} set to volume {target_volume} at speed {speed}") + + async def pause(self): + """Pause any running command.""" + return await self.send_command_and_read_reply( + Protocol1Command(command="", execution_command="K") + ) + + async def resume(self): + """Resume any paused command.""" + return await self.send_command_and_read_reply( + Protocol1Command(command="", execution_command="$") + ) + + async def stop(self): + """Stop and abort any running command.""" + await self.pause() + return await self.send_command_and_read_reply( + Protocol1Command(command="", execution_command="V") + ) + + async def wait_until_idle(self): + """Return when no more commands are present in the pump buffer.""" + logger.debug(f"ML600 pump {self.name} wait until idle...") + while not self.is_idle(): + await asyncio.sleep(0.1) + logger.debug(f"...ML600 pump {self.name} idle now!") + + async def version(self) -> str: + """Return the current firmware version reported by the pump.""" + return await self.send_command_and_read_reply(Protocol1Command(command="U")) + + async def is_idle(self) -> bool: + """Check if the pump is idle (actually check if the last command has ended).""" + return ( + await self.send_command_and_read_reply(Protocol1Command(command="F")) == "Y" + ) + + async def get_valve_position(self) -> str: + """Represent the position of the valve: getter returns Enum, setter needs Enum.""" + return await self.send_command_and_read_reply(Protocol1Command(command="LQP")) + + async def set_valve_position( + self, + target_position: str, + wait_for_movement_end: bool = True, + ): + """ + Set valve position. + + wait_for_movement_end is defaulted to True as it is a common mistake not to wait... + """ + await self.send_command_and_read_reply( + Protocol1Command(command="LP0", command_value=target_position) + ) + logger.debug(f"{self.name} valve position set to position {target_position}") + if wait_for_movement_end: + await self.wait_until_idle() + + # async def get_return_steps(self) -> int: + # """Return steps' getter. Applied to the end of a downward syringe movement to removes mechanical slack.""" + # steps = await self.send_command_and_read_reply(Protocol1Command(command="YQN")) + # return int(steps) + # + # async def set_return_steps(self, target_steps: int): + # """Return steps' setter. Applied to the end of a downward syringe movement to removes mechanical slack.""" + # target_steps = str(int(target_steps)) + # return await self.send_command_and_read_reply(Protocol1Command(command="YSN", command_value=target_steps)) + + def components(self): + """Return a Syringe and a Valve component.""" + return ML600Pump("pump", self), ML600Valve("valve", self) + + +if __name__ == "__main__": + import asyncio + + conf = { + "port": "COM12", + "address": 1, + "name": "test1", + "syringe_volume": 5, + } + pump1 = ML600.from_config(**conf) + asyncio.run(pump1.initialize_pump()) diff --git a/src/flowchem/devices/hamilton/ml600_finder.py b/src/flowchem/devices/hamilton/ml600_finder.py new file mode 100644 index 00000000..0e271599 --- /dev/null +++ b/src/flowchem/devices/hamilton/ml600_finder.py @@ -0,0 +1,45 @@ +"""This module is used to discover the serial address of any ML600 connected to the PC.""" +import asyncio +from textwrap import dedent + +from loguru import logger + +from flowchem.devices.hamilton.ml600 import HamiltonPumpIO +from flowchem.devices.hamilton.ml600 import InvalidConfiguration + + +def ml600_finder(serial_port) -> set[str]: + """Try to initialize an ML600 on every available COM port.""" + logger.debug(f"Looking for ML600 pumps on {serial_port}...") + # Static counter for device type across different serial ports + if "counter" not in ml600_finder.__dict__: + ml600_finder.counter = 0 # type: ignore + dev_config: set[str] = set() + + try: + link = HamiltonPumpIO.from_config({"port": serial_port}) + except InvalidConfiguration: + return dev_config + + try: + asyncio.run(link.initialize(hw_initialization=False)) + except InvalidConfiguration: + # This is necessary only on failure to release the port for the other inspector + link._serial.close() + return dev_config + + for count in range(link.num_pump_connected): + logger.info(f"Pump ML600 found on <{serial_port}> address {count + 1}") + + ml600_finder.counter += 1 # type: ignore + dev_config.add( + dedent( + f"\n\n[device.ml600-{ml600_finder.counter}]" # type: ignore + f"""type = "ML600" + port = "{serial_port}" + address = {count + 1} + syringe_volume = "XXX ml" # Specify syringe volume here!\n""" + ) + ) + + return dev_config diff --git a/src/flowchem/devices/hamilton/ml600_pump.py b/src/flowchem/devices/hamilton/ml600_pump.py new file mode 100644 index 00000000..442251dc --- /dev/null +++ b/src/flowchem/devices/hamilton/ml600_pump.py @@ -0,0 +1,77 @@ +"""ML600 component relative to pumping.""" +from __future__ import annotations + +from typing import TYPE_CHECKING + +from loguru import logger + +from flowchem import ureg +from flowchem.components.pumps.syringe_pump import SyringePump + +if TYPE_CHECKING: + from .ml600 import ML600 + + +class ML600Pump(SyringePump): + hw_device: ML600 # for typing's sake + + @staticmethod + def is_withdrawing_capable(): + """ML600 can withdraw.""" + return True + + async def is_pumping(self) -> bool: + """True if pump is moving.""" + return await self.hw_device.is_idle() is False + + async def stop(self): + """Stops pump.""" + await self.hw_device.stop() + + async def infuse(self, rate: str = "", volume: str = "") -> bool: + """Start infusion with given rate and volume (both optional). + + If no rate is specified, the default (1 ml/min) is used, can be set on per-pump basis via `default_infuse_rate` + If no volume is specified, the max possible volume is infused. + """ + if not rate: + rate = self.hw_device.config.get("default_infuse_rate") # type: ignore + + if not volume: + target_vol = ureg.Quantity("0 ml") + else: + current_volume = await self.hw_device.get_current_volume() + target_vol = current_volume - ureg.Quantity(volume) + if target_vol < 0: + logger.error( + f"Cannot infuse target volume {volume}! " + f"Only {current_volume} in the syringe!" + ) + return False + + await self.hw_device.to_volume(target_vol, ureg.Quantity(rate)) + return True + + async def withdraw(self, rate: str = "1 ml/min", volume: str | None = None) -> bool: + """Start withdraw with given rate and volume (both optional). + + If no rate is specified, the default (1 ml/min) is used, can be set on per-pump basis via `default_withdraw_rate` + If no volume is specified, the max possible volume is infused. + """ + if not rate: + rate = self.hw_device.config["default_withdraw_rate"] + + if volume is None: + target_vol = self.hw_device.syringe_volume + else: + current_volume = await self.hw_device.get_current_volume() + target_vol = current_volume + ureg.Quantity(volume) + if target_vol > self.hw_device.syringe_volume: + logger.error( + f"Cannot withdraw target volume {volume}! " + f"Max volume left is {self.hw_device.syringe_volume - current_volume}!" + ) + return False + + await self.hw_device.to_volume(target_vol, ureg.Quantity(rate)) + return True diff --git a/src/flowchem/devices/hamilton/ml600_valve.py b/src/flowchem/devices/hamilton/ml600_valve.py new file mode 100644 index 00000000..1d63c783 --- /dev/null +++ b/src/flowchem/devices/hamilton/ml600_valve.py @@ -0,0 +1,40 @@ +"""ML600 component relative to valve switching.""" +from __future__ import annotations + +from typing import TYPE_CHECKING + +from loguru import logger + +from flowchem.components.valves.distribution_valves import TwoPortDistribution + +if TYPE_CHECKING: + from .ml600 import ML600 + + +class ML600Valve(TwoPortDistribution): + hw_device: ML600 # for typing's sake + + position_mapping = { + "input": "9", # 9 is default inlet, i.e. 1 + "output": "10", # 10 is default outlet, i.e. 3 + } + + async def set_position(self, position: str) -> bool: + """Set pump to position.""" + await super().set_position(position) # Validation + return await self.hw_device.set_valve_position( + target_position=ML600Valve.position_mapping[position], + wait_for_movement_end=True, + ) + + async def get_position(self) -> str: + """Current pump position.""" + pos = await self.hw_device.get_valve_position() + reverse_position_mapping = { + v: k for k, v in ML600Valve.position_mapping.items() + } + try: + return reverse_position_mapping[pos] + except KeyError: + logger.error(f"Unknown valve position returned {pos}") + return "" diff --git a/src/flowchem/devices/harvardapparatus/__init__.py b/src/flowchem/devices/harvardapparatus/__init__.py new file mode 100644 index 00000000..16bdeb7c --- /dev/null +++ b/src/flowchem/devices/harvardapparatus/__init__.py @@ -0,0 +1,4 @@ +"""Harvard Apparatus devices.""" +from .elite11 import Elite11 + +__all__ = ["Elite11"] diff --git a/src/flowchem/devices/harvardapparatus/_pumpio.py b/src/flowchem/devices/harvardapparatus/_pumpio.py new file mode 100644 index 00000000..84412c5d --- /dev/null +++ b/src/flowchem/devices/harvardapparatus/_pumpio.py @@ -0,0 +1,143 @@ +from dataclasses import dataclass +from enum import Enum + +import aioserial +from loguru import logger + +from flowchem.exceptions import DeviceError +from flowchem.exceptions import InvalidConfiguration + + +class PumpStatus(Enum): + """Possible pump statuses, as defined by the reply prompt.""" + + IDLE = ":" + INFUSING = ">" + WITHDRAWING = "<" + TARGET_REACHED = "T" + STALLED = "*" + + +@dataclass +class Protocol11Command: + """Class representing a pump command.""" + + command: str + pump_address: int + arguments: str + + +class HarvardApparatusPumpIO: + """Setup with serial parameters, low level IO.""" + + DEFAULT_CONFIG = {"timeout": 0.1, "baudrate": 115200} + + def __init__(self, port: str, **kwargs): + # Merge default settings, including serial, with provided ones. + configuration = dict(HarvardApparatusPumpIO.DEFAULT_CONFIG, **kwargs) + + try: + self._serial = aioserial.AioSerial(port, **configuration) + except aioserial.SerialException as serial_exception: + logger.error(f"Cannot connect to the Pump on the port <{port}>") + raise InvalidConfiguration( + f"Cannot connect to the Pump on the port <{port}>" + ) from serial_exception + + async def _write(self, command: Protocol11Command): + """Write a command to the pump.""" + command_msg = f"{command.pump_address}{command.command} {command.arguments}\r\n" + + try: + await self._serial.write_async(command_msg.encode("ascii")) + except aioserial.SerialException as serial_exception: + raise InvalidConfiguration from serial_exception + logger.debug(f"Sent {repr(command_msg)}!") + + async def _read_reply(self) -> list[str]: + """Read the pump reply from serial communication.""" + reply_string = [] + + for line in await self._serial.readlines_async(): + reply_string.append(line.decode("ascii").strip()) + logger.debug(f"Received {repr(line)}!") + + # First line is usually empty, but some prompts such as T* actually leak into this line sometimes. + reply_string.pop(0) + return [x for x in reply_string if x] # remove empty strings from reply_string + + @staticmethod + def parse_response_line(line: str) -> tuple[int, PumpStatus, str]: + """Split a received line in its components: address, prompt and reply body.""" + assert len(line) >= 3 + pump_address, status = int(line[0:2]), PumpStatus(line[2:3]) + + # Target reached is the only two-character status + if status is PumpStatus.TARGET_REACHED: + return pump_address, status, line[4:] + return pump_address, status, line[3:] + + @staticmethod + def parse_response( + response: list[str], + ) -> tuple[list[int], list[PumpStatus], list[str]]: + """Aggregate address prompt and reply body from all the reply lines and return them.""" + parsed_lines = list(map(HarvardApparatusPumpIO.parse_response_line, response)) + return zip(*parsed_lines) # type: ignore + + @staticmethod + def check_for_errors(response_line, command_sent): + """Further response parsing, checks for error messages.""" + error_string = ( + "Command error", + "Unknown command", + "Argument error", + "Out of range", + ) + if any([e in response_line for e in error_string]): + logger.error( + f"Error for command {command_sent} on pump {command_sent.pump_address}!" + f"Reply: {response_line}" + ) + raise DeviceError("Command error") + + async def write_and_read_reply( + self, command: Protocol11Command, return_parsed: bool = True + ) -> list[str]: + """ + Send a command to the pump, read the replies and return it, optionally parsed. + + If unparsed reply is a List[str] with raw replies. + If parsed reply is a List[str] w/ reply body (address and prompt removed from each line). + """ + self._serial.reset_input_buffer() + await self._write(command) + response = await self._read_reply() + + if not response: + logger.error("No reply received from pump!") + raise InvalidConfiguration(f"No response received. Is the address right?") + + pump_address, status, parsed_response = self.parse_response(response) + + # All the replies came from the target pump + assert all(address == command.pump_address for address in pump_address) + + # No stall reply is present + if PumpStatus.STALLED in status: + logger.error("Pump stalled!") + raise DeviceError("Pump stalled! Press display on pump to clear error :(") + + # Check for error in the last response line + self.check_for_errors(response_line=response[-1], command_sent=command) + return parsed_response if return_parsed else response + + def autodiscover_address(self) -> int: + """Autodiscover pump address based on response received.""" + self._serial.write(b"\r\n") + self._serial.readline() + prompt = self._serial.readline() + valid_status = [status.value for status in PumpStatus] + address = 0 if prompt[0:2].decode() in valid_status else int(prompt[0:2]) + logger.debug(f"Address detected as {address}") + return address diff --git a/src/flowchem/devices/harvardapparatus/elite11.py b/src/flowchem/devices/harvardapparatus/elite11.py new file mode 100644 index 00000000..045e4b24 --- /dev/null +++ b/src/flowchem/devices/harvardapparatus/elite11.py @@ -0,0 +1,407 @@ +"""This module is used to control Harvard Apparatus Elite 11 syringe pump via the 11 protocol.""" +from __future__ import annotations + +import asyncio +import warnings + +import pint +from loguru import logger +from pydantic import BaseModel + +from ._pumpio import HarvardApparatusPumpIO +from ._pumpio import Protocol11Command +from ._pumpio import PumpStatus +from flowchem import ureg +from flowchem.devices.flowchem_device import DeviceInfo +from flowchem.devices.flowchem_device import FlowchemDevice +from flowchem.devices.harvardapparatus.elite11_pump import Elite11PumpOnly +from flowchem.devices.harvardapparatus.elite11_pump import Elite11PumpWithdraw +from flowchem.exceptions import InvalidConfiguration +from flowchem.people import * + + +class PumpInfo(BaseModel): + """ + Detailed pump info. e.g.: + + ('Pump type Pump 11', + 'Pump type string 11 ELITE I/W Single', + 'Display type Sharp', + 'Steps per rev 400', + 'Gear ratio 1:1', + 'Pulley ratio 2.4:1', + 'Lead screw 24 threads per inch', + 'Microstepping 16 microsteps per step', + 'Low speed limit 27 seconds', + 'High speed limit 26 microseconds', + 'Motor polarity Reverse', + 'Min syringe size 0.1 mm', + 'Max syringe size 33 mm', + 'Min raw force % 20%', + 'Max raw force % 80%', + 'Encoder 100 lines', + 'Direction Infuse/withdraw', + 'Programmable Yes', + 'Limit switches No', + 'Command set None', '') + """ + + pump_type: str + pump_description: str + infuse_only: bool + + @classmethod + def parse_pump_string(cls, metrics_text: list[str]): + """Parse pump response string into model.""" + pump_type, pump_description, infuse_only = "", "", True + for line in metrics_text: + if line.startswith("Pump type "): + pump_type = line[9:].strip() + elif line.startswith("Pump type string"): + pump_description = line[16:].strip() + elif line.startswith("Direction"): + infuse_only = "withdraw" not in line + return cls( + pump_type=pump_type, + pump_description=pump_description, + infuse_only=infuse_only, + ) + + +class Elite11(FlowchemDevice): + """ + Controls Harvard Apparatus Elite11 syringe pumps. + + The same protocol (Protocol11) can be used on other HA pumps, but is untested. + Several pumps can be daisy-chained on the same serial connection, if so address 0 must be the first one. + Read the manufacturer manual for more details. + """ + + # This class variable is used for daisy chains (i.e. multiple pumps on the same serial connection). + _io_instances: set[HarvardApparatusPumpIO] = set() + + def __init__( + self, + pump_io: HarvardApparatusPumpIO, + syringe_diameter: str = "", + syringe_volume: str = "", + address: int = 0, + name: str = "", + force: int = 30, + ): + super().__init__(name) + + # Create communication + self.pump_io = pump_io + Elite11._io_instances.add(self.pump_io) + + self.address = address + self._infuse_only = True # Actual value set in initialize + + # syringe diameter and volume, and force will be set in initialize() + self._force = force + if syringe_diameter: + self._diameter = syringe_diameter + else: + raise InvalidConfiguration("Please provide the syringe diameter!") + + if syringe_volume: + self._syringe_volume = syringe_volume + else: + raise InvalidConfiguration("Please provide the syringe volume!") + + self.metadata = DeviceInfo( + authors=[dario, jakob, wei_hsin], + maintainers=[dario], + manufacturer="HarvardApparatus", + model="Elite11", + version="", + ) + + @classmethod + def from_config( + cls, + port: str, + syringe_diameter: str, + syringe_volume: str, + address: int = 0, + name: str = "", + force: int = 30, + **serial_kwargs, + ): + """ + Programmatic instantiation from configuration. + + Many pump can be present on the same serial port with different addresses. + This shared list of PumpIO objects allow shared state in a borg-inspired way, avoiding singletons + This is only relevant to programmatic instantiation, i.e. when from_config() is called per each pump from a + config file, as it is the case in the HTTP server. + Pump_IO() manually instantiated are not accounted for. + """ + pumpio = None + for obj in Elite11._io_instances: + if obj._serial.port == port: + pumpio = obj + break + + # If not existing serial object are available for the port provided, create a new one + if pumpio is None: + pumpio = HarvardApparatusPumpIO(port, **serial_kwargs) + + return cls( + pumpio, + address=address, + name=name, + syringe_diameter=syringe_diameter, + syringe_volume=syringe_volume, + force=force, + ) + + async def initialize(self): + """ + Initialize Elite11. + + Query model and version number of firmware to check if pump is connected. + Responds with a load of stuff, but the last three characters + are the prompt XXY, where XX is the address and Y is pump status. + The status can be one of the three: [":", ">" "<"] respectively + when stopped, running forwards (pumping), or backwards (withdrawing). + The prompt is used to confirm that the address is correct. + """ + # Autodetect address if none provided + if self.address == 0: + self.address = self.pump_io.autodiscover_address() + + # Test communication and return InvalidConfiguration on failure + try: + await self.stop() + except IndexError as index_e: + raise InvalidConfiguration( + f"Check pump address! Currently {self.address=}" + ) from index_e + + # Sets syringe parameters + await self.set_syringe_diameter(ureg.Quantity(self._diameter)) + await self.set_syringe_volume(ureg.Quantity(self._syringe_volume)) + await self.set_force(self._force) + + logger.info( + f"Connected to '{self.name}'! [{self.pump_io._serial.name}:{self.address}]" + ) + version = await self.version() + self.metadata.version = version.split(" ")[-1] + + # Clear target volume eventually set to prevent pump from stopping prematurely + await self.set_target_volume("0 ml") + + # Get pump type + pump_info = await self.pump_info() + self._infuse_only = True if pump_info.infuse_only else False + + @staticmethod + def _parse_version(version_text: str) -> tuple[int, int, int]: + """Extract semver from Elite11 version string, e.g. '11 ELITE I/W Single 3.0.4'.""" + version = version_text.split(" ")[-1] + digits = version.split(".") + return int(digits[0]), int(digits[1]), int(digits[2]) + + async def _send_command_and_read_reply( + self, command: str, parameter="", parse=True, multiline=False + ): + """Send a command based on its template and return the corresponding reply as str.""" + cmd = Protocol11Command( + command=command, + pump_address=self.address, + arguments=parameter, + ) + reply = await self.pump_io.write_and_read_reply(cmd, return_parsed=parse) + if multiline: + return reply + else: + return reply[0] + + async def get_syringe_diameter(self) -> str: + """Get syringe diameter in mm. A value between 1 and 33 mm.""" + return await self._send_command_and_read_reply("diameter") + + async def set_syringe_diameter(self, diameter: pint.Quantity): + """Set syringe diameter. This can be set in the interval 1 mm to 33 mm.""" + if not 1 * ureg.mm <= diameter <= 33 * ureg.mm: + logger.warning( + f"Invalid diameter provided: {diameter}! [Valid range: 1-33 mm]" + ) + return False + + await self._send_command_and_read_reply( + "diameter", parameter=f"{diameter.to('mm').magnitude:.4f} mm" + ) + + async def get_syringe_volume(self) -> str: + """Return the syringe volume as str w/ units.""" + return await self._send_command_and_read_reply("svolume") # e.g. '100 ml' + + async def set_syringe_volume(self, volume: pint.Quantity): + """Set the syringe volume in ml.""" + await self._send_command_and_read_reply( + "svolume", parameter=f"{volume.m_as('ml'):.15f} m" + ) + + async def get_force(self): + """ + Pump force, in percentage. + + Manufacturer suggested values are: + stainless steel: 100% + plastic syringes: 50% if volume <= 5 ml else 100% + glass/glass: 30% if volume <= 20 ml else 50% + glass/plastic: 30% if volume <= 250 ul, 50% if volume <= 5ml else 100% + """ + percent = await self._send_command_and_read_reply("FORCE") + return int(percent[:-1]) + + async def set_force(self, force_percent: int): + """Set the pump force, see `Elite11.get_force()` for suggested values.""" + await self._send_command_and_read_reply( + "FORCE", parameter=str(int(force_percent)) + ) + + async def _bound_rate_to_pump_limits(self, rate: str) -> float: + """ + Bound the rate provided to pump's limit. + + These are function of the syringe diameter. + NOTE: Infusion and withdraw limits are equal! + """ + # Get current pump limits (those are function of the syringe diameter) + limits_raw = await self._send_command_and_read_reply("irate lim") + + # Lower limit usually expressed in nl/min so unit-aware quantities are needed + lower_limit, upper_limit = map(ureg, limits_raw.split(" to ")) + + # Also add units to the provided rate + set_rate = ureg.Quantity(rate) + + # Bound rate to acceptance range + if set_rate < lower_limit: + logger.warning( + f"The requested rate {rate} is lower than the minimum possible ({lower_limit})!" + f"Setting rate to {lower_limit} instead!" + ) + set_rate = lower_limit + + if set_rate > upper_limit: + logger.warning( + f"The requested rate {rate} is higher than the maximum possible ({upper_limit})!" + f"Setting rate to {upper_limit} instead!" + ) + set_rate = upper_limit + + return set_rate.to("ml/min").magnitude + + async def version(self) -> str: + """Return the current firmware version reported by the pump.""" + return await self._send_command_and_read_reply( + "VER" + ) # '11 ELITE I/W Single 3.0.4 + + async def is_moving(self) -> bool: + """Evaluate prompt for current status, i.e. moving or not.""" + status = await self._send_command_and_read_reply(" ", parse=False) + prompt = PumpStatus(status[2:3]) + return prompt in (PumpStatus.INFUSING, PumpStatus.WITHDRAWING) + + async def infuse(self): + """Run pump in infuse mode.""" + await self._send_command_and_read_reply("irun") + logger.info("Pump infusion started!") + return True + + async def withdraw(self): + """Activate pump to run in withdraw mode.""" + await self._send_command_and_read_reply("wrun") + logger.info("Pump withdraw started!") + return True + + async def stop(self): + """Stop pump.""" + await self._send_command_and_read_reply("stp") + logger.info("Pump stopped") + + async def wait_until_idle(self): + """Wait until the pump is not moving.""" + while await self.is_moving(): + await asyncio.sleep(0.05) + + async def get_flow_rate(self) -> float: + """Return the infusion rate as str w/ units.""" + flow_value = await self._send_command_and_read_reply("irate") + flowrate = ureg.Quantity(flow_value) + logger.debug(f"Current infusion flow rate is {flowrate}") + return flowrate.m_as("ml/min") + + async def set_flow_rate(self, rate: str): + """Set the infusion rate.""" + set_rate = await self._bound_rate_to_pump_limits(rate=rate) + await self._send_command_and_read_reply( + "irate", parameter=f"{set_rate:.10f} m/m" + ) + + async def get_withdrawing_flow_rate(self) -> float: + """Return the withdrawing flow rate as ml/min.""" + flow_value = await self._send_command_and_read_reply("wrate") + flowrate = ureg.Quantity(flow_value) + logger.debug(f"Current withdraw flow rate is {flowrate}") + return flowrate.m_as("ml/min") + + async def set_withdrawing_flow_rate(self, rate: str): + """Set the infusion rate.""" + set_rate = await self._bound_rate_to_pump_limits(rate=rate) + await self._send_command_and_read_reply("wrate", parameter=f"{set_rate} m/m") + + async def set_target_volume(self, volume: str): + """Set target volume in ml. If the volume is set to 0, the target is cleared.""" + target_volume = ureg.Quantity(volume) + if target_volume.magnitude == 0: + await self._send_command_and_read_reply("ctvolume") + else: + set_vol = await self._send_command_and_read_reply( + "tvolume", parameter=f"{target_volume.m_as('ml')} m" + ) + if "Argument error" in set_vol: + warnings.warn( + f"Cannot set target volume of {target_volume} with a " + f"{self.get_syringe_volume()} syringe!" + ) + + async def pump_info(self) -> PumpInfo: + """Return pump info.""" + parsed_multiline_response = await self._send_command_and_read_reply( + "metrics", multiline=True + ) + return PumpInfo.parse_pump_string(parsed_multiline_response) + + def components(self): + """Return pump component.""" + if self._infuse_only: + return (Elite11PumpOnly("pump", self),) + else: + return (Elite11PumpWithdraw("pump", self),) + + +if __name__ == "__main__": + pump = Elite11.from_config( + port="COM4", syringe_volume="10 ml", syringe_diameter="10 mm" + ) + + async def main(): + """Test function.""" + await pump.initialize() + # assert await pump.get_infused_volume() == 0 + await pump.set_syringe_diameter("30 mm") + await pump.set_flow_rate("0.1 ml/min") + await pump.set_target_volume("0.05 ml") + await pump.infuse() + await asyncio.sleep(2) + await pump.pump_info() + + asyncio.run(main()) diff --git a/src/flowchem/devices/harvardapparatus/elite11_finder.py b/src/flowchem/devices/harvardapparatus/elite11_finder.py new file mode 100644 index 00000000..58a13a75 --- /dev/null +++ b/src/flowchem/devices/harvardapparatus/elite11_finder.py @@ -0,0 +1,66 @@ +"""This module is used to discover the serial address of any Elite11 connected to the PC.""" +import asyncio +from textwrap import dedent + +from loguru import logger + +from flowchem.devices.harvardapparatus.elite11 import Elite11 +from flowchem.devices.harvardapparatus.elite11 import HarvardApparatusPumpIO +from flowchem.exceptions import InvalidConfiguration + + +# noinspection PyProtectedMember +def elite11_finder(serial_port) -> set[str]: + """Try to initialize an Elite11 on every available COM port. [Does not support daisy-chained Elite11!]""" + logger.debug(f"Looking for Elite11 pumps on {serial_port}...") + # Static counter for device type across different serial ports + if "counter" not in elite11_finder.__dict__: + elite11_finder.counter = 0 # type: ignore + + try: + link = HarvardApparatusPumpIO(port=serial_port) + except InvalidConfiguration: + # This is necessary only on failure to release the port for the other inspector + return set() + + # Check for echo + link._serial.write(b"\r\n") + if link._serial.readline() != b"\n": + # This is necessary only on failure to release the port for the other inspector + link._serial.close() + return set() + + # Parse status prompt + pump = link._serial.readline().decode("ascii") + if pump[0:2].isdigit(): + address = int(pump[0:2]) + else: + address = 0 + + try: + test_pump = Elite11( + link, + syringe_diameter="20 mm", + syringe_volume="10 ml", + address=address, + ) + info = asyncio.run(test_pump.pump_info()) + except InvalidConfiguration: + # This is necessary only on failure to release the port for the other inspector + link._serial.close() + return set() + + p_type = "Elite11InfuseOnly" if info.infuse_only else "Elite11InfuseWithdraw" + logger.info(f"Pump {p_type} found on <{serial_port}>") + + elite11_finder.counter += 1 # type: ignore + return set( + dedent( + f"\n\n[device.elite11-{elite11_finder.counter}]" # type:ignore + f"""type = "{p_type} + port = "{serial_port}" + address = {address} + syringe_diameter = "XXX mm" # Specify syringe diameter! + syringe_volume = "YYY ml" # Specify syringe volume!\n""" + ) + ) diff --git a/src/flowchem/devices/harvardapparatus/elite11_pump.py b/src/flowchem/devices/harvardapparatus/elite11_pump.py new file mode 100644 index 00000000..cf74dc4f --- /dev/null +++ b/src/flowchem/devices/harvardapparatus/elite11_pump.py @@ -0,0 +1,62 @@ +"""Elite11 pump component.""" +from __future__ import annotations + +from typing import TYPE_CHECKING + +from loguru import logger + +if TYPE_CHECKING: + from .elite11 import Elite11 +from flowchem.components.pumps.syringe_pump import SyringePump + + +class Elite11PumpOnly(SyringePump): + hw_device: Elite11 # for typing's sake + + @staticmethod + def is_withdrawing_capable(): + """Elite11 w/o withdraw option.""" + return False + + async def is_pumping(self) -> bool: + """True if pump is moving.""" + return await self.hw_device.is_moving() + + async def stop(self): + """Stops pump.""" + await self.hw_device.stop() + + async def infuse(self, rate: str = "", volume: str = "") -> bool: + """Infuse.""" + if await self.is_pumping(): + logger.warning("Cannot start infusion: pump already moving!") + return False + + if rate: # Else previous rate will be used + await self.hw_device.set_flow_rate(rate) + + if volume: + await self.hw_device.set_target_volume(volume) + + return await self.hw_device.infuse() + + +class Elite11PumpWithdraw(Elite11PumpOnly): + @staticmethod + def is_withdrawing_capable(): + """Elite11 w/ withdraw option.""" + return True + + async def withdraw(self, rate: str = "1 ml/min", volume: str | None = None) -> bool: + """Withdraw.""" + if await self.is_pumping(): + logger.warning("Cannot start withdrawing: pump already moving!") + return False + + if rate: # Else previous rate will be used + await self.hw_device.set_withdrawing_flow_rate(rate) + + if volume: # FIXME check if target volume also works for withdrawing! + await self.hw_device.set_target_volume(volume) + + return await self.hw_device.withdraw() diff --git a/src/flowchem/devices/huber/__init__.py b/src/flowchem/devices/huber/__init__.py new file mode 100644 index 00000000..aae2b910 --- /dev/null +++ b/src/flowchem/devices/huber/__init__.py @@ -0,0 +1,4 @@ +"""Huber's devices.""" +from .chiller import HuberChiller + +__all__ = ["HuberChiller"] diff --git a/src/flowchem/devices/huber/chiller.py b/src/flowchem/devices/huber/chiller.py new file mode 100644 index 00000000..6538f1a6 --- /dev/null +++ b/src/flowchem/devices/huber/chiller.py @@ -0,0 +1,355 @@ +"""Huber chiller control driver.""" +import asyncio + +import aioserial +import pint +from loguru import logger + +from flowchem import ureg +from flowchem.components.technical.temperature_control import TempRange +from flowchem.devices.flowchem_device import DeviceInfo +from flowchem.devices.flowchem_device import FlowchemDevice +from flowchem.devices.huber.huber_temperature_control import HuberTemperatureControl +from flowchem.devices.huber.pb_command import PBCommand +from flowchem.exceptions import InvalidConfiguration +from flowchem.people import * + + +class HuberChiller(FlowchemDevice): + """Control class for Huber chillers.""" + + DEFAULT_CONFIG = { + "timeout": 0.1, + "baudrate": 9600, + "parity": aioserial.PARITY_NONE, + "stopbits": aioserial.STOPBITS_ONE, + "bytesize": aioserial.EIGHTBITS, + } + + def __init__( + self, + aio: aioserial.AioSerial, + name="", + min_temp: float = -150, + max_temp: float = 250, + ): + super().__init__(name) + self._serial = aio + self._device_sn: int = None # type: ignore + self._min_t: float = min_temp + self._max_t: float = max_temp + + @classmethod + def from_config(cls, port, name=None, **serial_kwargs): + """ + Create instance from config dict. Used by server to initialize obj from config. + + Only required parameter is 'port'. Optional 'loop' + others (see AioSerial()) + """ + # Merge default settings, including serial, with provided ones. + configuration = HuberChiller.DEFAULT_CONFIG | serial_kwargs + + try: + serial_object = aioserial.AioSerial(port, **configuration) + except (OSError, aioserial.SerialException) as serial_exception: + raise InvalidConfiguration( + f"Cannot connect to the HuberChiller on the port <{port}>" + ) from serial_exception + + return cls(serial_object, name) + + async def initialize(self): + """Ensure the connection w/ device is working.""" + self._device_sn = await self.serial_number() + if self._device_sn == 0: + raise InvalidConfiguration("No reply received from Huber Chiller!") + logger.debug(f"Connected with Huber Chiller S/N {self._device_sn}") + + # Validate temperature limits + device_limits = await self.temperature_limits() + if self._min_t < device_limits[0]: + logger.warning( + f"The device minimum temperature is higher than the specified minimum temperature!" + f"The lowest possible temperature will be {device_limits[0]} °C" + ) + self._min_t = device_limits[0] + + if self._max_t > device_limits[1]: + logger.warning( + f"The device maximum temperature is lower than the specified maximum temperature!" + f"The maximum possible temperature will be {device_limits[1]} °C" + ) + self._max_t = device_limits[1] + + def metadata(self) -> DeviceInfo: + """Return hw device metadata.""" + return DeviceInfo( + authors=[dario, jakob, wei_hsin], + maintainers=[dario], + manufacturer="Huber", + model="generic chiller", + serial_number=self._device_sn, + ) + + async def _send_command_and_read_reply(self, command: str) -> str: + """Send a command to the chiller and read the reply. + + Args: + command (str): string to be transmitted + + Returns: + str: reply received + """ + # Send command. Using PBCommand ensure command validation, see PBCommand.to_chiller() + pb_command = PBCommand(command.upper()) + await self._serial.write_async(pb_command.to_chiller()) + logger.debug(f"Command {command[0:8]} sent to chiller!") + + # Receive reply and return it after decoding + try: + reply = await asyncio.wait_for(self._serial.readline_async(), 1) + except asyncio.TimeoutError: + logger.error("No reply received! Unsupported command?") + return "" + + logger.debug(f"Reply received: {reply}") + return reply.decode("ascii") + + async def get_temperature(self) -> float: + """Get temperature. Process preferred, otherwise internal.""" + if process_t := await self.process_temperature(): + return process_t + return await self.internal_temperature() # type: ignore + + async def get_temperature_setpoint(self) -> float | None: + """Return the set point used by temperature controller. Internal if not probe, otherwise process temp.""" + reply = await self._send_command_and_read_reply("{M00****") + return PBCommand(reply).parse_temperature() + + async def set_temperature(self, temp: pint.Quantity): + """Set the set point used by temperature controller. Internal if not probe, otherwise process temp.""" + await self._send_command_and_read_reply("{M00" + self._temp_to_string(temp)) + + async def target_reached(self) -> bool: + """Trivially implemented as delta(currentT-setT) < 1°C.""" + current_t = await self.get_temperature() + set_t = await self.get_temperature_setpoint() + if set_t: + return abs(current_t - set_t) < 1 + return False + + async def internal_temperature(self) -> float | None: + """Return internal temp (bath temperature).""" + reply = await self._send_command_and_read_reply("{M01****") + return PBCommand(reply).parse_temperature() + + async def process_temperature(self) -> float | None: + """Return the current process temperature. If not T probe, the temperature is None.""" + reply = await self._send_command_and_read_reply("{M3A****") + return PBCommand(reply).parse_temperature() + + async def temperature_limits(self) -> tuple[float, float]: + """Return minimum/maximum accepted value for the temperature setpoint (in Celsius).""" + min_reply = await self._send_command_and_read_reply("{M30****") + min_t = PBCommand(min_reply).parse_temperature() + max_reply = await self._send_command_and_read_reply("{M31****") + max_t = PBCommand(max_reply).parse_temperature() + return min_t, max_t + + async def serial_number(self) -> int: + """Get serial number.""" + serial1 = await self._send_command_and_read_reply("{M1B****") + serial2 = await self._send_command_and_read_reply("{M1C****") + pb1, pb2 = PBCommand(serial1), PBCommand(serial2) + if pb1.data and pb2.data: + return int(pb1.data + pb2.data, 16) + else: + return 0 + + @staticmethod + def _temp_to_string(temp: pint.Quantity) -> str: + """From temperature to string for command. f^-1 of PCommand.parse_temperature.""" + assert ( + ureg.Quantity("-151 °C") <= temp <= ureg.Quantity("327 °C") + ), "Protocol temperature limits" + # Hexadecimal two's complement + return f"{int(temp.m_as('°C') * 100) & 65535:04X}" + + @staticmethod + def _int_to_string(number: int) -> str: + """From int to string for command. f^-1 of PCommand.parse_integer.""" + return f"{number:04X}" + + def get_components(self): + """Return a TemperatureControl component.""" + temperature_limits = TempRange( + min=ureg.Quantity(self._min_t), max=ureg.Quantity(self._max_t) + ) + return HuberTemperatureControl("temperature-control", self, temperature_limits) + + # async def return_temperature(self) -> float | None: + # """Return the temp of the thermal fluid flowing back to the device.""" + # reply = await self._send_command_and_read_reply("{M02****") + # return PBCommand(reply).parse_temperature() + # + # async def pump_pressure(self) -> str: + # """Return the pump pressure in mbar (note that you probably want barg, i.e. to remove 1 bar).""" + # reply = await self._send_command_and_read_reply("{M03****") + # pressure = PBCommand(reply).parse_integer() + # return str(ureg(f"{pressure} mbar")) + # + # async def current_power(self) -> str: + # """Return the current power in Watts (negative for cooling, positive for heating).""" + # reply = await self._send_command_and_read_reply("{M04****") + # power = PBCommand(reply).parse_integer() + # return str(ureg(f"{power} watt")) + # + # async def status(self) -> dict[str, bool]: + # """Return the info contained in `vstatus1` as dict.""" + # reply = await self._send_command_and_read_reply("{M0A****") + # return PBCommand(reply).parse_status1() + # + # async def status2(self) -> dict[str, bool]: + # """Return the info contained in `vstatus2` as dict.""" + # reply = await self._send_command_and_read_reply("{M3C****") + # return PBCommand(reply).parse_status2() + # + # async def is_temperature_control_active(self) -> bool: + # """Return whether temperature control is active or not.""" + # reply = await self._send_command_and_read_reply("{M14****") + # return PBCommand(reply).parse_boolean() + # + # async def is_circulation_active(self) -> bool: + # """Return whether temperature control is active or not.""" + # reply = await self._send_command_and_read_reply("{M16****") + # return PBCommand(reply).parse_boolean() + # + # async def start_circulation(self): + # """Start circulation pump.""" + # await self._send_command_and_read_reply("{M160001") + # + # async def stop_circulation(self): + # """Stop circulation pump.""" + # await self._send_command_and_read_reply("{M160000") + # + # async def pump_speed(self) -> str: + # """Return current circulation pump speed (in rpm).""" + # reply = await self._send_command_and_read_reply("{M26****") + # return PBCommand(reply).parse_rpm() + # + # async def pump_speed_setpoint(self) -> str: + # """Return the set point of the circulation pump speed (in rpm).""" + # reply = await self._send_command_and_read_reply("{M48****") + # return PBCommand(reply).parse_rpm() + # + # async def set_pump_speed(self, rpm: str): + # """Set the pump speed, in rpm. See device display for range.""" + # parsed_rpm = ureg(rpm) + # await self._send_command_and_read_reply( + # "{M48" + self._int_to_string(parsed_rpm.m_as("rpm")) + # ) + # + # async def cooling_water_temp(self) -> float | None: + # """Return the cooling water inlet temperature (in Celsius).""" + # reply = await self._send_command_and_read_reply("{M2C****") + # return PBCommand(reply).parse_temperature() + # + # async def cooling_water_pressure(self) -> float | None: + # """Return the cooling water inlet pressure (in mbar).""" + # reply = await self._send_command_and_read_reply("{M2D****") + # if pressure := PBCommand(reply).parse_integer() == 64536: + # return None + # return pressure + # + # async def cooling_water_temp_outflow(self) -> float | None: + # """Return the cooling water outlet temperature (in Celsius).""" + # reply = await self._send_command_and_read_reply("{M4C****") + # return PBCommand(reply).parse_temperature() + # + # async def alarm_max_internal_temp(self) -> float | None: + # """Return the max internal temp before the alarm is triggered and a fault generated.""" + # reply = await self._send_command_and_read_reply("{M51****") + # return PBCommand(reply).parse_temperature() + # + # async def set_alarm_max_internal_temp(self, set_temp: str): + # """Set the max internal temp before the alarm is triggered and a fault generated.""" + # temp = ureg(set_temp) + # await self._send_command_and_read_reply("{M51" + self._temp_to_string(temp)) + # + # async def alarm_min_internal_temp(self) -> float | None: + # """Return the min internal temp before the alarm is triggered and a fault generated.""" + # reply = await self._send_command_and_read_reply("{M52****") + # return PBCommand(reply).parse_temperature() + # + # async def set_alarm_min_internal_temp(self, set_temp: str): + # """Set the min internal temp before the alarm is triggered and a fault generated.""" + # temp = ureg(set_temp) + # await self._send_command_and_read_reply("{M52" + self._temp_to_string(temp)) + # + # async def alarm_max_process_temp(self) -> float | None: + # """Return the max process temp before the alarm is triggered and a fault generated.""" + # reply = await self._send_command_and_read_reply("{M53****") + # return PBCommand(reply).parse_temperature() + # + # async def set_alarm_max_process_temp(self, set_temp: str): + # """Set the max process temp before the alarm is triggered and a fault generated.""" + # temp = ureg(set_temp) + # await self._send_command_and_read_reply("{M53" + self._temp_to_string(temp)) + # + # async def alarm_min_process_temp(self) -> float | None: + # """Return the min process temp before the alarm is triggered and a fault generated.""" + # reply = await self._send_command_and_read_reply("{M54****") + # return PBCommand(reply).parse_temperature() + # + # async def set_alarm_min_process_temp(self, set_temp: str): + # """Set the min process temp before the alarm is triggered and a fault generated.""" + # temp = ureg(set_temp) + # await self._send_command_and_read_reply("{M54" + self._temp_to_string(temp)) + # + # async def set_ramp_duration(self, ramp_time: str): + # """Set the duration (in seconds) of a ramp to the temperature set by a later call to ramp_to_temperature.""" + # parsed_time = ureg(ramp_time) + # await self._send_command_and_read_reply( + # "{M59" + self._int_to_string(parsed_time.m_as("s")) + # ) + # + # async def ramp_to_temperature(self, temperature: str): + # """Set the duration (in seconds) of a ramp to the temperature set by a later call to start_ramp().""" + # temp = ureg(temperature) + # await self._send_command_and_read_reply("{M5A" + self._temp_to_string(temp)) + # + # async def is_venting(self) -> bool: + # """Whether the chiller is venting or not.""" + # reply = await self._send_command_and_read_reply("{M6F****") + # return PBCommand(reply).parse_boolean() + # + # async def start_venting(self): + # """Start venting. ONLY USE DURING SETUP! READ THE MANUAL!""" + # await self._send_command_and_read_reply("{M6F0001") + # + # async def stop_venting(self): + # """Stop venting.""" + # await self._send_command_and_read_reply("{M6F0000") + # + # async def is_draining(self) -> bool: + # """Whether the chiller is venting or not.""" + # reply = await self._send_command_and_read_reply("{M70****") + # return PBCommand(reply).parse_boolean() + # + # async def start_draining(self): + # """Start venting. ONLY USE DURING SHUT DOWN! READ THE MANUAL!""" + # await self._send_command_and_read_reply("{M700001") + # + # async def stop_draining(self): + # """Stop venting.""" + # await self._send_command_and_read_reply("{M700000") + + +if __name__ == "__main__": + chiller = HuberChiller(aioserial.AioSerial(port="COM8")) + + async def main(chiller): + await chiller.initialize() + print(f"S/N is {chiller.serial_number()}") + + asyncio.run(main(chiller)) diff --git a/src/flowchem/devices/huber/huber_finder.py b/src/flowchem/devices/huber/huber_finder.py new file mode 100644 index 00000000..6d7d1202 --- /dev/null +++ b/src/flowchem/devices/huber/huber_finder.py @@ -0,0 +1,35 @@ +"""This module is used to discover the serial address of any ML600 connected to the PC.""" +import asyncio +from textwrap import dedent + +from loguru import logger + +from flowchem.devices.huber.chiller import HuberChiller +from flowchem.exceptions import InvalidConfiguration + + +# noinspection PyProtectedMember +def chiller_finder(serial_port) -> set[str]: + """Try to initialize an Elite11 on every available COM port.""" + logger.debug(f"Looking for Huber chillers on {serial_port}...") + + try: + chill = HuberChiller.from_config(port=serial_port) + except InvalidConfiguration: + return set() + + try: + asyncio.run(chill.initialize()) + except InvalidConfiguration: + chill._serial.close() + return set() + + logger.info(f"Chiller #{chill._device_sn} found on <{serial_port}>") + + return set( + dedent( + f"""\n\n[device.huber-{chill._device_sn}] + type = "HuberChiller" + port = "{serial_port}"\n""" + ) + ) diff --git a/src/flowchem/devices/huber/huber_temperature_control.py b/src/flowchem/devices/huber/huber_temperature_control.py new file mode 100644 index 00000000..7e2f4ad7 --- /dev/null +++ b/src/flowchem/devices/huber/huber_temperature_control.py @@ -0,0 +1,37 @@ +"""Huber TemperatureControl component.""" +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from .chiller import HuberChiller + +from loguru import logger + +from flowchem import ureg +from flowchem.components.technical.temperature_control import TemperatureControl + + +class HuberTemperatureControl(TemperatureControl): + hw_device: HuberChiller # for typing's sake + + async def set_temperature(self, temp: str): + """Set the target temperature to the given string in natural language.""" + set_t = await super().set_temperature(temp) + return await self.hw_device.set_temperature(set_t) + + async def get_temperature(self) -> float: + """Return temperature in Celsius.""" + return await self.hw_device.get_temperature() + + async def is_target_reached(self) -> bool: + """Return True if the set temperature target has been reached.""" + return await self.hw_device.target_reached() + + async def power_on(self): + """Turn on temperature control.""" + return await self.hw_device._send_command_and_read_reply("{M140001") + + async def power_off(self): + """Turn off temperature control.""" + return await self.hw_device._send_command_and_read_reply("{M140000") diff --git a/src/flowchem/devices/huber/pb_command.py b/src/flowchem/devices/huber/pb_command.py new file mode 100644 index 00000000..bf2d59d0 --- /dev/null +++ b/src/flowchem/devices/huber/pb_command.py @@ -0,0 +1,97 @@ +from dataclasses import dataclass + +from flowchem import ureg + + +@dataclass +class PBCommand: + """Class representing a PBCommand.""" + + command: str + + def to_chiller(self) -> bytes: + """Validate and encode to bytes array to be transmitted.""" + self.validate() + return self.command.encode("ascii") + + def validate(self): + """Check command structure to be compliant with PB format.""" + if len(self.command) == 8: + self.command += "\r\n" + # 10 characters + assert len(self.command) == 10 + # Starts with { + assert self.command[0] == "{" + # M for master (commands) S for slave (replies). + assert self.command[1] in ("M", "S") + # Address, i.e. the desired function. Hex encoded. + assert 0 <= int(self.command[2:4], 16) < 256 + # Value + assert self.command[4:8] == "****" or 0 <= int(self.command[4:8], 16) <= 65536 + # EOL + assert self.command[8:10] == "\r\n" + + @property + def data(self) -> str: + """Data portion of PBCommand.""" + return self.command[4:8] + + def parse_temperature(self) -> float: + """Parse a device temp from hex string to celsius float.""" + # self.data is the two's complement 16-bit signed hex, see manual + temp = ( + (int(self.data, 16) - 65536) / 100 + if int(self.data, 16) > 32767 + else (int(self.data, 16)) / 100 + ) + # Note: -151 used for invalid temperatures! + return temp + + def parse_integer(self) -> int: + """Parse a device reply from hexadecimal string to base 10 integers.""" + return int(self.data, 16) + + def parse_rpm(self) -> str: + """Parse a device reply from hexadecimal string to rpm.""" + return str(ureg.Quantity(f"{self.parse_integer()} rpm")) + + def parse_bits(self) -> list[bool]: + """Parse a device reply from hexadecimal string to 16 constituting bits.""" + bits = f"{int(self.data, 16):016b}" + return [bool(int(x)) for x in bits] + + def parse_boolean(self): + """Parse a device reply from hexadecimal string (0x0000 or 0x0001) to boolean.""" + return self.parse_integer() == 1 + + def parse_status1(self) -> dict[str, bool]: + """Parse response to status1 command and returns dict.""" + bits = self.parse_bits() + return { + "temp_ctl_is_process": bits[0], + "circulation_active": bits[1], + "refrigerator_on": bits[2], + "temp_is_process": bits[3], + "circulating_pump": bits[4], + "cooling_power_available": bits[5], + "tkeylock": bits[6], + "is_pid_auto": bits[7], + "error": bits[8], + "warning": bits[9], + "int_temp_mode": bits[10], + "ext_temp_mode": bits[11], + "dv_e_grade": bits[12], + "power_failure": bits[13], + "freeze_protection": bits[14], + } + + def parse_status2(self) -> dict[str, bool]: + """Parse response to status2 command and returns dict. See manufacturer docs for more info""" + bits = self.parse_bits() + return { + "controller_is_external": bits[0], + "drip_tray_full": bits[5], + "venting_active": bits[7], + "venting_successful": bits[8], + "venting_monitored": bits[9], + } diff --git a/src/flowchem/devices/knauer/__init__.py b/src/flowchem/devices/knauer/__init__.py new file mode 100644 index 00000000..c3f3e960 --- /dev/null +++ b/src/flowchem/devices/knauer/__init__.py @@ -0,0 +1,13 @@ +"""Knauer's devices.""" +from .azura_compact import AzuraCompact +from .dad import KnauerDAD +from .knauer_finder import knauer_finder +from .valve import KnauerValve + + +__all__ = [ + "knauer_finder", + "AzuraCompact", + "KnauerDAD", + "KnauerValve", +] diff --git a/flowchem/components/devices/Knauer/Knauer_common.py b/src/flowchem/devices/knauer/_common.py similarity index 80% rename from flowchem/components/devices/Knauer/Knauer_common.py rename to src/flowchem/devices/knauer/_common.py index c189fa68..da43f44c 100644 --- a/flowchem/components/devices/Knauer/Knauer_common.py +++ b/src/flowchem/devices/knauer/_common.py @@ -1,24 +1,20 @@ -""" -Module for communication with Knauer pumps and valves. -""" +"""Module for communication with Knauer devices.""" import asyncio from loguru import logger -from flowchem.components.devices.Knauer.Knauer_autodiscover import autodiscover_knauer +from .knauer_finder import autodiscover_knauer from flowchem.exceptions import InvalidConfiguration class KnauerEthernetDevice: - """ - Common base class for shared logic across Knauer pumps and valves. - """ + """Common base class for shared logic across Knauer pumps and valves.""" TCP_PORT = 10001 BUFFER_SIZE = 1024 _id_counter = 0 - def __init__(self, ip_address, mac_address, name: str = None): + def __init__(self, ip_address, mac_address, **kwargs): """ Knauer Ethernet Device - either pump or valve. @@ -28,11 +24,11 @@ def __init__(self, ip_address, mac_address, name: str = None): Note that for configuration files, the MAC address is preferred as it is static. Args: - ip_address: IP address of Knauer device - mac_address: MAC address of Knauer device + ip_address: device IP address (only 1 of either IP or MAC address is needed) + mac_address: device MAC address (only 1 of either IP or MAC address is needed) name: name of device (optional) """ - super().__init__(name) # type: ignore + super().__init__(**kwargs) # MAC address if mac_address: @@ -44,11 +40,11 @@ def __init__(self, ip_address, mac_address, name: str = None): self._reader: asyncio.StreamReader = None # type: ignore self._writer: asyncio.StreamWriter = None # type: ignore - # Note: the pump requires "\n\r" as EOL, the valves "\r\n"! So this is set by sublcasses + # Note: the pump requires "\n\r" as EOL, the valves "\r\n"! So this is set by the subclasses self.eol = b"" def _ip_from_mac(self, mac_address: str) -> str: - """Gets IP from MAC.""" + """Get IP from MAC.""" # Autodiscover IP from MAC address available_devices = autodiscover_knauer() # IP if found, None otherwise @@ -62,12 +58,12 @@ def _ip_from_mac(self, mac_address: str) -> str: return ip_address async def initialize(self): - """Initialize connection""" + """Initialize connection.""" # Future used to set shorter timeout than default future = asyncio.open_connection(host=self.ip_address, port=10001) try: self._reader, self._writer = await asyncio.wait_for(future, timeout=3) - except ConnectionError as connection_error: + except OSError as connection_error: logger.exception(connection_error) raise InvalidConfiguration( f"Cannot open connection with device {self.__class__.__name__} at IP={self.ip_address}" @@ -80,6 +76,7 @@ async def initialize(self): async def _send_and_receive(self, message: str) -> str: self._writer.write(message.encode("ascii") + self.eol) + await self._writer.drain() logger.debug(f"WRITE >>> '{message}' ") reply = await self._reader.readuntil(separator=b"\r") logger.debug(f"READ <<< '{reply.decode().strip()}' ") diff --git a/flowchem/components/devices/Knauer/AzuraCompactPump.py b/src/flowchem/devices/knauer/azura_compact.py similarity index 64% rename from flowchem/components/devices/Knauer/AzuraCompactPump.py rename to src/flowchem/devices/knauer/azura_compact.py index 5003f299..9c051f25 100644 --- a/flowchem/components/devices/Knauer/AzuraCompactPump.py +++ b/src/flowchem/devices/knauer/azura_compact.py @@ -1,29 +1,31 @@ -""" -Knauer pump control. -""" +"""Knauer pump control.""" import asyncio import warnings from enum import Enum -from typing import List +import pint from loguru import logger -from flowchem.components.devices.Knauer.Knauer_common import KnauerEthernetDevice -from flowchem.components.stdlib import Pump +from flowchem import ureg +from flowchem.devices.flowchem_device import DeviceInfo +from flowchem.devices.flowchem_device import FlowchemDevice +from flowchem.devices.knauer._common import KnauerEthernetDevice +from flowchem.devices.knauer.azura_compact_pump import AzuraCompactPump +from flowchem.devices.knauer.azura_compact_sensor import AzuraCompactSensor from flowchem.exceptions import DeviceError -from flowchem.units import flowchem_ureg +from flowchem.people import * FLOW = "FLOW" # 0-50000 µL/min, int only! -HEADTYPE = "HEADTYPE" # 10, 50 ml. Value refers to highest flowrate in ml/min -PMIN10 = "PMIN10" # 0-400 in 0.1 MPa, use to avoid dryrunning -PMIN50 = "PMIN50" # 0-150 in 0.1 MPa, use to avoid dryrunning +HEADTYPE = "HEADTYPE" # 10, 50 ml. Value refers to the highest flow rate in ml/min +PMIN10 = "PMIN10" # 0-400 in 0.1 MPa, use to avoid running dry +PMIN50 = "PMIN50" # 0-150 in 0.1 MPa, use to avoid running dry PMAX10 = "PMAX10" # 0-400 in 0.1 MPa, chosen automatically by selecting pump head -PMAX50 = "PMAX50" # 0-150 in 0.1 MPa, chosen automatically by selecting pumphead +PMAX50 = "PMAX50" # 0-150 in 0.1 MPa, chosen automatically by selecting pump head IMIN10 = "IMIN10" # 0-100 minimum motor current IMIN50 = "IMIN50" # 0-100 minimum motor current STARTLEVEL = "STARTLEVEL" # 0, 1 configures start. 0 -> only start pump when shorted to GND, 1 -> always allow start ERRIO = "ERRIO" # 0, 1 write/read error in/output ??? sets errio either 1 or 0, reports errio:ok -STARTMODE = "STARTMODE" # 0, 1; 0=pause pump after switchon, 1=start immediatley with previous set flow rate +STARTMODE = "STARTMODE" # 0, 1; 0=pause pump after switch on, 1=start immediately with previous set flow rate ADJ10 = "ADJ10" # 100-2000 ADJ50 = "ADJ50" # 100-2000 CORR10 = "CORR10" # 0-300 @@ -46,64 +48,58 @@ class AzuraPumpHeads(Enum): FLOWRATE_TEN_ML = 10 -class AzuraCompactPump(KnauerEthernetDevice, Pump): +# noinspection DuplicatedCode +class AzuraCompact(KnauerEthernetDevice, FlowchemDevice): """Control module for Knauer Azura Compact pumps.""" - metadata = { - "author": [ - { - "first_name": "Jakob", - "last_name": "Wolf", - "email": "jakob.wolf@mpikg.mpg.de", - "institution": "Max Planck Institute of Colloids and Interfaces", - "github_username": "JB-Wolf", - }, - { - "first_name": "Dario", - "last_name": "Cambie", - "email": "dario.cambie@mpikg.mpg.de", - "institution": "Max Planck Institute of Colloids and Interfaces", - "github_username": "dcambie", - }, - ], - "stability": "beta", - "supported": True, - } + metadata = DeviceInfo( + authors=[dario, jakob, wei_hsin], + maintainers=[dario], + manufacturer="knauer", + model="Azura Compact", + ) def __init__( - self, ip_address=None, mac_address=None, name=None, max_pressure: str = None + self, + ip_address=None, + mac_address=None, + max_pressure: str = "", + min_pressure: str = "", + name="", ): - super().__init__(ip_address, mac_address, name) + super().__init__(ip_address, mac_address, name=name) self.eol = b"\n\r" # All the following are set upon initialize() - self.max_allowed_pressure, self.max_allowed_flow = 0, 0 + self.max_allowed_pressure = 0 + self.max_allowed_flow = 0 self._headtype = None - self._running = None - self._pressure_limit = max_pressure + self._running: bool = None # type: ignore + self._pressure_max = max_pressure + self._pressure_min = min_pressure - self.rate = flowchem_ureg.parse_expression("0 ml/min") - self._base_state = dict(rate="0 mL/min") + self.rate = ureg.parse_expression("0 ml/min") async def initialize(self): - """Initialize connection""" + """Initialize connection.""" # Here the magic happens... await super().initialize() # Here it is checked that the device is a pump and not a valve await self.get_headtype() # Place pump in remote control - await self.set_remote() + await self.remote_control() # Also ensure rest state is not pumping. - await self.stop_flow() + await self.stop() - if self._pressure_limit is not None: - await self.set_maximum_pressure(self._pressure_limit) + if self._pressure_max: + await self.set_maximum_pressure(self._pressure_max) + if self._pressure_min: + await self.set_minimum_pressure(self._pressure_min) @staticmethod def error_present(reply: str) -> bool: - """True if there are errors, False otherwise. Warns for errors.""" - + """Return True if there are errors, False otherwise. Warns for errors.""" # ERRORS: is the expected answer to read_errors() if not reply.startswith("ERROR") or reply.startswith("ERRORS:"): return False @@ -121,8 +117,9 @@ def error_present(reply: str) -> bool: async def _transmit_and_parse_reply(self, message: str) -> str: """ - sends command and receives reply, deals with all communication based stuff and checks - that the valve is of expected type + Send command and receive reply. + + Deals with all communication based stuff and checks that the valve is of expected type. :param message: :return: reply: str """ @@ -150,7 +147,7 @@ async def _transmit_and_parse_reply(self, message: str) -> str: return reply async def create_and_send_command( - self, message, setpoint: int = None, setpoint_range: tuple = None + self, message, setpoint: int | None = None, setpoint_range: tuple | None = None ): """ Create and sends a message from the command. @@ -185,7 +182,7 @@ async def create_and_send_command( @property def _headtype(self): - """Internal state reflecting pump one, use set_headtype() to change in pump!""" + """Return internal head type. Use `set_headtype()` to change in pump.""" return self.__headtype @_headtype.setter @@ -198,7 +195,7 @@ def _headtype(self, htype): self.max_allowed_pressure, self.max_allowed_flow = 150, 50000 async def get_headtype(self) -> AzuraPumpHeads: - """Returns pump's head type.""" + """Return pump's head type.""" head_type_id = await self.create_and_send_command(HEADTYPE) try: headtype = AzuraPumpHeads(int(head_type_id)) @@ -214,43 +211,41 @@ async def get_headtype(self) -> AzuraPumpHeads: return headtype async def set_headtype(self, head_type: AzuraPumpHeads): - """Sets pump's head type.""" + """Set pump's head type.""" await self.create_and_send_command(HEADTYPE, setpoint=head_type.value) # Update internal property (changes max flowrate etc.) self._headtype = head_type logger.debug(f"Head type set to {head_type}") - async def get_flow(self) -> str: - """Gets flow rate.""" + async def get_flow_rate(self) -> float: + """Get flow rate in ml/min.""" flow_value = await self.create_and_send_command(FLOW) - flowrate = flowchem_ureg(f"{flow_value} ul/min") + flowrate = ureg.Quantity(f"{flow_value} ul/min") logger.debug(f"Current flow rate is {flowrate}") - return str(flowrate.to("ml/min")) + return flowrate.m_as("ml/min") - async def set_flow(self, flowrate: str = None): - """Sets flow rate. + async def set_flow_rate(self, rate: pint.Quantity): + """Set flow rate. - :param flowrate: string with units + Args: + rate (str): value with units """ - parsed_flowrate = flowchem_ureg(flowrate) await self.create_and_send_command( FLOW, - setpoint=round(parsed_flowrate.m_as("ul/min")), + setpoint=round(rate.m_as("ul/min")), setpoint_range=(0, self.max_allowed_flow + 1), ) - logger.info(f"Flow set to {flowrate}") + logger.info(f"Flow set to {rate}") async def get_minimum_pressure(self): - """Gets minimum pressure. The pump stops if the measured P is lower than this.""" - + """Get minimum pressure. The pump stops if the measured P is lower than this.""" command = PMIN10 if self._headtype == AzuraPumpHeads.FLOWRATE_TEN_ML else PMIN50 - p_min = await self.create_and_send_command(command) * flowchem_ureg.bar + p_min = await self.create_and_send_command(command) * ureg.bar return str(p_min) async def set_minimum_pressure(self, value: str = "0 bar"): - """Sets minimum pressure. The pump stops if the measured P is lower than this.""" - - pressure = flowchem_ureg(value) + """Set minimum pressure. The pump stops if the measured P is lower than this.""" + pressure = ureg.Quantity(value) command = PMIN10 if self._headtype == AzuraPumpHeads.FLOWRATE_TEN_ML else PMIN50 await self.create_and_send_command( command, @@ -260,16 +255,14 @@ async def set_minimum_pressure(self, value: str = "0 bar"): logger.info(f"Minimum pressure set to {pressure}") async def get_maximum_pressure(self) -> str: - """Gets maximum pressure. The pumps stop if the measured P is higher than this.""" - + """Get maximum pressure. The pumps stop if the measured P is higher than this.""" command = PMAX10 if self._headtype == AzuraPumpHeads.FLOWRATE_TEN_ML else PMAX50 - p_max = await self.create_and_send_command(command) * flowchem_ureg.bar + p_max = await self.create_and_send_command(command) * ureg.bar return str(p_max) async def set_maximum_pressure(self, value: str): - """Sets maximum pressure. The pumps stop if the measured P is higher than this.""" - - pressure = flowchem_ureg(value) + """Set maximum pressure. The pumps stop if the measured P is higher than this.""" + pressure = ureg.Quantity(value) command = PMAX10 if self._headtype == AzuraPumpHeads.FLOWRATE_TEN_ML else PMAX50 await self.create_and_send_command( command, @@ -279,7 +272,7 @@ async def set_maximum_pressure(self, value: str): logger.info(f"Maximum pressure set to {pressure}") async def set_minimum_motor_current(self, setpoint=None): - """Sets minimum motor current.""" + """Set minimum motor current.""" command = IMIN10 if self._headtype == AzuraPumpHeads.FLOWRATE_TEN_ML else IMIN50 reply = await self.create_and_send_command( @@ -288,15 +281,13 @@ async def set_minimum_motor_current(self, setpoint=None): logger.debug(f"Minimum motor current set to {setpoint}, returns {reply}") async def is_start_in_required(self): - """ - Check state of START IN. See require_start_in() for details. - """ + """Check state of START IN. See require_start_in() for details.""" runlevel = await self.create_and_send_command(STARTLEVEL) return not bool(int(runlevel)) async def require_start_in(self, value: bool = True): """ - Configures START IN. If required, the pump starts only if the STARTIN pin is shortened to GND. + Configure START IN. If required, the pump starts only if the STARTIN pin is shortened to GND. True = Pump starts the flow at short circuit contact only. (Start In <> Ground). [0] False = Pump starts the flow without a short circuit contact. (Start In <> Ground). [1] @@ -306,13 +297,13 @@ async def require_start_in(self, value: bool = True): logger.debug(f"Start in required set to {value}") async def is_autostart_enabled(self): - """Returns the default behaviour of the pump upon power on.""" + """Return the default behaviour of the pump upon power on.""" reply = await self.create_and_send_command(STARTMODE) return bool(int(reply)) async def enable_autostart(self, value: bool = True): """ - Sets the default behaviour of the pump upon power on. + Set the default behaviour of the pump upon power on. :param value: False: pause pump after switch on. True: start pumping with previous flow rate at startup :return: device message @@ -321,13 +312,13 @@ async def enable_autostart(self, value: bool = True): logger.debug(f"Autostart set to {value}") async def get_adjusting_factor(self): - """Gets the adjust parameter. Not clear what it is.""" + """Get the adjust parameter. Not clear what it is.""" command = ADJ10 if self._headtype == AzuraPumpHeads.FLOWRATE_TEN_ML else ADJ50 reply = await self.create_and_send_command(command) return int(reply) - async def set_adjusting_factor(self, setpoint: int = None): - """Sets the adjust parameter. Not clear what it is.""" + async def set_adjusting_factor(self, setpoint: int | None = None): + """Set the adjust parameter. Not clear what it is.""" command = ADJ10 if self._headtype == AzuraPumpHeads.FLOWRATE_TEN_ML else ADJ50 reply = await self.create_and_send_command( command, setpoint=setpoint, setpoint_range=(0, 2001) @@ -335,23 +326,21 @@ async def set_adjusting_factor(self, setpoint: int = None): logger.debug(f"Adjusting factor of set to {setpoint}, returns {reply}") async def get_correction_factor(self): - """Gets the correction factor. Not clear what it is.""" + """Get the correction factor. Not clear what it is.""" command = CORR10 if self._headtype == AzuraPumpHeads.FLOWRATE_TEN_ML else CORR50 return int(await self.create_and_send_command(command)) async def set_correction_factor(self, setpoint=None): - """Sets the correction factor. Not clear what it is.""" + """Set the correction factor. Not clear what it is.""" command = CORR10 if self._headtype == AzuraPumpHeads.FLOWRATE_TEN_ML else CORR50 reply = await self.create_and_send_command( command, setpoint=setpoint, setpoint_range=(0, 301) ) logger.debug(f"Correction factor set to {setpoint}, returns {reply}") - async def read_pressure(self) -> str: - """If the pump has a pressure sensor, returns pressure. Read-only property of course.""" - pressure = await self._transmit_and_parse_reply(PRESSURE) * flowchem_ureg.bar - logger.debug(f"Pressure measured = {pressure}") - return str(pressure) + async def read_pressure(self) -> pint.Quantity: + """Return pressure if the pump has a pressure sensor.""" + return await self._transmit_and_parse_reply(PRESSURE) * ureg.bar async def read_extflow(self) -> float: """Read the set flowrate from analog in.""" @@ -359,52 +348,49 @@ async def read_extflow(self) -> float: logger.debug(f"Extflow reading returns {ext_flow}") return float(ext_flow) - async def read_errors(self) -> List[int]: - """Returns the last 5 errors.""" + async def read_errors(self) -> list[int]: + """Return the last 5 errors.""" last_5_errors = await self.create_and_send_command(ERRORS) logger.debug(f"Error reading returns {last_5_errors}") - parsed_errors = [int(err_code) for err_code in last_5_errors.split(",")] - return parsed_errors + return [int(err_code) for err_code in last_5_errors.split(",")] async def read_motor_current(self): - """Returns motor current, relative in percent 0-100.""" + """Return motor current, relative in percent 0-100.""" current_percent = int(await self.create_and_send_command(IMOTOR)) logger.debug(f"Motor current reading returns {current_percent} %") return current_percent - async def start_flow(self): - """Starts flow""" + async def infuse(self): + """Start running pump at the given rate.""" await self._transmit_and_parse_reply(PUMP_ON) self._running = True - logger.info("Pump switched on") + logger.info("Pump started!") - async def stop_flow(self): - """Stops flow""" + async def stop(self): + """Stop flow.""" await self._transmit_and_parse_reply(PUMP_OFF) self._running = False - logger.info("Pump not pumping") + logger.info("Pump stopped") def is_running(self): """Get pump state.""" return self._running async def set_local(self, state: bool = True): - """Relinquish remote control""" + """Relinquish remote control.""" await self.create_and_send_command(LOCAL, setpoint=int(state)) logger.debug(f"Local control set to {state}") - async def set_remote(self, state: bool = True): + async def remote_control(self, state: bool = True): """Set remote control on or off.""" - await self.create_and_send_command(REMOTE, setpoint=int(state)) + if state: + await self.create_and_send_command(REMOTE, setpoint=1) + else: + await self.create_and_send_command(LOCAL, setpoint=1) logger.debug(f"Remote control set to {state}") - async def set_errio(self, param: bool): - """no idea what this exactly does...""" - await self.create_and_send_command(ERRIO, setpoint=int(param)) - logger.debug(f"Set errio {param}") - async def is_analog_control_enabled(self): - """Returns the status of the external flow control via analog input.""" + """Return the status of the external flow control via analog input.""" reply = await self.create_and_send_command(EXTCONTR) return bool(int(reply)) @@ -417,33 +403,9 @@ async def enable_analog_control(self, value: bool): await self.create_and_send_command(EXTCONTR, setpoint=int(value)) logger.debug(f"External control set to {value}") - async def __aenter__(self): - await self.initialize() - return self - - async def __aexit__(self, exc_type, exc_value, traceback): - await self.stop_flow() - - async def _update(self): - """Called automatically to change flow rate.""" - - if self.rate == 0: - await self.stop_flow() - else: - await self.set_flow(self.rate) - await self.start_flow() - - def get_router(self): - """Creates an APIRouter for this object.""" - from fastapi import APIRouter - - router = APIRouter() - router.add_api_route("/flow", self.get_flow, methods=["GET"]) - router.add_api_route("/flow", self.set_flow, methods=["PUT"]) - router.add_api_route("/pressure", self.read_pressure, methods=["GET"]) - router.add_api_route("/start", self.start_flow, methods=["PUT"]) - router.add_api_route("/stop", self.stop_flow, methods=["PUT"]) - return router + def components(self): + """Create a Pump and a Sensor components.""" + return AzuraCompactPump("pump", self), AzuraCompactSensor("pressure", self) if __name__ == "__main__": @@ -453,14 +415,20 @@ def get_router(self): if sys.platform == "win32": asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) - p = AzuraCompactPump(ip_address="192.168.10.113") + p = AzuraCompact(ip_address="192.168.1.119") - async def main(pump: AzuraCompactPump): - """Test function""" + async def main(pump: AzuraCompact): + """Test function.""" await pump.initialize() - await pump.set_flow("0.1 ml/min") - await pump.start_flow() + c = pump.components() + print(c) + pc: AzuraCompactPump = c[0] + print(pc) + print(await pc.infuse(rate="0.1 ml/min")) + await pump.set_flow_rate(ureg.Quantity("0.1 ml/min")) + await pump.infuse() await asyncio.sleep(5) - await pump.stop_flow() + await pump.stop() + print(await pc.is_pumping()) asyncio.run(main(p)) diff --git a/src/flowchem/devices/knauer/azura_compact_pump.py b/src/flowchem/devices/knauer/azura_compact_pump.py new file mode 100644 index 00000000..5f174ae5 --- /dev/null +++ b/src/flowchem/devices/knauer/azura_compact_pump.py @@ -0,0 +1,58 @@ +"""Azura compact pump component.""" +from __future__ import annotations + +from typing import TYPE_CHECKING + +from loguru import logger + +from ... import ureg + +if TYPE_CHECKING: + from .azura_compact import AzuraCompact +from flowchem.components.pumps.hplc_pump import HPLCPump + + +def isfloat(rate: str) -> bool: + try: + float(rate) + return True + except ValueError: + return False + + +class AzuraCompactPump(HPLCPump): + hw_device: AzuraCompact # for typing's sake + + def __init__(self, name: str, hw_device: AzuraCompact): + """Initialize component.""" + super().__init__(name, hw_device) + + async def infuse(self, rate: str = "", volume: str = "") -> bool: + """Start infusion.""" + if volume: + logger.warning(f"Volume parameter ignored: not supported by {self.name}!") + + if isfloat(rate): + rate = "0 ml/min" + if rate.isnumeric(): + rate += " ml/min" + logger.warning("Units missing, assuming ml/min!") + + parsed_flowrate = ureg.Quantity(rate) + + await self.hw_device.set_flow_rate(rate=parsed_flowrate) + return await self.hw_device.infuse() + + async def stop(self) -> bool: + """Stop pumping.""" + await self.hw_device.stop() + return True + + async def is_pumping(self) -> bool: + """Is pump running?""" + return self.hw_device.is_running() + + @staticmethod + def is_withdrawing_capable() -> bool: + """Can the pump reverse its normal flow direction?""" + return False diff --git a/src/flowchem/devices/knauer/azura_compact_sensor.py b/src/flowchem/devices/knauer/azura_compact_sensor.py new file mode 100644 index 00000000..9f0b9083 --- /dev/null +++ b/src/flowchem/devices/knauer/azura_compact_sensor.py @@ -0,0 +1,18 @@ +"""Azura compact sensor component.""" +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from .azura_compact import AzuraCompact + +from flowchem.components.sensors.pressure_sensor import PressureSensor + + +class AzuraCompactSensor(PressureSensor): + hw_device: AzuraCompact # for typing's sake + + async def read_pressure(self, units: str = "bar"): + """Read from sensor, result to be expressed in units (optional).""" + pressure = await self.hw_device.read_pressure() + return pressure.m_as(units) diff --git a/src/flowchem/devices/knauer/dad.py b/src/flowchem/devices/knauer/dad.py new file mode 100644 index 00000000..e22aa083 --- /dev/null +++ b/src/flowchem/devices/knauer/dad.py @@ -0,0 +1,92 @@ +"""Control module for the Knauer DAD.""" +import asyncio + +from loguru import logger + +from flowchem.components.analytics.dad_control import DADControl +from flowchem.devices.flowchem_device import FlowchemDevice +from flowchem.devices.knauer._common import KnauerEthernetDevice +from flowchem.devices.list_known_device_type import autodiscover_third_party +from flowchem.exceptions import InvalidConfiguration + +try: + from flowchem_knauer import KnauerDADCommands + + HAS_DAD_COMMANDS = True +except ImportError: + HAS_DAD_COMMANDS = False + + +class KnauerDAD(KnauerEthernetDevice, FlowchemDevice): + """DAD control class.""" + + def __init__( + self, + ip_address=None, + mac_address=None, + name: str | None = None, + turn_on_d2: bool = True, + turn_on_halogen: bool = True, + ): + super().__init__(ip_address, mac_address, name=name) + self.eol = b"\n\r" + self._d2 = turn_on_d2 + self._hal = turn_on_halogen + self._state_d2 = False + self._state_hal = False + + if not HAS_DAD_COMMANDS: + raise InvalidConfiguration( + "You tried to use a Knauer DAD device but the relevant commands are missing!\n" + "Unfortunately, we cannot publish those as they were provided under NDA.\n" + "Contact Knauer for further assistance." + ) + + self.cmd = KnauerDADCommands() + + async def initialize(self): + """Initialize connection.""" + await super().initialize() + + if self._d2: + await self.d2(True) + await asyncio.sleep(1) + if self._hal: + await self.hal(True) + await asyncio.sleep(15) + + async def d2(self, state: bool = True) -> str: + """Turn off or on the deuterium lamp.""" + cmd = self.cmd.D2_LAMP_ON if state else self.cmd.D2_LAMP_OFF + self._state_d2 = state + return await self._send_and_receive(cmd) + + async def hal(self, state: bool = True) -> str: + """Turn off or on the halogen lamp.""" + cmd = self.cmd.HAL_LAMP_ON if state else self.cmd.HAL_LAMP_OFF + self._state_hal = state + return await self._send_and_receive(cmd) + + def components(self): + return (KnauerDADControl("dad", self),) + + +class KnauerDADControl(DADControl): + hw_device: KnauerDAD + + async def get_lamp(self): + """Lamp status.""" + return { + "d2": self.hw_device._state_d2, + "hal": self.hw_device._state_hal, + } + + async def set_lamp(self, state: bool, lamp_name: str): + """Lamp status.""" + match lamp_name: + case "d2": + await self.hw_device.d2(state) + case "hal": + await self.hw_device.hal(state) + case _: + logger.error("unknown lamp name!") diff --git a/src/flowchem/devices/knauer/knauer_finder.py b/src/flowchem/devices/knauer/knauer_finder.py new file mode 100644 index 00000000..4709601d --- /dev/null +++ b/src/flowchem/devices/knauer/knauer_finder.py @@ -0,0 +1,205 @@ +"""Autodiscover Knauer devices on network.""" +import asyncio +import queue +import socket +import sys +import time +from textwrap import dedent +from threading import Thread + +import rich_click as click +from loguru import logger + +from flowchem.vendor.getmac import get_mac_address + +__all__ = ["autodiscover_knauer", "knauer_finder"] + +Address = tuple[str, int] + + +class BroadcastProtocol(asyncio.DatagramProtocol): + """See `https://gist.github.com/yluthu/4f785d4546057b49b56c`.""" + + def __init__(self, target: Address, response_queue: queue.Queue): + self.target = target + self.loop = asyncio.get_event_loop() + self._queue = response_queue + + def connection_made(self, transport: asyncio.transports.DatagramTransport): # type: ignore + """Send the magic broadcast package for autodiscovery.""" + sock = transport.get_extra_info("socket") # type: socket.socket + sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) # sets to broadcast + transport.sendto(b"\x00\x01\x00\xf6", self.target) + + def datagram_received(self, data: bytes | str, addr: Address): + """Add received data to queue.""" + logger.trace(f"Received data from {addr}") + self._queue.put(addr[0]) + + +async def get_device_type(ip_address: str) -> str: + """Return either 'Pump', 'Valve' or 'Unknown'.""" + fut = asyncio.open_connection(host=ip_address, port=10001) + try: + reader, writer = await asyncio.wait_for(fut, timeout=3) + except ConnectionError: + return "ConnectionError" + except asyncio.TimeoutError: + if ip_address == "192.168.1.2": + return "TimeoutError - Nice FlowIR that you have :D" + return "TimeoutError" + + # Test Pump + writer.write(b"HEADTYPE:?\n\r") + try: + reply = await asyncio.wait_for(reader.readuntil(separator=b"\r"), timeout=1) + except asyncio.TimeoutError: + return "TimeoutError" + + if reply.startswith(b"HEADTYPE"): + logger.debug(f"Device {ip_address} is a Pump") + return "AzuraCompact" + + # Test Valve + writer.write(b"T:?\n\r") + reply = await reader.readuntil(separator=b"\r") + if reply.startswith(b"VALVE"): + logger.debug(f"Device {ip_address} is a Valve") + return "KnauerValve" + + return "Unknown" + + +def _get_local_ip() -> str: + """Guess the most suitable local IP for autodiscovery.""" + # These are all the local IPs (different interfaces) + machine_ips = [_[4][0] for _ in socket.getaddrinfo(socket.gethostname(), None)] + + # 192.168 subnet 1st priority + if local_ip := next((ip for ip in machine_ips if ip.startswith("192.168.")), False): + return local_ip # type: ignore + + # 10.0 subnet 2nd priority + if local_ip := next((ip for ip in machine_ips if ip.startswith("10.")), False): + return local_ip # type: ignore + + # 100.x subnet 3rd priority (Tailscale) + if local_ip := next((ip for ip in machine_ips if ip.startswith("100.")), False): + return local_ip # type: ignore + + logger.warning(f"Could not reliably determine local IP!") + hostname = socket.gethostname() + + # Only accept local IP + if ( + hostname.startswith("192.168") + or hostname.startswith("192.168") + or hostname.startswith("100.") + ): + return socket.gethostbyname(hostname) + else: + return "" + + +async def send_broadcast_and_receive_replies(source_ip): + try: + loop = asyncio.get_running_loop() + except RuntimeError: + loop = asyncio.new_event_loop() + + device_q: queue.Queue = queue.Queue() + transport, protocol = await loop.create_datagram_endpoint( + lambda: BroadcastProtocol(("255.255.255.255", 30718), response_queue=device_q), + local_addr=(source_ip, 28688), + allow_broadcast=True, + ) + try: + await asyncio.sleep(2) # Serve for 1 hour. + + finally: + transport.close() + + device_list = [] + # Get all device from queue (nobody should need has more than 40 devices, right?) + for _ in range(40): + try: + device_list.append(device_q.get_nowait()) + except queue.Empty: + break + + return device_list + + +def autodiscover_knauer(source_ip: str = "") -> dict[str, str]: + """ + Automatically find Knauer ethernet device on the network and returns the IP associated to each MAC address. + Note that the MAC is the key here as it is the parameter used in configuration files. + Knauer devices only support DHCP so static IPs are not an option. + Args: + source_ip: source IP for autodiscover (only relevant if multiple network interfaces are available!) + Returns: + List of tuples (IP, MAC, device_type), one per device replying to autodiscover + """ + # Define source IP resolving local hostname. + if not source_ip: + source_ip = _get_local_ip() + if not source_ip: + logger.warning("Please provide a valid source IP for broadcasting.") + return dict() + logger.info(f"Starting detection from IP {source_ip}") + + device_list = asyncio.run(send_broadcast_and_receive_replies(source_ip)) + + device_info: dict[str, str] = {} + device_ip: str + # We got replies from IPs, let's find their MACs + for device_ip in device_list: + logger.debug(f"Got a reply from {device_ip}") + # MAC address + mac = get_mac_address(ip=device_ip) + if mac: + device_info[mac] = device_ip + return device_info + + +def knauer_finder(source_ip=None): + """Execute autodiscovery. This is the entry point of the `knauer-finder` CLI command.""" + # This is a bug of asyncio on Windows :| + if sys.platform == "win32": + asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) + + # Autodiscover devices (returns dict with MAC as index, IP as value) + devices = autodiscover_knauer(source_ip) + dev_config = set() + + for mac_address, ip in devices.items(): + logger.info(f"Determining device type for device at {ip} [{mac_address}]") + # Device Type + device_type = asyncio.run(get_device_type(ip)) + logger.info(f"Found a {device_type} on IP {ip}") + + if device_type == "AzuraCompact": + dev_config.add( + dedent( + f"""\n\n + [device.pump-{mac_address[-8:-6] + mac_address[-5:-3] + mac_address[-2:]}] + type = "AzuraCompact" + ip_address = "{ip}" # MAC address during discovery: {mac_address} + # max_pressure = "XX bar" + # min_pressure = "XX bar"\n""" + ) + ) + elif device_type == "KnauerValve": + dev_config.add( + dedent( + f"""\n\n[device.valve-{mac_address[-8:-6] + mac_address[-5:-3] + mac_address[-2:]}] + type = "KnauerValve" + ip_address = "{ip}" # MAC address during discovery: {mac_address}\n""" + ) + ) + + return dev_config + + +if __name__ == "__main__": + knauer_finder() diff --git a/flowchem/components/devices/Knauer/KnauerValve.py b/src/flowchem/devices/knauer/valve.py similarity index 55% rename from flowchem/components/devices/Knauer/KnauerValve.py rename to src/flowchem/devices/knauer/valve.py index c414c861..63c70ecd 100644 --- a/flowchem/components/devices/Knauer/KnauerValve.py +++ b/src/flowchem/devices/knauer/valve.py @@ -1,19 +1,22 @@ -""" Knauer valve control. """ - +"""Knauer valve control.""" import warnings from enum import Enum from loguru import logger -from flowchem.components.devices.Knauer.Knauer_common import KnauerEthernetDevice -from flowchem.components.properties import Valve +from flowchem.devices.flowchem_device import DeviceInfo +from flowchem.devices.flowchem_device import FlowchemDevice +from flowchem.devices.knauer._common import KnauerEthernetDevice +from flowchem.devices.knauer.valve_component import Knauer12PortDistribution +from flowchem.devices.knauer.valve_component import Knauer16PortDistribution +from flowchem.devices.knauer.valve_component import Knauer6PortDistribution +from flowchem.devices.knauer.valve_component import KnauerInjectionValve from flowchem.exceptions import DeviceError +from flowchem.people import * class KnauerValveHeads(Enum): - """ - Four different valve types can be used. 6port2position valve, and 6, 12, 16 multi-position valves - """ + """Four different valve types can be used. 6port2position valve, and 6, 12, 16 multi-position valves.""" SIX_PORT_TWO_POSITION = "LI" SIX_PORT_SIX_POSITION = "6" @@ -21,11 +24,11 @@ class KnauerValveHeads(Enum): SIXTEEN_PORT_SIXTEEN_POSITION = "16" -class KnauerValve(KnauerEthernetDevice, Valve): +class KnauerValve(KnauerEthernetDevice, FlowchemDevice): """ Control Knauer multi position valves. - Valve type can be 6, 12, 16 or it can be 6 ports, two positions, which will be simply 2 (two states) + Valve type can be 6, 12, 16, or it can be 6 ports, two positions, which will be simply 2 (two states) in this case, the response for T is LI. Load and inject can be switched by sending L or I maybe valves should have an initial state which is set during init and updated, if no change don't schedule command EN: https://www.knauer.net/Dokumente/valves/azura/manuals/v6860_azura_v_2.1s_user-manual_en.pdf @@ -33,26 +36,31 @@ class KnauerValve(KnauerEthernetDevice, Valve): DIP switch for valve selection """ - def __init__(self, ip_address=None, mac_address=None, name=None): - super().__init__(ip_address, mac_address, name) + def __init__(self, ip_address=None, mac_address=None, **kwargs): + super().__init__(ip_address, mac_address, **kwargs) self.eol = b"\r\n" - # These are set during initialize() - self.valve_type = None - self._position = None + self.valve_type = None # Set during initialize() async def initialize(self): - """Initialize connection""" - # Here the magic happens... + """Initialize connection.""" + # The connection is established in KnauerEthernetDevice.initialize() await super().initialize() # Detect valve type and state self.valve_type = await self.get_valve_type() - self._position = await self.get_current_position() + + def metadata(self) -> DeviceInfo: + """Return hw device metadata.""" + return DeviceInfo( + authors=[dario, jakob, wei_hsin], + maintainers=[dario], + manufacturer="Knauer", + model="Valve", + ) @staticmethod def handle_errors(reply: str): - """True if there are errors, False otherwise. Warns for errors.""" - + """Return True if there are errors, False otherwise. Warns for errors.""" if not reply.startswith("E"): return @@ -92,10 +100,13 @@ def handle_errors(reply: str): async def _transmit_and_parse_reply(self, message: str) -> str: """ - Sends command, receives reply and parse it. + Send command, receive reply and parse it. + + Args: + message (str): command to be sent - :param message: str with command to be sent - :return: reply: str with reply + Returns: + str: reply """ reply = await self._send_and_receive(message) self.handle_errors(reply) @@ -110,27 +121,9 @@ async def _transmit_and_parse_reply(self, message: str) -> str: return reply - async def get_current_position(self) -> str: - """Return current valve position.""" - return await self._transmit_and_parse_reply("P") - - async def switch_to_position(self, position: str): - """Move valve to position.""" - position = str(position).upper() - - # switching necessary? - if position == self._position: - logger.debug("Target position == current position. No movement needed.") - return - - self._position = position - - # Switch to position - await self._transmit_and_parse_reply(position) - async def get_valve_type(self): """ - Gets valve type, if returned value is not supported throws an error. + Get valve type, if returned value is not supported throws an error. Note that this method is called during initialize(), therefore it is in line with the general philosophy of the module to 'fail early' upon init and avoiding @@ -152,59 +145,38 @@ async def get_valve_type(self): logger.info(f"Valve connected, type: {headtype}.") return headtype + async def get_raw_position(self) -> str: + """Return current valve position, following valve nomenclature.""" + return await self._transmit_and_parse_reply("P") -class Knauer6Port2PositionValve(KnauerValve): - """KnauerValve of type SIX_PORT_TWO_POSITION""" - - async def initialize(self): - """Ensure valve type""" - await super().initialize() - assert self.valve_type == KnauerValveHeads.SIX_PORT_TWO_POSITION - - -class Knauer6Port6PositionValve(KnauerValve): - """KnauerValve of type SIX_PORT_SIX_POSITION""" - - async def initialize(self): - """Ensure valve type""" - await super().initialize() - assert self.valve_type == KnauerValveHeads.SIX_PORT_SIX_POSITION - - -class Knauer12PortValve(KnauerValve): - """KnauerValve of type TWELVE_PORT_TWELVE_POSITION""" - - async def initialize(self): - """Ensure valve type""" - await super().initialize() - assert self.valve_type == KnauerValveHeads.TWELVE_PORT_TWELVE_POSITION - - -class Knauer16PortValve(KnauerValve): - """KnauerValve of type SIXTEEN_PORT_SIXTEEN_POSITION""" + async def set_raw_position(self, position: str) -> bool: + """Sets the valve position, following valve nomenclature.""" + return await self._transmit_and_parse_reply(position) != "" - async def initialize(self): - """Ensure valve type""" - await super().initialize() - assert self.valve_type == KnauerValveHeads.SIXTEEN_PORT_SIXTEEN_POSITION + def components(self): + """Create the right type of Valve components based on head type.""" + match self.valve_type: + case KnauerValveHeads.SIX_PORT_TWO_POSITION: + return KnauerInjectionValve("injection-valve", self) + case KnauerValveHeads.SIX_PORT_SIX_POSITION: + return Knauer6PortDistribution("distribution-valve", self) + case KnauerValveHeads.TWELVE_PORT_TWELVE_POSITION: + return Knauer12PortDistribution("distribution-valve", self) + case KnauerValveHeads.SIXTEEN_PORT_SIXTEEN_POSITION: + return Knauer16PortDistribution("distribution-valve", self) if __name__ == "__main__": - # This is a bug of asyncio on Windows :| import asyncio - import sys - - if sys.platform == "win32": - asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) v = KnauerValve(ip_address="192.168.1.176") - async def main(valve: KnauerValve): - """test function""" + async def main(valve): + """Test function.""" await valve.initialize() - await valve.switch_to_position("I") - print(await valve.get_current_position()) - await valve.switch_to_position("L") - print(await valve.get_current_position()) + await valve.set_raw_position("I") + print(await valve.get_raw_position()) + await valve.set_raw_position("L") + print(await valve.get_raw_position()) asyncio.run(main(v)) diff --git a/src/flowchem/devices/knauer/valve_component.py b/src/flowchem/devices/knauer/valve_component.py new file mode 100644 index 00000000..38e155cd --- /dev/null +++ b/src/flowchem/devices/knauer/valve_component.py @@ -0,0 +1,74 @@ +"""Knauer valve component.""" +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from .valve import KnauerValve +from flowchem.components.valves.distribution_valves import SixPortDistribution +from flowchem.components.valves.distribution_valves import SixteenPortDistribution +from flowchem.components.valves.distribution_valves import TwelvePortDistribution +from flowchem.components.valves.injection_valves import SixPortTwoPosition + + +class KnauerInjectionValve(SixPortTwoPosition): + hw_device: KnauerValve # for typing's sake + position_mapping = {"load": "L", "inject": "I"} + _reverse_position_mapping = {v: k for k, v in position_mapping.items()} + + async def get_position(self) -> str: + """Get current valve position.""" + pos = await self.hw_device.get_raw_position() + assert pos in ("L", "I"), "Valve position is 'I' or 'L'" + return self._reverse_position_mapping[pos] + + async def set_position(self, position: str): + """Move valve to position.""" + await super().set_position(position) + target_pos = self.position_mapping[position] + return await self.hw_device.set_raw_position(target_pos) + + +class Knauer6PortDistribution(SixPortDistribution): + """KnauerValve of type SIX_PORT_SIX_POSITION.""" + + hw_device: KnauerValve # for typing's sake + + async def get_position(self) -> str: + """Get current valve position.""" + return await self.hw_device.get_raw_position() + + async def set_position(self, position: str): + """Move valve to position.""" + await super().set_position(position) + return await self.hw_device.set_raw_position(position) + + +class Knauer12PortDistribution(TwelvePortDistribution): + """KnauerValve of type SIX_PORT_SIX_POSITION.""" + + hw_device: KnauerValve # for typing's sake + + async def get_position(self) -> str: + """Get current valve position.""" + return await self.hw_device.get_raw_position() + + async def set_position(self, position: str): + """Move valve to position.""" + await super().set_position(position) + return await self.hw_device.set_raw_position(position) + + +class Knauer16PortDistribution(SixteenPortDistribution): + """KnauerValve of type SIX_PORT_SIX_POSITION.""" + + hw_device: KnauerValve # for typing's sake + + async def get_position(self) -> str: + """Get current valve position.""" + return await self.hw_device.get_raw_position() + + async def set_position(self, position: str): + """Move valve to position.""" + await super().set_position(position) + return await self.hw_device.set_raw_position(position) diff --git a/src/flowchem/devices/known_plugins.py b/src/flowchem/devices/known_plugins.py new file mode 100644 index 00000000..87021ed3 --- /dev/null +++ b/src/flowchem/devices/known_plugins.py @@ -0,0 +1,3 @@ +plugin_devices = { + "Spinsolve": "flowchem-spinsolve", +} diff --git a/src/flowchem/devices/list_known_device_type.py b/src/flowchem/devices/list_known_device_type.py new file mode 100644 index 00000000..5917b943 --- /dev/null +++ b/src/flowchem/devices/list_known_device_type.py @@ -0,0 +1,63 @@ +"""Auto-discover the device classes present in the device sub-folders and in the installed plugins.""" +import inspect +from importlib.metadata import entry_points +from typing import Any + +from loguru import logger + +import flowchem.devices +from flowchem.devices.flowchem_device import FlowchemDevice + + +def is_device_class(test_object): + """Return true if the object is a subclass of FlowchemDevice.""" + if getattr(test_object, "__module__", None) is None: + return + return ( + inspect.isclass(test_object) + and issubclass(test_object, FlowchemDevice) + and test_object.__name__ != "FlowchemDevice" + ) + + +def _autodiscover_devices_in_module(module) -> dict[str, Any]: + """Given a module, autodiscover the device classes and return them as dict(name, object).""" + device_classes = inspect.getmembers(module, is_device_class) + logger.debug(f"Found {len(device_classes)} device type(s) in {module.__name__}") + # Dict of device class names and their respective classes, i.e. {device_class_name: DeviceClass}. + return {obj_class[0]: obj_class[1] for obj_class in device_classes} + + +def autodiscover_first_party() -> dict[str, Any]: + """Get classes from `flowchem.devices` subpackages.""" + return _autodiscover_devices_in_module(flowchem.devices) + + +def autodiscover_third_party() -> dict[str, Any]: + """ + Get classes from packages with a `flowchem.devices` entrypoint. + + A plugin structure can be used to add devices from an external package via setuptools entry points. + See https://packaging.python.org/en/latest/guides/creating-and-discovering-plugins/#using-package-metadata + """ + return { + k: v + for ep in entry_points(group="flowchem.devices") + for (k, v) in _autodiscover_devices_in_module(ep.load()).items() + } + + +def autodiscover_device_classes(): + """Get all the device-controlling classes, either from `flowchem.devices` or third party packages.""" + first = autodiscover_first_party() + # logger.info(f"Found {len(first)} 1st-party device type! {list(first.keys())}") + third = autodiscover_third_party() + # logger.info(f"Found {len(third)} 3rd-party device type! {list(third.keys())}") + + return third | first # First party devices will overwrite the third party ones. + + +if __name__ == "__main__": + logger.debug( + f"The following device types were found: {list(autodiscover_device_classes().keys())}" + ) diff --git a/src/flowchem/devices/magritek/__init__.py b/src/flowchem/devices/magritek/__init__.py new file mode 100644 index 00000000..e3f12b19 --- /dev/null +++ b/src/flowchem/devices/magritek/__init__.py @@ -0,0 +1,6 @@ +"""Magritek's Spinsolve.""" +from .spinsolve import * + +__all__ = [ + "Spinsolve", +] diff --git a/flowchem/components/devices/Magritek/msg_maker.py b/src/flowchem/devices/magritek/_msg_maker.py similarity index 55% rename from flowchem/components/devices/Magritek/msg_maker.py rename to src/flowchem/devices/magritek/_msg_maker.py index e6f8107f..62b2c9d4 100644 --- a/flowchem/components/devices/Magritek/msg_maker.py +++ b/src/flowchem/devices/magritek/_msg_maker.py @@ -1,14 +1,11 @@ -""" Functions related to the construction of instrument request """ - +"""Functions for the construction of XML requests for Spinsolve.""" from pathlib import WindowsPath from lxml import etree def create_message(sub_element_name, attributes=None): - """ - Create a minimal XML tree with Message as root and sub_element as child tag - """ + """Create a minimal XML tree with Message as root and sub_element as child tag.""" if attributes is None: attributes = {} @@ -19,8 +16,9 @@ def create_message(sub_element_name, attributes=None): def set_attribute(name, value="") -> etree.Element: """ - Creates a Set message. - Used for name = {Solvent | Sample} + indirectly by UserData and DataFolder + Create a Set . + + Used for name = {Solvent | Sample} + indirectly by UserData and DataFolder. """ base = create_message("Set") attribute = etree.SubElement(base.find("./Set"), name) @@ -30,8 +28,9 @@ def set_attribute(name, value="") -> etree.Element: def get_request(name) -> etree.Element: """ - Creates a Get message. - Used for name = {Solvent | Sample | UserData} + indirectly by UserData and DataFolder + Create a Get element. + + Used for name = {Solvent | Sample | UserData} + indirectly by UserData and DataFolder. """ base = create_message("GetRequest") etree.SubElement(base.find("./GetRequest"), name) @@ -39,9 +38,7 @@ def get_request(name) -> etree.Element: def set_data_folder(location) -> etree.Element: - """ - Create a Set DataFolder message - """ + """Create a Set DataFolder message.""" # Get base request data_folder = set_attribute("DataFolder") @@ -54,31 +51,13 @@ def set_data_folder(location) -> etree.Element: return data_folder -def set_user_data(data: dict) -> etree.Element: - """ - Given a dict with custom data, it creates a Set/UserData message. - Those data are saved in acq.par - """ - user_data = set_attribute("UserData") - for key, value in data.items(): - etree.SubElement( - user_data.find(".//UserData"), "Data", dict(key=key, value=value) - ) - return user_data - - -def create_protocol_message( - protocol_name: str, protocol_options: dict -) -> etree.Element: - """ - Create an XML request to run a protocol - """ - xml_root = create_message("Start", {"protocol": protocol_name}) - +def create_protocol_message(name: str, options: dict) -> etree.Element: + """Create an XML request to run a protocol.""" + xml_root = create_message("Start", {"protocol": name}) start_tag = xml_root.find("Start") - for key, value in protocol_options.items(): - # All options are SubElements of the Start tag! + # All options are sent as Start tag SubElements + for key, value in options.items(): etree.SubElement(start_tag, "Option", {"name": f"{key}", "value": f"{value}"}) return xml_root diff --git a/src/flowchem/devices/magritek/_parser.py b/src/flowchem/devices/magritek/_parser.py new file mode 100644 index 00000000..6990264c --- /dev/null +++ b/src/flowchem/devices/magritek/_parser.py @@ -0,0 +1,43 @@ +"""Functions related to instrument response parsing.""" +import warnings +from enum import Enum + +from lxml import etree + + +class StatusNotification(Enum): + """Represent the type of the status notification.""" + + STARTED = 1 # received, starting protocol + RUNNING = 2 # All good, received, protocol is running + STOPPING = 3 # Abort called, waiting for current scan end + FINISHING = 4 # Upon : with this also processing/saving data is over + ERROR = 6 # If an error occurs + UNKNOWN = 7 + + +def parse_status_notification(xml_message: etree.Element): + """Parse a status notification reply.""" + status_notification = xml_message.find(".//StatusNotification") + assert status_notification is not None, "a StatusNotification tree is needed" + + # StatusNotification child can be (w/ submsg), , or + match status_notification[0].tag, status_notification[0].get("status"): + case ["State", "Running"]: + status = StatusNotification.STARTED + case ["State", "Ready"]: + status = StatusNotification.FINISHING + case ["State", "Stopping"]: + status = StatusNotification.STOPPING + case ["Progress", None]: + status = StatusNotification.RUNNING + case ["Completed", None]: + status = StatusNotification.COMPLETED + case ["Error", None]: + status = StatusNotification.ERROR + case _: + warnings.warn("Could not recognize StatusNotification state!") + status = StatusNotification.UNKNOWN + + return status, status_notification[0].get("dataFolder") diff --git a/src/flowchem/devices/magritek/spinsolve.py b/src/flowchem/devices/magritek/spinsolve.py new file mode 100644 index 00000000..26f9d7a5 --- /dev/null +++ b/src/flowchem/devices/magritek/spinsolve.py @@ -0,0 +1,391 @@ +"""Spinsolve module.""" +import asyncio +import pprint as pp +import warnings +from pathlib import Path + +from fastapi import BackgroundTasks +from loguru import logger +from lxml import etree +from packaging import version + +from flowchem.devices.flowchem_device import DeviceInfo +from flowchem.devices.flowchem_device import FlowchemDevice +from flowchem.devices.magritek._msg_maker import create_message +from flowchem.devices.magritek._msg_maker import create_protocol_message +from flowchem.devices.magritek._msg_maker import get_request +from flowchem.devices.magritek._msg_maker import set_attribute +from flowchem.devices.magritek._msg_maker import set_data_folder +from flowchem.devices.magritek._parser import parse_status_notification +from flowchem.devices.magritek._parser import StatusNotification +from flowchem.devices.magritek.spinsolve_control import SpinsolveControl +from flowchem.devices.magritek.utils import create_folder_mapper +from flowchem.devices.magritek.utils import get_my_docs_path +from flowchem.people import * + +__all__ = ["Spinsolve"] + + +class Spinsolve(FlowchemDevice): + """Spinsolve class, gives access to the spectrometer remote control API.""" + + def __init__( + self, + host="127.0.0.1", + port: int | None = 13000, + name: str | None = None, + xml_schema=None, + data_folder=None, + solvent: str | None = "Chloroform-d1", + sample_name: str | None = "Unnamed automated experiment", + remote_to_local_mapping: list[str] | None = None, + ): + """Control a Spinsolve instance via HTTP XML API.""" + super().__init__(name) + + self.metadata = DeviceInfo( + authors=[dario, jakob, wei_hsin], + maintainers=[dario], + manufacturer="Magritek", + model="Spinsolve", + ) + + self.host, self.port = host, port + + # The Qs must exist before the response is received to be awaited + # Could be generated programmatically from RemoteControl.xsd, but it's not worth it + self._replies_by_type: dict[str, asyncio.Queue] = { + "HardwareResponse": asyncio.Queue(), + "AvailableProtocolOptionsResponse": asyncio.Queue(), + "GetResponse": asyncio.Queue(), + "StatusNotification": asyncio.Queue(), + } + + # Set experimental variable + self._data_folder = data_folder + + # An optional mapping between remote and local folder location can be used for remote use + if remote_to_local_mapping is not None: + self._folder_mapper = create_folder_mapper(*remote_to_local_mapping) + assert ( + self._folder_mapper(self._data_folder) is not None + ) # Ensure mapper validity. + else: + self._folder_mapper = None # type: ignore + + # Sets default sample, solvent value and user data + self.sample, self.solvent = sample_name, solvent + self.protocols: dict[str, dict] = {} + self.user_data = {"control_software": "flowchem"} + + # XML schema for reply validation. Reply validation is completely optional! + if xml_schema is None: + # This is the default location upon Spinsolve installation. + # However, remote control can be from remote, i.e. not Spinsolve PC so this ;) + default_schema = ( + get_my_docs_path() / "Magritek" / "Spinsolve" / "RemoteControl.xsd" + ) + try: + self.schema = etree.XMLSchema(file=str(default_schema)) + except etree.XMLSchemaParseError: # i.e. not found + self.schema = None + else: + self.schema = xml_schema + + # IOs (these are set upon initialization w/ initialize) + self._io_reader: asyncio.StreamReader = None # type: ignore + self._io_writer: asyncio.StreamWriter = None # type: ignore + self.reader: asyncio.Task = None # type: ignore + # Each protocol adds a new Path to the list, run_protocol returns the ID of the next protocol + self._result_folders: list[Path] = [] + + async def initialize(self): + """Initiate connection with a running Spinsolve instance.""" + try: + self._io_reader, self._io_writer = await asyncio.open_connection( + self.host, self.port + ) + logger.debug(f"Connected to {self.host}:{self.port}") + except OSError as e: + raise ConnectionError( + f"Error connecting to {self.host}:{self.port} -- {e}" + ) from e + + # Start reader thread + self.reader = asyncio.create_task( + self.connection_listener(), name="Connection listener" + ) + + # This request is used to check if the instrument is connected + hw_info = await self.hw_request() + if hw_info.find(".//ConnectedToHardware").text != "true": + raise ConnectionError("Spectrometer not connected to Spinsolve's PC!") + + # If connected parse and log instrument info + self.metadata.version = hw_info.find(".//SpinsolveSoftware").text + hardware_type = hw_info.find(".//SpinsolveType").text + self.metadata.additional_info["hardware_type"] = hardware_type + logger.debug(f"Connected to model {hardware_type}, SW: {self.metadata.version}") + + # Load available protocols + await self.load_protocols() + + # Finally, check version + if version.parse(self.metadata.version) < version.parse("1.18.1.3062"): + warnings.warn( + f"Spinsolve v. {self.metadata.version} is not supported!" + f"Upgrade to a more recent version! (at least 1.18.1.3062)" + ) + + await self.set_data_folder(self._data_folder) + + async def connection_listener(self): + """Listen for replies and puts them in the queue.""" + logger.debug("Spinsolve connection listener started!") + parser = etree.XMLParser() + while True: + raw_tree = await self._io_reader.readuntil(b"") + logger.debug(f"Read reply {raw_tree!r}") + + # Try parsing reply and skip if not valid + try: + parsed_tree = etree.fromstring(raw_tree, parser) + except etree.XMLSyntaxError: + warnings.warn(f"Cannot parse response XML {raw_tree!r}") + continue + + # Validate reply if schema was provided + if self.schema: + try: + self.schema.validate(parsed_tree) + except etree.XMLSyntaxError as syntax_error: + warnings.warn( + f"Invalid XML received! [Validation error: {syntax_error}]" + ) + + # Add to reply queue of the given tag-type + await self._replies_by_type[parsed_tree[0].tag].put(parsed_tree) + + async def get_solvent(self) -> str: + """Get current solvent.""" + await self.send_message(get_request("Solvent")) + reply = await self._replies_by_type["GetResponse"].get() + return reply.find(".//Solvent").text + + async def set_solvent(self, solvent: str): + """Set solvent.""" + await self.send_message(set_attribute("Solvent", solvent)) + + async def get_sample(self) -> str: + """Get current sample.""" + await self.send_message(get_request("Sample")) + reply = await self._replies_by_type["GetResponse"].get() + return reply.find(".//Sample").text + + async def set_sample(self, sample: str): + """Set the sample name (it will appear in `acqu.par`).""" + await self.send_message(set_attribute("Sample", sample)) + + async def set_data_folder(self, location: str): + """Set location of the data folder (where FIDs are saved to).""" + if location is not None: + self._data_folder = location + await self.send_message(set_data_folder(location)) + + async def get_user_data(self) -> dict: + """Get user data. These will appear in `acqu.par`.""" + await self.send_message(get_request("UserData")) + reply = await self._replies_by_type["GetResponse"].get() + return { + data_item.get("key"): data_item.get("value") + for data_item in reply.findall(".//Data") + } + + async def set_user_data(self, data: dict): + """Set user data. The items provide will appear in `acqu.par`.""" + user_data = set_attribute("UserData") + for key, value in data.items(): + etree.SubElement(user_data.find(".//UserData"), "Data", {key: value}) + await self.send_message(user_data) + + async def _transmit(self, message: bytes): + """Send the message to the spectrometer.""" + # This assertion is here for mypy ;) + assert isinstance( + self._io_writer, asyncio.StreamWriter + ), "The connection was not initialized!" + self._io_writer.write(message) + await self._io_writer.drain() + + async def send_message(self, root: etree.Element): + """Send the tree connected XML etree.Element provided.""" + # Turn the etree.Element provided into an ElementTree + tree = etree.ElementTree(root) + + # The request must include the XML declaration + message = etree.tostring(tree, xml_declaration=True, encoding="utf-8") + + # Send request + logger.debug(f"Transmitting request to spectrometer: {message}") + await self._transmit(message) + + async def hw_request(self): + """Send an HW request to the spectrometer, receive the reply and returns it.""" + await self.send_message(create_message("HardwareRequest")) + return await self._replies_by_type["HardwareResponse"].get() + + async def load_protocols(self): + """Get a list of available protocol on the current spectrometer.""" + await self.send_message(create_message("AvailableProtocolOptionsRequest")) + reply = await self._replies_by_type["AvailableProtocolOptionsResponse"].get() + + # Parse reply and construct the dict with protocols available + for element in reply.findall(".//Protocol"): + protocol_name = element.get("protocol") + self.protocols[protocol_name] = { + option.get("name"): [value.text for value in option.findall("Value")] + for option in element.findall("Option") + } + + async def run_protocol( + self, name, background_tasks: BackgroundTasks, options=None + ) -> int: + """ + Run a protocol. + + Return the ID of the protocol (needed to get results via `get_result_folder`). -1 for errors. + """ + # All protocol names are UPPERCASE, so force upper here to avoid case issues + name = name.upper() + if name not in self.protocols: + warnings.warn( + f"The protocol requested '{name}' is not available on the spectrometer!\n" + f"Valid options are: {pp.pformat(sorted(self.protocols.keys()))}" + ) + return -1 + + # Validate protocol options (check values and remove invalid ones, with warning) + options_validated = self._validate_protocol_request(name, options) + + # Flush all previous StatusNotification replies from queue. + # This is needed as sometimes FINISHED is received before other notification that remain unconsumed + for _ in range(self._replies_by_type["StatusNotification"].qsize()): + self._replies_by_type["StatusNotification"].get_nowait() + + # Start protocol + await self.send_message(create_protocol_message(name, options_validated)) + + # Rest of protocol as bg task to avoid timeout on API reply + # See https://fastapi.tiangolo.com/tutorial/background-tasks/ + background_tasks.add_task(self._check_notifications) + + return len(self._result_folders) + + async def _check_notifications(self): + """Read all the StatusNotification and returns the dataFolder.""" + self._protocol_running = True + remote_folder = Path() + while True: + # Get all StatusNotification + status_update = await self._replies_by_type["StatusNotification"].get() + + # Parse them + status, folder = parse_status_notification(status_update) + logger.debug(f"Status update: Status is {status} and data folder={folder}") + + # When I get a finishing response end protocol and return the data folder! + if status is StatusNotification.FINISHING: + remote_folder = Path(folder) + break + + if status is StatusNotification.ERROR: + # Usually device busy + warnings.warn("Error detected on running protocol -- aborting.") + await self.abort() # Abort running experiment + break + + logger.info(f"Protocol over - remote data folder is {remote_folder}") + # Add result folder to self._result_folders + if self._folder_mapper is not None: + self._result_folders.append(self._folder_mapper(remote_folder)) + else: + self._result_folders.append(remote_folder) + + self._protocol_running = False + + async def is_protocol_running(self) -> bool: + """Return True if a protocol is running, otherwise False.""" + return self._protocol_running + + async def get_result_folder(self, result_id: int | None = None) -> str: + """Get the result folder with the given ID or the last one if no ID is specified. Empty str if not existing.""" + # If no result_id get last + if result_id is None: + result_id = -1 + try: + folder = str(self._result_folders[result_id]) + except IndexError: + folder = "" + + return folder + + async def abort(self): + """Abort running command.""" + await self.send_message(create_message("Abort")) + + def list_protocols(self) -> list[str]: + """Return known protocol names.""" + return list(self.protocols.keys()) + + def _validate_protocol_request(self, protocol_name, protocol_options) -> dict: + """Ensure the validity of protocol name and options based on spectrometer reported parameters.""" + # Valid option for protocol + valid_options = self.protocols.get(protocol_name) + if valid_options is None or protocol_options is None: + return {} + + # For each option, check if valid. If not, remove it, raise warning and continue + for option_name, option_value in list(protocol_options.items()): + if option_name not in valid_options: + protocol_options.pop(option_name) + warnings.warn( + f"Invalid option {option_name} for protocol {protocol_name} -- DROPPED!" + ) + continue + + # Get valid option values (list of them or empty list if not a multiple choice) + valid_values = valid_options[option_name] + + # If there is no list of valid options accept anything + if not valid_values: + continue + # otherwise validate the value as well + elif str(option_value) not in valid_values: + protocol_options.pop(option_name) + warnings.warn( + f"Invalid value {option_value} for option {option_name} in protocol {protocol_name}" + f" -- DROPPED!" + ) + + # Returns the dict with only valid options/value pairs + return protocol_options + + def shim(self): + """Shim on sample.""" + raise NotImplementedError("Use run protocol with a shimming protocol instead!") + + def components(self): + """Return SpinsolveControl""" + return (SpinsolveControl("nmr-control", self),) + + +if __name__ == "__main__": + + async def main(): + """Test connection.""" + nmr: Spinsolve = Spinsolve() + await nmr.initialize() + s = await nmr.get_solvent() + print(f"The current solvent is set to '{s}'.") + + asyncio.run(main()) diff --git a/src/flowchem/devices/magritek/spinsolve_control.py b/src/flowchem/devices/magritek/spinsolve_control.py new file mode 100644 index 00000000..03b17a63 --- /dev/null +++ b/src/flowchem/devices/magritek/spinsolve_control.py @@ -0,0 +1,50 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from fastapi import BackgroundTasks + +from flowchem.components.analytics.nmr_control import NMRControl + +if TYPE_CHECKING: + from .spinsolve import Spinsolve + + +class SpinsolveControl(NMRControl): + hw_device: Spinsolve # for typing's sake + + def __init__(self, name: str, hw_device: Spinsolve): # type:ignore + """HPLC Control component. Sends methods, starts run, do stuff.""" + super().__init__(name, hw_device) + # Solvent + self.add_api_route("/solvent", self.hw_device.get_solvent, methods=["GET"]) + self.add_api_route("/solvent", self.hw_device.set_solvent, methods=["PUT"]) + # Sample name + self.add_api_route("/sample-name", self.hw_device.get_sample, methods=["GET"]) + self.add_api_route("/sample-name", self.hw_device.set_sample, methods=["PUT"]) + # User data + self.add_api_route("/user-data", self.hw_device.get_user_data, methods=["GET"]) + self.add_api_route("/user-data", self.hw_device.set_user_data, methods=["PUT"]) + # Protocols + self.add_api_route( + "/protocol-list", self.hw_device.list_protocols, methods=["GET"] + ) + self.add_api_route( + "/spectrum-folder", self.hw_device.get_result_folder, methods=["GET"] + ) + self.add_api_route( + "/is-busy", self.hw_device.is_protocol_running, methods=["GET"] + ) + + async def acquire_spectrum(self, background_tasks: BackgroundTasks, protocol="H", options=None) -> int: # type: ignore + """ + Acquire an NMR spectrum. + + Return an ID to be passed to get_result_folder, it will return the result folder after acquisition end. + """ + return await self.hw_device.run_protocol( + name=protocol, background_tasks=background_tasks, options=options + ) + + async def stop(self): + return await self.hw_device.abort() diff --git a/flowchem/components/devices/Magritek/utils.py b/src/flowchem/devices/magritek/utils.py similarity index 67% rename from flowchem/components/devices/Magritek/utils.py rename to src/flowchem/devices/magritek/utils.py index 276e3e5a..2055ec59 100644 --- a/flowchem/components/devices/Magritek/utils.py +++ b/src/flowchem/devices/magritek/utils.py @@ -1,15 +1,16 @@ -""" Various utility functions for the Magritek device. """ -import ctypes.wintypes +"""Various utility functions for the Magritek device.""" +import ctypes +from collections.abc import Callable from pathlib import Path -from typing import Callable, Union from loguru import logger from flowchem.exceptions import InvalidConfiguration -def get_my_docs_path(): - """ +def get_my_docs_path() -> Path: + """Get my docs path on Windows. + Spinsolve control software is only available on Windows, so lack of cross-platform support is unavoidable. XSD and XML schema are installed in my documents, whose location, if custom, can be obtained as follows. """ @@ -17,30 +18,30 @@ def get_my_docs_path(): csidl_personal = 5 # My Documents shgfp_type_current = 0 # Get current, not default value - buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH) - ctypes.windll.shell32.SHGetFolderPathW( + buf = ctypes.create_unicode_buffer(2000) + ctypes.windll.shell32.SHGetFolderPathW( # type: ignore None, csidl_personal, None, shgfp_type_current, buf ) return Path(buf.value) def create_folder_mapper( - remote_root: Path, local_root: Path -) -> Callable[[Union[Path, str]], Path]: - """ - Returns a function that converts path relative to remote_root to their corresponding on local_root - Used when using spinsolve on a remote PC to share the result data via a remotely mounted network drive + remote_root: str | Path, local_root: str | Path +) -> Callable[[Path | str], Path]: + """Return a function that converts path relative to remote_root to their corresponding on local_root. + + Used when using spinsolve on a remote PC to share the result data via a remotely mounted network drive. """ - def folder_mapper(path_to_be_translated: Union[Path, str]): - """ - Given a remote path converts it to the corresponding local location, or None + warning if not possible. - """ + def folder_mapper(path_to_be_translated: Path | str): + """Convert remote path to the corresponding local location, or None + warning if not possible.""" # Ensures it is a Path object if isinstance(path_to_be_translated, str): path_to_be_translated = Path(path_to_be_translated) nonlocal remote_root, local_root + remote_root = Path(remote_root) + local_root = Path(local_root) # If relative translate is not error # NOTE: Path.is_relative_to() is available from Py 3.9 only. NBD as this is not often used. if not path_to_be_translated.is_relative_to(remote_root): diff --git a/src/flowchem/devices/manson/__init__.py b/src/flowchem/devices/manson/__init__.py new file mode 100644 index 00000000..e9402ae7 --- /dev/null +++ b/src/flowchem/devices/manson/__init__.py @@ -0,0 +1,4 @@ +"""Manson devices.""" +from .powersupply import MansonPowerSupply + +__all__ = ["MansonPowerSupply"] diff --git a/src/flowchem/devices/manson/manson_component.py b/src/flowchem/devices/manson/manson_component.py new file mode 100644 index 00000000..bec7da98 --- /dev/null +++ b/src/flowchem/devices/manson/manson_component.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from flowchem.components.technical.power_control import PowerControl + +if TYPE_CHECKING: + from flowchem.devices import MansonPowerSupply + + +class MansonPowerControl(PowerControl): + hw_device: MansonPowerSupply # for typing's sake + + async def set_current(self, current: str): + """Set the target current to the given string in natural language.""" + return await self.hw_device.set_current(current) + + async def get_current(self) -> float: + """Return current in Ampere.""" + return await self.hw_device.get_output_current() + + async def set_voltage(self, voltage: str): + """Set the target voltage to the given string in natural language.""" + return await self.hw_device.set_voltage(voltage) + + async def get_voltage(self) -> float: + """Return current in Volt.""" + return await self.hw_device.get_output_voltage() + + async def power_on(self): + """Turn on temperature control.""" + return await self.hw_device.output_on() + + async def power_off(self): + """Turn off temperature control.""" + return await self.hw_device.output_off() diff --git a/flowchem/components/devices/Manson/manson.py b/src/flowchem/devices/manson/powersupply.py similarity index 62% rename from flowchem/components/devices/Manson/manson.py rename to src/flowchem/devices/manson/powersupply.py index cf50aaeb..1b17d1e4 100644 --- a/flowchem/components/devices/Manson/manson.py +++ b/src/flowchem/devices/manson/powersupply.py @@ -1,35 +1,34 @@ -""" -Original code from Manson website with edits. -No license originally specified. -""" - +"""Control module for Manson lab power supply unites.""" +# Note: Original code from Manson website with edits. No license originally specified. import re import warnings -from typing import List, Literal, Tuple, Union +from typing import Literal import aioserial from loguru import logger -from flowchem.components.properties import ActiveComponent -from flowchem.exceptions import DeviceError, InvalidConfiguration -from flowchem.units import flowchem_ureg +from flowchem import ureg +from flowchem.devices.flowchem_device import DeviceInfo +from flowchem.devices.flowchem_device import FlowchemDevice +from flowchem.devices.manson.manson_component import MansonPowerControl +from flowchem.exceptions import DeviceError +from flowchem.exceptions import InvalidConfiguration +from flowchem.people import * -class MansonPowerSupply(ActiveComponent): - """Control module for Manson Power Supply (e.g. used to power LEDs in the photo-rector or as potentiostat)""" +class MansonPowerSupply(FlowchemDevice): + """Control module for Manson Power Supply (e.g. used to power LEDs in the photo-rector or as potentiostat).""" MODEL_ALT_RANGE = ["HCS-3102", "HCS-3014", "HCS-3204", "HCS-3202"] - def __init__(self, aio: aioserial.AioSerial, name=None): - """ - Control class for Manson Power Supply. - """ - + def __init__(self, aio: aioserial.AioSerial, name=""): + """Control class for Manson Power Supply.""" super().__init__(name) self._serial = aio + self.model_info = "" @classmethod - def from_config(cls, port, name=None, **serial_kwargs): + def from_config(cls, port, name="", **serial_kwargs): """ Create instance from config dict. Used by server to initialize obj from config. @@ -46,26 +45,33 @@ def from_config(cls, port, name=None, **serial_kwargs): async def initialize(self): """Ensure the connection w/ device is working.""" - model_info = await self.get_info() - if model_info == "": + self.model_info = await self.get_info() + if self.model_info == "": raise DeviceError("Communication with device failed!") if await self.get_info() not in self.MODEL_ALT_RANGE: raise InvalidConfiguration( f"Device is not supported! [Supported models: {self.MODEL_ALT_RANGE}]" ) + def metadata(self) -> DeviceInfo: + """Return hw device metadata.""" + return DeviceInfo( + authors=[dario, jakob, wei_hsin], + maintainers=[dario], + manufacturer="Manson", + model=self.model_info, + ) + @staticmethod def _format_voltage(voltage_value: str) -> str: - """Format a voltage in the format the power supply understands""" - - voltage = flowchem_ureg(voltage_value) + """Format a voltage in the format the power supply understands.""" + voltage = ureg.Quantity(voltage_value) # Zero fill by left pad with zeros, up to three digits return str(voltage.m_as("V") * 10).zfill(3) async def _format_amperage(self, amperage_value: str) -> str: - """Format a current intensity in the format the power supply understands""" - - current = flowchem_ureg(amperage_value) + """Format a current intensity in the format the power supply understands.""" + current = ureg.Quantity(amperage_value) multiplier = 100 if await self.get_info() in self.MODEL_ALT_RANGE else 10 return str(current.m_as("A") * multiplier).zfill(3) @@ -73,8 +79,7 @@ async def _send_command( self, command: str, ) -> str: - """Internal function to send command and read reply.""" - + """Send command and read reply.""" # Flush buffer self._serial.reset_input_buffer() @@ -90,7 +95,7 @@ async def _send_command( return "\n".join(reply_string) async def get_info(self) -> str: - """Returns the model name of the connected device""" + """Return the model name of the connected device.""" response = await self._send_command("GMOD") pattern = re.compile(r".*\d{4}\s") @@ -103,61 +108,61 @@ async def get_info(self) -> str: return "" async def output_on(self) -> bool: - """Turn on electricity on output""" + """Turn on electricity on output.""" response = await self._send_command("SOUT0") return response == "OK" async def output_off(self) -> bool: - """Turn off electricity on output""" + """Turn off electricity on output.""" response = await self._send_command("SOUT1") return response == "OK" async def get_output_read( self, - ) -> Tuple[str, str, Union[Literal["CC"], Literal["CV"], Literal["NN"]]]: - """Returns actual values of voltage, current and mode""" + ) -> tuple[float, float, Literal["CC"] | Literal["CV"] | Literal["NN"]]: + """Return actual values of voltage, current and mode.""" response = await self._send_command("GETD") try: - volt = float(response[0:4]) / 100 * flowchem_ureg.volt - curr = float(response[4:8]) / 100 * flowchem_ureg.ampere + volt = float(response[0:4]) / 100 * ureg.volt + curr = float(response[4:8]) / 100 * ureg.ampere except ValueError: warnings.warn("Invalid values from device!") - return "0 V", "0 A", "NN" + return 0, 0, "NN" if response[8:9] == "0": - return str(volt), str(curr), "CV" + return volt.m_as("V"), curr.m_as("A"), "CV" if response[8:9] == "1": - return str(volt), str(curr), "CC" - return str(volt), str(curr), "NN" + return volt.m_as("V"), curr.m_as("A"), "CC" + return volt.m_as("V"), curr.m_as("A"), "NN" - async def get_output_voltage(self) -> str: - """Returns output voltage in Volt""" + async def get_output_voltage(self) -> float: + """Return output voltage in Volt.""" voltage, _, _ = await self.get_output_read() return voltage - async def get_output_current(self) -> str: - """Returns output current in Ampere""" + async def get_output_current(self) -> float: + """Return output current in ampere.""" _, current, _ = await self.get_output_read() return current async def get_output_mode(self) -> Literal["CC", "CV", "NN"]: - """Returns output mode: either current control (CC) or voltage control (CV)""" + """Return output mode: either current control (CC) or voltage control (CV).""" _, _, mode = await self.get_output_read() return mode async def get_output_power(self) -> str: - """Returns output power in watts""" - voltage, intensity, _ = await self.get_output_read() - power = flowchem_ureg(voltage) * flowchem_ureg(intensity) + """Return output power in watts.""" + voltage, current, _ = await self.get_output_read() + power = ureg.Quantity(f"{voltage} V") * ureg.Quantity(f"{current} A") return str(power.to("W")) - async def get_max(self) -> Tuple[str, str]: - """Returns maximum voltage and current, as tuple, or False.""" + async def get_max(self) -> tuple[str, str]: + """Return maximum voltage and current, as tuple, or False.""" response = await self._send_command("GMAX") - max_v_raw = int(response[0:3]) * flowchem_ureg.volt - max_c_raw = int(response[3:6]) * flowchem_ureg.ampere + max_v_raw = int(response[0:3]) * ureg.volt + max_c_raw = int(response[3:6]) * ureg.ampere max_v = max_v_raw / 10 # Some models report current as 0.1 A others at 0.01 A @@ -165,14 +170,14 @@ async def get_max(self) -> Tuple[str, str]: divider = 100 if model in self.MODEL_ALT_RANGE else 10 return str(max_v), str(max_c_raw / divider) - async def get_setting(self) -> Tuple[str, str]: - """Returns current setting as tuple (voltage, current).""" + async def get_setting(self) -> tuple[str, str]: + """Return current setting as tuple (voltage, current).""" response = await self._send_command("GETS") # RegEx to only keep numbers response = re.sub(r"\D", "", response) - v_setting = float(response[0:3]) / 10 * flowchem_ureg.volt - c_setting = float(response[3:6]) * flowchem_ureg.ampere + v_setting = float(response[0:3]) / 10 * ureg.volt + c_setting = float(response[3:6]) * ureg.ampere if await self.get_info() in self.MODEL_ALT_RANGE: c_setting /= 10 @@ -180,21 +185,19 @@ async def get_setting(self) -> Tuple[str, str]: return str(v_setting), str(c_setting / 10) async def set_voltage(self, voltage: str) -> bool: - """Set target voltage""" - + """Set target voltage.""" cmd = "VOLT" + self._format_voltage(voltage) response = await self._send_command(cmd) return response == "OK" async def set_current(self, current: str) -> bool: - """Set target current""" - + """Set target current.""" cmd = "CURR" + await self._format_amperage(current) response = await self._send_command(cmd) return response == "OK" - async def set_all_preset(self, preset: List[Tuple[str, str]]) -> bool: - """Set all 3 preset memory position with voltage/current values""" + async def set_all_preset(self, preset: list[tuple[str, str]]) -> bool: + """Set all 3 preset memory position with voltage/current values.""" command = "PROM" for set_values in preset: @@ -208,7 +211,7 @@ async def set_all_preset(self, preset: List[Tuple[str, str]]) -> bool: return True async def set_preset(self, index: int, voltage: str, current: str) -> bool: - """Set preset position index with the provided values of voltage and current""" + """Set preset position index with the provided values of voltage and current.""" preset = await self.get_all_preset() try: volt_str = self._format_voltage(voltage) @@ -219,8 +222,8 @@ async def set_preset(self, index: int, voltage: str, current: str) -> bool: return False return await self.set_all_preset(preset) - async def get_all_preset(self) -> List[Tuple[str, str]]: - """Get voltage and current for all 3 memory preset position""" + async def get_all_preset(self) -> list[tuple[str, str]]: + """Get voltage and current for all 3 memory preset position.""" response = await self._send_command("GETM") response_lines = response.split("\r") @@ -245,13 +248,13 @@ async def get_all_preset(self) -> List[Tuple[str, str]]: if await self.get_info() in self.MODEL_ALT_RANGE: voltage = [x / 10 for x in voltage] - voltage_str = [str(flowchem_ureg.Quantity(x, "V")) for x in voltage] - current_str = [str(flowchem_ureg.Quantity(x, "A")) for x in current] + voltage_str = [str(ureg.Quantity(x, "V")) for x in voltage] + current_str = [str(ureg.Quantity(x, "A")) for x in current] return list(zip(voltage_str, current_str)) - async def get_preset(self, index) -> Tuple[str, str]: - """Get voltage and current for given preset position [0..2]""" + async def get_preset(self, index) -> tuple[str, str]: + """Get voltage and current for given preset position [0..2].""" all_preset = await self.get_all_preset() try: return all_preset[index] @@ -260,7 +263,7 @@ async def get_preset(self, index) -> Tuple[str, str]: return "", "" async def run_preset(self, index: int) -> bool: - """Set Voltage and Current using values saved in one of the three memory locations: 0, 1 or 2""" + """Set Voltage and Current using values saved in one of the three memory locations: 0, 1 or 2.""" if not 0 <= int(index) < 3: warnings.warn(f"Invalid preset value: <{index}>!") return False @@ -269,36 +272,35 @@ async def run_preset(self, index: int) -> bool: return response == "OK" async def remove_protection(self) -> bool: - """I guess it removes overvoltage protection?""" + """Remove overvoltage protection. Maybe.""" response = await self._send_command("SPRO0") return bool(response) async def add_protection(self) -> bool: - """I guess it adds overvoltage protection?""" + """Add overvoltage protection. Maybe.""" response = await self._send_command("SPRO1") return bool(response) async def set_voltage_and_current(self, voltage: str, current: str): - """Convenience method to set both voltage and current""" + """Set both voltage and current.""" await self.set_voltage(voltage) await self.set_current(current) - def get_router(self): - """Creates an APIRouter for this MansonPowerSupply instance.""" - from fastapi import APIRouter - - router = APIRouter() - router.add_api_route("/output/on", self.output_on, methods=["GET"]) - router.add_api_route("/output/off", self.output_off, methods=["GET"]) - router.add_api_route("/output/power", self.get_output_power, methods=["GET"]) - router.add_api_route("/output/mode", self.get_output_mode, methods=["GET"]) - router.add_api_route("/voltage/read", self.get_output_voltage, methods=["GET"]) - router.add_api_route("/voltage/max", self.set_voltage, methods=["PUT"]) - router.add_api_route("/current/read", self.get_output_current, methods=["GET"]) - router.add_api_route("/current/max", self.set_current, methods=["PUT"]) - router.add_api_route("/protection/add", self.add_protection, methods=["GET"]) - router.add_api_route( - "/protection/remove", self.remove_protection, methods=["GET"] - ) - - return router + def get_components(self): + """Return an TemperatureControl component.""" + return (MansonPowerControl("power-control", self),) + + # def get_router(self, prefix: str | None = None): + # """Create an APIRouter for this MansonPowerSupply instance.""" + # router = super().get_router() + # + # router.add_api_route("/on", self.output_on, methods=["GET"]) + # router.add_api_route("/off", self.output_off, methods=["GET"]) + # router.add_api_route("/output/power", self.get_output_power, methods=["GET"]) + # router.add_api_route("/output/mode", self.get_output_mode, methods=["GET"]) + # router.add_api_route("/voltage/read", self.get_output_voltage, methods=["GET"]) + # router.add_api_route("/voltage/max", self.set_voltage, methods=["PUT"]) + # router.add_api_route("/current/read", self.get_output_current, methods=["GET"]) + # router.add_api_route("/current/max", self.set_current, methods=["PUT"]) + # + # return router diff --git a/src/flowchem/devices/mettlertoledo/__init__.py b/src/flowchem/devices/mettlertoledo/__init__.py new file mode 100644 index 00000000..410d6739 --- /dev/null +++ b/src/flowchem/devices/mettlertoledo/__init__.py @@ -0,0 +1,4 @@ +"""MettlerToledo devices.""" +from .icir import IcIR + +__all__ = ["IcIR"] diff --git a/src/flowchem/devices/mettlertoledo/icir.py b/src/flowchem/devices/mettlertoledo/icir.py new file mode 100644 index 00000000..ea49a154 --- /dev/null +++ b/src/flowchem/devices/mettlertoledo/icir.py @@ -0,0 +1,340 @@ +"""Async implementation of FlowIR.""" +import asyncio +import datetime +from pathlib import Path + +from asyncua import Client +from asyncua import ua +from loguru import logger +from pydantic import BaseModel + +from flowchem.components.analytics.ir_control import IRSpectrum +from flowchem.devices.flowchem_device import DeviceInfo +from flowchem.devices.flowchem_device import FlowchemDevice +from flowchem.devices.mettlertoledo.icir_control import IcIRControl +from flowchem.exceptions import DeviceError +from flowchem.people import * + + +class ProbeInfo(BaseModel): + """Dictionary returned from iCIR with probe info.""" + + spectrometer: str + spectrometer_SN: int + probe_SN: int + detector: str + apodization: str + ip_address: str + probe_type: str + sampling_interval: str + resolution: int + scan_option: str + gain: int + + +class IcIR(FlowchemDevice): + """Object to interact with the iCIR software controlling the FlowIR and ReactIR.""" + + metadata = DeviceInfo( + authors=[dario, jakob, wei_hsin], + maintainers=[dario], + manufacturer="Mettler-Toledo", + model="iCIR", + version="", + ) + + iC_OPCUA_DEFAULT_SERVER_ADDRESS = "opc.tcp://localhost:62552/iCOpcUaServer" + _supported_versions = {"7.1.91.0"} + SOFTWARE_VERSION = "ns=2;s=Local.iCIR.SoftwareVersion" + CONNECTION_STATUS = "ns=2;s=Local.iCIR.ConnectionStatus" + PROBE_DESCRIPTION = "ns=2;s=Local.iCIR.Probe1.ProbeDescription" + PROBE_STATUS = "ns=2;s=Local.iCIR.Probe1.ProbeStatus" + LAST_SAMPLE_TIME = "ns=2;s=Local.iCIR.Probe1.LastSampleTime" + SAMPLE_COUNT = "ns=2;s=Local.iCIR.Probe1.SampleCount" + SPECTRA_TREATED = "ns=2;s=Local.iCIR.Probe1.SpectraTreated" + SPECTRA_RAW = "ns=2;s=Local.iCIR.Probe1.SpectraRaw" + SPECTRA_BACKGROUND = "ns=2;s=Local.iCIR.Probe1.SpectraBackground" + START_EXPERIMENT = "ns=2;s=Local.iCIR.Probe1.Methods.Start Experiment" + STOP_EXPERIMENT = "ns=2;s=Local.iCIR.Probe1.Methods.Stop" + METHODS = "ns=2;s=Local.iCIR.Probe1.Methods" + + counter = 0 + + def __init__(self, template: str, url="", name=""): + """Initiate connection with OPC UA server.""" + super().__init__(name) + + # Default (local) url if none provided + if not url: + url = self.iC_OPCUA_DEFAULT_SERVER_ADDRESS + self.opcua = Client(url) + + self._template = template + + async def initialize(self): + """Initialize, check connection and start acquisition.""" + try: + await self.opcua.connect() + except asyncio.TimeoutError as timeout_error: + raise DeviceError( + f"Could not connect to FlowIR on {self.opcua.server_url}!" + ) from timeout_error + + # Ensure iCIR version is supported + self.metadata.version = await self.opcua.get_node( + self.SOFTWARE_VERSION + ).get_value() # "7.1.91.0" + + await self.check_version() + logger.debug("FlowIR initialized!") + + if not await self.is_iCIR_connected(): + raise DeviceError("Device not connected! Check iCIR...") + + # Start acquisition! Ensures the device is ready when a spectrum is needed + await self.start_experiment(name="Flowchem", template=self._template) + probe = await self.probe_info() + self.metadata.additional_info = probe.dict() + + def is_local(self): + """Return true if the server is on the same machine running the python code.""" + return any( + x in self.opcua.server_url.netloc for x in ("localhost", "127.0.0.1") + ) + + async def check_version(self): + """Check if iCIR is installed and open and if the version is supported.""" + try: + if self.metadata.version not in self._supported_versions: + logger.warning( + f"The current version of iCIR [self.version] has not been tested!" + f"Pleas use one of the supported versions: {self._supported_versions}" + ) + except ua.UaStatusCodeError as error: # iCIR app closed + raise DeviceError( + "iCIR app not installed or closed or no instrument available!" + ) from error + + # noinspection PyPep8Naming + async def is_iCIR_connected(self) -> bool: + """Check connection with instrument.""" + return await self.opcua.get_node(self.CONNECTION_STATUS).get_value() + + async def probe_info(self) -> ProbeInfo: + """Return FlowIR probe information.""" + probe_info = await self.opcua.get_node(self.PROBE_DESCRIPTION).get_value() + return self.parse_probe_info(probe_info) + + async def probe_status(self): + """Return current probe status. Possible values are 'Running', 'Not running' (+ more?).""" + return await self.opcua.get_node(self.PROBE_STATUS).get_value() + + async def last_sample_time(self) -> datetime.datetime: + """Return date/time of the latest scan.""" + return await self.opcua.get_node(self.LAST_SAMPLE_TIME).get_value() + + async def sample_count(self) -> int | None: + """Sample count (integer autoincrement) watch for changes to ensure latest spectrum is recent.""" + return await self.opcua.get_node(self.SAMPLE_COUNT).get_value() + + @staticmethod + def _normalize_template_name(template_name) -> str: + """Add `.iCIRTemplate` extension to string if not already present.""" + return ( + template_name + if template_name.endswith(".iCIRTemplate") + else template_name + ".iCIRTemplate" + ) + + @staticmethod + def is_template_name_valid(template_name: str) -> bool: + r""" + Check template name validity. For the template folder location read below. + + From Mettler Toledo docs: + You can use the Start method to create and run a new experiment in one of the iC analytical applications + (i.e. iC IR, iC FBRM, iC Vision, iC Raman). Note that you must provide the name of an existing experiment + template file that can be used as a basis for the new experiment. + The template file must be located in a specific folder on the iC OPC UA Server computer. + This is usually C:\\ProgramData\\METTLER TOLEDO\\iC OPC UA Server\\1.2\\Templates. + """ + template_dir = Path( + r"C:\ProgramData\METTLER TOLEDO\iC OPC UA Server\1.2\Templates" + ) + if not template_dir.exists() or not template_dir.is_dir(): + logger.warning("iCIR template folder not found on the local PC!") + return False + + # Ensures the name has been provided with no extension (common mistake) + template_name = IcIR._normalize_template_name(template_name) + + return any( + existing_tmpl.name == template_name + for existing_tmpl in template_dir.glob("*.iCIRTemplate") + ) + + @staticmethod + def parse_probe_info(probe_info_reply: str) -> ProbeInfo: + """Convert the device reply into a ProbeInfo dictionary. + + Example probe_info_reply reply is: + 'FlowIR; SN: 2989; Detector: DTGS; Apodization: HappGenzel; IP Address: 192.168.1.2; + Probe: DiComp (Diamond); SN: 14570173; Interface: FlowIR™ Sensor; Sampling: 4000 to 650 cm-1; + Resolution: 8; Scan option: AutoSelect; Gain: 232;' + """ + fields = probe_info_reply.split(";") + probe_info = { + "spectrometer": fields[0], + "spectrometer_SN": fields[1].split(": ")[1], + "probe_SN": fields[6].split(": ")[1], + } + + # Use aliases, i.e. translate API names (left) to dict key (right) + translate_attributes = { + "Detector": "detector", + "Apodization": "apodization", + "IP Address": "ip_address", + "Probe": "probe_type", + "Sampling": "sampling_interval", + "Resolution": "resolution", + "Scan option": "scan_option", + "Gain": "gain", + } + for element in fields: + if ":" in element: + piece = element.split(":") + if piece[0].strip() in translate_attributes: + probe_info[translate_attributes[piece[0].strip()]] = piece[ + 1 + ].strip() + + return probe_info # type: ignore + + @staticmethod + async def _wavenumber_from_spectrum_node(node) -> list[float]: + """Get the X-axis value of a spectrum. This is necessary as they change e.g. with resolution.""" + node_property = await node.get_properties() + x_axis = await node_property[0].get_value() + return x_axis.AxisSteps + + @staticmethod + async def spectrum_from_node(node) -> IRSpectrum: + """Given a Spectrum node returns it as IRSpectrum.""" + try: + intensity = await node.get_value() + wavenumber = await IcIR._wavenumber_from_spectrum_node(node) + return IRSpectrum(wavenumber=wavenumber, intensity=intensity) + + except ua.uaerrors.BadOutOfService: + return IRSpectrum(wavenumber=[], intensity=[]) + + async def last_spectrum_treated(self) -> IRSpectrum: + """Return an IRSpectrum element for the last acquisition.""" + return await IcIR.spectrum_from_node(self.opcua.get_node(self.SPECTRA_TREATED)) + + async def last_spectrum_raw(self) -> IRSpectrum: + """RAW result latest scan.""" + return await IcIR.spectrum_from_node(self.opcua.get_node(self.SPECTRA_RAW)) + + async def last_spectrum_background(self) -> IRSpectrum: + """RAW result latest scan.""" + return await IcIR.spectrum_from_node( + self.opcua.get_node(self.SPECTRA_BACKGROUND) + ) + + async def start_experiment( + self, template: str, name: str = "Unnamed flowchem exp." + ): + r"""Start an experiment on iCIR. + + Args: + template: name of the experiment template, should be in the Templtates folder on the PC running iCIR. + That usually is C:\\ProgramData\\METTLER TOLEDO\\iC OPC UA Server\1.2\\Templates + name: experiment name. + """ + template = self._normalize_template_name(template) + if self.is_local() and self.is_template_name_valid(template) is False: + raise DeviceError( + f"Cannot start template {template}: name not valid! Check if is in: " + r"C:\ProgramData\METTLER TOLEDO\iC OPC UA Server\1.2\Templates" + ) + if await self.probe_status() == "Running": + logger.warning( + "I was asked to start an experiment while a current experiment is already running!" + "I will have to stop that first! Sorry for that :)" + ) + # Stop running experiment and wait for the spectrometer to be ready + await self.stop_experiment() + await self.wait_until_idle() + + start_xp_nodeid = self.opcua.get_node(self.START_EXPERIMENT).nodeid + method_parent = self.opcua.get_node(self.METHODS) + try: + # Collect_bg does not seem to work in automation, set to false and do not expose in start_experiment()! + collect_bg = False + await method_parent.call_method(start_xp_nodeid, name, template, collect_bg) + except ua.uaerrors.Bad as error: + raise DeviceError( + "The experiment could not be started!\n" + "Check iCIR status and close any open experiment." + ) from error + logger.info(f"FlowIR experiment {name} started with template {template}!") + return True + + async def stop_experiment(self): + """ + Stop the experiment currently running. + + Note: the call does not make the instrument idle: you need to wait for the current scan to end! + """ + method_parent = self.opcua.get_node(self.METHODS) + stop_nodeid = self.opcua.get_node(self.STOP_EXPERIMENT).nodeid + await method_parent.call_method(stop_nodeid) + + async def wait_until_idle(self): + """Wait until no experiment is running.""" + while await self.probe_status() == "Running": + await asyncio.sleep(0.2) + + def components(self): + """Return an IRSpectrometer component.""" + return (IcIRControl("ir-control", self),) + + +if __name__ == "__main__": + ... + # async def main(): + # opcua_client = Client( + # url=FlowIR.iC_OPCUA_DEFAULT_SERVER_ADDRESS.replace("localhost", "BSMC-YMEF002121") + # ) + # + # async with FlowIR(opcua_client) as ir_spectrometer: + # await ir_spectrometer.check_version() + # + # if await ir_spectrometer.is_iCIR_connected(): + # print("FlowIR connected!") + # else: + # raise ConnectionError("FlowIR not connected :(") + # + # template_name = "15_sec_integration.iCIRTemplate" + # await ir_spectrometer.start_experiment( + # name="reaction_monitoring", template=template_name + # ) + # + # spectrum = await ir_spectrometer.last_spectrum_treated() + # while len(spectrum.intensity) == 0: + # spectrum = await ir_spectrometer.last_spectrum_treated() + # + # for x in range(3): + # spectra_count = await ir_spectrometer.sample_count() + # + # while await ir_spectrometer.sample_count() == spectra_count: + # await asyncio.sleep(1) + # + # print("New spectrum!") + # spectrum = await ir_spectrometer.last_spectrum_treated() + # print(spectrum) + # + # await ir_spectrometer.stop_experiment() + # + # asyncio.run(main()) diff --git a/src/flowchem/devices/mettlertoledo/icir_control.py b/src/flowchem/devices/mettlertoledo/icir_control.py new file mode 100644 index 00000000..f34e469f --- /dev/null +++ b/src/flowchem/devices/mettlertoledo/icir_control.py @@ -0,0 +1,38 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from flowchem.components.analytics.ir_control import IRControl +from flowchem.components.analytics.ir_control import IRSpectrum + +if TYPE_CHECKING: + from .icir import IcIR + + +class IcIRControl(IRControl): + hw_device: IcIR # for typing's sake + + def __init__(self, name: str, hw_device: IcIR): # type:ignore + """HPLC Control component. Sends methods, starts run, do stuff.""" + super().__init__(name, hw_device) + self.add_api_route("/spectrum-count", self.spectrum_count, methods=["GET"]) + + async def acquire_spectrum(self, treated: bool = True) -> IRSpectrum: + """ + Acquire an IR spectrum. + + Background subtraction performed if treated=True, else a raw scan is returned. + """ + if treated: + return await self.hw_device.last_spectrum_treated() + else: + return await self.hw_device.last_spectrum_raw() + + async def spectrum_count(self) -> int: + if count := self.hw_device.sample_count() is not None: + return count + else: + return -1 + + async def stop(self): + return await self.hw_device.stop_experiment() diff --git a/src/flowchem/devices/phidgets/__init__.py b/src/flowchem/devices/phidgets/__init__.py new file mode 100644 index 00000000..f00623a2 --- /dev/null +++ b/src/flowchem/devices/phidgets/__init__.py @@ -0,0 +1,4 @@ +"""Phidget-based devices.""" +from .pressure_sensor import PhidgetPressureSensor + +__all__ = ["PhidgetPressureSensor"] diff --git a/flowchem/components/devices/Phidgets/phidget.py b/src/flowchem/devices/phidgets/pressure_sensor.py similarity index 56% rename from flowchem/components/devices/Phidgets/phidget.py rename to src/flowchem/devices/phidgets/pressure_sensor.py index de608dd8..8b08abc9 100644 --- a/flowchem/components/devices/Phidgets/phidget.py +++ b/src/flowchem/devices/phidgets/pressure_sensor.py @@ -1,47 +1,42 @@ -""" Use Phidgets to control lab devices. So far, only 4..20mA interface for Swagelock Pressure-sensor """ +"""Use Phidgets to control lab devices. So far, only 4..20mA interface for Swagelock Pressure-sensor.""" import time -import warnings -from typing import Optional, Tuple +import pint from loguru import logger -from flowchem.components.properties import Sensor +from flowchem.devices.flowchem_device import DeviceInfo +from flowchem.devices.flowchem_device import FlowchemDevice +from flowchem.devices.phidgets.pressure_sensor_component import ( + PhidgetPressureSensorComponent, +) +from flowchem.people import * try: from Phidget22.Devices.CurrentInput import CurrentInput, PowerSupply from Phidget22.Devices.Log import Log, LogLevel from Phidget22.PhidgetException import PhidgetException + + HAS_PHIDGET = True except ImportError: HAS_PHIDGET = False -else: - try: - Log.enable(LogLevel.PHIDGET_LOG_INFO, "phidget.log") - except (OSError, FileNotFoundError): - warnings.warn( - "Phidget22 package installed but Phidget library not found!\n" - "Get it from https://www.phidgets.com/docs/Operating_System_Support" - ) - HAS_PHIDGET = False - except PhidgetException as phidget_e: - HAS_PHIDGET = "Logging already enabled" in phidget_e.description - else: - HAS_PHIDGET = True -from flowchem.exceptions import DeviceError, InvalidConfiguration -from flowchem.units import flowchem_ureg + +from flowchem.exceptions import InvalidConfiguration +from flowchem import ureg -class PressureSensor(Sensor): - """Use a Phidget current input to translate a Swagelock 4..20mA signal to the corresponding pressure value""" +class PhidgetPressureSensor(FlowchemDevice): + """Use a Phidget current input to translate a Swagelock 4..20mA signal to the corresponding pressure value.""" def __init__( self, - pressure_range: Tuple[str, str] = ("0 bar", "10 bar"), - vint_serial_number: int = None, - vint_channel: int = None, + pressure_range: tuple[str, str] = ("0 bar", "10 bar"), + vint_serial_number: int = -1, + vint_channel: int = -1, phidget_is_remote: bool = False, - name: Optional[str] = None, + name: str = "", ): + """Initialize PressureSensor with the given pressure range (sensor-specific!).""" super().__init__(name=name) if not HAS_PHIDGET: raise InvalidConfiguration( @@ -50,15 +45,15 @@ def __init__( # Sensor range sensor_min, sensor_max = pressure_range - self._min_pressure = flowchem_ureg(sensor_min) - self._max_pressure = flowchem_ureg(sensor_max) + self._min_pressure = ureg.Quantity(sensor_min) + self._max_pressure = ureg.Quantity(sensor_max) # current meter self.phidget = CurrentInput() - # Ensure connection with the right sensor (ideally these are from graph) - if vint_serial_number: + # Ensure connection with the right sensor (ideally these are from config) + if vint_serial_number > -1: self.phidget.setDeviceSerialNumber(vint_serial_number) - if vint_channel: + if vint_channel > -1: self.phidget.setChannel(vint_channel) # Fancy remote sensors? @@ -72,45 +67,44 @@ def __init__( try: self.phidget.openWaitForAttachment(1000) logger.debug("Pressure sensor connected!") - except PhidgetException as phdget_error: - raise DeviceError( - "Cannot connect to sensor! Check settings..." - ) from phdget_error + except PhidgetException as phidget_error: + raise InvalidConfiguration( + "Cannot connect to sensor! Check it is not already opened elsewhere and settings..." + ) # Set power supply to 24V self.phidget.setPowerSupply(PowerSupply.POWER_SUPPLY_24V) self.phidget.setDataInterval(200) # 200ms + self.metadata = DeviceInfo( + authors=[dario, jakob, wei_hsin], + maintainers=[dario], + manufacturer="Phidget", + model="VINT", + serial_number=vint_serial_number, + ) + def __del__(self): + """Ensure connection closure upon deletion.""" self.phidget.close() - def get_router(self): - """Creates an APIRouter for this object.""" - from fastapi import APIRouter - - router = APIRouter() - router.add_api_route("/attached", self.is_attached, methods=["GET"]) - router.add_api_route("/pressure", self.read_pressure, methods=["GET"]) - - return router - def is_attached(self) -> bool: - """Whether the device is connected""" + """Whether the device is connected.""" return bool(self.phidget.getAttached()) - def _current_to_pressure(self, current_in_ampere: float) -> str: - """Converts current reading into pressure value""" + def _current_to_pressure(self, current_in_ampere: float) -> pint.Quantity: + """Convert current reading into pressure value.""" mill_amp = current_in_ampere * 1000 # minP..maxP is 4..20mA pressure_reading = self._min_pressure + ((mill_amp - 4) / 16) * ( self._max_pressure - self._min_pressure ) - logger.debug(f"Read pressure {pressure_reading} barg!") - return str(pressure_reading * flowchem_ureg.bar) + logger.debug(f"Read pressure {pressure_reading}!") + return pressure_reading - def read_pressure(self) -> str: + def read_pressure(self) -> pint.Quantity: # type: ignore """ - Read pressure from sensor, in bar. + Read pressure from the sensor and returns it as pint.Quantity. This is the main class method, and it never fails, but rather return None. Why? @@ -124,16 +118,20 @@ def read_pressure(self) -> str: """ try: current = self.phidget.getCurrent() - logger.debug(f"Current pressure: {current}") + logger.debug(f"Actual current: {current}") except PhidgetException: - warnings.warn("Cannot read pressure!") - return "" + logger.error("Cannot read pressure!") + return 0 * ureg.bar else: return self._current_to_pressure(current) + def components(self): + """Return an IRSpectrometer component.""" + return (PhidgetPressureSensorComponent("pressure-sensor", self),) + if __name__ == "__main__": - test = PressureSensor( + test = PhidgetPressureSensor( pressure_range=("0 bar", "25 bar"), vint_serial_number=627768, vint_channel=0, diff --git a/src/flowchem/devices/phidgets/pressure_sensor_component.py b/src/flowchem/devices/phidgets/pressure_sensor_component.py new file mode 100644 index 00000000..a2fca4d9 --- /dev/null +++ b/src/flowchem/devices/phidgets/pressure_sensor_component.py @@ -0,0 +1,21 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from flowchem.devices.flowchem_device import FlowchemDevice + +if TYPE_CHECKING: + from .pressure_sensor import PhidgetPressureSensor +from flowchem.components.sensors.pressure_sensor import PressureSensor + + +class PhidgetPressureSensorComponent(PressureSensor): + hw_device: PhidgetPressureSensor # just for typing + + def __init__(self, name: str, hw_device: FlowchemDevice): + """A generic Syringe pump.""" + super().__init__(name, hw_device) + + async def read_pressure(self, units: str = "bar"): + """Read from sensor, result to be expressed in units (optional).""" + return self.hw_device.read_pressure().m_as(units) diff --git a/src/flowchem/devices/vapourtec/__init__.py b/src/flowchem/devices/vapourtec/__init__.py new file mode 100644 index 00000000..b7efb46e --- /dev/null +++ b/src/flowchem/devices/vapourtec/__init__.py @@ -0,0 +1,4 @@ +"""Vapourtec devices.""" +from .r4_heater import R4Heater + +__all__ = ["R4Heater"] diff --git a/src/flowchem/devices/vapourtec/r4_heater.py b/src/flowchem/devices/vapourtec/r4_heater.py new file mode 100644 index 00000000..e2c3733e --- /dev/null +++ b/src/flowchem/devices/vapourtec/r4_heater.py @@ -0,0 +1,186 @@ +""" Control module for the Vapourtec R4 heater """ +from __future__ import annotations + +import time +from collections import namedtuple +from collections.abc import Iterable + +import aioserial +import pint +from loguru import logger + +from flowchem import ureg +from flowchem.components.technical.temperature_control import TempRange +from flowchem.devices.flowchem_device import DeviceInfo +from flowchem.devices.flowchem_device import FlowchemDevice +from flowchem.devices.vapourtec.r4_heater_channel_control import R4HeaterChannelControl +from flowchem.exceptions import InvalidConfiguration +from flowchem.people import * + +try: + from flowchem_vapourtec import VapourtecR4Commands + + HAS_VAPOURTEC_COMMANDS = True +except ImportError: + HAS_VAPOURTEC_COMMANDS = False + + +class R4Heater(FlowchemDevice): + """R4 reactor heater control class.""" + + DEFAULT_CONFIG = { + "timeout": 0.1, + "baudrate": 19200, + "parity": aioserial.PARITY_NONE, + "stopbits": aioserial.STOPBITS_ONE, + "bytesize": aioserial.EIGHTBITS, + } + + ChannelStatus = namedtuple("ChannelStatus", "state, temperature") + + def __init__( + self, + name: str = "", + min_temp: float | list[float] = -100, + max_temp: float | list[float] = 250, + **config, + ): + super().__init__(name) + # Set min and max temp for all 4 channels + if not isinstance(min_temp, Iterable): + min_temp = [min_temp] * 4 + if not isinstance(max_temp, Iterable): + max_temp = [max_temp] * 4 + assert len(min_temp) == len(max_temp) == 4 + self._min_t = min_temp + self._max_t = max_temp + + if not HAS_VAPOURTEC_COMMANDS: + raise InvalidConfiguration( + "You tried to use a Vapourtec device but the relevant commands are missing!\n" + "Unfortunately, we cannot publish those as they were provided under NDA.\n" + "Contact Vapourtec for further assistance." + ) + + self.cmd = VapourtecR4Commands() + + # Merge default settings, including serial, with provided ones. + configuration = R4Heater.DEFAULT_CONFIG | config + try: + self._serial = aioserial.AioSerial(**configuration) + except aioserial.SerialException as ex: + raise InvalidConfiguration( + f"Cannot connect to the R4Heater on the port <{config.get('port')}>" + ) from ex + + self.metadata = DeviceInfo( + authors=[dario, jakob, wei_hsin], + maintainers=[dario], + manufacturer="Vapourtec", + model="R4 reactor module", + ) + + async def initialize(self): + """Ensure connection.""" + self.metadata.version = await self.version() + logger.info(f"Connected with R4Heater version {self.metadata.version}") + + async def _write(self, command: str): + """Writes a command to the pump""" + cmd = command + "\r\n" + await self._serial.write_async(cmd.encode("ascii")) + logger.debug(f"Sent command: {repr(command)}") + + async def _read_reply(self) -> str: + """Reads the pump reply from serial communication.""" + reply_string = await self._serial.readline_async() + logger.debug(f"Reply received: {reply_string.decode('ascii').rstrip()}") + return reply_string.decode("ascii") + + async def write_and_read_reply(self, command: str) -> str: + """Sends a command to the pump, read the replies and returns it, optionally parsed.""" + self._serial.reset_input_buffer() + await self._write(command) + response = await self._read_reply() + + if not response: + raise InvalidConfiguration("No response received from heating module!") + + return response.rstrip() + + async def version(self): + """Get firmware version.""" + return await self.write_and_read_reply(self.cmd.VERSION) + + async def set_temperature(self, channel, temperature: pint.Quantity): + """Set temperature to channel.""" + cmd = self.cmd.SET_TEMPERATURE.format( + channel=channel, temperature_in_C=round(temperature.m_as("°C")) + ) + await self.write_and_read_reply(cmd) + # Set temperature implies channel on + await self.power_on(channel) + # Verify it is not unplugged + status = await self.get_status(channel) + if status.state == "U": + logger.error( + f"TARGET CHANNEL {channel} UNPLUGGED! (Note: numbering starts at 0)" + ) + + async def get_status(self, channel) -> ChannelStatus: + """Get status from channel.""" + # This command is a bit fragile for unknown reasons. + failure = 0 + while True: + try: + raw_status = await self.write_and_read_reply( + self.cmd.GET_STATUS.format(channel=channel) + ) + return R4Heater.ChannelStatus(raw_status[:1], raw_status[1:]) + except InvalidConfiguration as ex: + failure += 1 + # Allows 3 failures cause the R4 is choosy at times... + if failure > 3: + raise ex + else: + continue + + async def get_temperature(self, channel): + """Get temperature (in Celsius) from channel.""" + state = await self.get_status(channel) + return None if state.temperature == "281.2" else state.temperature + + async def power_on(self, channel): + """Turn on channel.""" + await self.write_and_read_reply(self.cmd.POWER_ON.format(channel=channel)) + + async def power_off(self, channel): + """Turn off channel.""" + await self.write_and_read_reply(self.cmd.POWER_OFF.format(channel=channel)) + + def components(self): + temp_limits = { + ch_num: TempRange(min=ureg.Quantity(t[0]), max=ureg.Quantity(t[1])) + for ch_num, t in enumerate(zip(self._min_t, self._max_t)) + } + return [ + R4HeaterChannelControl(f"reactor{n+1}", self, n, temp_limits[n]) + for n in range(4) + ] + + +if __name__ == "__main__": + import asyncio + + heat = R4Heater(port="COM1") + + async def main(heat): + """test function""" + await heat.initialize() + # Get reactors + r1, r2, r3, r4 = heat.components() + + await r1.set_temperature("30 °C") + print(f"Temperature is {await r1.get_temperature()}") + + asyncio.run(main(heat)) diff --git a/src/flowchem/devices/vapourtec/r4_heater_channel_control.py b/src/flowchem/devices/vapourtec/r4_heater_channel_control.py new file mode 100644 index 00000000..8ff63e23 --- /dev/null +++ b/src/flowchem/devices/vapourtec/r4_heater_channel_control.py @@ -0,0 +1,45 @@ +""" Control module for the Vapourtec R4 heater """ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from flowchem.components.technical.temperature_control import TemperatureControl +from flowchem.components.technical.temperature_control import TempRange + +if TYPE_CHECKING: + from .r4_heater import R4Heater + + +class R4HeaterChannelControl(TemperatureControl): + """R4 reactor heater channel control class.""" + + hw_device: R4Heater # for typing's sake + + def __init__( + self, name: str, hw_device: R4Heater, channel: int, temp_limits: TempRange + ): + """Create a TemperatureControl object.""" + super().__init__(name, hw_device, temp_limits) + self.channel = channel + + async def set_temperature(self, temp: str): + """Set the target temperature to the given string in natural language.""" + set_t = await super().set_temperature(temp) + return await self.hw_device.set_temperature(self.channel, set_t) + + async def get_temperature(self) -> float: # type: ignore + """Return temperature in Celsius.""" + return float(await self.hw_device.get_temperature(self.channel)) + + async def is_target_reached(self) -> bool: # type: ignore + """Return True if the set temperature target has been reached.""" + status = await self.hw_device.get_status(self.channel) + return status.state == "S" + + async def power_on(self): + """Turn on temperature control.""" + return await self.hw_device.power_on(self.channel) + + async def power_off(self): + """Turn off temperature control.""" + return await self.hw_device.power_off(self.channel) diff --git a/src/flowchem/devices/vicivalco/__init__.py b/src/flowchem/devices/vicivalco/__init__.py new file mode 100644 index 00000000..ae7d543f --- /dev/null +++ b/src/flowchem/devices/vicivalco/__init__.py @@ -0,0 +1,4 @@ +"""Vici Valco devices.""" +from .vici_valve import ViciValve + +__all__ = ["ViciValve"] diff --git a/src/flowchem/devices/vicivalco/vici_valve.py b/src/flowchem/devices/vicivalco/vici_valve.py new file mode 100644 index 00000000..b1810f09 --- /dev/null +++ b/src/flowchem/devices/vicivalco/vici_valve.py @@ -0,0 +1,239 @@ +"""This module is used to control Vici Valco Universal Electronic Actuators.""" +from __future__ import annotations + +from dataclasses import dataclass + +import aioserial +from loguru import logger + +from flowchem import ureg +from flowchem.devices.flowchem_device import DeviceInfo +from flowchem.devices.flowchem_device import FlowchemDevice +from flowchem.devices.vicivalco.vici_valve_component import ViciInjectionValve +from flowchem.exceptions import InvalidConfiguration +from flowchem.people import * + + +@dataclass +class ViciCommand: + """This class represent a command. Its bytes() method is transmitted to the valve.""" + + command: str + valve_id: int | None = None + value: str = "" + reply_lines: int = 1 + + def __str__(self): + """Provide a string representation of the command used, nice for logs.""" + address = str(self.valve_id) if self.valve_id is not None else "" + return f"{address} {self.command}{self.value}" + + def __bytes__(self): + """Byte representation of the command used for serial communication.""" + return str(self).encode("ascii") + + +class ViciValcoValveIO: + """Setup with serial parameters, low level IO.""" + + DEFAULT_CONFIG = { + "timeout": 0.5, + "baudrate": 9600, + "parity": aioserial.PARITY_NONE, + "stopbits": aioserial.STOPBITS_ONE, + "bytesize": aioserial.EIGHTBITS, + } + + def __init__(self, aio_port: aioserial.Serial): + """ + Initialize communication on the serial port where the valves are located and initialize them. + + Args: + aio_port: aioserial.Serial() object + """ + self._serial = aio_port + + @classmethod + def from_config(cls, port, **serial_kwargs): + """Create ViciValcoValveIO from config.""" + # Merge default serial settings with provided ones. + configuration = dict(ViciValcoValveIO.DEFAULT_CONFIG, **serial_kwargs) + + try: + serial_object = aioserial.AioSerial(port, **configuration) + except aioserial.SerialException as serial_exception: + raise InvalidConfiguration( + f"Could not open serial port {port} with configuration {configuration}" + ) from serial_exception + + return cls(serial_object) + + async def _read_reply(self, lines: int) -> str: + """Read the valve reply from serial communication.""" + reply_string = "" + for _ in range(lines): + line = await self._serial.readline_async() + reply_string += line.decode("ascii") + + if reply_string: + logger.debug(f"Reply received: {reply_string}") + else: + raise InvalidConfiguration( + "No response received from valve! Check valve address?" + ) + + return reply_string.rstrip() + + async def write_and_read_reply(self, command: ViciCommand) -> str: + """Write command to valve and read reply.""" + # Make sure input buffer is empty + self._serial.reset_input_buffer() + + # Send command + await self._serial.write_async(bytes(command)) + logger.debug(f"Command {command} sent!") + + if command.reply_lines == 0: + return "" + else: + return await self._read_reply(command.reply_lines) + + @property + def name(self) -> str: + """Provide a nice-looking default name to valve based on its serial connection.""" + try: + return self._serial.name + except AttributeError: + return "" + + +class ViciValve(FlowchemDevice): + """ViciValco injection valves.""" + + # This class variable is used for daisy chains (i.e. multiple valves on the same serial connection). Details below. + _io_instances: set[ViciValcoValveIO] = set() + # When several valves are daisy-chained on the same serial port, they need to all access the *same* Serial object, + # because access to the serial port is exclusive by definition. + # The mutable object _io_instances as class variable creates a shared state across all the instances. + + def __init__( + self, + valve_io: ViciValcoValveIO, + name: str = "", + address: int | None = None, + ): + """ + Create instance from an existing ViciValcoValveIO object. This allows dependency injection. + + See from_config() class method for config-based init. + + Args: + valve_io: An ViciValcoValveIO w/ serial connection to the daisy chain w/ target valve. + address: number of valve in array, 1 for first one, auto-assigned on init based on position. + name: 'cause naming stuff is important. + """ + self.valve_io = valve_io + ViciValve._io_instances.add(self.valve_io) + + # The valve name is used for logs and error messages. + self.name = name if name else f"Valve {self.valve_io.name}:{address}" + super().__init__(name=name) # type: ignore + + self.address = address + self._version = "" + + @classmethod + def from_config( + cls, + port: str, + address: int, + name: str = "", + **serial_kwargs, + ): + """Create instances via provided parameters to enable programmatic instantiation.""" + existing_io = [v for v in ViciValve._io_instances if v._serial.port == port] + + # If no existing serial object are available for the port provided, create a new one + if existing_io: + valve_io = existing_io.pop() + else: + valve_io = ViciValcoValveIO.from_config(port, **serial_kwargs) + + return cls(valve_io, address=address, name=name) + + async def initialize(self): + """Must be called after init before anything else.""" + # Learning positions is only needed if the valve head has been reinstalled. + await self.learn_positions() + + # Homing implies moving to position 1. + await self.home() + + # Test connectivity by querying the valve's firmware version + self._version = await self.version() + logger.info(f"Connected to {self.name} - FW ver.: {self._version}!") + + def metadata(self) -> DeviceInfo: + """Return hw device metadata.""" + return DeviceInfo( + authors=[dario, jakob, wei_hsin], + maintainers=[dario], + manufacturer="Vici-Valco", + model="Universal Valve Actuator", + version=self._version, + ) + + async def learn_positions(self) -> None: + """Initialize valve only, there is no reply -> reply_lines = 0.""" + learn = ViciCommand(valve_id=self.address, command="LRN") + await self.valve_io.write_and_read_reply(learn) + + async def home(self) -> None: + """Initialize valve only: Move to Home position.""" + home = ViciCommand(valve_id=self.address, command="HM") + await self.valve_io.write_and_read_reply(home) + + # This seems necessary to make sure move is finished + await self.get_raw_position() + + async def version(self) -> str: + """Return the current firmware version reported by the valve.""" + version = ViciCommand(valve_id=self.address, command="VR", reply_lines=5) + return await self.valve_io.write_and_read_reply(version) + + async def get_raw_position(self) -> str: + """Represent the position of the valve.""" + current_pos = ViciCommand(valve_id=self.address, command="CP") + return await self.valve_io.write_and_read_reply(current_pos) + + async def set_raw_position(self, position: str): + """Set valve position.""" + valve_by_name_cw = ViciCommand( + valve_id=self.address, command="GO", value=position, reply_lines=0 + ) + await self.valve_io.write_and_read_reply(valve_by_name_cw) + + async def timed_toggle(self, injection_time: str): + """Switch valve to a position for a given time.""" + delay = ureg.Quantity(injection_time).to("ms") + set_delay = ViciCommand( + valve_id=self.address, command="DT", value=delay.magnitude + ) + await self.valve_io.write_and_read_reply(set_delay) + + time_toggle = ViciCommand(valve_id=self.address, command="TT") + await self.valve_io.write_and_read_reply(time_toggle) + + def get_components(self): + """Return a Valve component.""" + return (ViciInjectionValve("injection-valve", self),) + + +if __name__ == "__main__": + import asyncio + + valve1 = ViciValve.from_config(port="COM13", address=0, name="test1") + asyncio.run(valve1.initialize()) + + # Set position works with both strings and InjectionValvePosition + asyncio.run(valve1.set_raw_position("2")) diff --git a/src/flowchem/devices/vicivalco/vici_valve_component.py b/src/flowchem/devices/vicivalco/vici_valve_component.py new file mode 100644 index 00000000..fb478a22 --- /dev/null +++ b/src/flowchem/devices/vicivalco/vici_valve_component.py @@ -0,0 +1,27 @@ +"""Vici valve component.""" +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from .vici_valve import ViciValve +from flowchem.components.valves.injection_valves import SixPortTwoPosition + + +class ViciInjectionValve(SixPortTwoPosition): + hw_device: ViciValve # for typing's sake + + position_mapping = {"load": "1", "inject": "2"} + _reverse_position_mapping = {v: k for k, v in position_mapping.items()} + + async def get_position(self) -> str: + """Get current valve position.""" + pos = await self.hw_device.get_raw_position() + assert pos in ("1", "2"), "Valve position is '1' or '2'" + return self._reverse_position_mapping[pos] + + async def set_position(self, position: str): + """Move valve to position.""" + await super().set_position(position) + target_pos = self.position_mapping[position] + return await self.hw_device.set_raw_position(target_pos) diff --git a/src/flowchem/exceptions.py b/src/flowchem/exceptions.py new file mode 100644 index 00000000..f50b295f --- /dev/null +++ b/src/flowchem/exceptions.py @@ -0,0 +1,9 @@ +"""Exceptions used in the flowchem module.""" + + +class DeviceError(BaseException): + """Generic DeviceError.""" + + +class InvalidConfiguration(DeviceError): + """The configuration provided is not valid, e.g. no connection w/ device obtained.""" diff --git a/src/flowchem/people.py b/src/flowchem/people.py new file mode 100644 index 00000000..d65583fc --- /dev/null +++ b/src/flowchem/people.py @@ -0,0 +1,7 @@ +from flowchem.devices.flowchem_device import Person + +__all__ = ["dario", "jakob", "wei_hsin"] + +dario = Person(name="Dario Cambiè", email="2422614+dcambie@users.noreply.github.com") +jakob = Person(name="Jakob Wolf", email="Jakob.Wolf@mpikg.mpg.de") +wei_hsin = Person(name="Wei-Hsin Hsu", email="Wei-hsin.Hsu@mpikg.mpg.de") diff --git a/src/flowchem/py.typed b/src/flowchem/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/src/flowchem/server/README.md b/src/flowchem/server/README.md new file mode 100644 index 00000000..f42433b7 --- /dev/null +++ b/src/flowchem/server/README.md @@ -0,0 +1,3 @@ +# flowchem/server + +This folder contains the modules releted to the API Server and the Zeroconfig (mDNS) server for flowchem devices. diff --git a/src/flowchem/server/__init__.py b/src/flowchem/server/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/flowchem/server/api_server.py b/src/flowchem/server/api_server.py new file mode 100644 index 00000000..8d8d8bc4 --- /dev/null +++ b/src/flowchem/server/api_server.py @@ -0,0 +1,114 @@ +"""Run with `uvicorn main:app`.""" +import asyncio +from importlib.metadata import metadata +from io import BytesIO +from pathlib import Path +from typing import TypedDict + +from fastapi import FastAPI +from loguru import logger +from starlette.responses import RedirectResponse + +import flowchem +from flowchem.server.configuration_parser import parse_config +from flowchem.server.zeroconf_server import ZeroconfServer + + +class FlowchemInstance(TypedDict): + api_server: FastAPI + mdns_server: ZeroconfServer + port: int + + +async def run_create_server_from_file( + config_file: BytesIO | Path, host: str = "127.0.0.1" +) -> FlowchemInstance: + """Make create_server_from_file a sync function for CLI.""" + + return await create_server_from_file(config_file, host) + + +async def create_server_from_file( + config_file: BytesIO | Path, host: str +) -> FlowchemInstance: + """ + Based on the toml device config provided, initialize connection to devices and create API endpoints. + + config: Path to the toml file with the device config or dict. + """ + # Parse config create object instances for all hw devices + parsed_config = parse_config(config_file) + + # Run `initialize` method of all hw devices + logger.info("Initializing devices (all devices are initialized in parallel)") + await asyncio.gather(*[dev.initialize() for dev in parsed_config["device"]]) + logger.info("Device initialization complete!") + + return await create_server_for_devices(parsed_config, host) + + +async def create_server_for_devices(config: dict, host="127.0.0.1") -> FlowchemInstance: + """Initialize and create API endpoints for device object provided.""" + dev_list = config["device"] + port = config.get("port", 8000) + + # FastAPI server + app = FastAPI( + title=f"Flowchem - {config.get('filename')}", + description=metadata("flowchem")["Summary"], + version=flowchem.__version__, + license_info={ + "name": "MIT License", + "url": "https://opensource.org/licenses/MIT", + }, + ) + + mdns = ZeroconfServer(port=port, debug=False) + api_base_url = r"http://" + f"{host}:{port}" + + @app.route("/") + def home_redirect_to_docs(root_path): + """Redirect root to `/docs` to enable interaction w/ API.""" + return RedirectResponse(url="/docs") + + # For each device get the relevant APIRouter(s) and add them to the app + for device in dev_list: + # Get components (some compounded devices can return multiple components) + components = device.components() + logger.info(f"Got {len(components)} components from {device.name}") + + for component in components: + # API endpoints registration + app.include_router(component.router, tags=component.router.tags) + logger.debug(f"Router <{component.router.prefix}> added to app!") + + # Advertise component via zeroconfig + await mdns.add_component( + name=component.router.prefix, url=api_base_url + component.router.prefix + ) + + return {"api_server": app, "mdns_server": mdns, "port": port} + + +if __name__ == "__main__": + + import io + import uvicorn + + async def main(): + flowchem_instance = await run_create_server_from_file( + config_file=io.BytesIO( + b"""[device.test-device]\n + type = "FakeDevice"\n""" + ) + ) + config = uvicorn.Config( + flowchem_instance["api_server"], + port=flowchem_instance["port"], + log_level="info", + timeout_keep_alive=3600, + ) + server = uvicorn.Server(config) + await server.serve() + + asyncio.run(main()) diff --git a/src/flowchem/server/configuration_parser.py b/src/flowchem/server/configuration_parser.py new file mode 100644 index 00000000..22b22059 --- /dev/null +++ b/src/flowchem/server/configuration_parser.py @@ -0,0 +1,187 @@ +"""Parse a device config file.""" +import inspect +import sys +import typing +from io import BytesIO +from pathlib import Path +from textwrap import dedent + +from flowchem.devices.flowchem_device import FlowchemDevice +from flowchem.devices.list_known_device_type import autodiscover_device_classes + +if sys.version_info >= (3, 11): + # noinspection PyUnresolvedReferences + import tomllib +else: + import tomli as tomllib + +from flowchem.devices.known_plugins import plugin_devices +from flowchem.exceptions import InvalidConfiguration +from loguru import logger + + +def parse_toml(stream: typing.BinaryIO) -> dict: + """ + Read the TOML configuration file and returns it as a dict. + + Extensive exception handling due to the error-prone human editing needed in the configuration file. + """ + try: + return tomllib.load(stream) + except tomllib.TOMLDecodeError as parser_error: + logger.exception(parser_error) + raise InvalidConfiguration( + f"The configuration provided does not contain valid TOML!" + ) from parser_error + + +def parse_config(file_path: BytesIO | Path) -> dict: + """Parse a config file.""" + # StringIO used for testing without creating actual files + if isinstance(file_path, BytesIO): + config = parse_toml(file_path) + config["filename"] = "StringIO" + else: + assert ( + file_path.exists() and file_path.is_file() + ), f"{file_path} is a valid file" + + with file_path.open("rb") as stream: + config = parse_toml(stream) + + config["filename"] = file_path.stem + + return instantiate_device(config) + + +def instantiate_device(config: dict) -> dict: + """Instantiate all devices defined in the provided config dict.""" + assert "device" in config, "The configuration file must include a device section" + + # device_mapper is a dict mapping device type (str, as key) with the device class (obj, value). + # e.g. device_mapper["Spinsolve"] = Spinsolve class + device_mapper = autodiscover_device_classes() + + # Iterate on all devices, parse device-specific settings and instantiate the relevant objects + config["device"] = [ + parse_device(dev_settings, device_mapper) + for dev_settings in config["device"].items() + ] + logger.info("Configuration parsed!") + + return config + + +def parse_device(dev_settings, device_object_mapper) -> FlowchemDevice: + """ + Parse device config and return a device object. + + Exception handling to provide more specific and diagnostic messages upon errors in the configuration file. + """ + device_name, device_config = dev_settings + + # Get device class + try: + obj_type = device_object_mapper[device_config["type"]] + del device_config["type"] + except KeyError as error: + # If the device type specified is supported via a plugin we know of, alert user + if device_config["type"] in plugin_devices: + needed_plugin = plugin_devices[device_config["type"]] + logger.exception( + f"The device `{device_name}` of type `{device_config['type']}` needs a additional plugin" + f"Install {needed_plugin} to add support for it!" + f"e.g. `python -m pip install {needed_plugin}`" + ) + raise InvalidConfiguration(f"{needed_plugin} not installed.") + + logger.exception( + f"Device type `{device_config['type']}` unknown in 'device.{device_name}'!" + f"[Known types: {device_object_mapper.keys()}]" + ) + raise InvalidConfiguration( + f"Unknown device type `{device_config['type']}`." + ) from error + + # If the object has a 'from_config' method, use that for instantiation, otherwise try straight with the constructor. + try: + if hasattr(obj_type, "from_config"): + called = obj_type.from_config + device = obj_type.from_config(**device_config, name=device_name) + else: + called = obj_type.__init__ + device = obj_type(**device_config, name=device_name) + except TypeError as error: + logger.error(f"Wrong settings for device '{device_name}'!") + get_helpful_error_message(device_config, inspect.getfullargspec(called)) + raise ConnectionError( + f"Wrong configuration provided for device '{device_name}' of type {obj_type.__name__}!\n" + f"Configuration: {device_config}\n" + f"Accepted parameters: {inspect.getfullargspec(called).args}" + ) from error + + logger.debug( + f"Created device '{device.name}' instance: {device.__class__.__name__}" + ) + return device + + +def get_helpful_error_message(called_with: dict, arg_spec: inspect.FullArgSpec): + """Give helpful debugging text on configuration errors.""" + # First check if we have provided an argument that is not supported. + # Clearly no **kwargs should be defined in the signature otherwise all kwargs are ok + if arg_spec.varkw is None: + invalid_parameters = list(set(called_with.keys()).difference(arg_spec.args)) + if invalid_parameters: + logger.error( + f"The following parameters were not recognized: {invalid_parameters}" + ) + + # Then check if a mandatory arguments was not satisfied. [1 to skip cls/self, -n to remove args w/ default] + num_default = 0 if arg_spec.defaults is None else len(arg_spec.defaults) + mandatory_args = arg_spec.args[1:-num_default] + missing_parameters = list(set(mandatory_args).difference(called_with.keys())) + if missing_parameters: + logger.error( + f"The following mandatory parameters were missing in the configuration: {missing_parameters}" + ) + + +if __name__ == "__main__": + cfg_txt = BytesIO( + dedent( + """config_version = "1.0" + simulation = true + + [device.donor] + type = "Elite11InfuseOnly" + port = "COM11" + address = 0 + syringe_diameter = "4.6 mm" + syringe_volume = "1 ml" + + [device.activator] + type = "Elite11InfuseOnly" + port = "COM11" + address= 1 + syringe_diameter = "4.6 mm" + syringe_volume = "1 ml" + + [device.quencher] + type = "AxuraCompactPump" + mac_address = "00:80:A3:BA:C3:4A" + max_pressure = "13 bar" + + [device.sample-loop] + type = "ViciValve" + port = "COM13" + address = 0 + + [device.chiller] + type = "HubeerChiller" + port = "COM3" + """ + ).encode("utf-8") + ) + cfg = parse_config(cfg_txt) + print(cfg) diff --git a/src/flowchem/server/sample_configuration.toml b/src/flowchem/server/sample_configuration.toml new file mode 100644 index 00000000..b375b96f --- /dev/null +++ b/src/flowchem/server/sample_configuration.toml @@ -0,0 +1,30 @@ +config_version = "1.0" +simulation = true + +[device.donor] +type = "Elite11InfuseOnly" +port = "COM11" +address = 0 +diameter = "4.6 mm" +syringe_volume = "1 ml" + +[device.activator] +type = "Elite11InfuseOnly" +port = "COM11" +address= 1 +diameter = "4.6 mm" +syringe_volume = "1 ml" + +[device.quencher] +type = "AxuraCompactPump" +mac_address = "00:80:A3:BA:C3:4A" +max_pressure = "13 bar" + +[device.sample-loop] +type = "ViciValve" +port = "COM13" +address = 0 + +[device.chiller] +type = "HubeerChiller" +port = "COM3" diff --git a/src/flowchem/server/zeroconf_server.py b/src/flowchem/server/zeroconf_server.py new file mode 100644 index 00000000..4960c468 --- /dev/null +++ b/src/flowchem/server/zeroconf_server.py @@ -0,0 +1,75 @@ +"""Zeroconf (mDNS) server.""" +import hashlib +import uuid + +from loguru import logger +from zeroconf import get_all_addresses +from zeroconf import IPVersion +from zeroconf import ServiceInfo +from zeroconf import Zeroconf + + +class ZeroconfServer: + """ZeroconfServer to advertise FlowchemComponents.""" + + def __init__(self, port=8000, debug=False): + # Server properties + self.port = port + self.debug = debug + + self.server = Zeroconf(ip_version=IPVersion.V4Only) + + # Get list of host addresses + self.mdns_addresses = [ + ip + for ip in get_all_addresses() # Get all local IP + if ip not in ("127.0.0.1", "0.0.0.0") + and not ip.startswith("169.254") # Remove invalid IPs + ] + + @staticmethod + def _get_valid_service_name(name: str): + """Given a desired service name, returns a valid one ;)""" + candidate_name = f"{name}._labthing._tcp.local." + if len(candidate_name) < 64: + prefix = name + else: + logger.warning( + f"The device name '{name}' is too long to be used as identifier." + f"It will be trimmed to " + ) + # First 30 characters of name + 10 of hash for uniqueness (2^40 ~ 1E12 collision rate is acceptable). + # The hash is based on the (unique?) name end and limited to 64 i.e. max Blake2b key size + prefix = ( + name[:30] + + hashlib.blake2b( + key=name[-64:].encode("utf-8"), digest_size=10 + ).hexdigest() + ) + + return f"{prefix}._labthing._tcp.local." + + async def add_component(self, name, url): + """Adds device to the server.""" + logger.debug(f"Adding zeroconf component {name}") + service_name = ZeroconfServer._get_valid_service_name(name) + + # LabThing service + service_info = ServiceInfo( + type_="_labthing._tcp.local.", + name=service_name, + port=self.port, + properties={ + "path": url, + "id": f"{service_name}:{uuid.uuid4()}".replace(" ", ""), + }, + parsed_addresses=self.mdns_addresses, + ) + + await self.server.async_register_service(service_info) + logger.debug(f"Registered {service_name} on the mDNS server! [ -> {url}]") + + +if __name__ == "__main__": + test = ZeroconfServer() + input() diff --git a/src/flowchem/vendor/LICENSE b/src/flowchem/vendor/LICENSE new file mode 100644 index 00000000..1a71c0f0 --- /dev/null +++ b/src/flowchem/vendor/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Christopher Goes + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/src/flowchem/vendor/README.md b/src/flowchem/vendor/README.md new file mode 100644 index 00000000..e3753dfd --- /dev/null +++ b/src/flowchem/vendor/README.md @@ -0,0 +1,4 @@ +# flowchem/vendor + +* **GetMac** The pypi package [getmac](https://pypi.org/project/getmac/) is included in the source as there are a couple of changes +compared to upstream, mainly deprecation of Py2 support to avoid false-positive in mypy. diff --git a/src/flowchem/vendor/__init__.py b/src/flowchem/vendor/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/flowchem/vendor/getmac.py b/src/flowchem/vendor/getmac.py new file mode 100644 index 00000000..f2c5318a --- /dev/null +++ b/src/flowchem/vendor/getmac.py @@ -0,0 +1,609 @@ +# Vendored from https://github.com/GhostofGoes/getmac with minor changes +"""Get the MAC address of remote hosts or network interfaces. +It provides a platform-independent interface to get the MAC addresses of: +* System network interfaces (by interface name) +* Remote hosts on the local network (by IPv4/IPv6 address or hostname) +It provides one function: `get_mac_address()` +Examples: + from getmac import get_mac_address + eth_mac = get_mac_address(interface="eth0") + win_mac = get_mac_address(interface="Ethernet 3") + ip_mac = get_mac_address(ip="192.168.0.1") + ip6_mac = get_mac_address(ip6="::1") + host_mac = get_mac_address(hostname="localhost") + updated_mac = get_mac_address(ip="10.0.0.1", network_request=True) +""" +import ctypes +import os +import platform +import re +import shlex +import socket +import struct +from subprocess import check_output +from subprocess import DEVNULL + +from loguru import logger + +# Configure logging +log = logger + +# Configurable settings +PORT = 55555 + +# Platform identifiers +_SYST = platform.system() +if _SYST == "Java": + try: + import java.lang + + _SYST = str(java.lang.System.getProperty("os.name")) + except ImportError: + log.critical("Can't determine OS: couldn't import java.lang on Jython") +WINDOWS = _SYST == "Windows" +DARWIN = _SYST == "Darwin" +OPENBSD = _SYST == "OpenBSD" +FREEBSD = _SYST == "FreeBSD" +BSD = OPENBSD or FREEBSD # Not including Darwin for now +WSL = False # Windows Subsystem for Linux (WSL) +LINUX = False +if _SYST == "Linux": + if "Microsoft" in platform.version(): + WSL = True + else: + LINUX = True + +PATH = os.environ.get("PATH", os.defpath).split(os.pathsep) +if not WINDOWS: + PATH.extend(("/sbin", "/usr/sbin")) + +# Use a copy of the environment so that we don't +# modify the process's current environment. +ENV = dict(os.environ) +ENV["LC_ALL"] = "C" # Ensure ASCII output so we parse correctly + +ARP_PATH = os.environ.get("ARP_PATH", "/proc/net/arp") + +# Constants +IP4 = 0 +IP6 = 1 +INTERFACE = 2 +HOSTNAME = 3 + +MAC_RE_COLON = r"([0-9a-fA-F]{2}(?::[0-9a-fA-F]{2}){5})" +MAC_RE_DASH = r"([0-9a-fA-F]{2}(?:-[0-9a-fA-F]{2}){5})" +MAC_RE_DARWIN = r"([0-9a-fA-F]{1,2}(?::[0-9a-fA-F]{1,2}){5})" + +# Used for mypy (a data type analysis tool) +# If you're copying the code, this section can be safely removed + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Optional + +# Ensure we only log the Python compatibility warning once +WARNED_UNSUPPORTED_PYTHONS = False + + +# noinspection PyBroadException +def get_mac_address( + interface=None, ip=None, ip6=None, hostname=None, network_request=True +): + # type: (Optional[str], Optional[str], Optional[str], Optional[str], bool) -> Optional[str] + """Get an Unicast IEEE 802 MAC-48 address from a local interface or remote host. + You must only use one of the first four arguments. If none of the arguments + are selected, the default network interface for the system will be used. + Exceptions will be handled silently and returned as a None. + For the time being, it assumes you are using Ethernet. + NOTES: + * You MUST provide str-typed arguments, REGARDLESS of Python version. + * localhost/127.0.0.1 will always return '00:00:00:00:00:00' + Args: + interface (str): Name of a local network interface (e.g "Ethernet 3", "eth0", "ens32") + ip (str): Canonical dotted decimal IPv4 address of a remote host (e.g 192.168.0.1) + ip6 (str): Canonical shortened IPv6 address of a remote host (e.g ff02::1:ffe7:7f19) + hostname (str): DNS hostname of a remote host (e.g "router1.mycorp.com", "localhost") + network_request (bool): Send a UDP packet to a remote host to populate + the ARP/NDP tables for IPv4/IPv6. The port this packet is sent to can + be configured using the module variable `getmac.PORT`. + Returns: + Lowercase colon-separated MAC address, or None if one could not be + found or there was an error. + """ + if (hostname and hostname == "localhost") or (ip and ip == "127.0.0.1"): + return "00:00:00:00:00:00" + + # Resolve hostname to an IP address + if hostname: + ip = socket.gethostbyname(hostname) + + # Populate the ARP table by sending an empty UDP packet to a high port + if network_request and (ip or ip6): + if ip: + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + else: + s = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM) + try: + if ip: + s.sendto(b"", (ip, PORT)) + else: + s.sendto(b"", (ip6, PORT)) + except Exception: # noqa: B902 + log.error("Failed to send ARP table population packet") + finally: + s.close() + + # Set up the address hunt based on the arguments specified + if ip6: + if not socket.has_ipv6: + log.error( + "Cannot get the MAC address of a IPv6 host: " + "IPv6 is not supported on this system" + ) + return None + elif ":" not in ip6: + log.error("Invalid IPv6 address: %s", ip6) + return None + to_find = ip6 + typ = IP6 + elif ip: + to_find = ip + typ = IP4 + else: # Default to searching for interface + typ = INTERFACE + if interface: + to_find = interface + else: + # Default to finding MAC of the interface with the default route + if WINDOWS and network_request: + to_find = _fetch_ip_using_dns() + typ = IP4 + elif WINDOWS: + to_find = "Ethernet" + elif BSD: + if OPENBSD: + to_find = _get_default_iface_openbsd() # type: ignore + else: + to_find = _get_default_iface_freebsd() # type: ignore + if not to_find: + to_find = "em0" + else: + to_find = _hunt_linux_default_iface() # type: ignore + if not to_find: + to_find = "en0" + + mac = _hunt_for_mac(to_find, typ, network_request) + # log.debug("Raw MAC found: %s", mac) + + # Check and format the result to be lowercase, colon-separated + if mac is not None: + mac = str(mac) + for garbage_string in ["b'", "'", "\\n", "\\r"]: + mac = mac.replace(garbage_string, "") + mac = mac.strip().lower().replace(" ", "").replace("-", ":") + + # Fix cases where there are no colons + if ":" not in mac and len(mac) == 12: + log.debug("Adding colons to MAC %s", mac) + mac = ":".join(mac[i : i + 2] for i in range(0, len(mac), 2)) + + # Pad single-character octets with a leading zero (e.g. Darwin's ARP output) + elif len(mac) < 17: + log.debug( + "Length of MAC %s is %d, padding single-character " "octets with zeros", + mac, + len(mac), + ) + parts = mac.split(":") + new_mac = [] + for part in parts: + if len(part) == 1: + new_mac.append("0" + part) + else: + new_mac.append(part) + mac = ":".join(new_mac) + + # MAC address should ALWAYS be 17 characters before being returned + if len(mac) != 17: + log.warning("MAC address %s is not 17 characters long!", mac) + mac = None + elif mac.count(":") != 5: + log.warning("MAC address %s is missing ':' characters", mac) + mac = None + return mac + + +def _search(regex, text, group_index=0): + # type: (str, str, int) -> Optional[str] + match = re.search(regex, text) + if match: + return match.groups()[group_index] + return None + + +def _popen(command, args): + # type: (str, str) -> str + for directory in PATH: + executable = os.path.join(directory, command) + if ( + os.path.exists(executable) + and os.access(executable, os.F_OK | os.X_OK) + and not os.path.isdir(executable) + ): + break + else: + executable = command + return _call_proc(executable, args) + + +def _call_proc(executable, args): + # type: (str, str) -> str + if WINDOWS: + cmd = executable + " " + args # type: ignore + else: + cmd = [executable] + shlex.split(args) # type: ignore + output = check_output(cmd, stderr=DEVNULL, env=ENV) + if isinstance(output, bytes): + return str(output, "utf-8") + else: + return str(output) + + +# noinspection PyBroadException +def _windows_ctypes_host(host): + # type: (str) -> Optional[str] + host = host.encode() # type: ignore + try: + inetaddr = ctypes.windll.wsock32.inet_addr(host) # type: ignore + if inetaddr in (0, -1): + raise Exception + except Exception: # noqa: BLK100 + hostip = socket.gethostbyname(host) + inetaddr = ctypes.windll.wsock32.inet_addr(hostip) # type: ignore + + buffer = ctypes.c_buffer(6) + addlen = ctypes.c_ulong(ctypes.sizeof(buffer)) + + send_arp = ctypes.windll.Iphlpapi.SendARP # type: ignore + if send_arp(inetaddr, 0, ctypes.byref(buffer), ctypes.byref(addlen)) != 0: + return None + + # Convert binary data into a string. + macaddr = "" + for intval in struct.unpack("BBBBBB", buffer): # type: ignore + if intval > 15: + replacestr = "0x" + else: + replacestr = "x" + macaddr = "".join([macaddr, hex(intval).replace(replacestr, "")]) + return macaddr + + +def _fcntl_iface(iface): + # type: (str) -> str + import fcntl + + iface = iface.encode() # type: ignore + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + # 0x8927 = SIOCGIFADDR + info = fcntl.ioctl(s.fileno(), 0x8927, struct.pack("256s", iface[:15])) # type: ignore + return ":".join(["%02x" % ord(chr(char)) for char in info[18:24]]) + + +def _uuid_ip(ip): + # type: (str) -> Optional[str] + from uuid import _arp_getnode # type: ignore + + backup = socket.gethostbyname + try: + socket.gethostbyname = lambda x: ip + mac1 = _arp_getnode() + if mac1 is not None: + mac1 = _uuid_convert(mac1) + mac2 = _arp_getnode() + mac2 = _uuid_convert(mac2) + if mac1 == mac2: + return mac1 + except Exception: # noqa: B902 + raise + finally: + socket.gethostbyname = backup + return None + + +def _uuid_lanscan_iface(iface): + # type: (str) -> Optional[str] + from uuid import _find_mac # type: ignore + + iface = bytes(iface, "utf-8") # type: ignore + mac = _find_mac("lanscan", "-ai", [iface], lambda i: 0) + if mac: + return _uuid_convert(mac) + return None + + +def _uuid_convert(mac): + # type: (int) -> str + return ":".join(("%012X" % mac)[i : i + 2] for i in range(0, 12, 2)) + + +def _read_sys_iface_file(iface): + # type: (str) -> Optional[str] + data = _read_file("/sys/class/net/" + iface + "/address") + # Sometimes this can be empty or a single newline character + return None if data is not None and len(data) < 17 else data + + +def _read_arp_file(host): + # type: (str) -> Optional[str] + data = _read_file(ARP_PATH) + if data is not None and len(data) > 1: + # Need a space, otherwise a search for 192.168.16.2 + # will match 192.168.16.254 if it comes first! + return _search(re.escape(host) + r" .+" + MAC_RE_COLON, data) + return None + + +def _arping_habets(host): + # type: (str) -> Optional[str] + """Parse https://github.com/ThomasHabets/arping output.""" + return _search( + r"^%s$" % MAC_RE_COLON, + _popen("arping", "-r -C 1 -c 1 %s" % host).strip(), + ) + + +def _arping_iputils(host): + # type: (str) -> Optional[str] + """Parse iputils arping output.""" + return _search( + rf" from {re.escape(host)} \[({MAC_RE_COLON})\]", + _popen("arping", "-f -c 1 %s" % host).strip(), + ) + + +def _read_file(filepath): + # type: (str) -> Optional[str] + try: + with open(filepath) as f: + return f.read() + except OSError: # noqa: B014 - This is IOError on Python 2.7 + log.debug("Could not find file: '%s'", filepath) + return None + + +def _hunt_for_mac(to_find, type_of_thing, net_ok=True): + # type: (Optional[str], int, bool) -> Optional[str] + """Tries a variety of methods to get a MAC address. + Format of method lists: + Tuple: (regex, regex index, command, command args) + Command args is a list of strings to attempt to use as arguments + lambda: Function to call + """ + if to_find is None: + log.warning("_hunt_for_mac() failed: to_find is None") + return None + if isinstance(to_find, bytes): + to_find = str(to_find, "utf-8") + + if WINDOWS and type_of_thing == INTERFACE: + methods = [ + # getmac - Connection Name + ( + r"\r\n" + to_find + r".*" + MAC_RE_DASH + r".*\r\n", + 0, + "getmac.exe", + ["/NH /V"], + ), + # ipconfig + ( + to_find + + r"(?:\n?[^\n]*){1,8}Physical Address[ .:]+" + + MAC_RE_DASH + + r"\r\n", + 0, + "ipconfig.exe", + ["/all"], + ), + # getmac - Network Adapter (the human-readable name) + ( + r"\r\n.*" + to_find + r".*" + MAC_RE_DASH + r".*\r\n", + 0, + "getmac.exe", + ["/NH /V"], + ), + # wmic - WMI command line utility + lambda x: _popen( + "wmic.exe", + "nic where \"NetConnectionID = '%s'\" get " "MACAddress /value" % x, + ) + .strip() + .partition("=")[2], + ] + elif (WINDOWS or WSL) and type_of_thing in [IP4, IP6, HOSTNAME]: + methods = [ + # arp -a - Parsing result with a regex + (MAC_RE_DASH, 0, "arp.exe", ["-a %s" % to_find]) + ] + + # Add methods that make network requests + # Insert it *after* arp.exe since that's probably faster. + if net_ok and type_of_thing != IP6 and not WSL: + methods.insert(1, _windows_ctypes_host) + elif (DARWIN or FREEBSD) and type_of_thing == INTERFACE: + methods = [ + (r"ether " + MAC_RE_COLON, 0, "ifconfig", [to_find]), + # Alternative match for ifconfig if it fails + (to_find + r".*ether " + MAC_RE_COLON, 0, "ifconfig", [""]), + (MAC_RE_COLON, 0, "networksetup", ["-getmacaddress %s" % to_find]), + ] + elif FREEBSD and type_of_thing in [IP4, IP6, HOSTNAME]: + methods = [ + ( + r"\(" + re.escape(to_find) + r"\)\s+at\s+" + MAC_RE_COLON, + 0, + "arp", + [to_find], + ) + ] + elif OPENBSD and type_of_thing == INTERFACE: + methods = [(r"lladdr " + MAC_RE_COLON, 0, "ifconfig", [to_find])] + elif OPENBSD and type_of_thing in [IP4, IP6, HOSTNAME]: + methods = [(re.escape(to_find) + r"[ ]+" + MAC_RE_COLON, 0, "arp", ["-an"])] + elif type_of_thing == INTERFACE: + methods = [ + _read_sys_iface_file, + _fcntl_iface, + # Fast modern Ubuntu ifconfig + (r"ether " + MAC_RE_COLON, 0, "ifconfig", [to_find]), + # Fast ifconfig + (r"HWaddr " + MAC_RE_COLON, 0, "ifconfig", [to_find]), + # Android 6.0.1 + (r"state UP.*\n.*ether " + MAC_RE_COLON, 0, "ip", ["link", "addr"]), + (r"wlan.*\n.*ether " + MAC_RE_COLON, 0, "ip", ["link", "addr"]), + (r"ether " + MAC_RE_COLON, 0, "ip", ["link", "addr"]), + # ip link (Don't use 'list' due to SELinux [Android 24+]) + # ( + # to_find + r".*\n.*link/ether " + MAC_RE_COLON, + # 0, + # "ip", + # ["link %s" % to_find, "link"], + # ), + # netstat + (to_find + r".*HWaddr " + MAC_RE_COLON, 0, "netstat", ["-iae"]), + # More variations of ifconfig + (to_find + r".*ether " + MAC_RE_COLON, 0, "ifconfig", [""]), + (to_find + r".*HWaddr " + MAC_RE_COLON, 0, "ifconfig", ["", "-a", "-v"]), + # Tru64 ('-av') + (to_find + r".*Ether " + MAC_RE_COLON, 0, "ifconfig", ["-av"]), + _uuid_lanscan_iface, + ] + elif type_of_thing in [IP4, IP6, HOSTNAME]: + esc = re.escape(to_find) + methods = [ + _read_arp_file, + lambda x: _popen("ip", "neighbor show %s" % x) + .partition(x)[2] + .partition("lladdr")[2] + .strip() + .split()[0], + ( + r"\(" + esc + r"\)\s+at\s+" + MAC_RE_COLON, + 0, + "arp", + [to_find, "-an", "-an %s" % to_find], + ), + # Darwin oddness + ( + r"\(" + esc + r"\)\s+at\s+" + MAC_RE_DARWIN, + 0, + "arp", + [to_find, "-a", "-a %s" % to_find], + ), + _uuid_ip, + ] + # Add methods that make network requests + if net_ok and type_of_thing != IP6: + methods.extend((_arping_iputils, _arping_habets)) + else: + log.critical("Reached end of _hunt_for_mac() if-else chain!") + return None + return _try_methods(methods, to_find) + + +def _try_methods(methods, to_find=None): + # type: (list, Optional[str]) -> Optional[str] + """ + Runs the methods specified by _hunt_for_mac(). + + We try every method and see if it returned a MAC address. If it returns + None or raises an exception, we continue and try the next method. + """ + found = None + for m in methods: + try: + if isinstance(m, tuple): + for arg in m[3]: # list(str) + # Arguments: (regex, _popen(command, arg), regex index) + found = _search(m[0], _popen(m[2], arg), m[1]) + if found: # Skip remaining args AND remaining methods + break + elif callable(m): + if to_find is not None: + found = m(to_find) + else: + found = m() + else: + log.critical("Invalid type '%s' for method '%s'", type(m), str(m)) + except Exception as ex: # noqa: B902 + logger.debug(f"Ignore exception {ex}") + if found: # Skip remaining methods + break + return found + + +def _get_default_iface_linux(): + # type: () -> Optional[str] + """Get the default interface by reading /proc/net/route. + This is the same source as the `route` command, however it's much + faster to read this file than to call `route`. If it fails for whatever + reason, we can fall back on the system commands (e.g. for a platform + that has a route command, but maybe doesn't use /proc?). + """ + data = _read_file("/proc/net/route") + if data is not None and len(data) > 1: + for line in data.split("\n")[1:-1]: + iface_name, dest = line.split("\t")[:2] + if dest == "00000000": + return iface_name + return None + + +def _hunt_linux_default_iface(): + # type: () -> Optional[str] + # NOTE: for now, we check the default interface for WSL using the + # same methods as POSIX, since those parts of the net stack work fine. + methods = [ + _get_default_iface_linux, + lambda: _popen("route", "-n") + .partition("0.0.0.0")[2] + .partition("\n")[0] + .split()[-1], + lambda: _popen("ip", "route list 0/0") + .partition("dev")[2] + .partition("proto")[0] + .strip(), + ] + return _try_methods(methods) + + +def _get_default_iface_openbsd(): + # type: () -> Optional[str] + methods = [ + lambda: _popen("route", "-nq show -inet -gateway -priority 1") + .partition("127.0.0.1")[0] + .strip() + .rpartition(" ")[2] + ] + return _try_methods(methods) + + +def _get_default_iface_freebsd(): + # type: () -> Optional[str] + methods = [(r"default[ ]+\S+[ ]+\S+[ ]+(\S+)[\r\n]+", 0, "netstat", ["-r"])] + return _try_methods(methods) + + +def _fetch_ip_using_dns(): + # type: () -> str + """Determines the IP address of the default network interface. + Sends a UDP packet to Cloudflare's DNS (1.1.1.1), which should go through + the default interface. This populates the source address of the socket, + which we then inspect and return. + """ + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + s.connect(("1.1.1.1", 53)) + ip = s.getsockname()[0] + s.close() # NOTE: sockets don't have context manager in 2.7 :( + return ip diff --git a/tests/cli/test_autodiscover_cli.py b/tests/cli/test_autodiscover_cli.py new file mode 100644 index 00000000..f09ad931 --- /dev/null +++ b/tests/cli/test_autodiscover_cli.py @@ -0,0 +1,14 @@ +from click.testing import CliRunner + +from flowchem.autodiscover import main + + +def test_cli(mocker): + runner = CliRunner() + + with runner.isolated_filesystem(): + result = runner.invoke( + main, + ["--assume-yes", "--safe"], + ) + assert result.exit_code == 0 diff --git a/tests/cli/test_flowchem_cli.py b/tests/cli/test_flowchem_cli.py new file mode 100644 index 00000000..7e98e4ce --- /dev/null +++ b/tests/cli/test_flowchem_cli.py @@ -0,0 +1,42 @@ +import asyncio +from pathlib import Path +from textwrap import dedent + +from click.testing import CliRunner + +from flowchem.__main__ import main + + +class FakeServer: + def __init__(self, config): + pass + + @staticmethod + async def serve(): + return None + + +def test_cli(mocker): + runner = CliRunner() + # Skip running server + mocker.patch("uvicorn.Server", return_value=FakeServer) + + with runner.isolated_filesystem(): + with open("test_configuration.toml", "w") as f: + f.write( + dedent( + """ + [device.test-device]\n + type = "FakeDevice"\n""" + ) + ) + + result = runner.invoke(main, ["test_configuration.toml"]) + assert result.exit_code == 0 + + result = runner.invoke( + main, ["test_configuration.toml", "--log", "logfile.log"] + ) + assert result.exit_code == 0 + assert Path("logfile.log").exists() + assert "Starting server" in Path("logfile.log").read_text() diff --git a/tests/test_FlowIR.py b/tests/devices/analytics/test_flowir.py similarity index 62% rename from tests/test_FlowIR.py rename to tests/devices/analytics/test_flowir.py index 6ac2136d..0927dd10 100644 --- a/tests/test_FlowIR.py +++ b/tests/devices/analytics/test_flowir.py @@ -5,8 +5,8 @@ import pytest -from flowchem.components.devices.MettlerToledo.iCIR_common import IRSpectrum -from flowchem import FlowIR +from flowchem.components.analytics.ir_control import IRSpectrum +from flowchem.devices.mettlertoledo.icir import IcIR def check_pytest_asyncio_installed(): @@ -20,32 +20,32 @@ def check_pytest_asyncio_installed(): @pytest.fixture() -async def flowir(): +async def spectrometer(): """Return local FlowIR object""" - return FlowIR( - FlowIR.iC_OPCUA_DEFAULT_SERVER_ADDRESS.replace("localhost", "BSMC-YMEF002121") + s = IcIR( + template="template", + url=IcIR.iC_OPCUA_DEFAULT_SERVER_ADDRESS.replace( + "localhost", "BSMC-YMEF002121" + ), ) + await s.initialize() + return s -@pytest.mark.asyncio @pytest.mark.FlowIR -async def test_connected(flowir): - async with flowir as spectrometer: - assert await spectrometer.is_iCIR_connected() +async def test_connected(spectrometer): + assert await spectrometer.is_iCIR_connected() -@pytest.mark.asyncio @pytest.mark.FlowIR -async def test_probe_info(flowir): - async with flowir as spectrometer: - info = await spectrometer.probe_info() - assert all( - field in info - for field in ("spectrometer", "spectrometer_SN", "probe_SN", "detector") - ) +async def test_probe_info(spectrometer): + info = await spectrometer.probe_info() + assert all( + field in info + for field in ("spectrometer", "spectrometer_SN", "probe_SN", "detector") + ) -@pytest.mark.asyncio @pytest.mark.FlowIR async def test_probe_status(flowir): async with flowir as spectrometer: @@ -53,26 +53,6 @@ async def test_probe_status(flowir): assert status == "Not running" -@pytest.mark.asyncio -@pytest.mark.FlowIR -async def test_is_running(flowir): - async with flowir as spectrometer: - # Check idle - assert await spectrometer.is_running() is False - - # Make busy - template_name = "15_sec_integration.iCIRTemplate" - await spectrometer.start_experiment(template=template_name) - - # Check busy - assert await spectrometer.is_running() is True - - # Restore idle - await spectrometer.stop_experiment() - await spectrometer.wait_until_idle() - - -@pytest.mark.asyncio @pytest.mark.FlowIR async def test_spectrum_acquisition(flowir): async with flowir as spectrometer: @@ -104,7 +84,6 @@ async def test_spectrum_acquisition(flowir): await spectrometer.wait_until_idle() -@pytest.mark.asyncio @pytest.mark.FlowIR async def test_spectra(flowir): # This implies the previous test run successfully, thus last spectrum is now diff --git a/tests/test_spinsolve.py b/tests/devices/analytics/test_spinsolve.py similarity index 91% rename from tests/test_spinsolve.py rename to tests/devices/analytics/test_spinsolve.py index 6ad77cf7..ef75ef32 100644 --- a/tests/test_spinsolve.py +++ b/tests/devices/analytics/test_spinsolve.py @@ -1,9 +1,11 @@ """ Test Spinsolve, needs actual connection with the device """ -import pytest import asyncio import time from pathlib import Path -from flowchem.components.devices.Magritek.spinsolve import Spinsolve + +import pytest + +from flowchem.devices import Spinsolve # Change to match your environment ;) @@ -55,23 +57,18 @@ def test_user_data(nmr: Spinsolve): @pytest.mark.Spinsolve -def test_hw_request(nmr: Spinsolve): - hw_tree = nmr.hw_request() +async def test_hw_request(nmr: Spinsolve): + hw_tree = await nmr.hw_request() assert hw_tree.find(".//SpinsolveSoftware") is not None @pytest.mark.Spinsolve -def test_request_available_protocols(nmr: Spinsolve): - protocols = nmr.request_available_protocols() +async def test_request_available_protocols(nmr: Spinsolve): + protocols = await nmr.load_protocols() assert isinstance(protocols, dict) assert "1D PROTON" in protocols -@pytest.mark.Spinsolve -def test_is_protocol_available(nmr: Spinsolve): - assert nmr.is_protocol_available("1D PROTON") - - @pytest.mark.Spinsolve def test_request_validation(nmr: Spinsolve): # VALID @@ -116,7 +113,7 @@ def test_request_validation(nmr: Spinsolve): ) with pytest.warns(UserWarning, match="Invalid option"): check_protocol = nmr._validate_protocol_request("1D EXTENDED+", partly_valid) - assert "balbla" not in check_protocol + assert "blabla" not in check_protocol @pytest.mark.Spinsolve diff --git a/tests/test_azura_compact.py b/tests/devices/pumps/test_azura_compact.py similarity index 68% rename from tests/test_azura_compact.py rename to tests/devices/pumps/test_azura_compact.py index 9bd03cca..8b733e87 100644 --- a/tests/test_azura_compact.py +++ b/tests/devices/pumps/test_azura_compact.py @@ -9,8 +9,8 @@ import pint import pytest -from flowchem import AzuraCompactPump -from flowchem.components.devices.Knauer.AzuraCompactPump import AzuraPumpHeads +from flowchem.devices.knauer.azura_compact import AzuraCompact +from flowchem.devices.knauer.azura_compact import AzuraPumpHeads if sys.platform == "win32": asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) @@ -32,14 +32,14 @@ def event_loop(request): @pytest.fixture(scope="session") async def pump(): """Change to match your hardware ;)""" - pump = AzuraCompactPump(ip_address="192.168.1.126") + pump = AzuraCompact(ip_address="192.168.1.126") await pump.initialize() return pump @pytest.mark.KPump @pytest.mark.asyncio -async def test_pumphead(pump: AzuraCompactPump): +async def test_pumphead(pump: AzuraCompact): assert await pump.get_headtype() in AzuraPumpHeads await pump.set_headtype(AzuraPumpHeads.FLOWRATE_TEN_ML) assert await pump.get_headtype() == AzuraPumpHeads.FLOWRATE_TEN_ML @@ -49,27 +49,27 @@ async def test_pumphead(pump: AzuraCompactPump): @pytest.mark.KPump @pytest.mark.asyncio -async def test_headtype(pump: AzuraCompactPump): +async def test_headtype(pump: AzuraCompact): assert await pump.get_headtype() in AzuraPumpHeads @pytest.mark.KPump @pytest.mark.asyncio -async def test_flow_rate(pump: AzuraCompactPump): - await pump.set_flow("1.25 ml/min") - await pump.start_flow() +async def test_flow_rate(pump: AzuraCompact): + await pump.set_flow_rate("1.25 ml/min") + await pump.infuse() # FIXME - assert pint.Quantity(await pump.get_flow()).magnitude == 1.25 - await pump.set_flow(f"{math.pi} ml/min") + assert pint.Quantity(await pump.get_flow_rate()).magnitude == 1.25 + await pump.set_flow_rate(f"{math.pi} ml/min") assert math.isclose( - pint.Quantity(await pump.get_flow()).magnitude, math.pi, abs_tol=1e-3 + pint.Quantity(await pump.get_flow_rate()).magnitude, math.pi, abs_tol=1e-3 ) - await pump.stop_flow() + await pump.stop() @pytest.mark.KPump @pytest.mark.asyncio -async def test_analog_control(pump: AzuraCompactPump): +async def test_analog_control(pump: AzuraCompact): await pump.enable_analog_control(True) assert await pump.is_analog_control_enabled() is True await pump.enable_analog_control(False) @@ -78,23 +78,23 @@ async def test_analog_control(pump: AzuraCompactPump): @pytest.mark.KPump @pytest.mark.asyncio -async def test_is_running(pump: AzuraCompactPump): - await pump.set_flow("1 ml/min") - await pump.start_flow() +async def test_is_running(pump: AzuraCompact): + await pump.set_flow_rate("1 ml/min") + await pump.infuse() assert pump.is_running() is True - await pump.stop_flow() + await pump.stop() @pytest.mark.KPump @pytest.mark.asyncio -async def test_motor_current(pump: AzuraCompactPump): - await pump.stop_flow() +async def test_motor_current(pump: AzuraCompact): + await pump.stop() assert await pump.read_motor_current() == 0 @pytest.mark.KPump @pytest.mark.asyncio -async def test_correction_factor(pump: AzuraCompactPump): +async def test_correction_factor(pump: AzuraCompact): init_val = await pump.get_correction_factor() await pump.set_correction_factor(0) assert await pump.get_correction_factor() == 0 @@ -103,7 +103,7 @@ async def test_correction_factor(pump: AzuraCompactPump): @pytest.mark.KPump @pytest.mark.asyncio -async def test_adjusting_factor(pump: AzuraCompactPump): +async def test_adjusting_factor(pump: AzuraCompact): init_val = await pump.get_adjusting_factor() await pump.set_adjusting_factor(1000) assert await pump.get_adjusting_factor() == 1000 @@ -112,7 +112,7 @@ async def test_adjusting_factor(pump: AzuraCompactPump): @pytest.mark.KPump @pytest.mark.asyncio -async def test_autostart(pump: AzuraCompactPump): +async def test_autostart(pump: AzuraCompact): await pump.enable_autostart() assert await pump.is_autostart_enabled() is True await pump.enable_autostart(False) @@ -120,7 +120,7 @@ async def test_autostart(pump: AzuraCompactPump): @pytest.mark.KPump @pytest.mark.asyncio -async def test_start_in(pump: AzuraCompactPump): +async def test_start_in(pump: AzuraCompact): await pump.require_start_in() assert await pump.is_start_in_required() is True await pump.require_start_in(False) diff --git a/tests/devices/pumps/test_hamilton.py b/tests/devices/pumps/test_hamilton.py new file mode 100644 index 00000000..b4432af0 --- /dev/null +++ b/tests/devices/pumps/test_hamilton.py @@ -0,0 +1,88 @@ +import asyncio +import sys + +import aioserial +import pytest +from fastapi import FastAPI +from httpx import AsyncClient + +from flowchem.server.api_server import create_server_for_devices +from flowchem.server.configuration_parser import instantiate_device + + +# class FakeSerial(aioserial.AioSerial): +# """Mock AioSerial.""" +# +# # noinspection PyMissingConstructor +# def __init__(self): +# self.fixed_reply = None +# self.last_command = b"" +# self.map_reply = { +# b"aUR\r": b"\x06NV01.01.a", +# b"1a\r": b"1", +# b"bUR\r": b"", +# b":XR\r": b"", +# +# } +# +# async def write_async(self, text: bytes): +# """Override AioSerial method""" +# self.last_command = text +# +# async def readline_async(self, size: int = -1) -> bytes: +# """Override AioSerial method""" +# if self.last_command == b"{MFFFFFF\r\n": +# await asyncio.sleep(999) +# if self.fixed_reply: +# return self.fixed_reply +# return self.map_reply[self.last_command] +# +# def __repr__(self): +# return "FakeSerial" +# +# @pytest.mark.skipif(sys.platform == "win32", reason="No mock_serial on windows") +# @pytest.fixture +# def devices() -> dict: +# """ML600 device.""" + + +# config = {"device": {}} +# config["device"]["ml600-test"] = { +# "type": "ML600", +# "port": mock_serial.port, +# "syringe_volume": "1 ml", +# } +# return instantiate_device(config) +# +# +# @pytest.fixture +# async def app(devices) -> FastAPI: +# """ML600-containing app.""" +# app = await create_server_for_devices(devices) +# +# # Ugly workaround, essentially startup hooks are not called with AsyncClient +# # See tiangolo/fastapi#2003 for details +# [await dev.initialize() for dev in devices["device"]] +# +# return app["api_server"] +# +# +# @pytest.mark.skipif(sys.platform == "win32", reason="No mock_serial on windows") +# @pytest.mark.anyio +# async def test_root(app): +# """Test root verifies app initialization (config validation/ML600 instantiation).""" +# async with AsyncClient(app=app, base_url="http://127.0.0.1:8000") as ac: +# response_root = await ac.get("/") +# response_docs = await ac.get("/docs") +# assert response_root.status_code == 307 +# assert response_docs.status_code == 200 +# +# +# @pytest.mark.skipif(sys.platform == "win32", reason="No mock_serial on windows") +# @pytest.mark.anyio +# async def test_get_position(app): +# """Test firmware_version.""" +# async with AsyncClient(app=app, base_url="http://127.0.0.1:8000/ml600-test") as ac: +# response = await ac.get("/pump/") +# assert response.status_code == 200 +# assert "NV01.01.a" in response.text diff --git a/tests/devices/pumps/test_hw_elite11.py b/tests/devices/pumps/test_hw_elite11.py new file mode 100644 index 00000000..1587824b --- /dev/null +++ b/tests/devices/pumps/test_hw_elite11.py @@ -0,0 +1,176 @@ +""" +HA Elite11 tests + +1. Update pump serial port and address belo +2. Run with `python -m pytest ./tests -m HApump` from the root folder +""" +import asyncio +import math + +import pytest + +from flowchem import ureg +from flowchem.devices.harvardapparatus.elite11 import Elite11 +from flowchem.devices.harvardapparatus.elite11 import PumpStatus + + +@pytest.fixture(scope="session") +async def pump(): + """Change to match your hardware ;)""" + pump = Elite11.from_config( + port="COM4", syringe_volume="5 ml", syringe_diameter="20 mm" + ) + await pump.initialize() + return pump + + +async def move_infuse(pump): + await pump.set_syringe_diameter("10 mm") + await pump.set_flow_rate("1 ml/min") + await pump.set_target_volume("1 ml") + await pump.infuse() + + +@pytest.fixture(scope="session") +def event_loop(request): + loop = asyncio.get_event_loop_policy().new_event_loop() + yield loop + loop.close() + + +@pytest.mark.HApump +@pytest.mark.asyncio +async def test_version(pump: Elite11): + assert "11 ELITE" in await pump.version() + + +@pytest.mark.HApump +@pytest.mark.asyncio +async def test_status_idle(pump: Elite11): + await pump.stop() + assert await pump.get_status() is PumpStatus.IDLE + + +@pytest.mark.HApump +@pytest.mark.asyncio +async def test_status_infusing(pump: Elite11): + await move_infuse(pump) + assert await pump.get_status() is PumpStatus.INFUSING + await pump.stop() + + +@pytest.mark.HApump +@pytest.mark.asyncio +async def test_status_withdrawing(pump: Elite11): + await pump.set_syringe_diameter("10 mm") + await pump.set_withdrawing_flow_rate("1 ml/min") + await pump.withdraw() + assert await pump.get_status() is PumpStatus.WITHDRAWING + await pump.stop() + + +@pytest.mark.HApump +@pytest.mark.asyncio +async def test_is_moving(pump: Elite11): + assert await pump.is_moving() is False + await move_infuse(pump) + assert await pump.is_moving() is True + await pump.stop() + + +@pytest.mark.HApump +@pytest.mark.asyncio +async def test_syringe_volume(pump: Elite11): + await pump.set_syringe_volume("10 ml") + assert await pump.get_syringe_volume() == "10 ml" + await pump.set_syringe_volume(f"{math.pi} ml") + vol = ureg.Quantity(await pump.get_syringe_volume()).magnitude + assert math.isclose(vol, math.pi, abs_tol=10e-4) + await pump.set_syringe_volume("3e-05 ml") + vol = ureg.Quantity(await pump.get_syringe_volume()).magnitude + assert math.isclose(vol, 3e-5) + await pump.set_syringe_volume("50 ml") # Leave it high for next tests + + +@pytest.mark.HApump +@pytest.mark.asyncio +async def test_infusion_rate(pump: Elite11): + await pump.set_syringe_volume("10 ml") + await pump.set_flow_rate("5 ml/min") + assert await pump.get_flow_rate() + with pytest.warns(UserWarning): + await pump.set_flow_rate("121 ml/min") + rate = ureg.Quantity(await pump.get_flow_rate()).magnitude + assert math.isclose(rate, 12.49, rel_tol=0.01) + with pytest.warns(UserWarning): + await pump.set_flow_rate("0 ml/min") + rate = ureg.Quantity(await pump.get_flow_rate()).magnitude + assert math.isclose(rate, 1e-05, abs_tol=1e-5) + await pump.set_flow_rate(f"{math.pi} ml/min") + rate = ureg.Quantity(await pump.get_flow_rate()).magnitude + assert math.isclose(rate, math.pi, abs_tol=0.001) + + +@pytest.mark.HApump +@pytest.mark.asyncio +async def test_get_infused_volume(pump: Elite11): + await pump.clear_volumes() + assert await pump.get_infused_volume() == "0 ul" + await pump.set_syringe_diameter("30 mm") + await pump.set_flow_rate("5 ml/min") + await pump.set_target_volume("0.05 ml") + await pump.infuse() + await asyncio.sleep(2) + vol = ureg.Quantity(await pump.get_infused_volume()).to("ml").magnitude + assert math.isclose(vol, 0.05, abs_tol=1e-4) + + +@pytest.mark.HApump +@pytest.mark.asyncio +async def test_get_withdrawn_volume(pump: Elite11): + await pump.clear_volumes() + await pump.set_withdrawing_flow_rate("10 ml/min") + await pump.set_target_volume("0.1 ml") + await pump.withdraw() + await asyncio.sleep(1) + vol = ureg.Quantity(await pump.get_withdrawn_volume()).to("ml").magnitude + assert math.isclose(vol, 0.1, abs_tol=1e-4) + + +@pytest.mark.HApump +@pytest.mark.asyncio +async def test_force(pump: Elite11): + await pump.set_force(10) + assert await pump.get_force() == 10 + await pump.set_force(50.2) + assert await pump.get_force() == 50 + assert await pump.get_force() == 50 + + +@pytest.mark.HApump +@pytest.mark.asyncio +async def test_diameter(pump: Elite11): + await pump.set_syringe_diameter("10 mm") + assert await pump.get_syringe_diameter() == "10.0000 mm" + + with pytest.warns(UserWarning): + await pump.set_syringe_diameter("34 mm") + + with pytest.warns(UserWarning): + await pump.set_syringe_diameter("0.01 mm") + + await pump.set_syringe_diameter(f"{math.pi} mm") + dia = ureg.Quantity(await pump.get_syringe_diameter()).magnitude + math.isclose(dia, math.pi) + + +@pytest.mark.HApump +@pytest.mark.asyncio +async def test_target_volume(pump: Elite11): + await pump.set_syringe_volume("10 ml") + await pump.set_target_volume(f"{math.pi} ml") + vol = ureg.Quantity(await pump.get_target_volume()).magnitude + assert math.isclose(vol, math.pi, abs_tol=10e-4) + await pump.set_target_volume("1e-04 ml") + vol = ureg.Quantity(await pump.get_target_volume()).magnitude + assert math.isclose(vol, 1e-4, abs_tol=10e-4) diff --git a/tests/devices/pumps/test_ml600.py b/tests/devices/pumps/test_ml600.py new file mode 100644 index 00000000..cd25f765 --- /dev/null +++ b/tests/devices/pumps/test_ml600.py @@ -0,0 +1,21 @@ +def test_infuse(): + ... + # test without parameters + # test with only rate + # test with only volume + # test with both + # test with too large volume + + +def test_stop(): + ... + # run, stop, is-pumping is False + + +def test_withdraw(): + ... + # test without parameters + # test with only rate + # test with only volume + # test with both + # test with too large volume diff --git a/tests/devices/technical/test_huber.py b/tests/devices/technical/test_huber.py new file mode 100644 index 00000000..7bc0fdbd --- /dev/null +++ b/tests/devices/technical/test_huber.py @@ -0,0 +1,344 @@ +""" Test HuberChiller object. Does not require physical connection to the device. """ +import asyncio + +import aioserial +import pytest +from _pytest.logging import LogCaptureFixture +from loguru import logger + +from flowchem.devices.huber import HuberChiller +from flowchem.devices.huber.pb_command import PBCommand +from flowchem.exceptions import InvalidConfiguration + + +@pytest.fixture +def caplog(caplog: LogCaptureFixture): + handler_id = logger.add(caplog.handler, format="{message}") + yield caplog + logger.remove(handler_id) + + +def test_pbcommand_parse_temp(): + assert PBCommand("{S00F2DF").parse_temperature() == -33.61 + assert PBCommand("{S0004DA").parse_temperature() == 12.42 + + +def test_pbcommand_parse_int(): + assert PBCommand("{S000000").parse_integer() == 0 + assert PBCommand("{S00ffff").parse_integer() == 65535 + assert PBCommand("{S001234").parse_integer() == 4660 + + +def test_pbcommand_parse_bits(): + assert PBCommand("{S001234").parse_bits() == [ + False, + False, + False, + True, + False, + False, + True, + False, + False, + False, + True, + True, + False, + True, + False, + False, + ] + + +def test_pbcommand_parse_bool(): + assert PBCommand("{S000001").parse_boolean() is True + assert PBCommand("{S000000").parse_boolean() is False + + +def test_invalid_serial_port(): + with pytest.raises(InvalidConfiguration) as execinfo: + HuberChiller.from_config(port="COM99") + assert ( + str(execinfo.value) == "Cannot connect to the HuberChiller on the port " + ) + + +class FakeSerial(aioserial.AioSerial): + """Mock AioSerial.""" + + # noinspection PyMissingConstructor + def __init__(self): + self.fixed_reply = None + self.last_command = b"" + self.map_reply = { + b"{M0A****\r\n": b"{S0AFFFF\r\n", # Fake status reply + b"{M3C****\r\n": b"{S3CFFFF\r\n", # Fake status2 reply + b"{M3A****\r\n": b"{S3A04DA\r\n", # Fake process temperature reply + b"{M00****\r\n": b"{S0004DA\r\n", # Fake setpoint reply + b"{M03****\r\n": b"{S030a00\r\n", # Fake pressure reply + b"{M04****\r\n": b"{S04000a\r\n", # Fake current power reply (10 W) + b"{M26****\r\n": b"{S26000a\r\n", # Fake current pump speed (10 rpm) + b"{M30****\r\n": b"{S30EC78\r\n", # Fake min temp -50.00 C + b"{M31****\r\n": b"{S303A98\r\n", # Fake max temp +150.00 C + b"{M00EC78\r\n": b"{S00EC78\r\n", # set temp to -50 + b"{M003A98\r\n": b"{S003A98\r\n", # set temp to 150 + b"{M0007D0\r\n": b"{S0007D0\r\n", # Reply to set temp 20 C + b"{M00F830\r\n": b"{S00F830\r\n", # Reply to set temp -20 C + } + + async def write_async(self, text: bytes): + """Override AioSerial method""" + self.last_command = text + + async def readline_async(self, size: int = -1) -> bytes: + """Override AioSerial method""" + if self.last_command == b"{MFFFFFF\r\n": + await asyncio.sleep(999) + if self.fixed_reply: + return self.fixed_reply + return self.map_reply[self.last_command] + + def __repr__(self): + return "FakeSerial" + + +@pytest.fixture(scope="session") +def chiller(): + """Chiller instance connected to FakeSerial""" + return HuberChiller(FakeSerial()) + + +async def test_no_reply(chiller, caplog): + reply = await chiller._send_command_and_read_reply("{MFFFFFF") + assert "No reply received!" in caplog.text + assert reply == "" + + +# async def test_status(chiller): +# chiller._serial.fixed_reply = None +# stat = await chiller.status() +# stat_content = list(stat.values()) +# assert all(stat_content) +# +# # Set reply in FakeSerial +# chiller._serial.fixed_reply = b"{S0A0000" +# stat = await chiller.status() +# stat_content = list(stat.values()) +# assert not any(stat_content) +# +# +# async def test_status2(chiller): +# chiller._serial.fixed_reply = None +# stat = await chiller.status2() +# stat_content = [x for x in stat.values()] +# assert all(stat_content) +# +# # Set reply in FakeSerial +# chiller._serial.fixed_reply = b"{S0A0000" +# stat = await chiller.status2() +# stat_content = [x for x in stat.values()] +# assert not any(stat_content) +# +# +# async def test_get_temperature_setpoint(chiller): +# chiller._serial.fixed_reply = None +# +# temp = await chiller.get_temperature() +# assert temp == 12.42 +# +# chiller._serial.fixed_reply = b"{S00F2DF" +# temp = await chiller.get_temperature() +# assert temp == -33.61 +# +# +# # noinspection PyUnresolvedReferences +# async def test_set_temperature_setpoint(chiller): +# chiller._serial.fixed_reply = None +# await chiller.set_temperature("20 °C") +# print(chiller._serial.last_command) +# assert chiller._serial.last_command == b"{M0007D0\r\n" +# +# await chiller.set_temperature("-20 °C") +# assert chiller._serial.last_command == b"{M00F830\r\n" +# +# with pytest.warns(Warning): +# await chiller.set_temperature("-400 °C") +# assert chiller._serial.last_command == b"{M00EC78\r\n" +# +# with pytest.warns(Warning): +# await chiller.set_temperature("4000 °C") +# assert chiller._serial.last_command == b"{M003A98\r\n" +# +# +# # noinspection PyUnresolvedReferences +# async def test_internal_temperature(chiller): +# chiller._serial.fixed_reply = b"{S000000" +# await chiller.internal_temperature() +# assert chiller._serial.last_command == b"{M01****\r\n" +# +# +# # noinspection PyUnresolvedReferences +# async def test_return_temperature(chiller): +# chiller._serial.fixed_reply = b"{S000000" +# await chiller.return_temperature() +# assert chiller._serial.last_command == b"{M02****\r\n" +# + +# noinspection PyUnresolvedReferences +# async def test_process_temperature(chiller): +# chiller._serial.fixed_reply = b"{S000000" +# await chiller.process_temperature() +# assert chiller._serial.last_command == b"{M3A****\r\n" +# +# +# # noinspection PyUnresolvedReferences +# +# async def test_pump_pressure(chiller): +# chiller._serial.fixed_reply = None +# pressure = await chiller.pump_pressure() +# assert chiller._serial.last_command == b"{M03****\r\n" +# assert pressure == "2560 millibar" +# +# +# # noinspection PyUnresolvedReferences +# async def test_current_power(chiller): +# chiller._serial.fixed_reply = None +# power = await chiller.current_power() +# assert chiller._serial.last_command == b"{M04****\r\n" +# assert power == "10 watt" +# +# +# async def test_get_temperature_control(chiller): +# chiller._serial.fixed_reply = b"{S140000" +# t_ctl = await chiller.is_temperature_control_active() +# assert t_ctl is False +# chiller._serial.fixed_reply = b"{S140001" +# t_ctl = await chiller.is_temperature_control_active() +# assert t_ctl is True +# +# +# # noinspection PyUnresolvedReferences +# async def test_temperature_control(chiller): +# chiller._serial.fixed_reply = b"{S000000" +# await chiller.power_on() +# assert chiller._serial.last_command == b"{M140001\r\n" +# await chiller.power_off() +# assert chiller._serial.last_command == b"{M140000\r\n" +# +# +# async def test_get_circulation(chiller): +# chiller._serial.fixed_reply = b"{S160000" +# circulation = await chiller.is_circulation_active() +# assert circulation is False +# chiller._serial.fixed_reply = b"{S160001" +# circulation = await chiller.is_circulation_active() +# assert circulation is True +# +# +# # noinspection PyUnresolvedReferences +# async def test_circulation(chiller): +# chiller._serial.fixed_reply = b"{S000000" +# await chiller.start_circulation() +# assert chiller._serial.last_command == b"{M160001\r\n" +# await chiller.stop_circulation() +# assert chiller._serial.last_command == b"{M160000\r\n" +# +# +# # noinspection PyUnresolvedReferences +# async def test_pump_speed(chiller): +# chiller._serial.fixed_reply = None +# speed = await chiller.pump_speed() +# assert chiller._serial.last_command == b"{M26****\r\n" +# assert speed == "10 revolutions_per_minute" +# +# +# # noinspection PyUnresolvedReferences +# async def test_pump_speed_setpoint(chiller): +# chiller._serial.fixed_reply = b"{S480000" +# speed = await chiller.pump_speed_setpoint() +# assert chiller._serial.last_command == b"{M48****\r\n" +# assert speed == "0 revolutions_per_minute" +# +# +# # noinspection PyUnresolvedReferences +# async def test_set_pump_speed(chiller): +# chiller._serial.fixed_reply = b"{S480000" +# await chiller.set_pump_speed("10 rpm") +# assert chiller._serial.last_command == b"{M48000A\r\n" +# +# +# # noinspection PyUnresolvedReferences +# async def test_cooling_water_temp(chiller): +# chiller._serial.fixed_reply = b"{S000000" +# await chiller.cooling_water_temp() +# assert chiller._serial.last_command == b"{M2C****\r\n" +# +# +# # noinspection PyUnresolvedReferences +# async def test_cooling_water_pressure(chiller): +# chiller._serial.fixed_reply = b"{S000000" +# await chiller.cooling_water_pressure() +# assert chiller._serial.last_command == b"{M2D****\r\n" +# +# +# # noinspection PyUnresolvedReferences +# async def test_cooling_water_temp_out(chiller): +# chiller._serial.fixed_reply = b"{S000000" +# await chiller.cooling_water_temp_outflow() +# assert chiller._serial.last_command == b"{M4C****\r\n" +# +# +# # noinspection PyUnresolvedReferences +# async def test_alarm_max_internal_temp(chiller): +# chiller._serial.fixed_reply = b"{S000000" +# await chiller.alarm_max_internal_temp() +# assert chiller._serial.last_command == b"{M51****\r\n" +# +# +# # noinspection PyUnresolvedReferences +# async def test_alarm_min_internal_temp(chiller): +# chiller._serial.fixed_reply = b"{S000000" +# await chiller.alarm_min_internal_temp() +# assert chiller._serial.last_command == b"{M52****\r\n" +# +# +# # noinspection PyUnresolvedReferences +# async def test_alarm_max_process_temp(chiller): +# chiller._serial.fixed_reply = b"{S000000" +# await chiller.alarm_max_process_temp() +# assert chiller._serial.last_command == b"{M53****\r\n" +# +# +# # noinspection PyUnresolvedReferences +# async def test_alarm_min_process_temp(chiller): +# chiller._serial.fixed_reply = b"{S000000" +# await chiller.alarm_min_process_temp() +# assert chiller._serial.last_command == b"{M54****\r\n" +# +# +# # noinspection PyUnresolvedReferences +# async def test_set_alarm_max_internal_temp(chiller): +# chiller._serial.fixed_reply = b"{S000000" +# await chiller.set_alarm_max_internal_temp("10 °C") +# assert chiller._serial.last_command == b"{M5103E8\r\n" +# +# +# # noinspection PyUnresolvedReferences +# async def test_set_alarm_min_internal_temp(chiller): +# chiller._serial.fixed_reply = b"{S000000" +# await chiller.set_alarm_min_internal_temp("10 °C") +# assert chiller._serial.last_command == b"{M5203E8\r\n" +# +# +# # noinspection PyUnresolvedReferences +# async def test_set_alarm_max_process_temp(chiller): +# chiller._serial.fixed_reply = b"{S000000" +# await chiller.set_alarm_max_process_temp("10 °C") +# assert chiller._serial.last_command == b"{M5303E8\r\n" +# +# +# # noinspection PyUnresolvedReferences +# async def test_set_alarm_min_process_temp(chiller): +# chiller._serial.fixed_reply = b"{S000000" +# await chiller.set_alarm_min_process_temp("10 °C") +# assert chiller._serial.last_command == b"{M5403E8\r\n" diff --git a/tests/devices/test_device_type_finder.py b/tests/devices/test_device_type_finder.py new file mode 100644 index 00000000..5c4db29a --- /dev/null +++ b/tests/devices/test_device_type_finder.py @@ -0,0 +1,32 @@ +from flowchem.devices.list_known_device_type import autodiscover_device_classes + + +def test_device_finder(): + device_types = [ + "AzuraCompact", + "Clarity", + "Elite11", + "FakeDevice", + "HuberChiller", + "IcIR", + "KnauerDAD", + "KnauerValve", + "ML600", + "MansonPowerSupply", + "PhidgetPressureSensor", + "Spinsolve", + "ViciValve", + ] + + dev_found = autodiscover_device_classes() + # Check all expected devices are there + for device_name in device_types: + assert device_name in dev_found.keys() + + # Check all devices implement base API + for name, device in dev_found.items(): + if name == "KnauerDADCommands": + continue # not a real device + + assert hasattr(device, "components") + assert hasattr(device, "initialize") diff --git a/tests/server/test_server.py b/tests/server/test_server.py new file mode 100644 index 00000000..63bab187 --- /dev/null +++ b/tests/server/test_server.py @@ -0,0 +1,40 @@ +import asyncio +from pathlib import Path +from textwrap import dedent + +import pytest +from click.testing import CliRunner +from fastapi.testclient import TestClient + +from flowchem.server.api_server import create_server_from_file + + +@pytest.fixture(scope="function") +def app(): + runner = CliRunner() + with runner.isolated_filesystem(): + with open("test_configuration.toml", "w") as f: + f.write( + dedent( + """[device.test-device]\n + type = "FakeDevice"\n""" + ) + ) + server = asyncio.run( + create_server_from_file(Path("test_configuration.toml"), "127.0.0.1") + ) + yield server["api_server"] + + +def test_read_main(app): + client = TestClient(app) + response = client.get("/") + assert response.status_code == 200 + assert "Flowchem" in response.text + + response = client.get("/test-device/test-component/test") + assert response.status_code == 200 + assert response.text == "true" + + response = client.get("/test-device2") + assert response.status_code == 404 diff --git a/tests/test_devicegraph.py b/tests/test_devicegraph.py deleted file mode 100644 index 091e3002..00000000 --- a/tests/test_devicegraph.py +++ /dev/null @@ -1,61 +0,0 @@ -import pytest -from flowchem.components.properties import Component -from flowchem import DeviceGraph - -a, b, c, d = [Component() for _ in range(4)] - - -@pytest.fixture -def device_graph(): - return DeviceGraph() - - -def test_add_single(device_graph): - device_graph.add_device(a) - # Contains - assert a in device_graph - # Length - assert len(device_graph) == 1 - # Get by name - assert device_graph[a.name] == a - # Get by type - assert len(device_graph[Component]) == 1 - # Get by value - assert device_graph[a] == a - - -def test_add_iterable(device_graph): - device_graph.add_device([a, b, c]) - assert len(device_graph) == 3 - assert a in device_graph - assert b in device_graph - assert c in device_graph - - -def test_add_errors(device_graph): - - # Not a component - with pytest.raises(AssertionError): - not_a_component = 5 - device_graph.add_device(not_a_component) - - # Class instead of instance - with pytest.raises(AssertionError): - device_graph.add_device(Component) - - -def test_add_edge(device_graph): - device_graph.add_device([a, b]) - device_graph.add_connection(a, b) - assert len(device_graph) == 2 - assert device_graph.validate() - - -def test_validation(device_graph): - device_graph.add_device([a, b, c]) - device_graph.add_connection(a, b) - # C is not connected to anything - assert device_graph.validate() is False - # Now DiGraph is weakly connected - device_graph.add_connection(b, c) - assert device_graph.validate() is True diff --git a/tests/test_execute_protocol.py b/tests/test_execute_protocol.py deleted file mode 100644 index 710b174b..00000000 --- a/tests/test_execute_protocol.py +++ /dev/null @@ -1,44 +0,0 @@ -from flowchem.components.dummy import DummyPump, DummySensor, BrokenDummySensor -from flowchem.components.stdlib import Vessel, Tube -from flowchem import Protocol, DeviceGraph - -# create components -from flowchem.units import flowchem_ureg - -# -# def test_execute(): -# a = Vessel(name="a", description="nothing") -# b = Vessel(name="b", description="nothing") -# c = Vessel(name="c", description="nothing") -# -# pump = DummyPump(name="Dummy pump") -# -# test = DummySensor(name="test") -# test2 = DummySensor(name="test2") -# test3 = DummySensor(name="test3") -# test4 = BrokenDummySensor(name="test4") -# -# tube = Tube("1 foot", "1/16 in", "2/16 in", "PVC") -# -# # create apparatus -# D = DeviceGraph() -# # A = Apparatus() -# A.add([a, b, c], pump, tube) -# A.add(pump, [test, test2, test3, test4], tube) -# -# P = Protocol(A, name="testing execution") -# P.add(pump, rate="5 mL/min", start="0 seconds", stop="1 secs") -# P.add([test, test2, test3, test4], rate="5 Hz", start="0 secs", stop="1 secs") -# -# # test both execution modes -# for dry_run in [True, False]: -# E = P.execute(confirm=True, dry_run=dry_run, log_file=None, data_file=None) -# -# assert len(E.data["test"]) >= 5 -# if dry_run: -# assert E.data["test"][0].data == "simulated read" -# assert pump.rate == flowchem_ureg.parse_expression(pump._base_state["rate"]) -# -# # test fast forward -# E = P.execute(confirm=True, dry_run=5, log_file=None, data_file=None) -# assert len(E.data["test"]) >= 1 diff --git a/tests/test_huber.py b/tests/test_huber.py deleted file mode 100644 index c05c0adf..00000000 --- a/tests/test_huber.py +++ /dev/null @@ -1,374 +0,0 @@ -""" Test HuberChiller object. Does not require physical connection to the device. """ -import asyncio - -import aioserial -import pytest - -from flowchem.components.devices.Huber.huberchiller import HuberChiller, PBCommand -from flowchem.exceptions import InvalidConfiguration - - -# TEST PBCommand parsers first -def test_pbcommand_parse_temp(): - assert PBCommand("{S00F2DF").parse_temperature() == "-33.61 degree_Celsius" - assert PBCommand("{S0004DA").parse_temperature() == "12.42 degree_Celsius" - - -def test_pbcommand_parse_int(): - assert PBCommand("{S000000").parse_integer() == 0 - assert PBCommand("{S00ffff").parse_integer() == 65535 - assert PBCommand("{S001234").parse_integer() == 4660 - - -def test_pbcommand_parse_bits(): - assert PBCommand("{S001234").parse_bits() == [ - False, - False, - False, - True, - False, - False, - True, - False, - False, - False, - True, - True, - False, - True, - False, - False, - ] - - -def test_pbcommand_parse_bool(): - assert PBCommand("{S000001").parse_boolean() is True - assert PBCommand("{S000000").parse_boolean() is False - - -def test_invalid_serial_port(): - with pytest.raises(InvalidConfiguration) as execinfo: - HuberChiller.from_config(port="COM99") - assert ( - str(execinfo.value) == "Cannot connect to the HuberChiller on the port " - ) - - -class FakeSerial(aioserial.AioSerial): - """Mock AioSerial.""" - - # noinspection PyMissingConstructor - def __init__(self): - self.fixed_reply = None - self.last_command = b"" - self.map_reply = { - b"{M0A****\r\n": b"{S0AFFFF\r\n", # Fake status reply - b"{M3C****\r\n": b"{S3CFFFF\r\n", # Fake status2 reply - b"{M00****\r\n": b"{S0004DA\r\n", # Fake setpoint reply - b"{M03****\r\n": b"{S030a00\r\n", # Fake pressure reply - b"{M04****\r\n": b"{S04000a\r\n", # Fake current power reply (10 W) - b"{M26****\r\n": b"{S26000a\r\n", # Fake current pump speed (10 rpm) - b"{M30****\r\n": b"{S30EC78\r\n", # Fake min temp -50.00 C - b"{M31****\r\n": b"{S303A98\r\n", # Fake max temp +150.00 C - b"{M00EC78\r\n": b"{S00EC78\r\n", # set temp to -50 - b"{M003A98\r\n": b"{S003A98\r\n", # set temp to 150 - b"{M0007D0\r\n": b"{S0007D0\r\n", # Reply to set temp 20 C - b"{M00F830\r\n": b"{S00F830\r\n", # Reply to set temp -20 C - } - - async def write_async(self, text: bytes): - """Override AioSerial method""" - self.last_command = text - - async def readline_async(self, size: int = -1) -> bytes: - """Override AioSerial method""" - if self.last_command == b"{MFFFFFF\r\n": - await asyncio.sleep(999) - if self.fixed_reply: - return self.fixed_reply - return self.map_reply[self.last_command] - - def __repr__(self): - return "FakeSerial" - - -@pytest.fixture(scope="session") -def chiller(): - """Chiller instance connected to FakeSerial""" - return HuberChiller(FakeSerial()) - - -@pytest.mark.asyncio -async def test_no_reply(chiller): - with pytest.warns(UserWarning): - reply = await chiller.send_command_and_read_reply("{MFFFFFF") - assert reply == "{SFFFFFF" - - -@pytest.mark.asyncio -async def test_status(chiller): - chiller._serial.fixed_reply = None - stat = await chiller.status() - stat_content = [x for x in stat.values()] - assert all(stat_content) - - # Set reply in FakeSerial - chiller._serial.fixed_reply = b"{S0A0000" - stat = await chiller.status() - stat_content = [x for x in stat.values()] - assert not any(stat_content) - - -@pytest.mark.asyncio -async def test_status2(chiller): - chiller._serial.fixed_reply = None - stat = await chiller.status2() - stat_content = [x for x in stat.values()] - assert all(stat_content) - - # Set reply in FakeSerial - chiller._serial.fixed_reply = b"{S0A0000" - stat = await chiller.status2() - stat_content = [x for x in stat.values()] - assert not any(stat_content) - - -@pytest.mark.asyncio -async def test_get_temperature_setpoint(chiller): - chiller._serial.fixed_reply = None - temp = await chiller.get_temperature_setpoint() - assert temp == "12.42 degree_Celsius" - - chiller._serial.fixed_reply = b"{S00F2DF" - temp = await chiller.get_temperature_setpoint() - assert temp == "-33.61 degree_Celsius" - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_set_temperature_setpoint(chiller): - chiller._serial.fixed_reply = None - await chiller.set_temperature_setpoint("20 °C") - print(chiller._serial.last_command) - assert chiller._serial.last_command == b"{M0007D0\r\n" - - await chiller.set_temperature_setpoint("-20 °C") - assert chiller._serial.last_command == b"{M00F830\r\n" - - with pytest.warns(Warning): - await chiller.set_temperature_setpoint("-400 °C") - assert chiller._serial.last_command == b"{M00EC78\r\n" - - with pytest.warns(Warning): - await chiller.set_temperature_setpoint("4000 °C") - assert chiller._serial.last_command == b"{M003A98\r\n" - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_internal_temperature(chiller): - chiller._serial.fixed_reply = b"{S000000" - await chiller.internal_temperature() - assert chiller._serial.last_command == b"{M01****\r\n" - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_return_temperature(chiller): - chiller._serial.fixed_reply = b"{S000000" - await chiller.return_temperature() - assert chiller._serial.last_command == b"{M02****\r\n" - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_process_temperature(chiller): - chiller._serial.fixed_reply = b"{S000000" - await chiller.process_temperature() - assert chiller._serial.last_command == b"{M3A****\r\n" - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_pump_pressure(chiller): - chiller._serial.fixed_reply = None - pressure = await chiller.pump_pressure() - assert chiller._serial.last_command == b"{M03****\r\n" - assert pressure == "2560 millibar" - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_current_power(chiller): - chiller._serial.fixed_reply = None - power = await chiller.current_power() - assert chiller._serial.last_command == b"{M04****\r\n" - assert power == "10 watt" - - -@pytest.mark.asyncio -async def test_get_temperature_control(chiller): - chiller._serial.fixed_reply = b"{S140000" - t_ctl = await chiller.is_temperature_control_active() - assert t_ctl is False - chiller._serial.fixed_reply = b"{S140001" - t_ctl = await chiller.is_temperature_control_active() - assert t_ctl is True - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_temperature_control(chiller): - chiller._serial.fixed_reply = b"{S000000" - await chiller.start_temperature_control() - assert chiller._serial.last_command == b"{M140001\r\n" - await chiller.stop_temperature_control() - assert chiller._serial.last_command == b"{M140000\r\n" - - -@pytest.mark.asyncio -async def test_get_circulation(chiller): - chiller._serial.fixed_reply = b"{S160000" - circulation = await chiller.is_circulation_active() - assert circulation is False - chiller._serial.fixed_reply = b"{S160001" - circulation = await chiller.is_circulation_active() - assert circulation is True - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_circulation(chiller): - chiller._serial.fixed_reply = b"{S000000" - await chiller.start_circulation() - assert chiller._serial.last_command == b"{M160001\r\n" - await chiller.stop_circulation() - assert chiller._serial.last_command == b"{M160000\r\n" - - -@pytest.mark.asyncio -async def test_min_setpoint(chiller): - chiller._serial.fixed_reply = None - min_t = await chiller.min_setpoint() - assert min_t == "-50.0 degree_Celsius" - - -@pytest.mark.asyncio -async def test_max_setpoint(chiller): - chiller._serial.fixed_reply = None - max_t = await chiller.max_setpoint() - assert max_t == "150.0 degree_Celsius" - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_pump_speed(chiller): - chiller._serial.fixed_reply = None - speed = await chiller.pump_speed() - assert chiller._serial.last_command == b"{M26****\r\n" - assert speed == "10 revolutions_per_minute" - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_pump_speed_setpoint(chiller): - chiller._serial.fixed_reply = b"{S480000" - speed = await chiller.pump_speed_setpoint() - assert chiller._serial.last_command == b"{M48****\r\n" - assert speed == "0 revolutions_per_minute" - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_set_pump_speed(chiller): - chiller._serial.fixed_reply = b"{S480000" - await chiller.set_pump_speed("10 rpm") - assert chiller._serial.last_command == b"{M48000A\r\n" - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_cooling_water_temp(chiller): - chiller._serial.fixed_reply = b"{S000000" - await chiller.cooling_water_temp() - assert chiller._serial.last_command == b"{M2C****\r\n" - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_cooling_water_pressure(chiller): - chiller._serial.fixed_reply = b"{S000000" - await chiller.cooling_water_pressure() - assert chiller._serial.last_command == b"{M2D****\r\n" - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_cooling_water_temp_out(chiller): - chiller._serial.fixed_reply = b"{S000000" - await chiller.cooling_water_temp_outflow() - assert chiller._serial.last_command == b"{M4C****\r\n" - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_alarm_max_internal_temp(chiller): - chiller._serial.fixed_reply = b"{S000000" - await chiller.alarm_max_internal_temp() - assert chiller._serial.last_command == b"{M51****\r\n" - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_alarm_min_internal_temp(chiller): - chiller._serial.fixed_reply = b"{S000000" - await chiller.alarm_min_internal_temp() - assert chiller._serial.last_command == b"{M52****\r\n" - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_alarm_max_process_temp(chiller): - chiller._serial.fixed_reply = b"{S000000" - await chiller.alarm_max_process_temp() - assert chiller._serial.last_command == b"{M53****\r\n" - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_alarm_min_process_temp(chiller): - chiller._serial.fixed_reply = b"{S000000" - await chiller.alarm_min_process_temp() - assert chiller._serial.last_command == b"{M54****\r\n" - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_set_alarm_max_internal_temp(chiller): - chiller._serial.fixed_reply = b"{S000000" - await chiller.set_alarm_max_internal_temp("10 °C") - assert chiller._serial.last_command == b"{M5103E8\r\n" - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_set_alarm_min_internal_temp(chiller): - chiller._serial.fixed_reply = b"{S000000" - await chiller.set_alarm_min_internal_temp("10 °C") - assert chiller._serial.last_command == b"{M5203E8\r\n" - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_set_alarm_max_process_temp(chiller): - chiller._serial.fixed_reply = b"{S000000" - await chiller.set_alarm_max_process_temp("10 °C") - assert chiller._serial.last_command == b"{M5303E8\r\n" - - -# noinspection PyUnresolvedReferences -@pytest.mark.asyncio -async def test_set_alarm_min_process_temp(chiller): - chiller._serial.fixed_reply = b"{S000000" - await chiller.set_alarm_min_process_temp("10 °C") - assert chiller._serial.last_command == b"{M5403E8\r\n" diff --git a/tests/test_hw_elite11.py b/tests/test_hw_elite11.py deleted file mode 100644 index 19423cc0..00000000 --- a/tests/test_hw_elite11.py +++ /dev/null @@ -1,178 +0,0 @@ -""" -HA Elite11 tests -Run with python -m pytest ./tests -m HApump and updates pump com port and address in pump below -""" -import asyncio -import math - -import pytest - -from flowchem.components.devices.Harvard_Apparatus.HA_elite11 import ( - Elite11InfuseWithdraw, - PumpStatus, -) -from flowchem.units import flowchem_ureg - - -async def move_infuse(pump): - await pump.set_syringe_diameter(10) - await pump.set_infusion_rate(1) - await pump.set_target_volume(1) - await pump.infuse_run() - - -@pytest.fixture(scope="session") -def event_loop(request): - loop = asyncio.get_event_loop_policy().new_event_loop() - yield loop - loop.close() - - -@pytest.fixture(scope="session") -async def pump(): - """Change to match your hardware ;)""" - pump = Elite11InfuseWithdraw.from_config( - port="COM11", syringe_volume=5, diameter=20 - ) - await pump.initialize() - return pump - - -@pytest.mark.HApump -@pytest.mark.asyncio -async def test_version(pump: Elite11InfuseWithdraw): - assert "11 ELITE" in await pump.version() - - -@pytest.mark.HApump -@pytest.mark.asyncio -async def test_status_idle(pump: Elite11InfuseWithdraw): - await pump.stop() - assert await pump.get_status() is PumpStatus.IDLE - - -@pytest.mark.HApump -@pytest.mark.asyncio -async def test_status_infusing(pump: Elite11InfuseWithdraw): - await move_infuse(pump) - assert await pump.get_status() is PumpStatus.INFUSING - await pump.stop() - - -@pytest.mark.HApump -@pytest.mark.asyncio -async def test_status_withdrawing(pump: Elite11InfuseWithdraw): - await pump.set_syringe_diameter(10) - await pump.set_withdraw_rate(1) - await pump.withdraw_run() - assert await pump.get_status() is PumpStatus.WITHDRAWING - await pump.stop() - - -@pytest.mark.HApump -@pytest.mark.asyncio -async def test_is_moving(pump: Elite11InfuseWithdraw): - assert await pump.is_moving() is False - await move_infuse(pump) - assert await pump.is_moving() is True - await pump.stop() - - -@pytest.mark.HApump -@pytest.mark.asyncio -async def test_syringe_volume(pump: Elite11InfuseWithdraw): - await pump.set_syringe_volume(10) - assert await pump.get_syringe_volume() == "10 ml" - await pump.set_syringe_volume(math.pi) - vol = flowchem_ureg.Quantity(await pump.get_syringe_volume()).magnitude - assert math.isclose(vol, math.pi, abs_tol=10e-4) - await pump.set_syringe_volume(3e-05) - vol = flowchem_ureg.Quantity(await pump.get_syringe_volume()).magnitude - assert math.isclose(vol, 3e-5) - await pump.set_syringe_volume( - 50 - ) # Leave a sensible value otherwise other tests will fail! - - -@pytest.mark.HApump -@pytest.mark.asyncio -async def test_infusion_rate(pump: Elite11InfuseWithdraw): - await pump.set_syringe_diameter(10) - await pump.set_infusion_rate(5) - assert await pump.get_infusion_rate() - with pytest.warns(UserWarning): - await pump.set_infusion_rate(121) - rate = flowchem_ureg.Quantity(await pump.get_infusion_rate()).magnitude - assert math.isclose(rate, 12.49, rel_tol=0.01) - with pytest.warns(UserWarning): - await pump.set_infusion_rate(0) - rate = flowchem_ureg.Quantity(await pump.get_infusion_rate()).magnitude - assert math.isclose(rate, 1e-05, abs_tol=1e-5) - await pump.set_infusion_rate(math.pi) - rate = flowchem_ureg.Quantity(await pump.get_infusion_rate()).magnitude - assert math.isclose(rate, math.pi, abs_tol=0.001) - - -@pytest.mark.HApump -@pytest.mark.asyncio -async def test_get_infused_volume(pump: Elite11InfuseWithdraw): - await pump.clear_volumes() - assert await pump.get_infused_volume() == "0 ul" - await pump.set_syringe_diameter(30) - await pump.set_infusion_rate(5) - await pump.set_target_volume(0.05) - await pump.infuse_run() - await asyncio.sleep(2) - vol = flowchem_ureg.Quantity(await pump.get_infused_volume()).to("ml").magnitude - assert math.isclose(vol, 0.05, abs_tol=1e-4) - - -@pytest.mark.HApump -@pytest.mark.asyncio -async def test_get_withdrawn_volume(pump: Elite11InfuseWithdraw): - await pump.clear_volumes() - await pump.set_withdraw_rate(10) - await pump.set_target_volume(0.1) - await pump.withdraw_run() - await asyncio.sleep(1) - vol = flowchem_ureg.Quantity(await pump.get_withdrawn_volume()).to("ml").magnitude - assert math.isclose(vol, 0.1, abs_tol=1e-4) - - -@pytest.mark.HApump -@pytest.mark.asyncio -async def test_force(pump: Elite11InfuseWithdraw): - await pump.set_force(10) - assert await pump.get_force() == 10 - await pump.set_force(50.2) - assert await pump.get_force() == 50 - assert await pump.get_force() == 50 - - -@pytest.mark.HApump -@pytest.mark.asyncio -async def test_diameter(pump: Elite11InfuseWithdraw): - await pump.set_syringe_diameter(10) - assert await pump.get_syringe_diameter() == "10.0000 mm" - - with pytest.warns(UserWarning): - await pump.set_syringe_diameter(34) - - with pytest.warns(UserWarning): - await pump.set_syringe_diameter(0.01) - - await pump.set_syringe_diameter(math.pi) - dia = flowchem_ureg.Quantity(await pump.get_syringe_diameter()).magnitude - math.isclose(dia, math.pi) - - -@pytest.mark.HApump -@pytest.mark.asyncio -async def test_target_volume(pump: Elite11InfuseWithdraw): - await pump.set_syringe_volume(10) - await pump.set_target_volume(math.pi) - vol = flowchem_ureg.Quantity(await pump.get_target_volume()).magnitude - assert math.isclose(vol, math.pi, abs_tol=10e-4) - await pump.set_target_volume(1e-04) - vol = flowchem_ureg.Quantity(await pump.get_target_volume()).magnitude - assert math.isclose(vol, 1e-4, abs_tol=10e-4) diff --git a/tests/test_protocol.py b/tests/test_protocol.py deleted file mode 100644 index d44e85f9..00000000 --- a/tests/test_protocol.py +++ /dev/null @@ -1,231 +0,0 @@ -from datetime import timedelta - -import pytest - - -from flowchem import DeviceGraph, Protocol -from flowchem.components.properties import Valve, Component -from flowchem.components.dummy import Dummy -from flowchem.components.stdlib import Pump, Vessel - - -@pytest.fixture -def device_graph(): - D = DeviceGraph() - a, b = [Component() for _ in range(2)] - pump = Pump("pump") - D.add_device([a, b, pump]) - D.add_connection(a, b) - D.add_connection(b, pump) - return D - - -def test_create_protocol(device_graph): - # test naming - assert Protocol(device_graph, name="testing").name == "testing" - assert Protocol(device_graph).name == "Protocol_0" - assert Protocol(device_graph).name == "Protocol_1" - - -def test_add(device_graph): - P = Protocol(device_graph) - - procedure = { - "component": device_graph["pump"], - "params": {"rate": "10 mL/min"}, - "start": 0, - "stop": 300, - } - - # test using duration - P.add(device_graph["pump"], rate="10 mL/min", duration="5 min") - assert P.procedures[0] == procedure - - # test adding with start and stop - P.procedures = [] - P.add(device_graph["pump"], rate="10 mL/min", start="0 min", stop="5 min") - assert P.procedures[0] == procedure - - # test adding with start and stop as timedeltas - P.procedures = [] - P.add( - device_graph["pump"], - rate="10 mL/min", - start=timedelta(seconds=0), - stop=timedelta(minutes=5), - ) - assert P.procedures[0] == procedure - - # test adding with duration as timedelta - P.procedures = [] - P.add(device_graph["pump"], rate="10 mL/min", duration=timedelta(minutes=5)) - assert P.procedures[0] == procedure - - P = Protocol(device_graph) - with pytest.raises(AssertionError): - P.add(Pump("not in apparatus"), rate="10 mL/min", duration="5 min") - - # adding a class, not an instance of it - with pytest.raises(ValueError): - P.add(Pump, rate="10 mL/min", duration="5 min") - - # Not adding keyword args - with pytest.raises(RuntimeError): - P.add(device_graph["pump"], duration="5 min") - - # Invalid keyword for component - with pytest.raises(ValueError): - P.add(device_graph["pump"], active=False, duration="5 min") - - # Invalid dimensionality for kwarg - with pytest.raises(ValueError): - P.add(device_graph["pump"], rate="5 mL", duration="5 min") - - # No unit - with pytest.raises(ValueError): - P.add(device_graph["pump"], rate="5", duration="5 min") - - # Just the raw value without a unit - with pytest.raises(ValueError): - P.add(device_graph["pump"], rate=5, duration="5 min") - - # Providing stop and duration should raise error - with pytest.raises(RuntimeError): - P.add(device_graph["pump"], rate="5 mL/min", stop="5 min", duration="5 min") - - # stop time before start time - with pytest.raises(ValueError): - P.add( - [device_graph["pump"], device_graph["pump"]], - rate="10 mL/min", - start="5 min", - stop="4 min", - ) - - -def test_add_dummy(device_graph): - dummy = Dummy(name="dummy") - device_graph.add_connection(dummy, "pump") - P = Protocol(device_graph) - with pytest.raises(ValueError): - P.add(dummy, active=1) # should be a bool! - - -# def test_add_valve(device_graph): -# valve = Valve(port={1, 2}) -# bad_valve = Valve(port={1, 2}) -# pump1 = Pump("pump1") -# pump2 = Pump("pump2") -# -# device_graph.add_connection(valve, "pump", 2) -# device_graph.add_connection(valve, pump1, 1) -# device_graph.add_connection(bad_valve, "pump", 1) -# device_graph.add_connection(bad_valve, pump2, 2) -# P = Protocol(device_graph) -# -# expected = [dict(start=0, stop=1, component=valve, params={"setting": 1})] -# -# # directly pass the pump object -# P.add(valve, setting=pump1, duration="1 sec") -# assert P.procedures == expected -# P.procedures = [] -# -# # using its name -# P.add(valve, setting="pump1", duration="1 sec") -# assert P.procedures == expected -# P.procedures = [] -# -# # using its port number -# P.add(valve, setting=1, duration="1 sec") -# assert P.procedures == expected -# -# with pytest.raises(ValueError): -# P.add(valve, setting=3, duration="1 sec") -# -# with pytest.raises(ValueError): -# P.add(bad_valve, setting=3, duration="1 sec") - - -# def test_compile(): -# P = Protocol(A) -# P.add([pump1, pump2], rate="10 mL/min", duration="5 min") -# assert P._compile() == { -# pump1: [ -# {"params": {"rate": "10 mL/min"}, "time": 0}, -# {"params": {"rate": "0 mL/min"}, "time": 300}, -# ], -# pump2: [ -# { -# "params": {"rate": "10 mL/min"}, -# "time": flowchem_ureg.parse_expression("0 seconds"), -# }, -# {"params": {"rate": "0 mL/min"}, "time": 300}, -# ], -# } -# -# -# def test_unused_component(): -# # raise warning if component not used -# P = Protocol(A) -# P.add(pump1, rate="10 mL/min", duration="5 min") -# with pytest.warns(UserWarning, match="not used"): -# P._compile() -# -# -# def test_switching_rates(): -# # check switching between rates -# P = Protocol(A) -# P.add([pump1, pump2], rate="10 mL/min", duration="5 min") -# P.add(pump1, rate="5 mL/min", start="5 min", stop="10 min") -# assert P._compile() == { -# pump1: [ -# {"params": {"rate": "10 mL/min"}, "time": 0}, -# {"params": {"rate": "5 mL/min"}, "time": 300}, -# {"params": {"rate": "0 mL/min"}, "time": 600}, -# ], -# pump2: [ -# {"params": {"rate": "10 mL/min"}, "time": 0}, -# {"params": {"rate": "0 mL/min"}, "time": 300}, -# ], -# } -# -# -# def test_overlapping_procedures(): -# P = Protocol(A) -# P.add(pump1, start="0 seconds", stop="5 seconds", rate="5 mL/min") -# P.add(pump1, start="2 seconds", stop="5 seconds", rate="2 mL/min") -# with pytest.raises(RuntimeError): -# P._compile() -# -# -# def test_conflicting_continuous_procedures(): -# P = Protocol(A) -# P.add(pump1, rate="5 mL/min", stop="1 sec") -# P.add(pump1, rate="2 mL/min", stop="1 sec") -# with pytest.raises(RuntimeError): -# P._compile() -# -# -# def test_json(): -# P = Protocol(A) -# P.add([pump1, pump2], rate="10 mL/min", duration="5 min") -# assert json.loads(P.json()) == [ -# { -# "start": 0, -# "stop": 300, -# "component": "pump1", -# "params": {"rate": "10 mL/min"}, -# }, -# { -# "start": 0, -# "stop": 300, -# "component": "pump2", -# "params": {"rate": "10 mL/min"}, -# }, -# ] -# -# -# def test_yaml(): -# P = Protocol(A) -# P.add([pump1, pump2], rate="10 mL/min", duration="5 min") -# assert yaml.safe_load(P.yaml()) == json.loads(P.json()) diff --git a/tests/test_tube.py b/tests/test_tube.py deleted file mode 100644 index c0a6273c..00000000 --- a/tests/test_tube.py +++ /dev/null @@ -1,21 +0,0 @@ -import pytest - -from flowchem.components.stdlib import Tube - - -def test_diameter_validation(): - # inner diameter is greater than outer diameter - with pytest.raises(ValueError): - Tube(length="5 cm", ID="2 cm", OD="1 cm", material="boyfriend material") - - -def test_length_units(): - with pytest.raises(ValueError): - Tube(length="5 L", ID="1 cm", OD="3 cm", material="boyfriend material") - - -def test_repr(): - assert ( - repr(Tube(length="5 cm", ID="1 cm", OD="2 cm", material="boyfriend material")) - == "Tube of length 5 centimeter, ID 1 centimeter, OD 2 centimeter" - ) diff --git a/tests/test_validate_component.py b/tests/test_validate_component.py deleted file mode 100644 index 38ff0cc7..00000000 --- a/tests/test_validate_component.py +++ /dev/null @@ -1,193 +0,0 @@ -import pytest - -from flowchem.components.properties import Component, ActiveComponent, Sensor -from flowchem.units import flowchem_ureg - - -def test_validate_component(): - # not a subclass of ActiveComponent, so it won't work - class Test(Component): - def __init__(self): - super().__init__() - - Test()._validate(dry_run=True) - with pytest.raises(RuntimeError): - Test()._validate(dry_run=False) - - -def test_empty_base_state(): - # base_state dictionary is not valid for the component because it's empty - class Test(ActiveComponent): - def __init__(self): - super().__init__(name=None) - self.active = False - self._base_state = {} - - async def _update(self): - return True - - with pytest.raises(ValueError): - Test()._validate(dry_run=True) - - -def test_invalid_base_state(): - # base_state dictionary is not valid for the component - class Test(ActiveComponent): - def __init__(self): - super().__init__(name=None) - self.active = False - self._base_state = dict(rate="10 mL") - - async def _update(self): - return True - - with pytest.raises(ValueError): - Test()._validate(dry_run=True) - - -def test_wrong_base_state_dimensionality(): - # base_state dictionary is wrong dimensionality - class Test(ActiveComponent): - def __init__(self): - super().__init__(name=None) - self.rate = flowchem_ureg.parse_expression("10 mL/min") - self._base_state = dict(rate="10 mL") - - async def _update(self): - return True - - with pytest.raises(ValueError): - Test()._validate(dry_run=True) - - -def test_passing_class(): - class Test(ActiveComponent): - def __init__(self, serial_port=None): - super().__init__(name=None) - self.active = False - self.serial_port = serial_port - self._base_state = dict(active=False) - - async def _update(self): - pass - - # should pass both as a dry run and as a real run (since update doesn't do anything) - Test()._validate(dry_run=True) - Test()._validate(dry_run=False) - - -def test_base_state_type(): - # not right base_state value type - class Test(ActiveComponent): - def __init__(self, serial_port=None): - super().__init__(name=None) - self.active = False - self.serial_port = serial_port - self._base_state = dict(active="10 mL") - - async def _update(self): - return True - - with pytest.raises(ValueError): - Test()._validate(dry_run=True) - - # not right base_state value type - class Test(ActiveComponent): - def __init__(self, serial_port=None): - super().__init__(name=None) - self.active = False - self.serial_port = serial_port - self._base_state = "not a dict" - - async def _update(self): - return True - - with pytest.raises(ValueError): - Test()._validate(dry_run=True) - - -def test_validate_sensor_without_read(): - class Test(Sensor): - def __init__(self, serial_port=None): - super().__init__(name=None) - self.serial_port = serial_port - - with pytest.raises(NotImplementedError): - Test()._validate(dry_run=False) - Test()._validate(dry_run=True) # should pass during a dry run - - -def test_update_must_return_none(): - class Test(ActiveComponent): - def __init__(self): - super().__init__(name=None) - self.active = False - self._base_state = dict(active=False) - - async def _update(self): - return False - - Test()._validate(dry_run=True) - with pytest.raises(ValueError): - Test()._validate(dry_run=False) - - -def test_default_update(): - class Test(ActiveComponent): - def __init__(self): - super().__init__(name=None) - self.active = False - self._base_state = dict(active=False) - - Test()._validate(dry_run=True) - with pytest.raises(NotImplementedError): - Test()._validate(dry_run=False) - - -def test_sync_update(): - class Test(ActiveComponent): - def __init__(self): - super().__init__(name=None) - self.active = False - self._base_state = dict(active=False) - - def _update(self): - pass - - Test()._validate(dry_run=True) - with pytest.raises(TypeError): - Test()._validate(dry_run=False) - - -def test_sync_read(): - class Test(Sensor): - def __init__(self, serial_port=None): - super().__init__(name=None) - self.serial_port = serial_port - - def _read(self): - return 1 - - async def _update(self): - pass - - Test()._validate(dry_run=True) - with pytest.raises(TypeError): - Test()._validate(dry_run=False) - - -def test_validate_sensor_with_failing_read(): - class Test(Sensor): - def __init__(self, serial_port=None): - super().__init__(name=None) - self.serial_port = serial_port - - async def _read(self): - raise RuntimeError("This component is broken!") - - async def _update(self): - pass - - Test()._validate(dry_run=True) - with pytest.raises(RuntimeError): - Test()._validate(dry_run=False)