Skip to content

Commit

Permalink
Update to Pydantic2 and ophyd_async 0.5.2
Browse files Browse the repository at this point in the history
  • Loading branch information
DiamondJoseph committed Sep 4, 2024
1 parent 387ae51 commit 0047e4c
Show file tree
Hide file tree
Showing 12 changed files with 102 additions and 81 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ description = "Ophyd devices and other utils that could be used across DLS beaml
dependencies = [
"click",
"ophyd",
"ophyd-async>=0.5.1",
"ophyd-async>=0.5.2",
"bluesky",
"pyepics",
"dataclasses-json",
Expand Down
8 changes: 5 additions & 3 deletions src/dodal/common/udc_directory_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,10 @@ class PandASubpathProvider(UpdatingPathProvider):

resource_dir = Path("panda")

def __init__(self, directory: Path | None = None, suffix: str = ""):
self._output_directory: Path | None = directory
def __init__(self, root_directory: Path | None = None, suffix: str = ""):
self._output_directory: Path | None = (
root_directory / self.resource_dir if root_directory else None
)
self._filename_provider = PandAFilenameProvider(suffix=suffix)
if self._output_directory is None:
LOGGER.debug(
Expand All @@ -44,7 +46,7 @@ async def update(self, *, directory: Path, suffix: str = "", **kwargs):
self._filename_provider.suffix = suffix

def __call__(self, device_name: str | None = None) -> PathInfo:
assert self._output_directory
assert self._output_directory, "Directory unknown for PandA to write into, update() needs to be called at least once"
return PathInfo(
directory_path=self._output_directory,
filename=self._filename_provider(device_name),
Expand Down
29 changes: 11 additions & 18 deletions src/dodal/devices/fast_grid_scan.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
epics_signal_rw_rbv,
epics_signal_x,
)
from pydantic import ConfigDict, Field, ValidationInfo, field_validator
from pydantic import field_validator
from pydantic.dataclasses import dataclass

from dodal.log import LOGGER
Expand Down Expand Up @@ -69,10 +69,6 @@ class GridScanParamsCommon(AbstractExperimentWithBeamParams):
y2_start: float = 0.1
z1_start: float = 0.1
z2_start: float = 0.1
x_axis: GridAxis = Field(default=GridAxis(0, 0, 0), exclude=True)
y_axis: GridAxis = Field(default=GridAxis(0, 0, 0), exclude=True)
z_axis: GridAxis = Field(default=GridAxis(0, 0, 0), exclude=True)
model_config = ConfigDict(arbitrary_types_allowed=True)

# Whether to set the stub offsets after centering
set_stub_offsets: bool = False
Expand All @@ -92,23 +88,20 @@ def get_param_positions(self) -> dict:
"z2_start": self.z2_start,
}

@field_validator("x_axis")
@classmethod
def _get_x_axis(cls, _: GridAxis, v: ValidationInfo) -> GridAxis:
return GridAxis(v.data["x_start"], v.data["x_step_size"], v.data["x_steps"])
@property
def x_axis(self) -> GridAxis:
return GridAxis(self.x_start, self.x_step_size, self.x_steps)

@field_validator("y_axis")
@classmethod
def _get_y_axis(cls, _: GridAxis, v: ValidationInfo) -> GridAxis:
return GridAxis(v.data["y1_start"], v.data["y_step_size"], v.data["y_steps"])
@property
def y_axis(self) -> GridAxis:
return GridAxis(self.y1_start, self.y_step_size, self.y_steps)

@field_validator("z_axis")
@classmethod
def _get_z_axis(cls, _: GridAxis, v: ValidationInfo) -> GridAxis:
return GridAxis(v.data["z2_start"], v.data["z_step_size"], v.data["z_steps"])
@property
def z_axis(self) -> GridAxis:
return GridAxis(self.z2_start, self.z_step_size, self.z_steps)

def get_num_images(self):
return self.x_steps * self.y_steps + self.x_steps * self.z_steps
return self.x_steps * (self.y_steps + self.z_steps)

@property
def is_3d_grid_scan(self):
Expand Down
16 changes: 13 additions & 3 deletions src/dodal/devices/i24/pmac.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,12 @@ def __init__(
super().__init__(backend, timeout, name)

@AsyncStatus.wrap
async def set(self, value: LaserSettings, wait=True, timeout=CalculateTimeout):
async def set(
self,
value: LaserSettings,
wait=True,
timeout=CalculateTimeout,
):
await self.signal.set(value.value, wait, timeout)


Expand All @@ -103,7 +108,12 @@ def __init__(
super().__init__(backend, timeout, name)

@AsyncStatus.wrap
async def set(self, value: EncReset, wait=True, timeout=CalculateTimeout):
async def set(
self,
value: EncReset,
wait=True,
timeout=CalculateTimeout,
):
await self.signal.set(value.value, wait, timeout)


Expand All @@ -127,7 +137,7 @@ def __init__(
super().__init__(backend, timeout, name)

@AsyncStatus.wrap
async def set(self, value: int, wait: bool = True, timeout: float | None = None):
async def set(self, value: int, wait=True, timeout=None):
prog_str = f"&2b{value}r"
assert isinstance(timeout, SupportsFloat) or (
timeout is None
Expand Down
11 changes: 7 additions & 4 deletions src/dodal/devices/tetramm.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@
from bluesky.protocols import Hints
from ophyd_async.core import (
AsyncStatus,
DatasetDescriber,
DetectorControl,
DetectorTrigger,
Device,
PathProvider,
ShapeProvider,
StandardDetector,
set_and_wait_for_value,
soft_signal_r_and_setter,
Expand Down Expand Up @@ -205,13 +205,16 @@ def _set_minimum_exposure(self, exposure: float):
)


class TetrammShapeProvider(ShapeProvider):
class TetrammDatasetDescriber(DatasetDescriber):
max_channels = 11

def __init__(self, controller: TetrammController) -> None:
self.controller = controller

async def __call__(self) -> tuple[int, int]:
async def np_datatype(self) -> str:
return "<f8" # IEEE 754 double precision floating point

async def shape(self) -> tuple[int, int]:
return (self.max_channels, self.controller.readings_per_frame)


Expand Down Expand Up @@ -244,7 +247,7 @@ def __init__(
self.hdf,
path_provider,
lambda: self.name,
TetrammShapeProvider(controller),
TetrammDatasetDescriber(controller),
**scalar_sigs,
),
config_signals,
Expand Down
3 changes: 2 additions & 1 deletion src/dodal/plans/data_session_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ def attach_data_session_metadata_wrapper(
if provider is None:
provider = beamline_utils.get_path_provider()

Check warning on line 34 in src/dodal/plans/data_session_metadata.py

View check run for this annotation

Codecov / codecov/patch

src/dodal/plans/data_session_metadata.py#L34

Added line #L34 was not covered by tests
yield from bps.wait_for([provider.update])
data_session = (yield from bps.wait_for([provider.data_session]))[0].result()
ress = yield from bps.wait_for([provider.data_session])
data_session = ress[0].result()
# https://github.com/DiamondLightSource/dodal/issues/452
# As part of 452, write each dataCollection into their own folder, then can use resource_dir directly
yield from bpp.inject_md_wrapper(plan, md={DATA_SESSION: data_session})
Expand Down
5 changes: 3 additions & 2 deletions tests/common/test_coordination.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import uuid
from inspect import Parameter, signature

import pytest
Expand All @@ -7,8 +6,10 @@
from dodal.common.coordination import group_uuid, inject
from dodal.common.types import MsgGenerator

static_uuid = "51aef931-33b4-4b33-b7ad-a8287f541202"

@pytest.mark.parametrize("group", ["foo", "bar", "baz", str(uuid.uuid4())])

@pytest.mark.parametrize("group", ["foo", "bar", "baz", static_uuid])
def test_group_uid(group: str):
gid = group_uuid(group)
assert gid.startswith(f"{group}-")
Expand Down
11 changes: 1 addition & 10 deletions tests/common/test_udc_directory_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def test_udc_path_provider_get_and_set(root, expected):
def test_udc_path_provider_excepts_before_update():
provider = PandASubpathProvider()
with pytest.raises(
ValueError,
AssertionError,
match=re.escape(
"Directory unknown for PandA to write into, update() needs to be called at least once"
),
Expand Down Expand Up @@ -63,12 +63,3 @@ async def test_udc_path_provider_with_suffix(tmp_path):
directory_info = provider()
assert directory_info.directory_path == root_path / "panda"
assert directory_info.filename.endswith("_123")


async def test_udc_path_provider_creates_subdirectory_if_not_exists(tmp_path):
root = tmp_path
subdir = root / Path("panda")
assert not subdir.exists()
provider = PandASubpathProvider(Path("initial"))
await provider.update(directory=root)
assert subdir.exists()
30 changes: 30 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,18 @@
import pytest
from bluesky.run_engine import RunEngine
from ophyd.status import Status
from ophyd_async.core import (
PathInfo,
PathProvider,
)

from dodal.beamlines import i03
from dodal.common.beamlines import beamline_utils
from dodal.common.visit import (
DirectoryServiceClientBase,
LocalDirectoryServiceClient,
StaticVisitPathProvider,
)
from dodal.log import LOGGER, GELFTCPHandler, set_up_all_logging_handlers
from dodal.utils import make_all_devices

Expand Down Expand Up @@ -101,6 +110,27 @@ def vfm_mirror_voltages(RE: RunEngine):
environ["EPICS_CA_REPEATER_PORT"] = s03_epics_repeater_port
print(f"[EPICS_CA_REPEATER_PORT] = {s03_epics_repeater_port}")

PATH_INFO_FOR_TESTING: PathInfo = PathInfo(
directory_path=Path("/does/not/exist"),
filename="on_this_filesystem",
)


@pytest.fixture
def dummy_visit_client() -> DirectoryServiceClientBase:
return LocalDirectoryServiceClient()


@pytest.fixture
async def static_path_provider(
tmp_path: Path, dummy_visit_client: DirectoryServiceClientBase
) -> PathProvider:
svpp = StaticVisitPathProvider(
beamline="ixx", root=tmp_path, client=dummy_visit_client
)
await svpp.update()
return svpp


@pytest.fixture
async def RE():
Expand Down
30 changes: 0 additions & 30 deletions tests/devices/unit_tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,40 +1,10 @@
from pathlib import Path

import pytest
from bluesky.run_engine import RunEngine
from ophyd_async.core import (
PathInfo,
PathProvider,
)

from dodal.beamlines import i03
from dodal.common.beamlines.beamline_utils import clear_devices
from dodal.common.visit import (
DirectoryServiceClientBase,
LocalDirectoryServiceClient,
StaticVisitPathProvider,
)
from dodal.devices.util.test_utils import patch_motor

PATH_INFO_FOR_TESTING: PathInfo = PathInfo(
directory_path=Path("/does/not/exist"),
filename="on_this_filesystem",
)


@pytest.fixture
def dummy_visit_client() -> DirectoryServiceClientBase:
return LocalDirectoryServiceClient()


@pytest.fixture
def static_path_provider(
tmp_path: Path, dummy_visit_client: DirectoryServiceClientBase
) -> PathProvider:
return StaticVisitPathProvider(
beamline="ixx", root=tmp_path, client=dummy_visit_client
)


@pytest.fixture
def smargon(RE: RunEngine):
Expand Down
25 changes: 21 additions & 4 deletions tests/devices/unit_tests/test_tetramm.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,17 @@ async def tetramm(static_path_provider: PathProvider) -> TetrammDetector:
return tetramm


@pytest.fixture
def supported_trigger_info() -> TriggerInfo:
return TriggerInfo(
number=1,
trigger=DetectorTrigger.constant_gate,
deadtime=1.0,
livetime=0.02,
frame_timeout=None,
)


async def test_max_frame_rate_is_calculated_correctly(
tetramm_controller: TetrammController,
):
Expand Down Expand Up @@ -164,6 +175,8 @@ async def test_sample_rate_scales_with_exposure_time(
exposure: float,
expected_values_per_reading: int,
):
set_mock_value(tetramm.hdf.file_path_exists, True)

await tetramm.prepare(
TriggerInfo(
number=100,
Expand Down Expand Up @@ -265,6 +278,7 @@ async def test_prepare_with_too_low_a_deadtime_raises_error(
async def test_prepare_arms_tetramm(
tetramm: TetrammDetector,
):
set_mock_value(tetramm.hdf.file_path_exists, True)
await tetramm.prepare(
TriggerInfo(
number=5,
Expand All @@ -277,11 +291,12 @@ async def test_prepare_arms_tetramm(
await assert_armed(tetramm.drv)


async def test_stage_sets_up_writer(
tetramm: TetrammDetector,
async def test_prepare_sets_up_writer(
tetramm: TetrammDetector, supported_trigger_info: TriggerInfo
):
set_mock_value(tetramm.hdf.file_path_exists, True)
await tetramm.stage()
await tetramm.prepare(supported_trigger_info)

assert (await tetramm.hdf.num_capture.get_value()) == 0
assert (await tetramm.hdf.num_extra_dims.get_value()) == 0
Expand All @@ -292,17 +307,19 @@ async def test_stage_sets_up_writer(


async def test_stage_sets_up_accurate_describe_output(
tetramm: TetrammDetector,
tetramm: TetrammDetector, supported_trigger_info: TriggerInfo
):
assert await tetramm.describe() == {}

set_mock_value(tetramm.hdf.file_path_exists, True)
await tetramm.stage()
await tetramm.prepare(supported_trigger_info)

assert await tetramm.describe() == {
TEST_TETRAMM_NAME: {
"source": "mock+ca://MY-TETRAMM:HDF5:FullFileName_RBV",
"shape": (11, 1000),
"shape": (11, 400),
"dtype_numpy": "<f8",
"dtype": "array",
"external": "STREAM:",
}
Expand Down
Loading

0 comments on commit 0047e4c

Please sign in to comment.