From 84c9bf210729c6fcab398c85cc5baefb8cb25ad2 Mon Sep 17 00:00:00 2001 From: Paul Coe Date: Wed, 6 Aug 2025 14:42:45 +0100 Subject: [PATCH 01/23] dodal#1384 dodal#1007 access controlled attenuation motor demand * Creates AttenuatorPositionDemand in i19 access controlled dodal such that either EH can request a set of motor position demands using a dict to store axis, position pairings e.g. "x" : 12.7 for a continuous motor motion along the x-axis e.g. "w" : 3 for a discrete indexed filter wheel This APD provides a restful format of the dict which is POSTed to the i19 optics that will decide whether or not it is appropriate to honour the demand and either passes that on to the real motors or ignores ( on the basis of which active EH has control of i19 ) --- src/dodal/beamlines/i19_1.py | 6 +- src/dodal/beamlines/i19_2.py | 4 +- src/dodal/beamlines/i19_optics.py | 5 +- .../devices/i19/access_controlled/__init__.py | 0 .../access_controlled/attenuator_motor_set.py | 104 +++++++++++++ .../{ => access_controlled}/hutch_access.py | 2 +- .../optics_blueapi_device.py} | 10 +- .../i19/{ => access_controlled}/shutter.py | 17 ++- .../devices/i19/test_attenuator_motor_set.py | 137 ++++++++++++++++++ tests/devices/i19/test_shutter.py | 55 +++++-- 10 files changed, 307 insertions(+), 33 deletions(-) create mode 100644 src/dodal/devices/i19/access_controlled/__init__.py create mode 100644 src/dodal/devices/i19/access_controlled/attenuator_motor_set.py rename src/dodal/devices/i19/{ => access_controlled}/hutch_access.py (80%) rename src/dodal/devices/i19/{blueapi_device.py => access_controlled/optics_blueapi_device.py} (91%) rename src/dodal/devices/i19/{ => access_controlled}/shutter.py (82%) create mode 100644 tests/devices/i19/test_attenuator_motor_set.py diff --git a/src/dodal/beamlines/i19_1.py b/src/dodal/beamlines/i19_1.py index f8d13d9b335..abaf72c3172 100644 --- a/src/dodal/beamlines/i19_1.py +++ b/src/dodal/beamlines/i19_1.py @@ -4,12 +4,12 @@ from dodal.common.beamlines.beamline_utils import ( set_beamline as set_utils_beamline, ) -from dodal.devices.i19.beamstop import BeamStop -from dodal.devices.i19.blueapi_device import HutchState -from dodal.devices.i19.shutter import ( +from dodal.devices.i19.access_controlled.optics_blueapi_device import HutchState +from dodal.devices.i19.access_controlled.shutter import ( AccessControlledShutter, HutchState, ) +from dodal.devices.i19.beamstop import BeamStop from dodal.devices.oav.oav_detector import OAVBeamCentreFile from dodal.devices.oav.oav_parameters import OAVConfigBeamCentre from dodal.devices.synchrotron import Synchrotron diff --git a/src/dodal/beamlines/i19_2.py b/src/dodal/beamlines/i19_2.py index d7f1ea4be3e..f7a56b7876d 100644 --- a/src/dodal/beamlines/i19_2.py +++ b/src/dodal/beamlines/i19_2.py @@ -4,11 +4,11 @@ from dodal.common.beamlines.beamline_utils import ( set_beamline as set_utils_beamline, ) +from dodal.devices.i19.access_controlled.optics_blueapi_device import HutchState +from dodal.devices.i19.access_controlled.shutter import AccessControlledShutter from dodal.devices.i19.backlight import BacklightPosition from dodal.devices.i19.beamstop import BeamStop -from dodal.devices.i19.blueapi_device import HutchState from dodal.devices.i19.diffractometer import FourCircleDiffractometer -from dodal.devices.i19.shutter import AccessControlledShutter from dodal.devices.synchrotron import Synchrotron from dodal.devices.zebra.zebra import Zebra from dodal.devices.zebra.zebra_constants_mapping import ( diff --git a/src/dodal/beamlines/i19_optics.py b/src/dodal/beamlines/i19_optics.py index 9d34aa5773a..fe171575637 100644 --- a/src/dodal/beamlines/i19_optics.py +++ b/src/dodal/beamlines/i19_optics.py @@ -5,7 +5,10 @@ set_beamline as set_utils_beamline, ) from dodal.devices.hutch_shutter import HutchShutter -from dodal.devices.i19.hutch_access import ACCESS_DEVICE_NAME, HutchAccessControl +from dodal.devices.i19.access_controlled.hutch_access import ( + ACCESS_DEVICE_NAME, + HutchAccessControl, +) from dodal.log import set_beamline as set_log_beamline from dodal.utils import BeamlinePrefix diff --git a/src/dodal/devices/i19/access_controlled/__init__.py b/src/dodal/devices/i19/access_controlled/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/dodal/devices/i19/access_controlled/attenuator_motor_set.py b/src/dodal/devices/i19/access_controlled/attenuator_motor_set.py new file mode 100644 index 00000000000..12c9e95dfbb --- /dev/null +++ b/src/dodal/devices/i19/access_controlled/attenuator_motor_set.py @@ -0,0 +1,104 @@ +from typing import Any + +from ophyd_async.core import AsyncStatus + +from dodal.devices.i19.access_controlled.hutch_access import ACCESS_DEVICE_NAME +from dodal.devices.i19.access_controlled.optics_blueapi_device import ( + HutchState, + OpticsBlueApiDevice, +) + + +class AttenuatorPositionDemand: + @staticmethod + def _ensure_no_dict_k_is_none(d: dict[str, Any], msg: str) -> None: + if any(k is None for k in d.keys()): + raise ValueError(msg) + + @staticmethod + def _ensure_no_dict_v_is_none(d: dict[str, Any], msg: str) -> None: + if any(v is None for v in d.values()): + raise ValueError(msg) + + @staticmethod + def __validate_dict(d: dict[str, Any], motor_type: str): + k_msg_template = "Missing attenuator motor axis label for {}" + k_msg = str.format(k_msg_template, motor_type) + AttenuatorPositionDemand._ensure_no_dict_k_is_none(d, k_msg) + v_msg_template = "Missing attenuator position demand value for {}" + v_msg = str.format(v_msg_template, motor_type) + AttenuatorPositionDemand._ensure_no_dict_v_is_none(d, v_msg) + + @staticmethod + def _ensure_dicts_share_no_common_k(a: dict[str, float], b: dict[str, int]) -> None: + clashes_detected = filter(lambda k: k in a, b) + if any(clashes_detected): + msg_template: str = "Clashing keys in common between wheel and wedge for attenuation position demand: {}" + reiterated = filter(lambda k: k in a, b) + clashes = ",".join(reiterated) + msg: str = msg_template.format(clashes) + raise ValueError(msg) + + def __init__( + self, + continuous_motor_position_demands: dict[str, float], + indexed_motor_position_demands: dict[str, int], + ) -> None: + self._validate_input( + continuous_motor_position_demands, indexed_motor_position_demands + ) + combined_demand: dict[str, Any] = {} + combined_demand.update(continuous_motor_position_demands) + combined_demand.update(indexed_motor_position_demands) + self.demands = combined_demand + + def restful_format(self) -> str: + return str(self.demands) + + def _validate_input( + self, + continuous_motor_position_demands: dict[str, float], + indexed_motor_position_demands: dict[str, int], + ): + AttenuatorPositionDemand.__validate_dict( + continuous_motor_position_demands, "continuous motor" + ) + AttenuatorPositionDemand.__validate_dict( + indexed_motor_position_demands, "indexed motor" + ) + AttenuatorPositionDemand._ensure_dicts_share_no_common_k( + continuous_motor_position_demands, indexed_motor_position_demands + ) + + +class AttenuatorMotorSet(OpticsBlueApiDevice): + """ I19-specific proxy device which requests absorber position changes in x-ray attenuator. + + Sends REST call to blueapi controlling optics on the I19 cluster. + The hutch in use is compared against the hutch which sent the REST call. + Only the hutch in use will be permitted to execute a plan (requesting motor moves). + As the two hutches are located in series, checking the hutch in use is necessary to \ + avoid accidentally operating optics devices from one hutch while the other has beam time. + + The name of the hutch that wants to operate the optics device is passed to the \ + access controlled device upon instantiation of the latter. + + For details see the architecture described in \ + https://github.com/DiamondLightSource/i19-bluesky/issues/30. + """ + + def __init__(self, prefix: str, hutch: HutchState, name: str = "") -> None: + super().__init__(hutch, name) + + @AsyncStatus.wrap + async def set(self, value: AttenuatorPositionDemand): + invoking_hutch = self._get_invoking_hutch().value + request_params = { + "name": "operate_attenuator_plan", + "params": { + "experiement_hutch": invoking_hutch, + "access_device": ACCESS_DEVICE_NAME, + "attenuator_demands": value.restful_format(), + }, + } + await super().set(request_params) diff --git a/src/dodal/devices/i19/hutch_access.py b/src/dodal/devices/i19/access_controlled/hutch_access.py similarity index 80% rename from src/dodal/devices/i19/hutch_access.py rename to src/dodal/devices/i19/access_controlled/hutch_access.py index c9ceb803732..6291f762d7d 100644 --- a/src/dodal/devices/i19/hutch_access.py +++ b/src/dodal/devices/i19/access_controlled/hutch_access.py @@ -1,7 +1,7 @@ from ophyd_async.core import StandardReadable from ophyd_async.epics.core import epics_signal_r -ACCESS_DEVICE_NAME = "access_control" # Device name in i19-blueapi +ACCESS_DEVICE_NAME: str = "access_control" # Device name in i19-blueapi class HutchAccessControl(StandardReadable): diff --git a/src/dodal/devices/i19/blueapi_device.py b/src/dodal/devices/i19/access_controlled/optics_blueapi_device.py similarity index 91% rename from src/dodal/devices/i19/blueapi_device.py rename to src/dodal/devices/i19/access_controlled/optics_blueapi_device.py index 7c41b844778..6a966760171 100644 --- a/src/dodal/devices/i19/blueapi_device.py +++ b/src/dodal/devices/i19/access_controlled/optics_blueapi_device.py @@ -20,8 +20,8 @@ class HutchState(str, Enum): EH2 = "EH2" -class OpticsBlueAPIDevice(StandardReadable, Movable[D]): - """General device that a REST call to the blueapi instance controlling the optics \ +class OpticsBlueApiDevice(StandardReadable, Movable[D]): + """General device that sends a REST call to the blueapi instance controlling the optics \ hutch running on the I19 cluster, which will evaluate the current hutch in use vs \ the hutch sending the request and decide if the plan will be run or not. @@ -29,11 +29,15 @@ class OpticsBlueAPIDevice(StandardReadable, Movable[D]): https://github.com/DiamondLightSource/i19-bluesky/issues/30. """ - def __init__(self, name: str = "") -> None: + def __init__(self, hutch: HutchState, name: str = "") -> None: self.url = OPTICS_BLUEAPI_URL self.headers = HEADERS + self.invoking_hutch = hutch super().__init__(name) + def _get_invoking_hutch(self) -> HutchState: + return self.invoking_hutch + @AsyncStatus.wrap async def set(self, value: D): """ On set send a POST request to the optics blueapi with the name and \ diff --git a/src/dodal/devices/i19/shutter.py b/src/dodal/devices/i19/access_controlled/shutter.py similarity index 82% rename from src/dodal/devices/i19/shutter.py rename to src/dodal/devices/i19/access_controlled/shutter.py index 283b1fa5a9c..0d97e4f4467 100644 --- a/src/dodal/devices/i19/shutter.py +++ b/src/dodal/devices/i19/access_controlled/shutter.py @@ -2,11 +2,14 @@ from ophyd_async.epics.core import epics_signal_r from dodal.devices.hutch_shutter import ShutterDemand, ShutterState -from dodal.devices.i19.blueapi_device import HutchState, OpticsBlueAPIDevice -from dodal.devices.i19.hutch_access import ACCESS_DEVICE_NAME +from dodal.devices.i19.access_controlled.hutch_access import ACCESS_DEVICE_NAME +from dodal.devices.i19.access_controlled.optics_blueapi_device import ( + HutchState, + OpticsBlueApiDevice, +) -class AccessControlledShutter(OpticsBlueAPIDevice): +class AccessControlledShutter(OpticsBlueApiDevice): """ I19-specific device to operate the hutch shutter. This device will send a REST call to the blueapi instance controlling the optics \ @@ -31,18 +34,18 @@ def __init__( ) -> None: # For instrument session addition to request parameters # see https://github.com/DiamondLightSource/blueapi/issues/1187 + super().__init__(hutch, name) + self.instrument_session = instrument_session with self.add_children_as_readables(StandardReadableFormat.HINTED_SIGNAL): self.shutter_status = epics_signal_r(ShutterState, f"{prefix}STA") - self.hutch_request = hutch - self.instrument_session = instrument_session - super().__init__(name) @AsyncStatus.wrap async def set(self, value: ShutterDemand): + invoking_hutch = self._get_invoking_hutch().value REQUEST_PARAMS = { "name": "operate_shutter_plan", "params": { - "experiment_hutch": self.hutch_request.value, + "experiment_hutch": invoking_hutch, "access_device": ACCESS_DEVICE_NAME, "shutter_demand": value.value, }, diff --git a/tests/devices/i19/test_attenuator_motor_set.py b/tests/devices/i19/test_attenuator_motor_set.py new file mode 100644 index 00000000000..3ca4c8a021c --- /dev/null +++ b/tests/devices/i19/test_attenuator_motor_set.py @@ -0,0 +1,137 @@ +from dodal.devices.i19.access_controlled.attenuator_motor_set import ( + AttenuatorPositionDemand, +) + + +def test_that_attenuator_position_demand_can_be_created_with_only_one_wedge(): + wedge_position_demands = {"x": 0.5} + wheel_position_demands = {} + position_demand = AttenuatorPositionDemand( + wedge_position_demands, wheel_position_demands + ) + assert position_demand is not None + + +def test_that_attenuator_position_demand_with_only_one_wedge_provides_expected_rest_format(): + wedge_position_demands = {"y": 14.9} + wheel_position_demands = {} + position_demand = AttenuatorPositionDemand( + wedge_position_demands, wheel_position_demands + ) + restful_encoding = position_demand.restful_format() + assert restful_encoding == "{'y': 14.9}" + + +def test_that_attenuator_position_demand_can_be_created_with_only_one_wheel(): + wedge_position_demands = {} + wheel_position_demands = {"w": 2} + position_demand = AttenuatorPositionDemand( + wedge_position_demands, wheel_position_demands + ) + assert position_demand is not None + + +def test_that_attenuator_position_demand_with_only_one_wheel_provides_expected_rest_format(): + wedge_position_demands = {} + wheel_position_demands = {"w": 6} + position_demand = AttenuatorPositionDemand( + wedge_position_demands, wheel_position_demands + ) + rest = position_demand.restful_format() + assert rest == "{'w': 6}" + + +def test_that_empty_attenuator_position_demand_can_be_created(): + wedge_position_demands = {} + wheel_position_demands = {} + position_demand = AttenuatorPositionDemand( + wedge_position_demands, wheel_position_demands + ) + assert position_demand is not None + + +def test_that_empty_attenuator_position_demand_provides_empty_rest_format(): + wedge_position_demands = {} + wheel_position_demands = {} + position_demand = AttenuatorPositionDemand( + wedge_position_demands, wheel_position_demands + ) + rest = position_demand.restful_format() + assert rest == "{}" + + +def test_that_attenuator_position_demand_triplet_can_be_created(): + standard_wedge_position_demand = {"x": 25.9, "y": 5.0} + standard_wheel_position_demand = {"w": 4} + position_demand = AttenuatorPositionDemand( + standard_wedge_position_demand, standard_wheel_position_demand + ) + assert position_demand is not None + + +def test_that_attenuator_position_demand_triplet_provides_expected_rest_format(): + wedge_position_demands = {"x": 0.1, "y": 90.1} + wheel_position_demands = {"w": 6} + position_demand = AttenuatorPositionDemand( + wedge_position_demands, wheel_position_demands + ) + rest = position_demand.restful_format() + assert rest == "{'x': 0.1, 'y': 90.1, 'w': 6}" + + +# Happy path tests above + +# Unhappy path tests below + + +def test_that_attenuator_position_raises_error_when_discrete_and_continuous_demands_overload_axis_label(): + wedge_position_demands = {"x": 0.1, "v": 90.1} + wheel_position_demands = {"w": 6, "v": 7} + try: + AttenuatorPositionDemand(wedge_position_demands, wheel_position_demands) + raise NotImplementedError("Did not observe expected error") + except ValueError as expected: + assert ( + str(expected) + == "Clashing keys in common between wheel and wedge for attenuation position demand: v" + ) + + +def test_that_attenuator_position_creation_raises_error_when_continuous_position_demand_is_none(): + wedge_position_demands = {"x": None, "y": 90.1} + wheel_position_demands = {} + try: + AttenuatorPositionDemand(wedge_position_demands, wheel_position_demands) + raise NotImplementedError("Did not observe expected error") + except ValueError as _expected: + pass + + +def test_that_attenuator_position_creation_raises_error_when_indexed_position_demand_is_none(): + wedge_position_demands = {"x": 14.88, "y": 90.1} + wheel_position_demands = {"w": None, "v": 3} + try: + AttenuatorPositionDemand(wedge_position_demands, wheel_position_demands) + raise NotImplementedError("Did not observe expected error") + except ValueError as _expected: + pass + + +def test_that_attenuator_position_creation_raises_error_when_continuous_position_key_is_none(): + wedge_position_demands = {"x": 32.65, None: 80.1} + wheel_position_demands = {"w": 8} + try: + AttenuatorPositionDemand(wedge_position_demands, wheel_position_demands) + raise NotImplementedError("Did not observe expected error") + except ValueError as _expected: + pass + + +def test_that_attenuator_position_creation_raises_error_when_indexed_position_key_is_none(): + wedge_position_demands = {"x": 24.08, "y": 71.4} + wheel_position_demands = {"w": 1, None: 2} + try: + AttenuatorPositionDemand(wedge_position_demands, wheel_position_demands) + raise NotImplementedError("Did not observe expected error") + except ValueError as _expected: + pass diff --git a/tests/devices/i19/test_shutter.py b/tests/devices/i19/test_shutter.py index 7bd414f618c..c14ff6716b1 100644 --- a/tests/devices/i19/test_shutter.py +++ b/tests/devices/i19/test_shutter.py @@ -9,8 +9,11 @@ ShutterDemand, ShutterState, ) -from dodal.devices.i19.blueapi_device import HEADERS, HutchState -from dodal.devices.i19.shutter import ( +from dodal.devices.i19.access_controlled.optics_blueapi_device import ( + HEADERS, + HutchState, +) +from dodal.devices.i19.access_controlled.shutter import ( AccessControlledShutter, ) @@ -66,7 +69,9 @@ async def test_set_raises_error_if_post_not_successful( eh2_shutter: AccessControlledShutter, ): with pytest.raises(ClientConnectionError): - with patch("dodal.devices.i19.blueapi_device.ClientSession.post") as mock_post: + with patch( + "dodal.devices.i19.access_controlled.optics_blueapi_device.ClientSession.post" + ) as mock_post: mock_post.return_value.__aenter__.return_value = ( mock_response := AsyncMock() ) @@ -76,13 +81,15 @@ async def test_set_raises_error_if_post_not_successful( await eh2_shutter.set(ShutterDemand.OPEN) -@patch("dodal.devices.i19.blueapi_device.LOGGER") +@patch("dodal.devices.i19.access_controlled.optics_blueapi_device.LOGGER") async def test_no_task_id_returned_from_post( mock_logger: MagicMock, eh1_shutter: AccessControlledShutter ): with pytest.raises(KeyError): with ( - patch("dodal.devices.i19.blueapi_device.ClientSession.post") as mock_post, + patch( + "dodal.devices.i19.access_controlled.optics_blueapi_device.ClientSession.post" + ) as mock_post, ): mock_post.return_value.__aenter__.return_value = ( mock_response := AsyncMock() @@ -110,9 +117,15 @@ async def test_set_corrently_makes_rest_calls( } test_request_json = json.dumps(test_request) with ( - patch("dodal.devices.i19.blueapi_device.ClientSession.post") as mock_post, - patch("dodal.devices.i19.blueapi_device.ClientSession.put") as mock_put, - patch("dodal.devices.i19.blueapi_device.ClientSession.get") as mock_get, + patch( + "dodal.devices.i19.access_controlled.optics_blueapi_device.ClientSession.post" + ) as mock_post, + patch( + "dodal.devices.i19.access_controlled.optics_blueapi_device.ClientSession.put" + ) as mock_put, + patch( + "dodal.devices.i19.access_controlled.optics_blueapi_device.ClientSession.get" + ) as mock_get, ): mock_post.return_value.__aenter__.return_value = (mock_response := AsyncMock()) mock_response.ok = True @@ -134,13 +147,17 @@ async def test_set_corrently_makes_rest_calls( ) -@patch("dodal.devices.i19.blueapi_device.LOGGER") +@patch("dodal.devices.i19.access_controlled.optics_blueapi_device.LOGGER") async def test_if_put_fails_log_error_and_return( mock_logger: MagicMock, eh1_shutter: AccessControlledShutter ): with ( - patch("dodal.devices.i19.blueapi_device.ClientSession.post") as mock_post, - patch("dodal.devices.i19.blueapi_device.ClientSession.put") as mock_put, + patch( + "dodal.devices.i19.access_controlled.optics_blueapi_device.ClientSession.post" + ) as mock_post, + patch( + "dodal.devices.i19.access_controlled.optics_blueapi_device.ClientSession.put" + ) as mock_put, ): mock_post.return_value.__aenter__.return_value = (mock_response := AsyncMock()) mock_response.ok = True @@ -155,16 +172,22 @@ async def test_if_put_fails_log_error_and_return( mock_logger.error.assert_called_once() -@patch("dodal.devices.i19.blueapi_device.LOGGER") -@patch("dodal.devices.i19.blueapi_device.asyncio.sleep") +@patch("dodal.devices.i19.access_controlled.optics_blueapi_device.LOGGER") +@patch("dodal.devices.i19.access_controlled.optics_blueapi_device.asyncio.sleep") async def test_if_plan_fails_raise_error_with_message( mock_sleep: MagicMock, mock_logger: MagicMock, eh2_shutter: AccessControlledShutter ): with pytest.raises(RuntimeError): with ( - patch("dodal.devices.i19.blueapi_device.ClientSession.post") as mock_post, - patch("dodal.devices.i19.blueapi_device.ClientSession.put") as mock_put, - patch("dodal.devices.i19.blueapi_device.ClientSession.get") as mock_get, + patch( + "dodal.devices.i19.access_controlled.optics_blueapi_device.ClientSession.post" + ) as mock_post, + patch( + "dodal.devices.i19.access_controlled.optics_blueapi_device.ClientSession.put" + ) as mock_put, + patch( + "dodal.devices.i19.access_controlled.optics_blueapi_device.ClientSession.get" + ) as mock_get, ): mock_post.return_value.__aenter__.return_value = ( mock_response := AsyncMock() From 52a62cb786755ad925c23a843c892d23d3089032 Mon Sep 17 00:00:00 2001 From: Paul Coe Date: Wed, 6 Aug 2025 18:17:10 +0100 Subject: [PATCH 02/23] dodal#1384 dodal#1007 improves testing of access controlled devices * Fixes typo ( en passant ) in test_shutter corrently -> correctly * Provides separate test scripts for i19 dodal python classes -- AttenuatorPositionDemand -- AttenuatorMotorSet * Adds previously absent tests for AttenuatorMotorSet --- .../devices/i19/access_controlled/__init__.py | 0 .../test_attenuator_motor_set.py | 101 ++++++++++++++++++ .../test_attenuator_position_demand.py} | 0 .../{ => access_controlled}/test_shutter.py | 4 +- 4 files changed, 103 insertions(+), 2 deletions(-) create mode 100644 tests/devices/i19/access_controlled/__init__.py create mode 100644 tests/devices/i19/access_controlled/test_attenuator_motor_set.py rename tests/devices/i19/{test_attenuator_motor_set.py => access_controlled/test_attenuator_position_demand.py} (100%) rename tests/devices/i19/{ => access_controlled}/test_shutter.py (98%) diff --git a/tests/devices/i19/access_controlled/__init__.py b/tests/devices/i19/access_controlled/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/devices/i19/access_controlled/test_attenuator_motor_set.py b/tests/devices/i19/access_controlled/test_attenuator_motor_set.py new file mode 100644 index 00000000000..5d2a8d03846 --- /dev/null +++ b/tests/devices/i19/access_controlled/test_attenuator_motor_set.py @@ -0,0 +1,101 @@ +from unittest.mock import AsyncMock, patch + +import pytest +from aiohttp.client import ClientConnectionError +from bluesky.run_engine import RunEngine + +from dodal.devices.i19.access_controlled.attenuator_motor_set import ( + AttenuatorMotorSet, + AttenuatorPositionDemand, +) +from dodal.devices.i19.access_controlled.optics_blueapi_device import HutchState + + +def given_a_position_demand() -> AttenuatorPositionDemand: + wedge_demands = {"x": 54.3, "y": 72.1} + wheel_demands = {"w": 4} + return AttenuatorPositionDemand(wedge_demands, wheel_demands) + + +def given_an_unhappy_restful_response() -> AsyncMock: + unhappy_response = AsyncMock() + unhappy_response.ok = False + unhappy_response.json.return_value = {"task_id": "alas_not"} + return unhappy_response + + +def given_a_blank_device_pv_prefix(): + return "" + + +async def given_set_of_attenuator_motors(hutch: HutchState) -> AttenuatorMotorSet: + prefix = given_a_blank_device_pv_prefix() + motor_set = AttenuatorMotorSet(prefix, hutch, name="fake_motor_set") + await motor_set.connect(mock=True) + + motor_set.url = "http://test-blueapi.url" + return motor_set + + +@pytest.fixture +async def eh1_motor_set(RE: RunEngine) -> AttenuatorMotorSet: + return await given_set_of_attenuator_motors(HutchState.EH1) + + +@pytest.fixture +async def eh2_motor_set(RE: RunEngine) -> AttenuatorMotorSet: + return await given_set_of_attenuator_motors(HutchState.EH2) + + +@pytest.mark.parametrize("invoking_hutch", [HutchState.EH1, HutchState.EH2]) +async def test_that_motor_set_can_be_instantiated(invoking_hutch: HutchState): + motor_set: AttenuatorMotorSet = await given_set_of_attenuator_motors(invoking_hutch) + assert isinstance(motor_set, AttenuatorMotorSet) # could have been None + + +@pytest.mark.parametrize("invoking_hutch", [HutchState.EH1, HutchState.EH2]) +async def test_when_rest_post_unsuccessful_that_error_raised( + invoking_hutch: HutchState, +): + motors: AttenuatorMotorSet = await given_set_of_attenuator_motors(invoking_hutch) + restful_post: str = ( + "dodal.devices.i19.access_controlled.optics_blueapi_device.ClientSession.post" + ) + with pytest.raises(ClientConnectionError): + with patch(restful_post) as unsuccessful_post: + restful_response: AsyncMock = given_an_unhappy_restful_response() + unsuccessful_post.return_value.__aenter__.return_value = restful_response + + postion_demand = given_a_position_demand() + await motors.set(postion_demand) + + +@pytest.mark.parametrize("invoking_hutch", [HutchState.EH1, HutchState.EH2]) +async def test_that_error_is_logged_whenever_position_demand_fails(invoking_hutch): + blue_api_stem: str = "dodal.devices.i19.access_controlled.optics_blueapi_device" + logger: str = f"{blue_api_stem}.LOGGER" + restful_template: str = f"{blue_api_stem}.ClientSession" + restful_post: str = f"{restful_template}.post" + restful_put: str = f"{restful_template}.put" + with ( + patch(logger) as patched_logger, + patch(restful_post) as fake_post, + patch(restful_put) as fake_put, + ): + response_to_post: AsyncMock = AsyncMock() + response_to_post.ok = True + response_to_post.json.return_value = {"task_id": "0123"} + fake_post.return_value.__aenter__.return_value = response_to_post + + response_to_put = AsyncMock() + response_to_put.ok = False + fake_put.return_value.__aenter__.return_value = response_to_put + + motors: AttenuatorMotorSet = await given_set_of_attenuator_motors( + invoking_hutch + ) + demand: AttenuatorPositionDemand = given_a_position_demand() + + patched_logger.error.assert_not_called() + await motors.set(demand) + patched_logger.error.assert_called_once() diff --git a/tests/devices/i19/test_attenuator_motor_set.py b/tests/devices/i19/access_controlled/test_attenuator_position_demand.py similarity index 100% rename from tests/devices/i19/test_attenuator_motor_set.py rename to tests/devices/i19/access_controlled/test_attenuator_position_demand.py diff --git a/tests/devices/i19/test_shutter.py b/tests/devices/i19/access_controlled/test_shutter.py similarity index 98% rename from tests/devices/i19/test_shutter.py rename to tests/devices/i19/access_controlled/test_shutter.py index c14ff6716b1..a2e1dada007 100644 --- a/tests/devices/i19/test_shutter.py +++ b/tests/devices/i19/access_controlled/test_shutter.py @@ -38,7 +38,7 @@ async def eh2_shutter() -> AccessControlledShutter: @pytest.mark.parametrize("hutch_name", [HutchState.EH1, HutchState.EH2]) -def shutter_can_be_created_without_raising_errors(hutch_name: HutchState): +def test_that_shutter_can_be_created(hutch_name: HutchState): test_shutter = AccessControlledShutter("", hutch_name, "cm12345-1", "test_shutter") assert isinstance(test_shutter, AccessControlledShutter) @@ -103,7 +103,7 @@ async def test_no_task_id_returned_from_post( @pytest.mark.parametrize("shutter_demand", [ShutterDemand.OPEN, ShutterDemand.CLOSE]) -async def test_set_corrently_makes_rest_calls( +async def test_set_correctly_makes_rest_calls( shutter_demand: ShutterDemand, eh2_shutter: AccessControlledShutter ): test_request = { From 54d4aec20650fc0447e2de53bdc5bff84ded79d2 Mon Sep 17 00:00:00 2001 From: Paul Coe Date: Thu, 14 Aug 2025 15:17:19 +0100 Subject: [PATCH 03/23] dodal#1384 dodal#1007 updates required from code review * Corrects typo for experiment_hutch key in dict * Applies pytest.with in place of ad-hoc error traps in unhappy path test * Production code class AttenuatorMotorSet renamed -> AttenuatorMotorClique ( since set is an overloaded term ) * Production code class AttenuatorPositionDemand now generates restful_format in dict form rather than as a string since the json parser can cope and it makes for simpler testing enerates a --- ...otor_set.py => attenuator_motor_clique.py} | 40 +++--- .../test_attenuator_motor_clique.py | 122 ++++++++++++++++++ .../test_attenuator_motor_set.py | 101 --------------- .../test_attenuator_position_demand.py | 49 +++---- 4 files changed, 162 insertions(+), 150 deletions(-) rename src/dodal/devices/i19/access_controlled/{attenuator_motor_set.py => attenuator_motor_clique.py} (75%) create mode 100644 tests/devices/i19/access_controlled/test_attenuator_motor_clique.py delete mode 100644 tests/devices/i19/access_controlled/test_attenuator_motor_set.py diff --git a/src/dodal/devices/i19/access_controlled/attenuator_motor_set.py b/src/dodal/devices/i19/access_controlled/attenuator_motor_clique.py similarity index 75% rename from src/dodal/devices/i19/access_controlled/attenuator_motor_set.py rename to src/dodal/devices/i19/access_controlled/attenuator_motor_clique.py index 12c9e95dfbb..b7249900ee8 100644 --- a/src/dodal/devices/i19/access_controlled/attenuator_motor_set.py +++ b/src/dodal/devices/i19/access_controlled/attenuator_motor_clique.py @@ -4,7 +4,6 @@ from dodal.devices.i19.access_controlled.hutch_access import ACCESS_DEVICE_NAME from dodal.devices.i19.access_controlled.optics_blueapi_device import ( - HutchState, OpticsBlueApiDevice, ) @@ -21,12 +20,10 @@ def _ensure_no_dict_v_is_none(d: dict[str, Any], msg: str) -> None: raise ValueError(msg) @staticmethod - def __validate_dict(d: dict[str, Any], motor_type: str): - k_msg_template = "Missing attenuator motor axis label for {}" - k_msg = str.format(k_msg_template, motor_type) + def _validate_dict(d: dict[str, Any], motor_type: str): + k_msg = f"Missing attenuator motor axis label for {motor_type}" AttenuatorPositionDemand._ensure_no_dict_k_is_none(d, k_msg) - v_msg_template = "Missing attenuator position demand value for {}" - v_msg = str.format(v_msg_template, motor_type) + v_msg = f"Missing attenuator position demand value for {motor_type}" AttenuatorPositionDemand._ensure_no_dict_v_is_none(d, v_msg) @staticmethod @@ -52,18 +49,18 @@ def __init__( combined_demand.update(indexed_motor_position_demands) self.demands = combined_demand - def restful_format(self) -> str: - return str(self.demands) + def restful_format(self) -> dict[str, Any]: + return self.demands.copy() def _validate_input( self, continuous_motor_position_demands: dict[str, float], indexed_motor_position_demands: dict[str, int], ): - AttenuatorPositionDemand.__validate_dict( + AttenuatorPositionDemand._validate_dict( continuous_motor_position_demands, "continuous motor" ) - AttenuatorPositionDemand.__validate_dict( + AttenuatorPositionDemand._validate_dict( indexed_motor_position_demands, "indexed motor" ) AttenuatorPositionDemand._ensure_dicts_share_no_common_k( @@ -71,8 +68,8 @@ def _validate_input( ) -class AttenuatorMotorSet(OpticsBlueApiDevice): - """ I19-specific proxy device which requests absorber position changes in x-ray attenuator. +class AttenuatorMotorClique(OpticsBlueApiDevice): + """ I19-specific proxy device which requests absorber position changes in the x-ray attenuator. Sends REST call to blueapi controlling optics on the I19 cluster. The hutch in use is compared against the hutch which sent the REST call. @@ -83,20 +80,29 @@ class AttenuatorMotorSet(OpticsBlueApiDevice): The name of the hutch that wants to operate the optics device is passed to the \ access controlled device upon instantiation of the latter. + Nomenclature: + Note this class was originally called + -MotorSet + ( but objections were based on "set" being an overloaded word ) + + -Gang might have worked, + ( but a gang of mechanical devices are able to be mechanically coupled + and move together; whereas the motors here are completely mutually independent ) + + -Clique + ( as in a set of friends, is an accurate choice, is unlikely to be overloaded, just sounds pretentious). + For details see the architecture described in \ https://github.com/DiamondLightSource/i19-bluesky/issues/30. """ - def __init__(self, prefix: str, hutch: HutchState, name: str = "") -> None: - super().__init__(hutch, name) - @AsyncStatus.wrap async def set(self, value: AttenuatorPositionDemand): - invoking_hutch = self._get_invoking_hutch().value + invoking_hutch = str(self._get_invoking_hutch().value) request_params = { "name": "operate_attenuator_plan", "params": { - "experiement_hutch": invoking_hutch, + "experiment_hutch": invoking_hutch, "access_device": ACCESS_DEVICE_NAME, "attenuator_demands": value.restful_format(), }, diff --git a/tests/devices/i19/access_controlled/test_attenuator_motor_clique.py b/tests/devices/i19/access_controlled/test_attenuator_motor_clique.py new file mode 100644 index 00000000000..3f9e8cda504 --- /dev/null +++ b/tests/devices/i19/access_controlled/test_attenuator_motor_clique.py @@ -0,0 +1,122 @@ +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from aiohttp.client import ClientConnectionError +from bluesky.run_engine import RunEngine + +from dodal.devices.i19.access_controlled.attenuator_motor_clique import ( + AttenuatorMotorClique, + AttenuatorPositionDemand, +) +from dodal.devices.i19.access_controlled.optics_blueapi_device import HutchState + + +def given_a_position_demand() -> AttenuatorPositionDemand: + position_demand = MagicMock() + restful_payload = {"x": 54.3, "y": 72.1, "w": 4} + position_demand.restful_format = MagicMock(return_value=restful_payload) + return position_demand + + +def given_an_unhappy_restful_response() -> AsyncMock: + unhappy_response = AsyncMock() + unhappy_response.ok = False + unhappy_response.json.return_value = {"task_id": "alas_not"} + return unhappy_response + + +async def given_a_clique_of_attenuator_motors( + hutch: HutchState, +) -> AttenuatorMotorClique: + motor_clique = AttenuatorMotorClique(hutch, name="attenuator_motor_clique") + await motor_clique.connect(mock=True) + + motor_clique.url = "http://test-blueapi.url" + return motor_clique + + +@pytest.fixture +async def eh1_motor_clique(RE: RunEngine) -> AttenuatorMotorClique: + return await given_a_clique_of_attenuator_motors(HutchState.EH1) + + +@pytest.fixture +async def eh2_motor_clique(RE: RunEngine) -> AttenuatorMotorClique: + return await given_a_clique_of_attenuator_motors(HutchState.EH2) + + +@pytest.mark.parametrize("invoking_hutch", [HutchState.EH1, HutchState.EH2]) +async def test_that_motor_clique_can_be_instantiated(invoking_hutch): + motor_clique: AttenuatorMotorClique = await given_a_clique_of_attenuator_motors( + invoking_hutch + ) + assert isinstance(motor_clique, AttenuatorMotorClique) + + +@pytest.mark.parametrize("invoking_hutch", [HutchState.EH1, HutchState.EH2]) +@patch( + "dodal.devices.i19.access_controlled.optics_blueapi_device.OpticsBlueApiDevice.set" +) +async def test_when_motor_clique_is_set_that_expected_request_params_are_passed( + patched_setter, invoking_hutch +): + motors: AttenuatorMotorClique = await given_a_clique_of_attenuator_motors( + invoking_hutch + ) + position_demand: AttenuatorPositionDemand = given_a_position_demand() + motors.set(position_demand) # when motor position demand is set + expected_hutch: str = invoking_hutch.value + expected_device: str = "access_control" + expected_request_name: str = "operate_motor_clique_plan" + expected_parameters: dict = { + "experiment_hutch": expected_hutch, + "access_device": expected_device, + "attenuator_demands": {"x": 54.3, "y": 72.1, "w": 4}, + } + expected_request: dict = { + "name": expected_request_name, + "params": expected_parameters, + } + assert patched_setter.called_once_with(expected_request) + + +@pytest.mark.parametrize("invoking_hutch", [HutchState.EH1, HutchState.EH2]) +@patch("dodal.devices.i19.access_controlled.optics_blueapi_device.ClientSession.post") +async def test_when_rest_post_unsuccessful_that_error_raised( + unsuccessful_post, invoking_hutch +): + motors: AttenuatorMotorClique = await given_a_clique_of_attenuator_motors( + invoking_hutch + ) + with pytest.raises(ClientConnectionError): + restful_response: AsyncMock = given_an_unhappy_restful_response() + unsuccessful_post.return_value.__aenter__.return_value = restful_response + + postion_demand = given_a_position_demand() + await motors.set(postion_demand) + + +@pytest.mark.parametrize("invoking_hutch", [HutchState.EH1, HutchState.EH2]) +@patch("dodal.devices.i19.access_controlled.optics_blueapi_device.LOGGER") +@patch("dodal.devices.i19.access_controlled.optics_blueapi_device.ClientSession.put") +@patch("dodal.devices.i19.access_controlled.optics_blueapi_device.ClientSession.post") +async def test_that_error_is_logged_whenever_position_demand_fails( + restful_post, restful_put, logger, invoking_hutch +): + response_to_post: AsyncMock = AsyncMock() + response_to_post.ok = True + response_to_post.json.return_value = {"task_id": "0123"} + restful_post.return_value.__aenter__.return_value = response_to_post + + response_to_put = AsyncMock() + response_to_put.ok = False + restful_put.return_value.__aenter__.return_value = response_to_put + + motors: AttenuatorMotorClique = await given_a_clique_of_attenuator_motors( + invoking_hutch + ) + demand: AttenuatorPositionDemand = given_a_position_demand() + + logger.error.assert_not_called() + await motors.set(demand) + logger.error.assert_called_once() diff --git a/tests/devices/i19/access_controlled/test_attenuator_motor_set.py b/tests/devices/i19/access_controlled/test_attenuator_motor_set.py deleted file mode 100644 index 5d2a8d03846..00000000000 --- a/tests/devices/i19/access_controlled/test_attenuator_motor_set.py +++ /dev/null @@ -1,101 +0,0 @@ -from unittest.mock import AsyncMock, patch - -import pytest -from aiohttp.client import ClientConnectionError -from bluesky.run_engine import RunEngine - -from dodal.devices.i19.access_controlled.attenuator_motor_set import ( - AttenuatorMotorSet, - AttenuatorPositionDemand, -) -from dodal.devices.i19.access_controlled.optics_blueapi_device import HutchState - - -def given_a_position_demand() -> AttenuatorPositionDemand: - wedge_demands = {"x": 54.3, "y": 72.1} - wheel_demands = {"w": 4} - return AttenuatorPositionDemand(wedge_demands, wheel_demands) - - -def given_an_unhappy_restful_response() -> AsyncMock: - unhappy_response = AsyncMock() - unhappy_response.ok = False - unhappy_response.json.return_value = {"task_id": "alas_not"} - return unhappy_response - - -def given_a_blank_device_pv_prefix(): - return "" - - -async def given_set_of_attenuator_motors(hutch: HutchState) -> AttenuatorMotorSet: - prefix = given_a_blank_device_pv_prefix() - motor_set = AttenuatorMotorSet(prefix, hutch, name="fake_motor_set") - await motor_set.connect(mock=True) - - motor_set.url = "http://test-blueapi.url" - return motor_set - - -@pytest.fixture -async def eh1_motor_set(RE: RunEngine) -> AttenuatorMotorSet: - return await given_set_of_attenuator_motors(HutchState.EH1) - - -@pytest.fixture -async def eh2_motor_set(RE: RunEngine) -> AttenuatorMotorSet: - return await given_set_of_attenuator_motors(HutchState.EH2) - - -@pytest.mark.parametrize("invoking_hutch", [HutchState.EH1, HutchState.EH2]) -async def test_that_motor_set_can_be_instantiated(invoking_hutch: HutchState): - motor_set: AttenuatorMotorSet = await given_set_of_attenuator_motors(invoking_hutch) - assert isinstance(motor_set, AttenuatorMotorSet) # could have been None - - -@pytest.mark.parametrize("invoking_hutch", [HutchState.EH1, HutchState.EH2]) -async def test_when_rest_post_unsuccessful_that_error_raised( - invoking_hutch: HutchState, -): - motors: AttenuatorMotorSet = await given_set_of_attenuator_motors(invoking_hutch) - restful_post: str = ( - "dodal.devices.i19.access_controlled.optics_blueapi_device.ClientSession.post" - ) - with pytest.raises(ClientConnectionError): - with patch(restful_post) as unsuccessful_post: - restful_response: AsyncMock = given_an_unhappy_restful_response() - unsuccessful_post.return_value.__aenter__.return_value = restful_response - - postion_demand = given_a_position_demand() - await motors.set(postion_demand) - - -@pytest.mark.parametrize("invoking_hutch", [HutchState.EH1, HutchState.EH2]) -async def test_that_error_is_logged_whenever_position_demand_fails(invoking_hutch): - blue_api_stem: str = "dodal.devices.i19.access_controlled.optics_blueapi_device" - logger: str = f"{blue_api_stem}.LOGGER" - restful_template: str = f"{blue_api_stem}.ClientSession" - restful_post: str = f"{restful_template}.post" - restful_put: str = f"{restful_template}.put" - with ( - patch(logger) as patched_logger, - patch(restful_post) as fake_post, - patch(restful_put) as fake_put, - ): - response_to_post: AsyncMock = AsyncMock() - response_to_post.ok = True - response_to_post.json.return_value = {"task_id": "0123"} - fake_post.return_value.__aenter__.return_value = response_to_post - - response_to_put = AsyncMock() - response_to_put.ok = False - fake_put.return_value.__aenter__.return_value = response_to_put - - motors: AttenuatorMotorSet = await given_set_of_attenuator_motors( - invoking_hutch - ) - demand: AttenuatorPositionDemand = given_a_position_demand() - - patched_logger.error.assert_not_called() - await motors.set(demand) - patched_logger.error.assert_called_once() diff --git a/tests/devices/i19/access_controlled/test_attenuator_position_demand.py b/tests/devices/i19/access_controlled/test_attenuator_position_demand.py index 3ca4c8a021c..9e4902c9fd8 100644 --- a/tests/devices/i19/access_controlled/test_attenuator_position_demand.py +++ b/tests/devices/i19/access_controlled/test_attenuator_position_demand.py @@ -1,4 +1,6 @@ -from dodal.devices.i19.access_controlled.attenuator_motor_set import ( +import pytest + +from dodal.devices.i19.access_controlled.attenuator_motor_clique import ( AttenuatorPositionDemand, ) @@ -18,8 +20,8 @@ def test_that_attenuator_position_demand_with_only_one_wedge_provides_expected_r position_demand = AttenuatorPositionDemand( wedge_position_demands, wheel_position_demands ) - restful_encoding = position_demand.restful_format() - assert restful_encoding == "{'y': 14.9}" + restful_payload = position_demand.restful_format() + assert restful_payload["y"] == 14.9 def test_that_attenuator_position_demand_can_be_created_with_only_one_wheel(): @@ -37,8 +39,8 @@ def test_that_attenuator_position_demand_with_only_one_wheel_provides_expected_r position_demand = AttenuatorPositionDemand( wedge_position_demands, wheel_position_demands ) - rest = position_demand.restful_format() - assert rest == "{'w': 6}" + restful_payload = position_demand.restful_format() + assert restful_payload["w"] == 6 def test_that_empty_attenuator_position_demand_can_be_created(): @@ -56,8 +58,8 @@ def test_that_empty_attenuator_position_demand_provides_empty_rest_format(): position_demand = AttenuatorPositionDemand( wedge_position_demands, wheel_position_demands ) - rest = position_demand.restful_format() - assert rest == "{}" + restful_payload = position_demand.restful_format() + assert restful_payload == {} def test_that_attenuator_position_demand_triplet_can_be_created(): @@ -75,8 +77,8 @@ def test_that_attenuator_position_demand_triplet_provides_expected_rest_format() position_demand = AttenuatorPositionDemand( wedge_position_demands, wheel_position_demands ) - rest = position_demand.restful_format() - assert rest == "{'x': 0.1, 'y': 90.1, 'w': 6}" + restful_payload = position_demand.restful_format() + assert restful_payload == {"x": 0.1, "y": 90.1, "w": 6} # Happy path tests above @@ -87,51 +89,34 @@ def test_that_attenuator_position_demand_triplet_provides_expected_rest_format() def test_that_attenuator_position_raises_error_when_discrete_and_continuous_demands_overload_axis_label(): wedge_position_demands = {"x": 0.1, "v": 90.1} wheel_position_demands = {"w": 6, "v": 7} - try: + anticipated_message: str = "Clashing keys in common between wheel and wedge for attenuation position demand: v" + with pytest.raises(expected_exception=ValueError, match=anticipated_message): AttenuatorPositionDemand(wedge_position_demands, wheel_position_demands) - raise NotImplementedError("Did not observe expected error") - except ValueError as expected: - assert ( - str(expected) - == "Clashing keys in common between wheel and wedge for attenuation position demand: v" - ) def test_that_attenuator_position_creation_raises_error_when_continuous_position_demand_is_none(): wedge_position_demands = {"x": None, "y": 90.1} wheel_position_demands = {} - try: + with pytest.raises(expected_exception=ValueError): AttenuatorPositionDemand(wedge_position_demands, wheel_position_demands) - raise NotImplementedError("Did not observe expected error") - except ValueError as _expected: - pass def test_that_attenuator_position_creation_raises_error_when_indexed_position_demand_is_none(): wedge_position_demands = {"x": 14.88, "y": 90.1} wheel_position_demands = {"w": None, "v": 3} - try: + with pytest.raises(expected_exception=ValueError): AttenuatorPositionDemand(wedge_position_demands, wheel_position_demands) - raise NotImplementedError("Did not observe expected error") - except ValueError as _expected: - pass def test_that_attenuator_position_creation_raises_error_when_continuous_position_key_is_none(): wedge_position_demands = {"x": 32.65, None: 80.1} wheel_position_demands = {"w": 8} - try: + with pytest.raises(expected_exception=ValueError): AttenuatorPositionDemand(wedge_position_demands, wheel_position_demands) - raise NotImplementedError("Did not observe expected error") - except ValueError as _expected: - pass def test_that_attenuator_position_creation_raises_error_when_indexed_position_key_is_none(): wedge_position_demands = {"x": 24.08, "y": 71.4} wheel_position_demands = {"w": 1, None: 2} - try: + with pytest.raises(expected_exception=ValueError): AttenuatorPositionDemand(wedge_position_demands, wheel_position_demands) - raise NotImplementedError("Did not observe expected error") - except ValueError as _expected: - pass From 7fd5cb81a5656c309d2761e94915ed3c3e0c6802 Mon Sep 17 00:00:00 2001 From: Paul Coe Date: Thu, 14 Aug 2025 18:10:40 +0100 Subject: [PATCH 04/23] * Fixes broken tests in attenuator motor class as detected by latest failures in CI --- .../i19/access_controlled/attenuator_motor_clique.py | 2 +- .../access_controlled/test_attenuator_motor_clique.py | 10 ++++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/src/dodal/devices/i19/access_controlled/attenuator_motor_clique.py b/src/dodal/devices/i19/access_controlled/attenuator_motor_clique.py index b7249900ee8..557b8310d01 100644 --- a/src/dodal/devices/i19/access_controlled/attenuator_motor_clique.py +++ b/src/dodal/devices/i19/access_controlled/attenuator_motor_clique.py @@ -100,7 +100,7 @@ class AttenuatorMotorClique(OpticsBlueApiDevice): async def set(self, value: AttenuatorPositionDemand): invoking_hutch = str(self._get_invoking_hutch().value) request_params = { - "name": "operate_attenuator_plan", + "name": "operate_motor_clique_plan", "params": { "experiment_hutch": invoking_hutch, "access_device": ACCESS_DEVICE_NAME, diff --git a/tests/devices/i19/access_controlled/test_attenuator_motor_clique.py b/tests/devices/i19/access_controlled/test_attenuator_motor_clique.py index 3f9e8cda504..dba14daeef1 100644 --- a/tests/devices/i19/access_controlled/test_attenuator_motor_clique.py +++ b/tests/devices/i19/access_controlled/test_attenuator_motor_clique.py @@ -55,16 +55,18 @@ async def test_that_motor_clique_can_be_instantiated(invoking_hutch): @pytest.mark.parametrize("invoking_hutch", [HutchState.EH1, HutchState.EH2]) @patch( - "dodal.devices.i19.access_controlled.optics_blueapi_device.OpticsBlueApiDevice.set" + "dodal.devices.i19.access_controlled.attenuator_motor_clique.OpticsBlueApiDevice.set", + new_callable=AsyncMock, ) async def test_when_motor_clique_is_set_that_expected_request_params_are_passed( - patched_setter, invoking_hutch + internal_setter, invoking_hutch ): motors: AttenuatorMotorClique = await given_a_clique_of_attenuator_motors( invoking_hutch ) position_demand: AttenuatorPositionDemand = given_a_position_demand() - motors.set(position_demand) # when motor position demand is set + await motors.set(position_demand) # when motor position demand is set + expected_hutch: str = invoking_hutch.value expected_device: str = "access_control" expected_request_name: str = "operate_motor_clique_plan" @@ -77,7 +79,7 @@ async def test_when_motor_clique_is_set_that_expected_request_params_are_passed( "name": expected_request_name, "params": expected_parameters, } - assert patched_setter.called_once_with(expected_request) + internal_setter.assert_called_once_with(expected_request) @pytest.mark.parametrize("invoking_hutch", [HutchState.EH1, HutchState.EH2]) From 05790fd4e0df5495a2b1a56efe930fe387f51f89 Mon Sep 17 00:00:00 2001 From: Paul Coe Date: Fri, 19 Sep 2025 23:32:44 +0100 Subject: [PATCH 05/23] dodal#1384 adds pydantic validation * replaces ad hoc validation with pydantic usage * renames badly named motor clique -> motor squad where the new name retains the sense of motor independence yet working towards a common purpose ( even if not a single point in 3D space : compare "stage" ) * tests adapted to the above changes where necessary --- .../attenuator_motor_clique.py | 110 ------------------ .../attenuator_motor_squad.py | 76 ++++++++++++ ...ique.py => test_attenuator_motor_squad.py} | 44 +++---- .../test_attenuator_position_demand.py | 69 +++++++---- 4 files changed, 144 insertions(+), 155 deletions(-) delete mode 100644 src/dodal/devices/i19/access_controlled/attenuator_motor_clique.py create mode 100644 src/dodal/devices/i19/access_controlled/attenuator_motor_squad.py rename tests/devices/i19/access_controlled/{test_attenuator_motor_clique.py => test_attenuator_motor_squad.py} (70%) diff --git a/src/dodal/devices/i19/access_controlled/attenuator_motor_clique.py b/src/dodal/devices/i19/access_controlled/attenuator_motor_clique.py deleted file mode 100644 index 557b8310d01..00000000000 --- a/src/dodal/devices/i19/access_controlled/attenuator_motor_clique.py +++ /dev/null @@ -1,110 +0,0 @@ -from typing import Any - -from ophyd_async.core import AsyncStatus - -from dodal.devices.i19.access_controlled.hutch_access import ACCESS_DEVICE_NAME -from dodal.devices.i19.access_controlled.optics_blueapi_device import ( - OpticsBlueApiDevice, -) - - -class AttenuatorPositionDemand: - @staticmethod - def _ensure_no_dict_k_is_none(d: dict[str, Any], msg: str) -> None: - if any(k is None for k in d.keys()): - raise ValueError(msg) - - @staticmethod - def _ensure_no_dict_v_is_none(d: dict[str, Any], msg: str) -> None: - if any(v is None for v in d.values()): - raise ValueError(msg) - - @staticmethod - def _validate_dict(d: dict[str, Any], motor_type: str): - k_msg = f"Missing attenuator motor axis label for {motor_type}" - AttenuatorPositionDemand._ensure_no_dict_k_is_none(d, k_msg) - v_msg = f"Missing attenuator position demand value for {motor_type}" - AttenuatorPositionDemand._ensure_no_dict_v_is_none(d, v_msg) - - @staticmethod - def _ensure_dicts_share_no_common_k(a: dict[str, float], b: dict[str, int]) -> None: - clashes_detected = filter(lambda k: k in a, b) - if any(clashes_detected): - msg_template: str = "Clashing keys in common between wheel and wedge for attenuation position demand: {}" - reiterated = filter(lambda k: k in a, b) - clashes = ",".join(reiterated) - msg: str = msg_template.format(clashes) - raise ValueError(msg) - - def __init__( - self, - continuous_motor_position_demands: dict[str, float], - indexed_motor_position_demands: dict[str, int], - ) -> None: - self._validate_input( - continuous_motor_position_demands, indexed_motor_position_demands - ) - combined_demand: dict[str, Any] = {} - combined_demand.update(continuous_motor_position_demands) - combined_demand.update(indexed_motor_position_demands) - self.demands = combined_demand - - def restful_format(self) -> dict[str, Any]: - return self.demands.copy() - - def _validate_input( - self, - continuous_motor_position_demands: dict[str, float], - indexed_motor_position_demands: dict[str, int], - ): - AttenuatorPositionDemand._validate_dict( - continuous_motor_position_demands, "continuous motor" - ) - AttenuatorPositionDemand._validate_dict( - indexed_motor_position_demands, "indexed motor" - ) - AttenuatorPositionDemand._ensure_dicts_share_no_common_k( - continuous_motor_position_demands, indexed_motor_position_demands - ) - - -class AttenuatorMotorClique(OpticsBlueApiDevice): - """ I19-specific proxy device which requests absorber position changes in the x-ray attenuator. - - Sends REST call to blueapi controlling optics on the I19 cluster. - The hutch in use is compared against the hutch which sent the REST call. - Only the hutch in use will be permitted to execute a plan (requesting motor moves). - As the two hutches are located in series, checking the hutch in use is necessary to \ - avoid accidentally operating optics devices from one hutch while the other has beam time. - - The name of the hutch that wants to operate the optics device is passed to the \ - access controlled device upon instantiation of the latter. - - Nomenclature: - Note this class was originally called - -MotorSet - ( but objections were based on "set" being an overloaded word ) - - -Gang might have worked, - ( but a gang of mechanical devices are able to be mechanically coupled - and move together; whereas the motors here are completely mutually independent ) - - -Clique - ( as in a set of friends, is an accurate choice, is unlikely to be overloaded, just sounds pretentious). - - For details see the architecture described in \ - https://github.com/DiamondLightSource/i19-bluesky/issues/30. - """ - - @AsyncStatus.wrap - async def set(self, value: AttenuatorPositionDemand): - invoking_hutch = str(self._get_invoking_hutch().value) - request_params = { - "name": "operate_motor_clique_plan", - "params": { - "experiment_hutch": invoking_hutch, - "access_device": ACCESS_DEVICE_NAME, - "attenuator_demands": value.restful_format(), - }, - } - await super().set(request_params) diff --git a/src/dodal/devices/i19/access_controlled/attenuator_motor_squad.py b/src/dodal/devices/i19/access_controlled/attenuator_motor_squad.py new file mode 100644 index 00000000000..7928a24f7ec --- /dev/null +++ b/src/dodal/devices/i19/access_controlled/attenuator_motor_squad.py @@ -0,0 +1,76 @@ +from typing import Annotated, Any + +from ophyd_async.core import AsyncStatus +from pydantic import BaseModel, field_validator +from pydantic.types import PositiveInt, StringConstraints + +from dodal.devices.i19.access_controlled.hutch_access import ACCESS_DEVICE_NAME +from dodal.devices.i19.access_controlled.optics_blueapi_device import ( + OpticsBlueApiDevice, +) + +PermittedKeyStr = Annotated[str, StringConstraints(pattern="^[A-Za-z0-9-_]*$")] + + +class AttenuatorMotorPositionDemands(BaseModel): + continuous_demands: dict[PermittedKeyStr, float] = {} + indexed_demands: dict[PermittedKeyStr, PositiveInt] = {} + + @field_validator("indexed_demands") + def no_keys_clash(cls, indexed_demands, values, **kwargs): + if len(indexed_demands) < 1: + return indexed_demands + other_demands = values["continuous_demands"] + for key in indexed_demands: + if key in other_demands: + message = f"Attenuator Position Demand Key clash: {key}. Require distinct keys for axis names on continuous motors and indexed positions." + raise ValueError(message) + return indexed_demands + + def restful_format(self) -> dict[PermittedKeyStr, Any]: + combined: dict[PermittedKeyStr, Any] = {} + combined.update(self.continuous_demands) + combined.update(self.indexed_demands) + return combined + + +class AttenuatorMotorSquad(OpticsBlueApiDevice): + """ I19-specific proxy device which requests absorber position changes in the x-ray attenuator. + + Sends REST call to blueapi controlling optics on the I19 cluster. + The hutch in use is compared against the hutch which sent the REST call. + Only the hutch in use will be permitted to execute a plan (requesting motor moves). + As the two hutches are located in series, checking the hutch in use is necessary to \ + avoid accidentally operating optics devices from one hutch while the other has beam time. + + The name of the hutch that wants to operate the optics device is passed to the \ + access controlled device upon instantiation of the latter. + + Nomenclature: + Note this class was originally called + -MotorSet + ( but objections were based on "set" being an overloaded word ) + + -Gang might have worked, + ( but a gang of mechanical devices are able to be mechanically coupled + and move together; whereas the motors here are completely mutually independent ) + + -Clique + ( as in a set of friends, is an accurate choice, is unlikely to be overloaded, just sounds pretentious). + + For details see the architecture described in \ + https://github.com/DiamondLightSource/i19-bluesky/issues/30. + """ + + @AsyncStatus.wrap + async def set(self, value: AttenuatorMotorPositionDemands): + invoking_hutch = str(self._get_invoking_hutch().value) + request_params = { + "name": "operate_motor_clique_plan", + "params": { + "experiment_hutch": invoking_hutch, + "access_device": ACCESS_DEVICE_NAME, + "attenuator_demands": value.restful_format(), + }, + } + await super().set(request_params) diff --git a/tests/devices/i19/access_controlled/test_attenuator_motor_clique.py b/tests/devices/i19/access_controlled/test_attenuator_motor_squad.py similarity index 70% rename from tests/devices/i19/access_controlled/test_attenuator_motor_clique.py rename to tests/devices/i19/access_controlled/test_attenuator_motor_squad.py index dba14daeef1..b93a9f6cf21 100644 --- a/tests/devices/i19/access_controlled/test_attenuator_motor_clique.py +++ b/tests/devices/i19/access_controlled/test_attenuator_motor_squad.py @@ -4,14 +4,14 @@ from aiohttp.client import ClientConnectionError from bluesky.run_engine import RunEngine -from dodal.devices.i19.access_controlled.attenuator_motor_clique import ( - AttenuatorMotorClique, - AttenuatorPositionDemand, +from dodal.devices.i19.access_controlled.attenuator_motor_squad import ( + AttenuatorMotorPositionDemands, + AttenuatorMotorSquad, ) from dodal.devices.i19.access_controlled.optics_blueapi_device import HutchState -def given_a_position_demand() -> AttenuatorPositionDemand: +def given_position_demands() -> AttenuatorMotorPositionDemands: position_demand = MagicMock() restful_payload = {"x": 54.3, "y": 72.1, "w": 4} position_demand.restful_format = MagicMock(return_value=restful_payload) @@ -25,10 +25,10 @@ def given_an_unhappy_restful_response() -> AsyncMock: return unhappy_response -async def given_a_clique_of_attenuator_motors( +async def given_a_squad_of_attenuator_motors( hutch: HutchState, -) -> AttenuatorMotorClique: - motor_clique = AttenuatorMotorClique(hutch, name="attenuator_motor_clique") +) -> AttenuatorMotorSquad: + motor_clique = AttenuatorMotorSquad(hutch, name="attenuator_motor_clique") await motor_clique.connect(mock=True) motor_clique.url = "http://test-blueapi.url" @@ -36,21 +36,21 @@ async def given_a_clique_of_attenuator_motors( @pytest.fixture -async def eh1_motor_clique(RE: RunEngine) -> AttenuatorMotorClique: - return await given_a_clique_of_attenuator_motors(HutchState.EH1) +async def eh1_motor_clique(RE: RunEngine) -> AttenuatorMotorSquad: + return await given_a_squad_of_attenuator_motors(HutchState.EH1) @pytest.fixture -async def eh2_motor_clique(RE: RunEngine) -> AttenuatorMotorClique: - return await given_a_clique_of_attenuator_motors(HutchState.EH2) +async def eh2_motor_clique(RE: RunEngine) -> AttenuatorMotorSquad: + return await given_a_squad_of_attenuator_motors(HutchState.EH2) @pytest.mark.parametrize("invoking_hutch", [HutchState.EH1, HutchState.EH2]) async def test_that_motor_clique_can_be_instantiated(invoking_hutch): - motor_clique: AttenuatorMotorClique = await given_a_clique_of_attenuator_motors( + motor_clique: AttenuatorMotorSquad = await given_a_squad_of_attenuator_motors( invoking_hutch ) - assert isinstance(motor_clique, AttenuatorMotorClique) + assert isinstance(motor_clique, AttenuatorMotorSquad) @pytest.mark.parametrize("invoking_hutch", [HutchState.EH1, HutchState.EH2]) @@ -61,11 +61,11 @@ async def test_that_motor_clique_can_be_instantiated(invoking_hutch): async def test_when_motor_clique_is_set_that_expected_request_params_are_passed( internal_setter, invoking_hutch ): - motors: AttenuatorMotorClique = await given_a_clique_of_attenuator_motors( + motors: AttenuatorMotorSquad = await given_a_squad_of_attenuator_motors( invoking_hutch ) - position_demand: AttenuatorPositionDemand = given_a_position_demand() - await motors.set(position_demand) # when motor position demand is set + position_demands: AttenuatorMotorPositionDemands = given_position_demands() + await motors.set(position_demands) # when motor position demand is set expected_hutch: str = invoking_hutch.value expected_device: str = "access_control" @@ -87,15 +87,15 @@ async def test_when_motor_clique_is_set_that_expected_request_params_are_passed( async def test_when_rest_post_unsuccessful_that_error_raised( unsuccessful_post, invoking_hutch ): - motors: AttenuatorMotorClique = await given_a_clique_of_attenuator_motors( + motors: AttenuatorMotorSquad = await given_a_squad_of_attenuator_motors( invoking_hutch ) with pytest.raises(ClientConnectionError): restful_response: AsyncMock = given_an_unhappy_restful_response() unsuccessful_post.return_value.__aenter__.return_value = restful_response - postion_demand = given_a_position_demand() - await motors.set(postion_demand) + postion_demands = given_position_demands() + await motors.set(postion_demands) @pytest.mark.parametrize("invoking_hutch", [HutchState.EH1, HutchState.EH2]) @@ -114,11 +114,11 @@ async def test_that_error_is_logged_whenever_position_demand_fails( response_to_put.ok = False restful_put.return_value.__aenter__.return_value = response_to_put - motors: AttenuatorMotorClique = await given_a_clique_of_attenuator_motors( + motors: AttenuatorMotorSquad = await given_a_squad_of_attenuator_motors( invoking_hutch ) - demand: AttenuatorPositionDemand = given_a_position_demand() + position_demands: AttenuatorMotorPositionDemands = given_position_demands() logger.error.assert_not_called() - await motors.set(demand) + await motors.set(position_demands) logger.error.assert_called_once() diff --git a/tests/devices/i19/access_controlled/test_attenuator_position_demand.py b/tests/devices/i19/access_controlled/test_attenuator_position_demand.py index 9e4902c9fd8..1afbb87a3e2 100644 --- a/tests/devices/i19/access_controlled/test_attenuator_position_demand.py +++ b/tests/devices/i19/access_controlled/test_attenuator_position_demand.py @@ -1,15 +1,16 @@ import pytest -from dodal.devices.i19.access_controlled.attenuator_motor_clique import ( - AttenuatorPositionDemand, +from dodal.devices.i19.access_controlled.attenuator_motor_squad import ( + AttenuatorMotorPositionDemands, ) def test_that_attenuator_position_demand_can_be_created_with_only_one_wedge(): wedge_position_demands = {"x": 0.5} wheel_position_demands = {} - position_demand = AttenuatorPositionDemand( - wedge_position_demands, wheel_position_demands + position_demand = AttenuatorMotorPositionDemands( + continuous_demands=wedge_position_demands, + indexed_demands=wheel_position_demands, ) assert position_demand is not None @@ -17,8 +18,9 @@ def test_that_attenuator_position_demand_can_be_created_with_only_one_wedge(): def test_that_attenuator_position_demand_with_only_one_wedge_provides_expected_rest_format(): wedge_position_demands = {"y": 14.9} wheel_position_demands = {} - position_demand = AttenuatorPositionDemand( - wedge_position_demands, wheel_position_demands + position_demand = AttenuatorMotorPositionDemands( + continuous_demands=wedge_position_demands, + indexed_demands=wheel_position_demands, ) restful_payload = position_demand.restful_format() assert restful_payload["y"] == 14.9 @@ -27,8 +29,9 @@ def test_that_attenuator_position_demand_with_only_one_wedge_provides_expected_r def test_that_attenuator_position_demand_can_be_created_with_only_one_wheel(): wedge_position_demands = {} wheel_position_demands = {"w": 2} - position_demand = AttenuatorPositionDemand( - wedge_position_demands, wheel_position_demands + position_demand = AttenuatorMotorPositionDemands( + continuous_demands=wedge_position_demands, + indexed_demands=wheel_position_demands, ) assert position_demand is not None @@ -36,8 +39,9 @@ def test_that_attenuator_position_demand_can_be_created_with_only_one_wheel(): def test_that_attenuator_position_demand_with_only_one_wheel_provides_expected_rest_format(): wedge_position_demands = {} wheel_position_demands = {"w": 6} - position_demand = AttenuatorPositionDemand( - wedge_position_demands, wheel_position_demands + position_demand = AttenuatorMotorPositionDemands( + continuous_demands=wedge_position_demands, + indexed_demands=wheel_position_demands, ) restful_payload = position_demand.restful_format() assert restful_payload["w"] == 6 @@ -46,8 +50,9 @@ def test_that_attenuator_position_demand_with_only_one_wheel_provides_expected_r def test_that_empty_attenuator_position_demand_can_be_created(): wedge_position_demands = {} wheel_position_demands = {} - position_demand = AttenuatorPositionDemand( - wedge_position_demands, wheel_position_demands + position_demand = AttenuatorMotorPositionDemands( + continuous_demands=wedge_position_demands, + indexed_demands=wheel_position_demands, ) assert position_demand is not None @@ -55,8 +60,9 @@ def test_that_empty_attenuator_position_demand_can_be_created(): def test_that_empty_attenuator_position_demand_provides_empty_rest_format(): wedge_position_demands = {} wheel_position_demands = {} - position_demand = AttenuatorPositionDemand( - wedge_position_demands, wheel_position_demands + position_demand = AttenuatorMotorPositionDemands( + continuous_demands=wedge_position_demands, + indexed_demands=wheel_position_demands, ) restful_payload = position_demand.restful_format() assert restful_payload == {} @@ -65,8 +71,9 @@ def test_that_empty_attenuator_position_demand_provides_empty_rest_format(): def test_that_attenuator_position_demand_triplet_can_be_created(): standard_wedge_position_demand = {"x": 25.9, "y": 5.0} standard_wheel_position_demand = {"w": 4} - position_demand = AttenuatorPositionDemand( - standard_wedge_position_demand, standard_wheel_position_demand + position_demand = AttenuatorMotorPositionDemands( + continuous_demands=standard_wedge_position_demand, + indexed_demands=standard_wheel_position_demand, ) assert position_demand is not None @@ -74,8 +81,9 @@ def test_that_attenuator_position_demand_triplet_can_be_created(): def test_that_attenuator_position_demand_triplet_provides_expected_rest_format(): wedge_position_demands = {"x": 0.1, "y": 90.1} wheel_position_demands = {"w": 6} - position_demand = AttenuatorPositionDemand( - wedge_position_demands, wheel_position_demands + position_demand = AttenuatorMotorPositionDemands( + continuous_demands=wedge_position_demands, + indexed_demands=wheel_position_demands, ) restful_payload = position_demand.restful_format() assert restful_payload == {"x": 0.1, "y": 90.1, "w": 6} @@ -91,32 +99,47 @@ def test_that_attenuator_position_raises_error_when_discrete_and_continuous_dema wheel_position_demands = {"w": 6, "v": 7} anticipated_message: str = "Clashing keys in common between wheel and wedge for attenuation position demand: v" with pytest.raises(expected_exception=ValueError, match=anticipated_message): - AttenuatorPositionDemand(wedge_position_demands, wheel_position_demands) + AttenuatorMotorPositionDemands( + continuous_demands=wedge_position_demands, + indexed_demands=wheel_position_demands, + ) def test_that_attenuator_position_creation_raises_error_when_continuous_position_demand_is_none(): wedge_position_demands = {"x": None, "y": 90.1} wheel_position_demands = {} with pytest.raises(expected_exception=ValueError): - AttenuatorPositionDemand(wedge_position_demands, wheel_position_demands) + AttenuatorMotorPositionDemands( + continuous_demands=wedge_position_demands, + indexed_demands=wheel_position_demands, + ) def test_that_attenuator_position_creation_raises_error_when_indexed_position_demand_is_none(): wedge_position_demands = {"x": 14.88, "y": 90.1} wheel_position_demands = {"w": None, "v": 3} with pytest.raises(expected_exception=ValueError): - AttenuatorPositionDemand(wedge_position_demands, wheel_position_demands) + AttenuatorMotorPositionDemands( + continuous_demands=wedge_position_demands, + indexed_demands=wheel_position_demands, + ) def test_that_attenuator_position_creation_raises_error_when_continuous_position_key_is_none(): wedge_position_demands = {"x": 32.65, None: 80.1} wheel_position_demands = {"w": 8} with pytest.raises(expected_exception=ValueError): - AttenuatorPositionDemand(wedge_position_demands, wheel_position_demands) + AttenuatorMotorPositionDemands( + continuous_demands=wedge_position_demands, + indexed_demands=wheel_position_demands, + ) def test_that_attenuator_position_creation_raises_error_when_indexed_position_key_is_none(): wedge_position_demands = {"x": 24.08, "y": 71.4} wheel_position_demands = {"w": 1, None: 2} with pytest.raises(expected_exception=ValueError): - AttenuatorPositionDemand(wedge_position_demands, wheel_position_demands) + AttenuatorMotorPositionDemands( + continuous_demands=wedge_position_demands, + indexed_demands=wheel_position_demands, + ) From 95bd7f7800f0bc1f86e8e6fa116b3e3a77484db3 Mon Sep 17 00:00:00 2001 From: Paul Coe Date: Fri, 19 Sep 2025 23:32:44 +0100 Subject: [PATCH 06/23] dodal#1384 adds pydantic validation * replaces ad hoc validation with pydantic usage -- includes regex restriction of keys (motor axes or indexed positions to alphanumeric, underscore or dash) * renames badly named motor clique -> motor squad where the new name retains the sense of motor independence yet working towards a common purpose ( even if not a single point in 3D space : compare "stage" ) * tests adapted to the above changes where necessary --- .../access_controlled/attenuator_motor_squad.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/dodal/devices/i19/access_controlled/attenuator_motor_squad.py b/src/dodal/devices/i19/access_controlled/attenuator_motor_squad.py index 7928a24f7ec..4f92db09665 100644 --- a/src/dodal/devices/i19/access_controlled/attenuator_motor_squad.py +++ b/src/dodal/devices/i19/access_controlled/attenuator_motor_squad.py @@ -17,15 +17,17 @@ class AttenuatorMotorPositionDemands(BaseModel): indexed_demands: dict[PermittedKeyStr, PositiveInt] = {} @field_validator("indexed_demands") - def no_keys_clash(cls, indexed_demands, values, **kwargs): - if len(indexed_demands) < 1: - return indexed_demands - other_demands = values["continuous_demands"] - for key in indexed_demands: - if key in other_demands: + + def no_keys_clash(cls, values, **kwargs): + continuous = values["continuous_demands"] + indexed = values["indexed_demands"] + if len(indexed) < 1: + return values + for key in indexed: + if key in continuous: message = f"Attenuator Position Demand Key clash: {key}. Require distinct keys for axis names on continuous motors and indexed positions." raise ValueError(message) - return indexed_demands + return values def restful_format(self) -> dict[PermittedKeyStr, Any]: combined: dict[PermittedKeyStr, Any] = {} From 1bc8c3d6f7086308c0d0a75d4e488289867106a2 Mon Sep 17 00:00:00 2001 From: olliesilvester <122091460+olliesilvester@users.noreply.github.com> Date: Mon, 22 Sep 2025 10:32:56 +0100 Subject: [PATCH 07/23] Fix bug in watcher utils (#1557) * Fix watcher utils bug and add test --- src/dodal/common/watcher_utils.py | 2 +- tests/common/test_watcher_utils.py | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/src/dodal/common/watcher_utils.py b/src/dodal/common/watcher_utils.py index 4befa9c7379..5fbb95eb5d7 100644 --- a/src/dodal/common/watcher_utils.py +++ b/src/dodal/common/watcher_utils.py @@ -12,7 +12,6 @@ def __init__( message_prefix: str, percent_interval: Number = 25, ): - status.watch(self) self.percent_interval = percent_interval self._current_percent_interval = 0 self.message_prefix = message_prefix @@ -20,6 +19,7 @@ def __init__( raise ValueError( f"Percent interval on class _LogOnPercentageProgressWatcher must be a positive number, but received {self.percent_interval}" ) + status.watch(self) def __call__( self, diff --git a/tests/common/test_watcher_utils.py b/tests/common/test_watcher_utils.py index 36c7d869e52..4b386bcae66 100644 --- a/tests/common/test_watcher_utils.py +++ b/tests/common/test_watcher_utils.py @@ -61,3 +61,17 @@ async def test_log_on_percentage_complete_value_error_on_bad_input(): match="Percent interval on class _LogOnPercentageProgressWatcher must be a positive number, but received 0", ): log_on_percentage_complete(status, "", 0) + + +async def test_log_on_percentage_complete_for_already_updating_status(): + test_watchable = MockWatchable() + status = test_watchable.get_watchable_status() + + async def update_signal(): + for i in range(10): + await test_watchable.signal.set(i) + + async def do_log(): + log_on_percentage_complete(status, "") + + await asyncio.gather(update_signal(), do_log()) From f4485d2aa88f2804d50a4871ad0d6fc8a0d46557 Mon Sep 17 00:00:00 2001 From: Raymond Fan Date: Mon, 22 Sep 2025 14:55:31 +0100 Subject: [PATCH 08/23] 1404 add base lakeshore temperature controllers (#1408) * added lakeshore * grouping vector into function * reorder structure * added lakeshore 340 an 336 * undo spaces change in pyproject * complete test * more tests * make set wait in test * correct lakeshore 340 name * correct some docstring * add lakeshore docstring * add i16 lakeshore * add i06 lakeshores * correct default for lakeshore340 * add docstring for base signals * correct endstation name * correct docstring * remove private methods from docstring and add explanation to control_channel * correct docsting * add user_ to readback and setpoint * change zero offset to custom offset * remove lakeshore on beamlines * remove sub class * remove sub class * rename single control channel to no_pv_suffix_index * signal only * update test to check off_set * add test to check pv for no_pv_index * create channels for group of signal instead of device vector each signal. * test signal channel control * move pid into control channel * add docstring * corrected snake_case and added docstring to clarify channel is readback channel * add channel_rw f * correct ramp_enable channel type * dropping channel in readback * update branch and make lakeshore work * correct temperature control test with lakeshoreIO * remove add readable in set channel * make hints_channel for changing hint signal in plan * move class docs string to collect place * correct enum * remove else and use map instead for suffix * add docstings * correct grammar Co-authored-by: Dominic Oram * correct import * correct logic and test * move docstring * rename channel to current_hints_channel * fix wrong pv * Apply suggestions from code review Co-authored-by: oliwenmandiamond <136330507+oliwenmandiamond@users.noreply.github.com> * correct tests * Create lakeshoreHint to avoid calling ophyd function * revert lakeshore hashint back to _HintsFromName --------- Co-authored-by: Dominic Oram Co-authored-by: oliwenmandiamond <136330507+oliwenmandiamond@users.noreply.github.com> --- .../devices/temperture_controller/__init__.py | 3 + .../lakeshore/lakeshore.py | 155 +++++++++++++++++ .../lakeshore/lakeshore_io.py | 2 +- .../temperature_controller/test_lakeshore.py | 158 ++++++++++++++++++ 4 files changed, 317 insertions(+), 1 deletion(-) create mode 100644 src/dodal/devices/temperture_controller/lakeshore/lakeshore.py create mode 100644 tests/devices/unit_tests/temperature_controller/test_lakeshore.py diff --git a/src/dodal/devices/temperture_controller/__init__.py b/src/dodal/devices/temperture_controller/__init__.py index e69de29bb2d..4e32083a9a9 100644 --- a/src/dodal/devices/temperture_controller/__init__.py +++ b/src/dodal/devices/temperture_controller/__init__.py @@ -0,0 +1,3 @@ +from .lakeshore.lakeshore import Lakeshore + +__all__ = ["Lakeshore"] diff --git a/src/dodal/devices/temperture_controller/lakeshore/lakeshore.py b/src/dodal/devices/temperture_controller/lakeshore/lakeshore.py new file mode 100644 index 00000000000..27e5b8bb40a --- /dev/null +++ b/src/dodal/devices/temperture_controller/lakeshore/lakeshore.py @@ -0,0 +1,155 @@ +from asyncio import gather + +from bluesky.protocols import Movable +from ophyd_async.core import ( + AsyncStatus, + SignalDatatypeT, + StandardReadable, + StandardReadableFormat, + derived_signal_rw, + soft_signal_rw, +) +from ophyd_async.core._readable import _HintsFromName + +from .lakeshore_io import ( + LakeshoreBaseIO, +) + + +class Lakeshore(LakeshoreBaseIO, StandardReadable, Movable[float]): + """ + Device for controlling and reading from a Lakeshore temperature controller. + It supports multiple channels and PID control. + Attributes + ---------- + temperature : LakeshoreBaseIO + Temperature IO interface. + PID : PIDBaseIO + PID IO interface. + control_channel : derived_signal_rw + Signal for selecting the control channel, + optional readback as hinted signal + (default readback channel is the same as control channel). + + temperature_high_limit: soft_signal_rw + Signal to store the soft high temperature limit. + temperature_low_limit: soft_signal_rw + Signal to store the soft low temperature limit. + + + Methods + ------- + set(value: float) + Set the temperature setpoint for the selected control channel. + """ + + def __init__( + self, + prefix: str, + num_readback_channel: int, + heater_setting: type[SignalDatatypeT], + control_channel: int = 1, + single_control_channel: bool = False, + name: str = "", + ): + """ + Parameters + ---------- + prefix : str + The EPICS prefix for the device. + no_channels : int + Number of temperature channels. + heater_setting : type[SignalDatatypeT] + Enum type for heater settings. + control_channel : int, optional + The initial control channel (default is 1). + single_control_channel : bool, optional + Whether to use a single control channel (default is False). + name : str, optional + Name of the device. + """ + self._control_channel = soft_signal_rw(int, initial_value=control_channel) + self.temperature_high_limit = soft_signal_rw(float, initial_value=400) + self.temperature_low_limit = soft_signal_rw(float, initial_value=0) + + self.control_channel = derived_signal_rw( + raw_to_derived=self._get_control_channel, + set_derived=self._set_control_channel, + current_channel=self._control_channel, + ) + + self._hints_channel = soft_signal_rw(int, initial_value=control_channel) + + self.hints_channel = derived_signal_rw( + raw_to_derived=self._get_hints_channel, + set_derived=self._set_hints_channel, + current_hints_channel=self._hints_channel, + ) + + super().__init__( + prefix=prefix, + num_readback_channel=num_readback_channel, + heater_setting=heater_setting, + name=name, + single_control_channel=single_control_channel, + ) + + self.add_readables( + [setpoint.user_setpoint for setpoint in self.control_channels.values()] + + list(self.readback.values()) + ) + self._has_hints = (_HintsFromName(self.readback[control_channel]),) + + self.add_readables( + [ + self._control_channel, + self.control_channels[control_channel].p, + self.control_channels[control_channel].i, + self.control_channels[control_channel].d, + self.control_channels[control_channel].heater_output_range, + ], + StandardReadableFormat.CONFIG_SIGNAL, + ) + + @AsyncStatus.wrap + async def set(self, value: float) -> None: + """ + Set the temperature setpoint for the active control channel. + """ + high, low = await gather( + self.temperature_high_limit.get_value(), + self.temperature_low_limit.get_value(), + ) + if high >= value >= low: + await self.control_channels[ + await self.control_channel.get_value() + ].user_setpoint.set(value) + else: + raise ValueError( + f"{self.name} requested temperature {value} is outside limits: {low}, {high}" + ) + + def _get_control_channel(self, current_channel: int) -> int: + return current_channel + + async def _set_control_channel(self, value: int) -> None: + if value < 1 or value > len(self.control_channels): + raise ValueError( + f"Control channel must be between 1 and {len(self.control_channels)}." + ) + await self._control_channel.set(value) + self._read_config_funcs = ( + self._control_channel.read, + self.control_channels[value].user_setpoint.read, + self.control_channels[value].p.read, + self.control_channels[value].i.read, + self.control_channels[value].d.read, + self.control_channels[value].heater_output_range.read, + ) + + async def _set_hints_channel(self, readback_channel: int) -> None: + self._has_hints = (_HintsFromName(self.readback[readback_channel]),) + await self._hints_channel.set(readback_channel) + + def _get_hints_channel(self, current_hints_channel: int) -> int: + return current_hints_channel diff --git a/src/dodal/devices/temperture_controller/lakeshore/lakeshore_io.py b/src/dodal/devices/temperture_controller/lakeshore/lakeshore_io.py index 6cd1ecf5162..c83eaa3f871 100644 --- a/src/dodal/devices/temperture_controller/lakeshore/lakeshore_io.py +++ b/src/dodal/devices/temperture_controller/lakeshore/lakeshore_io.py @@ -40,12 +40,12 @@ def channel_rw(channel_type, pv_name): self.user_setpoint = channel_rw(channel_type=float, pv_name="SETP") self.ramp_rate = channel_rw(channel_type=float, pv_name="RAMP") self.ramp_enable = channel_rw(channel_type=int, pv_name="RAMPST") - self.heater_output = channel_rw(channel_type=float, pv_name="HTR") self.heater_output_range = channel_rw(channel_type=heater_type, pv_name="RANGE") self.p = channel_rw(channel_type=float, pv_name="P") self.i = channel_rw(channel_type=float, pv_name="I") self.d = channel_rw(channel_type=float, pv_name="D") self.manual_output = channel_rw(channel_type=float, pv_name="MOUT") + self.heater_output = epics_signal_r(float, f"{prefix}{'HTR'}{suffix}") super().__init__(name=name) diff --git a/tests/devices/unit_tests/temperature_controller/test_lakeshore.py b/tests/devices/unit_tests/temperature_controller/test_lakeshore.py new file mode 100644 index 00000000000..1a738bfbf55 --- /dev/null +++ b/tests/devices/unit_tests/temperature_controller/test_lakeshore.py @@ -0,0 +1,158 @@ +from collections import defaultdict +from unittest.mock import ANY + +import numpy as np +import pytest +from bluesky.plan_stubs import abs_set +from bluesky.plans import count +from bluesky.run_engine import RunEngine +from ophyd_async.core import StrictEnum, init_devices +from ophyd_async.testing import assert_configuration, assert_reading + +from dodal.devices.temperture_controller import ( + Lakeshore, +) + + +class HeaterSettings(StrictEnum): + OFF = "Off" + LOW = "Low" + MEDIUM = "Medium" + HIGH = "High" + + +@pytest.fixture +async def lakeshore() -> Lakeshore: + async with init_devices(mock=True): + lakeshore = Lakeshore( + prefix="007", num_readback_channel=4, heater_setting=HeaterSettings + ) + return lakeshore + + +@pytest.mark.parametrize( + "control_channel", + [ + 1, + 2, + 3, + 4, + ], +) +async def test_lakeshore_set_success( + lakeshore: Lakeshore, RE: RunEngine, control_channel: int +): + RE(abs_set(lakeshore.control_channel, control_channel, wait=True)) + temperature = np.random.uniform(10, 400) + RE(abs_set(lakeshore, temperature, wait=True)) + + assert ( + await lakeshore.control_channels[ + await lakeshore.control_channel.get_value() + ].user_setpoint.get_value() + == temperature + ) + + +@pytest.mark.parametrize( + "temperature", + [ + -2, + 500, + ], +) +async def test_lakeshore_set_fail_outside_limit( + lakeshore: Lakeshore, RE: RunEngine, temperature: float +): + low = await lakeshore.temperature_low_limit.get_value() + high = await lakeshore.temperature_high_limit.get_value() + with pytest.raises( + ValueError, + match=f"{lakeshore.name} requested temperature {temperature} is outside limits: {low}, {high}", + ): + await lakeshore.set(temperature) + + +async def test_lakeshore_set_fail_unavailable_channel( + lakeshore: Lakeshore, RE: RunEngine +): + with pytest.raises( + ValueError, + match=f"Control channel must be between 1 and {len(lakeshore.control_channels)}.", + ): + await lakeshore._set_control_channel(0) + + +@pytest.mark.parametrize( + "control_channel", + [1, 2, 3, 4], +) +async def test_lakeshore_set_control_channel_correctly_set_up_config( + lakeshore: Lakeshore, + RE: RunEngine, + control_channel: int, +): + RE(abs_set(lakeshore.control_channel, control_channel, wait=True)) + + config = ["user_setpoint", "p", "i", "d", "heater_output_range"] + expected_config = { + f"lakeshore-control_channels-{control_channel}-{pv}": {"value": ANY} + for pv in config + } + expected_config["lakeshore-_control_channel"] = { + "value": control_channel, + } + await assert_configuration(lakeshore, expected_config, full_match=False) + + +async def test_lakeshore_read( + lakeshore: Lakeshore, +): + expected_reading = {} + + for control_channel in range(1, 5): + expected_reading[ + f"lakeshore-control_channels-{control_channel}-user_setpoint" + ] = {"value": ANY} + + expected_reading[f"lakeshore-readback-{control_channel}"] = { + "value": ANY, + } + await assert_reading(lakeshore, expected_reading, full_match=False) + + +@pytest.mark.parametrize( + "readback_channel", + [3, 2, 1, 4], +) +async def test_lakeshore_count_with_hints( + lakeshore: Lakeshore, RE: RunEngine, readback_channel: int +): + docs = defaultdict(list) + + def capture_emitted(name, doc): + docs[name].append(doc) + + RE(abs_set(lakeshore.hints_channel, readback_channel, wait=True)) + RE(count([lakeshore]), capture_emitted) + assert ( + docs["descriptor"][0]["hints"]["lakeshore"]["fields"][0] + == f"lakeshore-readback-{readback_channel}" + ) + + +@pytest.mark.parametrize( + "readback_channel", + [3, 2, 1, 4], +) +async def test_lakeshore_hints_read( + lakeshore: Lakeshore, RE: RunEngine, readback_channel: int +): + docs = defaultdict(list) + + def capture_emitted(name, doc): + docs[name].append(doc) + + RE(abs_set(lakeshore.hints_channel, readback_channel), wait=True) + RE(count([lakeshore.hints_channel]), capture_emitted) + assert docs["event"][0]["data"]["lakeshore-hints_channel"] == readback_channel From 20e2a7d8f5d17661acd20837a7922028e9e89511 Mon Sep 17 00:00:00 2001 From: Richard Dixey <185198552+RJCD-Diamond@users.noreply.github.com> Date: Mon, 22 Sep 2025 17:50:47 +0100 Subject: [PATCH 09/23] Add data PV's to Tetramm so the values can be used for alignment scans, read etc (#1561) * add data to tetramm * Fix naming of sum_y Co-authored-by: Dominic Oram --------- Co-authored-by: Dominic Oram --- src/dodal/devices/tetramm.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/src/dodal/devices/tetramm.py b/src/dodal/devices/tetramm.py index 962a5841be0..19a5839efff 100644 --- a/src/dodal/devices/tetramm.py +++ b/src/dodal/devices/tetramm.py @@ -23,7 +23,7 @@ NDFileHDFIO, NDPluginBaseIO, ) -from ophyd_async.epics.core import PvSuffix, stop_busy_record +from ophyd_async.epics.core import PvSuffix, epics_signal_r, stop_busy_record class TetrammRange(StrictEnum): @@ -225,6 +225,19 @@ def __init__( self.file_io = NDFileHDFIO(prefix + fileio_suffix) controller = TetrammController(self.driver) + self.current1 = epics_signal_r(float, prefix + "Cur1:MeanValue_RBV") + self.current2 = epics_signal_r(float, prefix + "Cur2:MeanValue_RBV") + self.current3 = epics_signal_r(float, prefix + "Cur3:MeanValue_RBV") + self.current4 = epics_signal_r(float, prefix + "Cur4:MeanValue_RBV") + + self.sum_x = epics_signal_r(float, prefix + "SumX:MeanValue_RBV") + self.sum_y = epics_signal_r(float, prefix + "SumY:MeanValue_RBV") + self.sum_all = epics_signal_r(float, prefix + "SumAll:MeanValue_RBV") + self.diff_x = epics_signal_r(float, prefix + "DiffX:MeanValue_RBV") + self.diff_y = epics_signal_r(float, prefix + "DiffY:MeanValue_RBV") + self.pos_x = epics_signal_r(float, prefix + "PosX:MeanValue_RBV") + self.pos_y = epics_signal_r(float, prefix + "PosY:MeanValue_RBV") + writer = ADHDFWriter( fileio=self.file_io, path_provider=path_provider, From ff3736028e63c3e0838d4fc1101fff1b1e03d5f0 Mon Sep 17 00:00:00 2001 From: Teo Ching Date: Mon, 22 Sep 2025 20:55:26 +0100 Subject: [PATCH 10/23] Remove get xyz limits (#1528) * Remove get_xyz_limits * Remove test in test_gridscan as test already in smargon * Remove i17.py --------- Co-authored-by: Dominic Oram --- src/dodal/devices/smargon.py | 56 ---------------------------------- tests/devices/test_gridscan.py | 25 --------------- 2 files changed, 81 deletions(-) diff --git a/src/dodal/devices/smargon.py b/src/dodal/devices/smargon.py index 9e7049745a1..adee4b5fb5d 100644 --- a/src/dodal/devices/smargon.py +++ b/src/dodal/devices/smargon.py @@ -1,13 +1,9 @@ import asyncio -from collections.abc import Collection, Generator -from dataclasses import dataclass from enum import Enum from math import isclose from typing import TypedDict, cast -from bluesky import plan_stubs as bps from bluesky.protocols import Movable -from bluesky.utils import Msg from ophyd_async.core import ( AsyncStatus, Device, @@ -66,40 +62,6 @@ async def set(self, value: StubPosition): await self.to_robot_load.set(1) -@dataclass -class AxisLimit: - """Represents the minimum and maximum allowable values on an axis""" - - min_value: float - max_value: float - - def contains(self, pos: float): - """Determine if the specified value is within limits. - - Args: - pos: the value to check - - Returns: - True if the value does not exceed the limits - """ - return self.min_value <= pos <= self.max_value - - -@dataclass -class XYZLimits: - """The limits of the smargon x, y, z axes.""" - - x: AxisLimit - y: AxisLimit - z: AxisLimit - - def position_valid(self, pos: Collection[float]) -> bool: - return all( - axis_limits.contains(value) - for axis_limits, value in zip([self.x, self.y, self.z], pos, strict=False) - ) - - class DeferMoves(StrictEnum): ON = "Defer On" OFF = "Defer Off" @@ -144,24 +106,6 @@ def __init__(self, prefix: str, name: str = ""): super().__init__(prefix, name) - def get_xyz_limits(self) -> Generator[Msg, None, XYZLimits]: - """Obtain a plan stub that returns the smargon XYZ axis limits - - Yields: - Bluesky messages - - Returns: - the axis limits - """ - limits = {} - for name, pv in [ - (attr_name, getattr(self, attr_name)) for attr_name in ["x", "y", "z"] - ]: - min_value = yield from bps.rd(pv.low_limit_travel) - max_value = yield from bps.rd(pv.high_limit_travel) - limits[name] = AxisLimit(min_value, max_value) - return XYZLimits(**limits) - @AsyncStatus.wrap async def set(self, value: CombinedMove): """This will move all motion together in a deferred move. diff --git a/tests/devices/test_gridscan.py b/tests/devices/test_gridscan.py index 86fc8345ecb..78fac3f9d86 100644 --- a/tests/devices/test_gridscan.py +++ b/tests/devices/test_gridscan.py @@ -194,31 +194,6 @@ def composite_with_smargon(motor_bundle_with_limits): return CompositeWithSmargon(motor_bundle_with_limits) -@pytest.mark.parametrize( - "position, expected_in_limit", - [ - (-1, False), - (20, False), - (5, True), - ], -) -async def test_within_limits_check( - RE: RunEngine, motor_bundle_with_limits, position, expected_in_limit -): - def check_limits_plan(): - limits = yield from motor_bundle_with_limits.get_xyz_limits() - assert limits.x.contains(position) == expected_in_limit - - RE(check_limits_plan()) - - -PASSING_LINE_1 = (1, 5, 1) -PASSING_LINE_2 = (0, 10, 0.5) -FAILING_LINE_1 = (-1, 20, 0.5) -PASSING_CONST = 6 -FAILING_CONST = 15 - - def check_parameter_validation(params, composite, expected_in_limits): if expected_in_limits: yield from params.validate_against_hardware(composite) From ce34360998ac232f4708adae2f2d5e31bd47d533 Mon Sep 17 00:00:00 2001 From: Shreelakshmi Iyengar Date: Tue, 23 Sep 2025 08:55:24 +0100 Subject: [PATCH 11/23] feat: instantiate panda on i19_2.py (#1551) Co-authored-by: Dominic Oram --- src/dodal/beamlines/i19_2.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/dodal/beamlines/i19_2.py b/src/dodal/beamlines/i19_2.py index f7a56b7876d..bb95dc5ec9e 100644 --- a/src/dodal/beamlines/i19_2.py +++ b/src/dodal/beamlines/i19_2.py @@ -1,5 +1,8 @@ +from ophyd_async.fastcs.panda import HDFPanda + from dodal.common.beamlines.beamline_utils import ( device_factory, + get_path_provider, ) from dodal.common.beamlines.beamline_utils import ( set_beamline as set_utils_beamline, @@ -84,3 +87,11 @@ def backlight() -> BacklightPosition: If this is called when already instantiated in i19-2, it will return the existing object. """ return BacklightPosition(prefix=f"{PREFIX.beamline_prefix}-EA-IOC-12:") + + +@device_factory() +def panda() -> HDFPanda: + return HDFPanda( + prefix=f"{PREFIX.beamline_prefix}-EA-PANDA-01:", + path_provider=get_path_provider(), + ) From c5528017d60a17b23b4b95ebd4fced05d9874e7b Mon Sep 17 00:00:00 2001 From: Dominic Oram Date: Tue, 23 Sep 2025 11:17:38 +0100 Subject: [PATCH 12/23] Add basic outlier rejection for murko (#1503) * Add basic outlier rejection for murko * Address PR comments * Clean up test structure (#1502) * Removed unit_tests dir from tests/devices/unit_tests, tests/unit_tests * Removed unit_tests folder from tests/beamlines/unit_tests * Updated test_data paths * Convert all remaining rst docs to md (#1487) * Add docs for handling devices shared between multiple endstations (#1483) * Create the file * Add link * Add decision * Fix typo * Wait for motors to start moving when doing deferred moves (#1401) * Wait for motors to start moving when doing deferred moves * Fix tests and linting * Check setpoints rather than dmov * Fix test * Fix typing * Fix tests * move patch_motors to dodal.testing (#1513) * move patch_motors to dodal.testing * Added arguments for default values with doc string * Updated docs --------- Co-authored-by: Dominic Oram * Fix test motor limits with new ophyd async (#1516) * move patch_motors to dodal.testing * Added arguments for default values with doc string * Updated docs * Fixed i10 id and undulator_dcm tests * fixed bimorph test * Removed unused import * Update more tests to use proper motor patch --------- Co-authored-by: Dominic Oram * When thawer is triggered again, reset the timer instead of raising an exception (#1510) * Change thawer behaviour so that starting the timer again does not throw * Ophyd async motor limit check in smargon (#1511) * Wait for motors to start moving when doing deferred moves * Fix tests and linting * Check setpoints rather than dmov * Fix test * Fix typing * Fix tests * Add motor limit checking from ophyd_async * Add mock values for motor limit * Add test for MotorLimitsException to be correctly raised * Fix linting * move patch_motors to dodal.testing * Fix wrong comparision between signal and float * Change ophyd-async requirement to test motor limit * Revert "Change ophyd-async requirement to test motor limit" This reverts commit 0a064f2494d435ce0bf67ec060f8a25d122c18dd. * Add dev-requirements.txt * Try changing pyproject.toml * Revert "Try changing pyproject.toml" This reverts commit e8fa65f687ee7fd599f00298301f3fd1ff93fc2e. * Move dev-requirements.txt around * Revert "Move dev-requirements.txt around" This reverts commit 9d3dd246978c427a2e38418733ecf9a6195659f9. * Test * Fix spelling * Chnge pyproject.toml * test * test * Changed pyproject.toml for ophyd-async * Fix typo * Added arguments for default values with doc string * Updated docs * Changing test * Fix test in i10apple2 * Test * Fixed i10 id and undulator_dcm tests * test * test * test * Add mock value for mock_id_pgm * Add mock value for slits in bimorph mirror * Fix bimorph test high limit sign typo * Fix typo * fixed bimorph test * Removed unused import * Update more tests to use proper motor patch * Change test value due to change in mock limit * Change limit for testing outside limit * Remove test_utils.py in the wrong place * Remove motor limit setting in test in i10apple2 * Removing the pin to ophyd-async as there is new release * Removing the pin to ophyd-async as there is new release * set mock value in test outside motor limit in smaron * Revert "Change limit for testing outside limit" This reverts commit c97e07e73bc03d55a2edb56d87f9b950b434a320. --------- Co-authored-by: Dominic Oram Co-authored-by: Oli Wenman Co-authored-by: oliwenmandiamond <136330507+oliwenmandiamond@users.noreply.github.com> * Change tetramm statistic plugin to stop tetramms on i22 failing to connect (#1506) * remove tet plugins * removal of unused import * plugin changed --------- Co-authored-by: root * add i17 to beamline with pgm. (#1525) * add i17 to beamline with pgm. * Fix tests * Address review comments --------- Co-authored-by: Jacob Williamson Co-authored-by: oliwenmandiamond <136330507+oliwenmandiamond@users.noreply.github.com> Co-authored-by: Noemi Frisina <54588199+noemifrisina@users.noreply.github.com> Co-authored-by: rtuck99 Co-authored-by: Teo Ching Co-authored-by: Oli Wenman Co-authored-by: Richard Dixey <185198552+RJCD-Diamond@users.noreply.github.com> Co-authored-by: root Co-authored-by: Raymond Fan --- src/dodal/devices/i04/murko_results.py | 72 ++++++++++++++++++------- tests/devices/i04/test_murko_results.py | 62 +++++++++++++++++---- 2 files changed, 105 insertions(+), 29 deletions(-) diff --git a/src/dodal/devices/i04/murko_results.py b/src/dodal/devices/i04/murko_results.py index 3918564577a..d5fbdae64f1 100644 --- a/src/dodal/devices/i04/murko_results.py +++ b/src/dodal/devices/i04/murko_results.py @@ -1,5 +1,6 @@ import json import pickle +from dataclasses import dataclass from enum import Enum from typing import TypedDict @@ -21,9 +22,6 @@ NO_MURKO_RESULT = (-1, -1) -MurkoResult = dict -FullMurkoResults = dict[str, list[MurkoResult]] - class MurkoMetadata(TypedDict): zoom_percentage: float @@ -42,6 +40,14 @@ class Coord(Enum): z = 2 +@dataclass +class MurkoResult: + x_dist_mm: float + y_dist_mm: float + omega: float + uuid: str + + class MurkoResultsDevice(StandardReadable, Triggerable, Stageable): """Device that takes crystal centre values from Murko and uses them to set the x, y, z coordinate of the sample to be in line with the beam centre. @@ -59,6 +65,8 @@ class MurkoResultsDevice(StandardReadable, Triggerable, Stageable): """ TIMEOUT_S = 2 + PERCENTAGE_TO_USE = 25 + NUMBER_OF_WRONG_RESULTS_TO_LOG = 5 def __init__( self, @@ -77,9 +85,7 @@ def __init__( self._last_omega = 0 self.sample_id = soft_signal_rw(str) # Should get from redis self.stop_angle = stop_angle - self.x_dists_mm = [] - self.y_dists_mm = [] - self.omegas = [] + self.results: list[MurkoResult] = [] with self.add_children_as_readables(): # Diffs from current x/y/z @@ -110,15 +116,19 @@ async def trigger(self): break await self.process_batch(message, sample_id) - for i in range(len(self.omegas)): - LOGGER.debug( - f"omega: {round(self.omegas[i], 2)}, x: {round(self.x_dists_mm[i], 2)}, y: {round(self.y_dists_mm[i], 2)}" - ) + for result in self.results: + LOGGER.debug(result) + + self.filter_outliers() + + x_dists_mm = [result.x_dist_mm for result in self.results] + y_dists_mm = [result.y_dist_mm for result in self.results] + omegas = [result.omega for result in self.results] - LOGGER.info(f"Using average of x beam distances: {self.x_dists_mm}") - avg_x = float(np.mean(self.x_dists_mm)) - LOGGER.info(f"Finding least square y and z from y distances: {self.y_dists_mm}") - best_y, best_z = get_yz_least_squares(self.y_dists_mm, self.omegas) + LOGGER.info(f"Using average of x beam distances: {x_dists_mm}") + avg_x = float(np.mean(x_dists_mm)) + LOGGER.info(f"Finding least square y and z from y distances: {y_dists_mm}") + best_y, best_z = get_yz_least_squares(y_dists_mm, omegas) # x, y, z are relative to beam centre. Need to move negative these values to get centred. self._x_mm_setter(-avg_x) self._y_mm_setter(-best_y) @@ -163,15 +173,37 @@ def process_result(self, result: dict, metadata: MurkoMetadata): centre_px[0], centre_px[1], ) - self.x_dists_mm.append( - beam_dist_px[0] * metadata["microns_per_x_pixel"] / 1000 + self.results.append( + MurkoResult( + x_dist_mm=beam_dist_px[0] * metadata["microns_per_x_pixel"] / 1000, + y_dist_mm=beam_dist_px[1] * metadata["microns_per_y_pixel"] / 1000, + omega=omega, + uuid=metadata["uuid"], + ) ) - self.y_dists_mm.append( - beam_dist_px[1] * metadata["microns_per_y_pixel"] / 1000 - ) - self.omegas.append(omega) self._last_omega = omega + def filter_outliers(self): + """Whilst murko is not fully trained it often gives us poor results. + When it is wrong it usually picks up the base of the pin, rather than the tip, + meaning that by keeping only a percentage of the results with the smallest X we + remove many of the outliers. + """ + LOGGER.info(f"Number of results before filtering: {len(self.results)}") + sorted_results = sorted(self.results, key=lambda item: item.x_dist_mm) + + worst_results = [ + r.uuid for r in sorted_results[-self.NUMBER_OF_WRONG_RESULTS_TO_LOG :] + ] + + LOGGER.info( + f"Worst {self.NUMBER_OF_WRONG_RESULTS_TO_LOG} murko results were {worst_results}" + ) + cutoff = max(1, int(len(sorted_results) * self.PERCENTAGE_TO_USE / 100)) + smallest_x = sorted_results[:cutoff] + self.results = smallest_x + LOGGER.info(f"Number of results after filtering: {len(self.results)}") + def get_yz_least_squares(vertical_dists: list, omegas: list) -> tuple[float, float]: """Get the least squares solution for y and z from the vertical distances and omega angles. diff --git a/tests/devices/i04/test_murko_results.py b/tests/devices/i04/test_murko_results.py index 5264ace19ab..1df518560be 100644 --- a/tests/devices/i04/test_murko_results.py +++ b/tests/devices/i04/test_murko_results.py @@ -10,6 +10,7 @@ from dodal.devices.i04.murko_results import ( MurkoMetadata, + MurkoResult, MurkoResultsDevice, get_yz_least_squares, ) @@ -148,6 +149,7 @@ def json_metadata( microns_pyp: float = 10, beam_centre_i: int = 50, beam_centre_j: int = 50, + uuid: str = "UUID", ) -> dict: metadatas = {} for i in range(n): @@ -157,6 +159,7 @@ def json_metadata( "microns_per_y_pixel": microns_pyp, "beam_centre_i": beam_centre_i, "beam_centre_j": beam_centre_j, + "uuid": uuid, } metadatas[i] = json.dumps(metadata) return metadatas @@ -196,13 +199,12 @@ def test_process_result_appends_lists_with_correct_values( sample_id="test", ) - assert murko_results.x_dists_mm == [] - assert murko_results.y_dists_mm == [] - assert murko_results.omegas == [] + assert murko_results.results == [] murko_results.process_result(result, metadata) - assert murko_results.x_dists_mm == [0.2 * 100 * 5 / 1000] - assert murko_results.y_dists_mm == [0] - assert murko_results.omegas == [60] + assert len(murko_results.results) == 1 + assert murko_results.results[0].x_dist_mm == 0.2 * 100 * 5 / 1000 + assert murko_results.results[0].y_dist_mm == 0 + assert murko_results.results[0].omega == 60 @patch("dodal.devices.i04.murko_results.calculate_beam_distance") @@ -229,9 +231,7 @@ def test_process_result_skips_when_no_result_from_murko( with caplog.at_level("INFO"): murko_results.process_result(result, metadata) - assert murko_results.x_dists_mm == [] - assert murko_results.y_dists_mm == [] - assert murko_results.omegas == [] + assert murko_results.results == [] assert mock_calculate_beam_distance.call_count == 0 assert "Murko didn't produce a result, moving on" in caplog.text @@ -283,6 +283,7 @@ async def test_process_batch_makes_correct_calls( "microns_per_y_pixel": 10, "beam_centre_i": 50, "beam_centre_j": 50, + "uuid": "UUID", }, ) @@ -350,6 +351,7 @@ async def test_correct_movement_given_90_180_degrees( x = 0.5 y = 0.6 z = 0.3 + murko_results.PERCENTAGE_TO_USE = 100 # type:ignore mock_x_setter, mock_y_setter, mock_z_setter = mock_setters messages, metadata = get_messages( xyz=(x, y, z), beam_centre_i=90, beam_centre_j=40, shape_x=100, shape_y=100 @@ -370,6 +372,7 @@ async def test_correct_movement_given_45_and_135_angles( murko_results: MurkoResultsDevice, mock_setters: tuple[MagicMock, MagicMock, MagicMock], ): + murko_results.PERCENTAGE_TO_USE = 100 # type:ignore x = 0.5 y = 0.3 z = 0.4 @@ -394,6 +397,7 @@ async def test_correct_movement_given_multiple_angles_and_x_drift( murko_results: MurkoResultsDevice, mock_setters: tuple[MagicMock, MagicMock, MagicMock], ): + murko_results.PERCENTAGE_TO_USE = 100 # type:ignore x = 0.1 y = 0.2 z = 0.3 @@ -493,3 +497,43 @@ async def test_assert_unsubscribes_to_queue_on_unstage( await murko_results.unstage() mock_pubsub.unsubscribe.assert_called_once() + + +@pytest.mark.parametrize( + "total_from_murko, percentage_to_keep, expected_left", + [(100, 25, 25), (10, 25, 2), (1000, 1, 10), (8, 100, 8), (5, 50, 2)], +) +def test_given_n_results_filter_outliers_will_reduce_down_to_smaller_amount( + total_from_murko: int, + percentage_to_keep: int, + expected_left: int, + murko_results: MurkoResultsDevice, +): + murko_results.results = [ + MurkoResult(x_dist_mm=i, y_dist_mm=i, omega=i, uuid=str(i)) + for i in range(total_from_murko) + ] + + murko_results.PERCENTAGE_TO_USE = percentage_to_keep # type:ignore + + murko_results.filter_outliers() + + assert isinstance(murko_results.results, list) + assert len(murko_results.results) == expected_left + + +def test_when_results_filtered_then_smallest_x_kept(murko_results: MurkoResultsDevice): + murko_results.results = [ + MurkoResult(x_dist_mm=4, y_dist_mm=8, omega=0, uuid="a"), + MurkoResult(x_dist_mm=0, y_dist_mm=90, omega=10, uuid="b"), + MurkoResult(x_dist_mm=6, y_dist_mm=63, omega=20, uuid="c"), + MurkoResult(x_dist_mm=7, y_dist_mm=8, omega=30, uuid="d"), + ] + + murko_results.filter_outliers() + assert len(murko_results.results) == 1 + results = murko_results.results[0] + assert results.x_dist_mm == 0 + assert results.y_dist_mm == 90 + assert results.omega == 10 + assert results.uuid == "b" From 25fc7f6dee5aa295da0c4222cabb0186bd561aec Mon Sep 17 00:00:00 2001 From: oliwenmandiamond <136330507+oliwenmandiamond@users.noreply.github.com> Date: Tue, 23 Sep 2025 14:18:36 +0100 Subject: [PATCH 13/23] Create energy source devices (#1446) * Create SelectedSource enum and remove used region parameter logic * Removed test to check for invalid source as no longer needed with enums * Updated this to only include addition of new devices, will update exisitng classes use in different branch * Added comment on removing pytest.fixture * Update energy_sources fixture * Fixed tests * Created common enums and updated devices to use (#1415) * Created common enums and updated devices to use * Corrected backlight OnOff * Switched ordering of enums * Correct spelling * Updated remaining enums that have extra options * Renamed InOut to InState * Updated to use enums from ophyd-async * Renamed enums to Upper * test * Restore previous whitespace formatting * Updated enums reusing ones from ophyd-async to use .value, same as ophyd-async --------- Co-authored-by: Joseph Ware <53935796+DiamondJoseph@users.noreply.github.com> * Add i11 beamline module, Mythen3 detector and basic devices (#1377) * added i11.py, i11 specific devices * renamed stages * added diff stages * made cryostream into standardreadable * cryostreams 1+2 working * mythen3 baseclass added * mythen3 connecting * removed mac, renamed detector to mythen * changed cstrm to ocs * added deadtime to mythen3 * added sample spinner * added eurotherm + cyberstar blower * add with children as readbles to classes * changed eurotherm signals to rbv * change blower names to csb1/2 * fixed error on mythen3 docstring * added basic methods to cyberstar blower * fixed signal types in CryoStream * added series to cryostream * changed methods to bluesky protocol * added initial robot class * i11 robot connecting * changed sample_env name to spinner * fixed spinner in bl module, change start signal to x * spin signal to rw * i11 robot signals added * super added to mythen, modified robot * added mythen3 test * added mythen connect test * added docstrings for blower and eurotherm * i11 robot added load_sample and methods * changed jaws to grip * stop and start methods added * added robot test * added robot test * modify cyberstarblower and add test, updated mythen deadtime * import motors from ophyd_async * improve Eurotherm * impproved robot and spinner inheretance * fixed naming * fixes to tests * json * updates to tests * added blower tests * added more robot tests * parameterised tests * fixed type checking * add full coverage for i11robot * fixed type * added test to mythen3 * restored settings * tidied up mythen and added more tests * improved spinner, added set speed and test * edits to improve typechecking and other small improvements * added robot at position logging test * removed _get_i11_robot * changed i11robot to NX100 * reorganise imports * lowercase file * seperated spinner pause resume tests * removed eurotherm infixes * minor update to further paramterise nx100 test * Update test_i11devices.py * added missing type hints to eurotherm, blower and mythen3 * made eurothermal rbv suffix constant * made robot states more descriptive * fixed carousel spelling on robot * add readables to cryo * made blowers and eurotherms compositional * added docs strings to i11 robot * updated cryostream imports * using ophyd_async enums on/off enabled/disabled * type checking fixed for cyberstar blower, and Eurotherms * Add default image mode setting for analysers (#1443) * Add default image mode setting for analysers and clean up imports * Set default image mode to SINGLE in ElectronAnalyserController and clean up test assertions * set image mode before arm - may be we should improve doc to say the order explicitly here. * move image_mode set to stage method * Refactor staging methods to set image mode directly * Remove image mode setting from trigger method in AbstractElectronAnalyserDetector * Enhance staging methods to ensure detector readiness and set default image mode to SINGLE * Fixed tests * Fixed flakey test, added test for unstage * Removed debugging flakey test function * Removed unneeded tests * Move Stageable to ElectronAnalyserDetector * Reorded doc string of stage so it matches order of functions. --------- Co-authored-by: Oli Wenman Co-authored-by: oliwenmandiamond <136330507+oliwenmandiamond@users.noreply.github.com> * Move test_data to where it is used and standardise (#1419) * Moved test_data to where it is used * Add back topup tests * Moved test_beamline_parameters.txt back to tests/test_data as used by more than one test * Move test_beamline_parameters.txt back to tests/test_data as used by multiple tests * Move beamline parameter files back to tests/test_data as they are used by several tests * Created constant.py files to strore file paths * Defined all file paths in local constants.py and import part * Move oav images to test_data * Changed test_data to be a python module where you can then import static paths * Removed spaces between variables * Removed hard coded paths * Removed paths.py and added the paths directly to the __init__.py * Fixed topup tests * Updated imports to be from test_data * Applied same standard to test_daq_configuration * Updated i10 ID tests * Fixed test on python 3.11 * Update i10 lookupTables to be test_data * applied feedback * Corrected text_oav_zoom_level file name * I24: Update pmac pvs (#1344) Fixes Issue #1319 Update PMAC PVs for I24 including * Fix pmac prefix * Fix pmac homing string * Fix coordinate system index * Update tests for string constants * Update prefix for stages * Modify devices for i03's default UDC state (#1364) * Allow reading when the aperture scatterguard is parked * If ap_sg is parked then unpark it correctly * Added ability to park the scintillator * Rename and tidy up collimation table * Correctly move beamstop out when parked * Add ability to check cryostream and move fluorescence detector * Add new devices to i03 * Fix test and address reviews * Update tests/devices/unit_tests/test_scintillator.py Co-authored-by: olliesilvester <122091460+olliesilvester@users.noreply.github.com> * Address review comments * Fix test * Do not allow aperture scatterguard to be parked --------- Co-authored-by: olliesilvester <122091460+olliesilvester@users.noreply.github.com> * Update electron analyser helper util for tests to be modular (#1400) * initial commit * Corrected import for device_factory * Updated device_factory to have create_detector and create_driver with doc string * Update device_factory.py create_detector doc string * Updated factory methods to remove prefix and have supplied by tests * Added RunEngine to detector driver fixtures * Removed connect=True as it is True by default * Tidy up allvalvescontrol (#1058) * Tidy up allvalvescontrol * Simplify with Generics --------- Co-authored-by: Joseph Ware <53935796+DiamondJoseph@users.noreply.github.com> * Fix pressure cell controller PVs (#1444) * Pressure cell controller: Updated to use the wrapped PV declaration format, renamed duplicate attributes found during testing, and added docstring * Lint formatting fix * Updated PV's to be read only and to use _RBV (#1452) * Create SelectedSource enum and remove used region parameter logic (#1448) * Create SelectedSource enum and remove used region parameter logic * Removed test to check for invalid source as no longer needed with enums * Update energy_sources fixture * Replaced talisman with gitleaks as pre-commit hook for detecting potential credentials (#1454) * Updated sequence to use files defined from __init__ * Update test_undulator_dcm.py --------- Co-authored-by: Paul Hathaway <7045365+phathaway@users.noreply.github.com> Co-authored-by: Dominic Oram Co-authored-by: olliesilvester <122091460+olliesilvester@users.noreply.github.com> Co-authored-by: barnettwilliam <32113205+barnettwilliam@users.noreply.github.com> Co-authored-by: Joseph Ware <53935796+DiamondJoseph@users.noreply.github.com> Co-authored-by: wajidzahoor-dls * Created AbstractEnergySource to create base signal * Updated doc string * Correct type hint for test * Updated source to be clearly a reference * Updated tests to be more dyanmic * Updated other electron analyser tests from rename * chore: Add support for python 3.13 (#1466) * chore: Add support for python 3.13 * fix: Fix problems caused by tighter type checking around StrictEnum * fix: Replace mocked behaviour in test_snapshots * Updated reads to use real energy signals to keep metadata * Updated docs * Updated DualEnergySource to allow SignalR in __init__ * Update src/dodal/devices/electron_analyser/energy_sources.py Co-authored-by: Joseph Ware <53935796+DiamondJoseph@users.noreply.github.com> --------- Co-authored-by: Joseph Ware <53935796+DiamondJoseph@users.noreply.github.com> Co-authored-by: Richard Dixey <185198552+RJCD-Diamond@users.noreply.github.com> Co-authored-by: Fajin Yuan Co-authored-by: Paul Hathaway <7045365+phathaway@users.noreply.github.com> Co-authored-by: Dominic Oram Co-authored-by: olliesilvester <122091460+olliesilvester@users.noreply.github.com> Co-authored-by: barnettwilliam <32113205+barnettwilliam@users.noreply.github.com> Co-authored-by: wajidzahoor-dls --- .../devices/electron_analyser/__init__.py | 4 + .../electron_analyser/energy_sources.py | 101 ++++++++++++++++++ tests/devices/electron_analyser/conftest.py | 49 +++++++-- .../electron_analyser/test_energy_sources.py | 100 +++++++++++++++++ 4 files changed, 243 insertions(+), 11 deletions(-) create mode 100644 src/dodal/devices/electron_analyser/energy_sources.py create mode 100644 tests/devices/electron_analyser/test_energy_sources.py diff --git a/src/dodal/devices/electron_analyser/__init__.py b/src/dodal/devices/electron_analyser/__init__.py index d801957ed48..f3151eec29b 100644 --- a/src/dodal/devices/electron_analyser/__init__.py +++ b/src/dodal/devices/electron_analyser/__init__.py @@ -4,6 +4,7 @@ TElectronAnalyserDetector, TElectronAnalyserRegionDetector, ) +from .energy_sources import DualEnergySource, EnergySource from .enums import EnergyMode, SelectedSource from .types import ( ElectronAnalyserDetectorImpl, @@ -16,6 +17,9 @@ __all__ = [ "to_binding_energy", "to_kinetic_energy", + "DualEnergySource", + "SelectedSource", + "EnergySource", "EnergyMode", "SelectedSource", "ElectronAnalyserDetector", diff --git a/src/dodal/devices/electron_analyser/energy_sources.py b/src/dodal/devices/electron_analyser/energy_sources.py new file mode 100644 index 00000000000..4b6affeb72b --- /dev/null +++ b/src/dodal/devices/electron_analyser/energy_sources.py @@ -0,0 +1,101 @@ +from abc import abstractmethod + +from ophyd_async.core import ( + Reference, + SignalR, + StandardReadable, + StandardReadableFormat, + derived_signal_r, + soft_signal_r_and_setter, + soft_signal_rw, +) + +from dodal.devices.electron_analyser.enums import SelectedSource + + +class AbstractEnergySource(StandardReadable): + """ + Abstract device that wraps an energy source signal and provides common interface via + a energy signal. + """ + + def __init__(self, name: str = "") -> None: + super().__init__(name) + + @property + @abstractmethod + def energy(self) -> SignalR[float]: + """ + Signal to provide the excitation energy value in eV. + """ + + +class EnergySource(AbstractEnergySource): + """ + Wraps a signal that relates to energy and provides common interface via energy + signal. It provides the name of the wrapped signal as a child signal in the + read_configuration via wrapped_device_name and adds the signal as a readable. + """ + + def __init__(self, source: SignalR[float], name: str = "") -> None: + self.add_readables([source]) + with self.add_children_as_readables(StandardReadableFormat.CONFIG_SIGNAL): + self.wrapped_device_name, _ = soft_signal_r_and_setter( + str, initial_value=source.name + ) + self._source_ref = Reference(source) + super().__init__(name) + + @property + def energy(self) -> SignalR[float]: + return self._source_ref() + + +class DualEnergySource(AbstractEnergySource): + """ + Holds two EnergySource devices and provides a signal to read energy depending on + which source is selected. This is controlled by a selected_source signal which can + switch source using SelectedSource enum. Both sources energy is recorded in the + read, the energy signal is used as a helper signal to know which source is being + used. + """ + + def __init__( + self, source1: SignalR[float], source2: SignalR[float], name: str = "" + ): + """ + Args: + source1: Default energy signal to select. + source2: Secondary energy signal to select. + name: name of this device. + """ + + with self.add_children_as_readables(): + self.selected_source = soft_signal_rw( + SelectedSource, initial_value=SelectedSource.SOURCE1 + ) + self.source1 = EnergySource(source1) + self.source2 = EnergySource(source2) + + self._selected_energy = derived_signal_r( + self._get_excitation_energy, + "eV", + selected_source=self.selected_source, + source1=self.source1.energy, + source2=self.source2.energy, + ) + + super().__init__(name) + + def _get_excitation_energy( + self, selected_source: SelectedSource, source1: float, source2: float + ) -> float: + match selected_source: + case SelectedSource.SOURCE1: + return source1 + case SelectedSource.SOURCE2: + return source2 + + @property + def energy(self) -> SignalR[float]: + return self._selected_energy diff --git a/tests/devices/electron_analyser/conftest.py b/tests/devices/electron_analyser/conftest.py index 76314cc8778..d756c7d9901 100644 --- a/tests/devices/electron_analyser/conftest.py +++ b/tests/devices/electron_analyser/conftest.py @@ -1,11 +1,13 @@ from typing import Any import pytest -from ophyd_async.core import SignalR -from ophyd_async.sim import SimMotor +from bluesky.run_engine import RunEngine +from ophyd_async.core import SignalR, init_devices from dodal.devices.electron_analyser import ( + DualEnergySource, ElectronAnalyserDetector, + EnergySource, SelectedSource, ) from dodal.devices.electron_analyser.abstract import ( @@ -22,28 +24,53 @@ VGScientaAnalyserDriverIO, VGScientaSequence, ) +from dodal.devices.i09 import DCM, Grating +from dodal.devices.pgm import PGM +from dodal.testing import patch_motor from tests.devices.electron_analyser.helper_util import ( get_test_sequence, ) @pytest.fixture -async def pgm_energy() -> SimMotor: - return SimMotor("pgm_energy") +def dcm(RE: RunEngine) -> DCM: + with init_devices(mock=True): + dcm = DCM("DCM:") + patch_motor(dcm.energy_in_kev, initial_position=2.2) + return dcm @pytest.fixture -async def dcm_energy() -> SimMotor: - return SimMotor("dcm_energy") +def pgm(RE: RunEngine) -> PGM: + with init_devices(mock=True): + pgm = PGM("PGM:", Grating) + patch_motor(pgm.energy, initial_position=500) + return pgm @pytest.fixture -async def energy_sources( - dcm_energy: SimMotor, pgm_energy: SimMotor -) -> dict[str, SignalR[float]]: +async def single_energy_source(dcm: DCM, RE: RunEngine) -> EnergySource: + with init_devices(mock=True): + single_energy_source = EnergySource(dcm.energy_in_ev) + return single_energy_source + + +@pytest.fixture +async def dual_energy_source(dcm: DCM, pgm: PGM, RE: RunEngine) -> DualEnergySource: + async with init_devices(mock=True): + dual_energy_source = DualEnergySource( + source1=dcm.energy_in_ev, source2=pgm.energy.user_readback + ) + return dual_energy_source + + +# ToDo - This will be removed once existing devices use the energy source device rather +# than dict. +@pytest.fixture +async def energy_sources(dcm: DCM, pgm: PGM) -> dict[str, SignalR[float]]: return { - SelectedSource.SOURCE1: dcm_energy.user_readback, - SelectedSource.SOURCE2: pgm_energy.user_readback, + SelectedSource.SOURCE1: dcm.energy_in_ev, + SelectedSource.SOURCE2: pgm.energy.user_readback, } diff --git a/tests/devices/electron_analyser/test_energy_sources.py b/tests/devices/electron_analyser/test_energy_sources.py new file mode 100644 index 00000000000..de41e4fd24e --- /dev/null +++ b/tests/devices/electron_analyser/test_energy_sources.py @@ -0,0 +1,100 @@ +from ophyd_async.testing import ( + assert_configuration, + assert_reading, + assert_value, + partial_reading, +) + +from dodal.devices.electron_analyser import ( + DualEnergySource, + EnergySource, + SelectedSource, +) +from dodal.devices.i09 import DCM +from dodal.devices.pgm import PGM + + +async def test_single_energy_source_read( + single_energy_source: EnergySource, + dcm: DCM, +) -> None: + await assert_reading( + single_energy_source, + { + f"{dcm.energy_in_ev.name}": partial_reading( + await dcm.energy_in_ev.get_value() + ), + }, + ) + + +async def test_single_energy_souce_read_configuration( + single_energy_source: EnergySource, + dcm: DCM, +) -> None: + await assert_configuration( + single_energy_source, + { + f"{single_energy_source.name}-wrapped_device_name": partial_reading( + dcm.energy_in_ev.name + ), + }, + ) + + +async def test_dual_energy_source_energy_is_correct_when_switching_between_sources( + dual_energy_source: DualEnergySource, + dcm: DCM, + pgm: PGM, +) -> None: + dcm_energy_val = await dcm.energy_in_ev.get_value() + pgm_energy_val = await pgm.energy.user_readback.get_value() + + # Make sure energy sources values are different for this test so we can tell them a + # part when switching + assert dcm_energy_val != pgm_energy_val + + await dual_energy_source.selected_source.set(SelectedSource.SOURCE1) + await assert_value(dual_energy_source.energy, dcm_energy_val) + await dual_energy_source.selected_source.set(SelectedSource.SOURCE2) + await assert_value(dual_energy_source.energy, pgm_energy_val) + + +async def test_dual_energy_souce_read( + dual_energy_source: DualEnergySource, + dcm: DCM, + pgm: PGM, +) -> None: + await dual_energy_source.selected_source.set(SelectedSource.SOURCE1) + prefix = dual_energy_source.name + await assert_reading( + dual_energy_source, + { + f"{prefix}-selected_source": partial_reading(SelectedSource.SOURCE1), + f"{dcm.energy_in_ev.name}": partial_reading( + await dcm.energy_in_ev.get_value() + ), + f"{pgm.energy.user_readback.name}": partial_reading( + await pgm.energy.user_readback.get_value() + ), + }, + ) + + +async def test_dual_energy_souce_read_configuration( + dual_energy_source: DualEnergySource, + dcm: DCM, + pgm: PGM, +) -> None: + prefix = dual_energy_source.name + await assert_configuration( + dual_energy_source, + { + f"{prefix}-source1-wrapped_device_name": partial_reading( + dcm.energy_in_ev.name + ), + f"{prefix}-source2-wrapped_device_name": partial_reading( + pgm.energy.user_readback.name + ), + }, + ) From 7a4252476ee02e05e8231bdf692ab7f9d4a27cb0 Mon Sep 17 00:00:00 2001 From: oliwenmandiamond <136330507+oliwenmandiamond@users.noreply.github.com> Date: Tue, 23 Sep 2025 14:38:09 +0100 Subject: [PATCH 14/23] Update electron analyser to use new energy source devices (#1453) * Create SelectedSource enum and remove used region parameter logic * Removed test to check for invalid source as no longer needed with enums * Updated this to only include addition of new devices, will update exisitng classes use in different branch * Added comment on removing pytest.fixture * Updated electron analyser devices to use new energy_source devices * Update energy_sources fixture * Fixed tests * Fix linting issue * Created common enums and updated devices to use (#1415) * Created common enums and updated devices to use * Corrected backlight OnOff * Switched ordering of enums * Correct spelling * Updated remaining enums that have extra options * Renamed InOut to InState * Updated to use enums from ophyd-async * Renamed enums to Upper * test * Restore previous whitespace formatting * Updated enums reusing ones from ophyd-async to use .value, same as ophyd-async --------- Co-authored-by: Joseph Ware <53935796+DiamondJoseph@users.noreply.github.com> * Add i11 beamline module, Mythen3 detector and basic devices (#1377) * added i11.py, i11 specific devices * renamed stages * added diff stages * made cryostream into standardreadable * cryostreams 1+2 working * mythen3 baseclass added * mythen3 connecting * removed mac, renamed detector to mythen * changed cstrm to ocs * added deadtime to mythen3 * added sample spinner * added eurotherm + cyberstar blower * add with children as readbles to classes * changed eurotherm signals to rbv * change blower names to csb1/2 * fixed error on mythen3 docstring * added basic methods to cyberstar blower * fixed signal types in CryoStream * added series to cryostream * changed methods to bluesky protocol * added initial robot class * i11 robot connecting * changed sample_env name to spinner * fixed spinner in bl module, change start signal to x * spin signal to rw * i11 robot signals added * super added to mythen, modified robot * added mythen3 test * added mythen connect test * added docstrings for blower and eurotherm * i11 robot added load_sample and methods * changed jaws to grip * stop and start methods added * added robot test * added robot test * modify cyberstarblower and add test, updated mythen deadtime * import motors from ophyd_async * improve Eurotherm * impproved robot and spinner inheretance * fixed naming * fixes to tests * json * updates to tests * added blower tests * added more robot tests * parameterised tests * fixed type checking * add full coverage for i11robot * fixed type * added test to mythen3 * restored settings * tidied up mythen and added more tests * improved spinner, added set speed and test * edits to improve typechecking and other small improvements * added robot at position logging test * removed _get_i11_robot * changed i11robot to NX100 * reorganise imports * lowercase file * seperated spinner pause resume tests * removed eurotherm infixes * minor update to further paramterise nx100 test * Update test_i11devices.py * added missing type hints to eurotherm, blower and mythen3 * made eurothermal rbv suffix constant * made robot states more descriptive * fixed carousel spelling on robot * add readables to cryo * made blowers and eurotherms compositional * added docs strings to i11 robot * updated cryostream imports * using ophyd_async enums on/off enabled/disabled * type checking fixed for cyberstar blower, and Eurotherms * Add default image mode setting for analysers (#1443) * Add default image mode setting for analysers and clean up imports * Set default image mode to SINGLE in ElectronAnalyserController and clean up test assertions * set image mode before arm - may be we should improve doc to say the order explicitly here. * move image_mode set to stage method * Refactor staging methods to set image mode directly * Remove image mode setting from trigger method in AbstractElectronAnalyserDetector * Enhance staging methods to ensure detector readiness and set default image mode to SINGLE * Fixed tests * Fixed flakey test, added test for unstage * Removed debugging flakey test function * Removed unneeded tests * Move Stageable to ElectronAnalyserDetector * Reorded doc string of stage so it matches order of functions. --------- Co-authored-by: Oli Wenman Co-authored-by: oliwenmandiamond <136330507+oliwenmandiamond@users.noreply.github.com> * Move test_data to where it is used and standardise (#1419) * Moved test_data to where it is used * Add back topup tests * Moved test_beamline_parameters.txt back to tests/test_data as used by more than one test * Move test_beamline_parameters.txt back to tests/test_data as used by multiple tests * Move beamline parameter files back to tests/test_data as they are used by several tests * Created constant.py files to strore file paths * Defined all file paths in local constants.py and import part * Move oav images to test_data * Changed test_data to be a python module where you can then import static paths * Removed spaces between variables * Removed hard coded paths * Removed paths.py and added the paths directly to the __init__.py * Fixed topup tests * Updated imports to be from test_data * Applied same standard to test_daq_configuration * Updated i10 ID tests * Fixed test on python 3.11 * Update i10 lookupTables to be test_data * applied feedback * Corrected text_oav_zoom_level file name * I24: Update pmac pvs (#1344) Fixes Issue #1319 Update PMAC PVs for I24 including * Fix pmac prefix * Fix pmac homing string * Fix coordinate system index * Update tests for string constants * Update prefix for stages * Modify devices for i03's default UDC state (#1364) * Allow reading when the aperture scatterguard is parked * If ap_sg is parked then unpark it correctly * Added ability to park the scintillator * Rename and tidy up collimation table * Correctly move beamstop out when parked * Add ability to check cryostream and move fluorescence detector * Add new devices to i03 * Fix test and address reviews * Update tests/devices/unit_tests/test_scintillator.py Co-authored-by: olliesilvester <122091460+olliesilvester@users.noreply.github.com> * Address review comments * Fix test * Do not allow aperture scatterguard to be parked --------- Co-authored-by: olliesilvester <122091460+olliesilvester@users.noreply.github.com> * Update electron analyser helper util for tests to be modular (#1400) * initial commit * Corrected import for device_factory * Updated device_factory to have create_detector and create_driver with doc string * Update device_factory.py create_detector doc string * Updated factory methods to remove prefix and have supplied by tests * Added RunEngine to detector driver fixtures * Removed connect=True as it is True by default * Tidy up allvalvescontrol (#1058) * Tidy up allvalvescontrol * Simplify with Generics --------- Co-authored-by: Joseph Ware <53935796+DiamondJoseph@users.noreply.github.com> * Fix pressure cell controller PVs (#1444) * Pressure cell controller: Updated to use the wrapped PV declaration format, renamed duplicate attributes found during testing, and added docstring * Lint formatting fix * Updated PV's to be read only and to use _RBV (#1452) * Create SelectedSource enum and remove used region parameter logic (#1448) * Create SelectedSource enum and remove used region parameter logic * Removed test to check for invalid source as no longer needed with enums * Update energy_sources fixture * Replaced talisman with gitleaks as pre-commit hook for detecting potential credentials (#1454) * Updated sequence to use files defined from __init__ * Update test_undulator_dcm.py --------- Co-authored-by: Paul Hathaway <7045365+phathaway@users.noreply.github.com> Co-authored-by: Dominic Oram Co-authored-by: olliesilvester <122091460+olliesilvester@users.noreply.github.com> Co-authored-by: barnettwilliam <32113205+barnettwilliam@users.noreply.github.com> Co-authored-by: Joseph Ware <53935796+DiamondJoseph@users.noreply.github.com> Co-authored-by: wajidzahoor-dls * Created AbstractEnergySource to create base signal * Updated doc string * Correct type hint for test * Updated source to be clearly a reference * Updated tests to be more dyanmic * Updated other electron analyser tests from rename * chore: Add support for python 3.13 (#1466) * chore: Add support for python 3.13 * fix: Fix problems caused by tighter type checking around StrictEnum * fix: Replace mocked behaviour in test_snapshots * Added missing stage method from merge * Updated driver energy_source func name to select_energy_source_from_region * Renamed select_energy_source_from_region to set_source_from_region_and_get_energy * Updated reads to use real energy signals to keep metadata * Updated docs * Correct test_energy_source * Update excitation_energy to be energy * Fix tests * Updated beamlines * Fix linting issue * Added doc string for set method * Removed commented out code from test * Fixed linting issue for p60 * Updated DualEnergySource to allow SignalR in __init__ * Remove unused import * Fixed missing device_factory --------- Co-authored-by: Joseph Ware <53935796+DiamondJoseph@users.noreply.github.com> Co-authored-by: Richard Dixey <185198552+RJCD-Diamond@users.noreply.github.com> Co-authored-by: Fajin Yuan Co-authored-by: Paul Hathaway <7045365+phathaway@users.noreply.github.com> Co-authored-by: Dominic Oram Co-authored-by: olliesilvester <122091460+olliesilvester@users.noreply.github.com> Co-authored-by: barnettwilliam <32113205+barnettwilliam@users.noreply.github.com> Co-authored-by: wajidzahoor-dls --- src/dodal/beamlines/b07.py | 9 +++- src/dodal/beamlines/b07_1.py | 9 +++- src/dodal/beamlines/i09.py | 13 ++--- src/dodal/beamlines/i09_1.py | 9 +++- src/dodal/beamlines/p60.py | 13 ++--- .../abstract/base_driver_io.py | 48 ++++++++++++------- .../electron_analyser/specs/detector.py | 12 ++--- .../electron_analyser/specs/driver_io.py | 22 +++------ .../electron_analyser/vgscienta/detector.py | 12 ++--- .../electron_analyser/vgscienta/driver_io.py | 21 +++----- .../abstract/test_base_detector.py | 7 +-- .../abstract/test_base_driver_io.py | 32 +++++++++---- tests/devices/electron_analyser/conftest.py | 38 +++++---------- .../electron_analyser/specs/test_detector.py | 9 ++-- .../electron_analyser/specs/test_driver_io.py | 35 ++++++-------- .../electron_analyser/test_detector.py | 7 +-- .../electron_analyser/test_energy_sources.py | 39 +++++++-------- .../vgscienta/test_detector.py | 9 ++-- .../vgscienta/test_driver_io.py | 43 ++++++++--------- 19 files changed, 197 insertions(+), 190 deletions(-) diff --git a/src/dodal/beamlines/b07.py b/src/dodal/beamlines/b07.py index fba3dc60a01..7afbb6992bf 100644 --- a/src/dodal/beamlines/b07.py +++ b/src/dodal/beamlines/b07.py @@ -3,7 +3,7 @@ ) from dodal.common.beamlines.beamline_utils import set_beamline as set_utils_beamline from dodal.devices.b07 import Grating, LensMode, PsuMode -from dodal.devices.electron_analyser import SelectedSource +from dodal.devices.electron_analyser import EnergySource from dodal.devices.electron_analyser.specs import SpecsDetector from dodal.devices.pgm import PGM from dodal.devices.synchrotron import Synchrotron @@ -26,6 +26,11 @@ def pgm() -> PGM: return PGM(prefix=f"{PREFIX.beamline_prefix}-OP-PGM-01:", grating=Grating) +@device_factory() +def energy_source() -> EnergySource: + return EnergySource(pgm().energy.user_readback) + + # Connect will work again after this work completed # https://jira.diamond.ac.uk/browse/B07-1104 @device_factory() @@ -34,5 +39,5 @@ def analyser() -> SpecsDetector[LensMode, PsuMode]: prefix=f"{PREFIX.beamline_prefix}-EA-DET-01:CAM:", lens_mode_type=LensMode, psu_mode_type=PsuMode, - energy_sources={SelectedSource.SOURCE1: pgm().energy.user_readback}, + energy_source=energy_source(), ) diff --git a/src/dodal/beamlines/b07_1.py b/src/dodal/beamlines/b07_1.py index ae582e0f892..91e1784a5ff 100644 --- a/src/dodal/beamlines/b07_1.py +++ b/src/dodal/beamlines/b07_1.py @@ -6,7 +6,7 @@ Grating, LensMode, ) -from dodal.devices.electron_analyser import SelectedSource +from dodal.devices.electron_analyser import EnergySource from dodal.devices.electron_analyser.specs import SpecsDetector from dodal.devices.pgm import PGM from dodal.devices.synchrotron import Synchrotron @@ -36,11 +36,16 @@ def ccmc() -> ChannelCutMonochromator: return ChannelCutMonochromator(prefix=f"{PREFIX.beamline_prefix}-OP-CCM-01:") +@device_factory() +def energy_source() -> EnergySource: + return EnergySource(pgm().energy.user_readback) + + @device_factory() def analyser() -> SpecsDetector[LensMode, PsuMode]: return SpecsDetector[LensMode, PsuMode]( prefix=f"{PREFIX.beamline_prefix}-EA-DET-01:CAM:", lens_mode_type=LensMode, psu_mode_type=PsuMode, - energy_sources={SelectedSource.SOURCE1: pgm().energy.user_readback}, + energy_source=energy_source(), ) diff --git a/src/dodal/beamlines/i09.py b/src/dodal/beamlines/i09.py index 86bdd1c897c..10cd60147f1 100644 --- a/src/dodal/beamlines/i09.py +++ b/src/dodal/beamlines/i09.py @@ -2,7 +2,7 @@ device_factory, ) from dodal.common.beamlines.beamline_utils import set_beamline as set_utils_beamline -from dodal.devices.electron_analyser import SelectedSource +from dodal.devices.electron_analyser import DualEnergySource from dodal.devices.electron_analyser.vgscienta import VGScientaDetector from dodal.devices.i09 import DCM, Grating, LensMode, PassEnergy, PsuMode from dodal.devices.pgm import PGM @@ -34,18 +34,19 @@ def dcm() -> DCM: return DCM(prefix=f"{PREFIX.beamline_prefix}-MO-DCM-01:") +@device_factory() +def energy_source() -> DualEnergySource: + return DualEnergySource(dcm().energy_in_ev, pgm().energy.user_readback) + + # Connect will work again after this work completed # https://jira.diamond.ac.uk/browse/I09-651 @device_factory() def ew4000() -> VGScientaDetector[LensMode, PsuMode, PassEnergy]: - energy_sources = { - SelectedSource.SOURCE1: pgm().energy.user_readback, - SelectedSource.SOURCE2: dcm().energy_in_ev, - } return VGScientaDetector[LensMode, PsuMode, PassEnergy]( prefix=f"{PREFIX.beamline_prefix}-EA-DET-01:CAM:", lens_mode_type=LensMode, psu_mode_type=PsuMode, pass_energy_type=PassEnergy, - energy_sources=energy_sources, + energy_source=energy_source(), ) diff --git a/src/dodal/beamlines/i09_1.py b/src/dodal/beamlines/i09_1.py index e9b08acb2f6..055d2f6fd84 100644 --- a/src/dodal/beamlines/i09_1.py +++ b/src/dodal/beamlines/i09_1.py @@ -2,7 +2,7 @@ device_factory, ) from dodal.common.beamlines.beamline_utils import set_beamline as set_utils_beamline -from dodal.devices.electron_analyser import SelectedSource +from dodal.devices.electron_analyser import EnergySource from dodal.devices.electron_analyser.specs import SpecsDetector from dodal.devices.i09.dcm import DCM from dodal.devices.i09_1 import LensMode, PsuMode @@ -26,6 +26,11 @@ def dcm() -> DCM: return DCM(prefix=f"{PREFIX.beamline_prefix}-MO-DCM-01:") +@device_factory() +def energy_source() -> EnergySource: + return EnergySource(dcm().energy_in_ev) + + # Connect will work again after this work completed # https://jira.diamond.ac.uk/browse/I09-651 @device_factory() @@ -34,5 +39,5 @@ def analyser() -> SpecsDetector[LensMode, PsuMode]: prefix=f"{PREFIX.beamline_prefix}-EA-DET-02:CAM:", lens_mode_type=LensMode, psu_mode_type=PsuMode, - energy_sources={SelectedSource.SOURCE1: dcm().energy_in_ev}, + energy_source=energy_source(), ) diff --git a/src/dodal/beamlines/p60.py b/src/dodal/beamlines/p60.py index 20a4e07c832..34e1a513baa 100644 --- a/src/dodal/beamlines/p60.py +++ b/src/dodal/beamlines/p60.py @@ -2,7 +2,7 @@ device_factory, ) from dodal.common.beamlines.beamline_utils import set_beamline as set_utils_beamline -from dodal.devices.electron_analyser import SelectedSource +from dodal.devices.electron_analyser import DualEnergySource from dodal.devices.electron_analyser.vgscienta import VGScientaDetector from dodal.devices.p60 import ( LabXraySource, @@ -30,18 +30,19 @@ def mg_kalpha_source() -> LabXraySourceReadable: return LabXraySourceReadable(LabXraySource.MG_KALPHA) +@device_factory() +def energy_source() -> DualEnergySource: + return DualEnergySource(al_kalpha_source().energy_ev, mg_kalpha_source().energy_ev) + + # Connect will work again after this work completed # https://jira.diamond.ac.uk/browse/P60-13 @device_factory() def r4000() -> VGScientaDetector[LensMode, PsuMode, PassEnergy]: - energy_sources = { - SelectedSource.SOURCE1: al_kalpha_source().energy_ev, - SelectedSource.SOURCE2: mg_kalpha_source().energy_ev, - } return VGScientaDetector[LensMode, PsuMode, PassEnergy]( prefix=f"{PREFIX.beamline_prefix}-EA-DET-01:CAM:", lens_mode_type=LensMode, psu_mode_type=PsuMode, pass_energy_type=PassEnergy, - energy_sources=energy_sources, + energy_source=energy_source(), ) diff --git a/src/dodal/devices/electron_analyser/abstract/base_driver_io.py b/src/dodal/devices/electron_analyser/abstract/base_driver_io.py index 285adaa2eb7..8b81c7fc255 100644 --- a/src/dodal/devices/electron_analyser/abstract/base_driver_io.py +++ b/src/dodal/devices/electron_analyser/abstract/base_driver_io.py @@ -1,5 +1,4 @@ from abc import ABC, abstractmethod -from collections.abc import Mapping from typing import Generic, TypeVar import numpy as np @@ -25,7 +24,11 @@ TPassEnergy, TPsuMode, ) -from dodal.devices.electron_analyser.enums import EnergyMode, SelectedSource +from dodal.devices.electron_analyser.energy_sources import ( + DualEnergySource, + EnergySource, +) +from dodal.devices.electron_analyser.enums import EnergyMode from dodal.devices.electron_analyser.util import to_binding_energy @@ -49,7 +52,7 @@ def __init__( lens_mode_type: type[TLensMode], psu_mode_type: type[TPsuMode], pass_energy_type: type[TPassEnergy], - energy_sources: Mapping[SelectedSource, SignalR[float]], + energy_source: EnergySource | DualEnergySource, name: str = "", ) -> None: """ @@ -65,11 +68,11 @@ def __init__( pass_energy_type: Can be enum or float, depends on electron analyser model. If enum, it determines the available pass energies for this device. - energy_sources: Map that pairs a source name to an energy value signal + energy_source: Device that can give us the correct excitation energy and + switch sources if applicable. (in eV). name: Name of the device. """ - self.energy_sources = energy_sources self.acquisition_mode_type = acquisition_mode_type self.lens_mode_type = lens_mode_type self.psu_mode_type = psu_mode_type @@ -84,7 +87,7 @@ def __init__( self.total_intensity = derived_signal_r( self._calculate_total_intensity, spectrum=self.spectrum ) - self.excitation_energy = soft_signal_rw(float, initial_value=0, units="eV") + self.energy_source = energy_source with self.add_children_as_readables(StandardReadableFormat.CONFIG_SIGNAL): # Read once per scan after data acquired @@ -104,7 +107,6 @@ def __init__( self.acquisition_mode = epics_signal_rw( acquisition_mode_type, prefix + "ACQ_MODE" ) - self.excitation_energy_source = soft_signal_rw(str, initial_value="") # This is used by each electron analyser, however it depends on the electron # analyser type to know if is moved with region settings. self.psu_mode = epics_signal_rw(psu_mode_type, prefix + "PSU_MODE") @@ -119,7 +121,7 @@ def __init__( self._calculate_binding_energy_axis, "eV", energy_axis=self.energy_axis, - excitation_energy=self.excitation_energy, + excitation_energy=self.energy_source.energy, energy_mode=self.energy_mode, ) self.angle_axis = self._create_angle_axis_signal(prefix) @@ -137,9 +139,28 @@ async def stage(self) -> None: await self.image_mode.set(ADImageMode.SINGLE) await super().stage() - @abstractmethod @AsyncStatus.wrap async def set(self, region: TAbstractBaseRegion): + """ + Take a region object and setup the driver with it. If using a DualEnergySource, + set it to use the source selected by the region. It also converts the region to + kinetic mode before we move the driver signals to the region parameter values: + + Args: + region: Contains the parameters to setup the driver for a scan. + """ + if isinstance(self.energy_source, DualEnergySource): + self.energy_source.selected_source.set(region.excitation_energy_source) + excitation_energy = await self.energy_source.energy.get_value() + + # Copy region so doesn't alter the actual region and switch to kinetic energy + ke_region = region.model_copy() + ke_region.switch_energy_mode(EnergyMode.KINETIC, excitation_energy) + + await self._set_region(ke_region) + + @abstractmethod + async def _set_region(self, ke_region: TAbstractBaseRegion): """ Move a group of signals defined in a region. Each implementation of this class is responsible for implementing this method correctly. @@ -148,15 +169,6 @@ async def set(self, region: TAbstractBaseRegion): region: Contains the parameters to setup the driver for a scan. """ - def _get_energy_source(self, alias_name: SelectedSource) -> SignalR[float]: - energy_source = self.energy_sources.get(alias_name) - if energy_source is None: - raise KeyError( - f"'{energy_source}' is an invalid energy source. Avaliable energy " - + f"sources are '{list(self.energy_sources.keys())}'" - ) - return energy_source - @abstractmethod def _create_angle_axis_signal(self, prefix: str) -> SignalR[Array1D[np.float64]]: """ diff --git a/src/dodal/devices/electron_analyser/specs/detector.py b/src/dodal/devices/electron_analyser/specs/detector.py index 38a52e07f03..d41ad4bf747 100644 --- a/src/dodal/devices/electron_analyser/specs/detector.py +++ b/src/dodal/devices/electron_analyser/specs/detector.py @@ -1,13 +1,13 @@ -from collections.abc import Mapping from typing import Generic -from ophyd_async.core import SignalR - from dodal.devices.electron_analyser.abstract.types import TLensMode, TPsuMode from dodal.devices.electron_analyser.detector import ( ElectronAnalyserDetector, ) -from dodal.devices.electron_analyser.enums import SelectedSource +from dodal.devices.electron_analyser.energy_sources import ( + DualEnergySource, + EnergySource, +) from dodal.devices.electron_analyser.specs.driver_io import SpecsAnalyserDriverIO from dodal.devices.electron_analyser.specs.region import SpecsRegion, SpecsSequence @@ -25,10 +25,10 @@ def __init__( prefix: str, lens_mode_type: type[TLensMode], psu_mode_type: type[TPsuMode], - energy_sources: Mapping[SelectedSource, SignalR[float]], + energy_source: DualEnergySource | EnergySource, name: str = "", ): driver = SpecsAnalyserDriverIO[TLensMode, TPsuMode]( - prefix, lens_mode_type, psu_mode_type, energy_sources + prefix, lens_mode_type, psu_mode_type, energy_source ) super().__init__(SpecsSequence[lens_mode_type, psu_mode_type], driver, name) diff --git a/src/dodal/devices/electron_analyser/specs/driver_io.py b/src/dodal/devices/electron_analyser/specs/driver_io.py index c06ee65140f..59603110ce0 100644 --- a/src/dodal/devices/electron_analyser/specs/driver_io.py +++ b/src/dodal/devices/electron_analyser/specs/driver_io.py @@ -1,11 +1,9 @@ import asyncio -from collections.abc import Mapping from typing import Generic import numpy as np from ophyd_async.core import ( Array1D, - AsyncStatus, SignalR, StandardReadableFormat, derived_signal_r, @@ -16,7 +14,10 @@ AbstractAnalyserDriverIO, ) from dodal.devices.electron_analyser.abstract.types import TLensMode, TPsuMode -from dodal.devices.electron_analyser.enums import EnergyMode, SelectedSource +from dodal.devices.electron_analyser.energy_sources import ( + DualEnergySource, + EnergySource, +) from dodal.devices.electron_analyser.specs.enums import AcquisitionMode from dodal.devices.electron_analyser.specs.region import SpecsRegion @@ -36,7 +37,7 @@ def __init__( prefix: str, lens_mode_type: type[TLensMode], psu_mode_type: type[TPsuMode], - energy_sources: Mapping[SelectedSource, SignalR[float]], + energy_source: EnergySource | DualEnergySource, name: str = "", ) -> None: with self.add_children_as_readables(StandardReadableFormat.CONFIG_SIGNAL): @@ -58,18 +59,11 @@ def __init__( lens_mode_type=lens_mode_type, psu_mode_type=psu_mode_type, pass_energy_type=float, - energy_sources=energy_sources, + energy_source=energy_source, name=name, ) - @AsyncStatus.wrap - async def set(self, region: SpecsRegion[TLensMode, TPsuMode]): - source = self._get_energy_source(region.excitation_energy_source) - excitation_energy = await source.get_value() # eV - # Copy region so doesn't alter the actual region and switch to kinetic energy - ke_region = region.model_copy() - ke_region.switch_energy_mode(EnergyMode.KINETIC, excitation_energy) - + async def _set_region(self, ke_region: SpecsRegion[TLensMode, TPsuMode]): await asyncio.gather( self.region_name.set(ke_region.name), self.energy_mode.set(ke_region.energy_mode), @@ -81,8 +75,6 @@ async def set(self, region: SpecsRegion[TLensMode, TPsuMode]): self.pass_energy.set(ke_region.pass_energy), self.iterations.set(ke_region.iterations), self.acquisition_mode.set(ke_region.acquisition_mode), - self.excitation_energy.set(excitation_energy), - self.excitation_energy_source.set(source.name), self.snapshot_values.set(ke_region.values), self.psu_mode.set(ke_region.psu_mode), ) diff --git a/src/dodal/devices/electron_analyser/vgscienta/detector.py b/src/dodal/devices/electron_analyser/vgscienta/detector.py index 9c64edc24c3..a65f3f4f8be 100644 --- a/src/dodal/devices/electron_analyser/vgscienta/detector.py +++ b/src/dodal/devices/electron_analyser/vgscienta/detector.py @@ -1,8 +1,5 @@ -from collections.abc import Mapping from typing import Generic -from ophyd_async.core import SignalR - from dodal.devices.electron_analyser.abstract.types import ( TLensMode, TPassEnergyEnum, @@ -11,7 +8,10 @@ from dodal.devices.electron_analyser.detector import ( ElectronAnalyserDetector, ) -from dodal.devices.electron_analyser.enums import SelectedSource +from dodal.devices.electron_analyser.energy_sources import ( + DualEnergySource, + EnergySource, +) from dodal.devices.electron_analyser.vgscienta.driver_io import ( VGScientaAnalyserDriverIO, ) @@ -35,11 +35,11 @@ def __init__( lens_mode_type: type[TLensMode], psu_mode_type: type[TPsuMode], pass_energy_type: type[TPassEnergyEnum], - energy_sources: Mapping[SelectedSource, SignalR[float]], + energy_source: DualEnergySource | EnergySource, name: str = "", ): driver = VGScientaAnalyserDriverIO[TLensMode, TPsuMode, TPassEnergyEnum]( - prefix, lens_mode_type, psu_mode_type, pass_energy_type, energy_sources + prefix, lens_mode_type, psu_mode_type, pass_energy_type, energy_source ) super().__init__( VGScientaSequence[lens_mode_type, psu_mode_type, pass_energy_type], diff --git a/src/dodal/devices/electron_analyser/vgscienta/driver_io.py b/src/dodal/devices/electron_analyser/vgscienta/driver_io.py index 77506e2a142..19c878f1785 100644 --- a/src/dodal/devices/electron_analyser/vgscienta/driver_io.py +++ b/src/dodal/devices/electron_analyser/vgscienta/driver_io.py @@ -1,11 +1,9 @@ import asyncio -from collections.abc import Mapping from typing import Generic import numpy as np from ophyd_async.core import ( Array1D, - AsyncStatus, SignalR, StandardReadableFormat, ) @@ -19,7 +17,10 @@ TPassEnergyEnum, TPsuMode, ) -from dodal.devices.electron_analyser.enums import EnergyMode, SelectedSource +from dodal.devices.electron_analyser.energy_sources import ( + DualEnergySource, + EnergySource, +) from dodal.devices.electron_analyser.vgscienta.enums import ( AcquisitionMode, DetectorMode, @@ -45,7 +46,7 @@ def __init__( lens_mode_type: type[TLensMode], psu_mode_type: type[TPsuMode], pass_energy_type: type[TPassEnergyEnum], - energy_sources: Mapping[SelectedSource, SignalR[float]], + energy_source: EnergySource | DualEnergySource, name: str = "", ) -> None: with self.add_children_as_readables(StandardReadableFormat.CONFIG_SIGNAL): @@ -66,17 +67,11 @@ def __init__( lens_mode_type, psu_mode_type, pass_energy_type, - energy_sources, + energy_source, name, ) - @AsyncStatus.wrap - async def set(self, region: VGScientaRegion[TLensMode, TPassEnergyEnum]): - source = self._get_energy_source(region.excitation_energy_source) - excitation_energy = await source.get_value() # eV - # Copy region so doesn't alter the actual region and switch to kinetic energy - ke_region = region.model_copy() - ke_region.switch_energy_mode(EnergyMode.KINETIC, excitation_energy) + async def _set_region(self, ke_region: VGScientaRegion[TLensMode, TPassEnergyEnum]): await asyncio.gather( self.region_name.set(ke_region.name), self.energy_mode.set(ke_region.energy_mode), @@ -89,8 +84,6 @@ async def set(self, region: VGScientaRegion[TLensMode, TPassEnergyEnum]): self.iterations.set(ke_region.iterations), self.acquire_time.set(ke_region.acquire_time), self.acquisition_mode.set(ke_region.acquisition_mode), - self.excitation_energy.set(excitation_energy), - self.excitation_energy_source.set(source.name), self.energy_step.set(ke_region.energy_step), self.detector_mode.set(ke_region.detector_mode), self.region_min_x.set(ke_region.min_x), diff --git a/tests/devices/electron_analyser/abstract/test_base_detector.py b/tests/devices/electron_analyser/abstract/test_base_detector.py index 5d9ba107cea..7bdcae7b4c7 100644 --- a/tests/devices/electron_analyser/abstract/test_base_detector.py +++ b/tests/devices/electron_analyser/abstract/test_base_detector.py @@ -3,7 +3,7 @@ import pytest from bluesky import plan_stubs as bps from bluesky.run_engine import RunEngine -from ophyd_async.core import SignalR, init_devices +from ophyd_async.core import init_devices from ophyd_async.testing import ( assert_configuration, assert_reading, @@ -16,6 +16,7 @@ AbstractAnalyserDriverIO, AbstractElectronAnalyserDetector, ) +from dodal.devices.electron_analyser.energy_sources import EnergySource from dodal.devices.electron_analyser.specs import SpecsDetector from dodal.devices.electron_analyser.vgscienta import VGScientaDetector from dodal.testing.electron_analyser import create_detector @@ -29,14 +30,14 @@ ) async def sim_detector( request: pytest.FixtureRequest, - energy_sources: dict[str, SignalR[float]], + single_energy_source: EnergySource, RE: RunEngine, ) -> AbstractElectronAnalyserDetector[AbstractAnalyserDriverIO]: async with init_devices(mock=True): sim_detector = await create_detector( request.param, prefix="TEST:", - energy_sources=energy_sources, + energy_source=single_energy_source, ) # Needed for specs so we don't get division by zero error. set_mock_value(sim_detector.driver.slices, 1) diff --git a/tests/devices/electron_analyser/abstract/test_base_driver_io.py b/tests/devices/electron_analyser/abstract/test_base_driver_io.py index 2b9611b86fe..afcae67a43b 100644 --- a/tests/devices/electron_analyser/abstract/test_base_driver_io.py +++ b/tests/devices/electron_analyser/abstract/test_base_driver_io.py @@ -1,13 +1,15 @@ -from unittest.mock import AsyncMock, patch +from typing import get_origin +from unittest.mock import AsyncMock, MagicMock, patch import pytest from bluesky import plan_stubs as bps from bluesky.run_engine import RunEngine from bluesky.utils import FailedStatus -from ophyd_async.core import SignalR, StrictEnum, init_devices +from ophyd_async.core import StrictEnum, init_devices from ophyd_async.epics.adcore import ADImageMode from dodal.devices import b07, i09 +from dodal.devices.electron_analyser import DualEnergySource, EnergySource from dodal.devices.electron_analyser.abstract import ( AbstractAnalyserDriverIO, AbstractBaseRegion, @@ -32,24 +34,38 @@ ) async def sim_driver( request: pytest.FixtureRequest, - energy_sources: dict[str, SignalR[float]], + single_energy_source: EnergySource, + dual_energy_source: DualEnergySource, + RE: RunEngine, ) -> AbstractAnalyserDriverIO: + source = single_energy_source + if get_origin(request.param) is VGScientaAnalyserDriverIO: + source = dual_energy_source async with init_devices(mock=True): sim_driver = await create_driver( - request.param, prefix="TEST:", energy_sources=energy_sources + request.param, prefix="TEST:", energy_source=source ) return sim_driver @pytest.mark.parametrize("region", TEST_SEQUENCE_REGION_NAMES, indirect=True) -def test_analyser_correctly_selects_energy_source_from_region_input( +def test_driver_set( sim_driver: AbstractAnalyserDriverIO, region: AbstractBaseRegion, + RE: RunEngine, ) -> None: - source_alias_name = region.excitation_energy_source - energy_source = sim_driver._get_energy_source(source_alias_name) + sim_driver._set_region = AsyncMock() + + if isinstance(sim_driver.energy_source, DualEnergySource): + sim_driver.energy_source.selected_source.set = MagicMock() - assert energy_source == sim_driver.energy_sources[source_alias_name] + RE(bps.mv(sim_driver, region)) + + if isinstance(sim_driver.energy_source, DualEnergySource): + sim_driver.energy_source.selected_source.set.assert_called_once_with( # type: ignore + region.excitation_energy_source + ) + sim_driver._set_region.assert_called_once() def test_driver_throws_error_with_wrong_lens_mode( diff --git a/tests/devices/electron_analyser/conftest.py b/tests/devices/electron_analyser/conftest.py index d756c7d9901..9b985e7edb1 100644 --- a/tests/devices/electron_analyser/conftest.py +++ b/tests/devices/electron_analyser/conftest.py @@ -2,13 +2,12 @@ import pytest from bluesky.run_engine import RunEngine -from ophyd_async.core import SignalR, init_devices +from ophyd_async.core import init_devices from dodal.devices.electron_analyser import ( DualEnergySource, ElectronAnalyserDetector, EnergySource, - SelectedSource, ) from dodal.devices.electron_analyser.abstract import ( AbstractAnalyserDriverIO, @@ -33,30 +32,25 @@ @pytest.fixture -def dcm(RE: RunEngine) -> DCM: +async def single_energy_source(RE: RunEngine) -> EnergySource: with init_devices(mock=True): dcm = DCM("DCM:") patch_motor(dcm.energy_in_kev, initial_position=2.2) - return dcm + async with init_devices(mock=True): + dcm_energy_source = EnergySource(dcm.energy_in_ev) + return dcm_energy_source @pytest.fixture -def pgm(RE: RunEngine) -> PGM: - with init_devices(mock=True): +async def dual_energy_source(RE: RunEngine) -> DualEnergySource: + async with init_devices(mock=True): + dcm = DCM("DCM:") + patch_motor(dcm.energy_in_kev, initial_position=2.2) + + async with init_devices(mock=True): pgm = PGM("PGM:", Grating) patch_motor(pgm.energy, initial_position=500) - return pgm - -@pytest.fixture -async def single_energy_source(dcm: DCM, RE: RunEngine) -> EnergySource: - with init_devices(mock=True): - single_energy_source = EnergySource(dcm.energy_in_ev) - return single_energy_source - - -@pytest.fixture -async def dual_energy_source(dcm: DCM, pgm: PGM, RE: RunEngine) -> DualEnergySource: async with init_devices(mock=True): dual_energy_source = DualEnergySource( source1=dcm.energy_in_ev, source2=pgm.energy.user_readback @@ -64,16 +58,6 @@ async def dual_energy_source(dcm: DCM, pgm: PGM, RE: RunEngine) -> DualEnergySou return dual_energy_source -# ToDo - This will be removed once existing devices use the energy source device rather -# than dict. -@pytest.fixture -async def energy_sources(dcm: DCM, pgm: PGM) -> dict[str, SignalR[float]]: - return { - SelectedSource.SOURCE1: dcm.energy_in_ev, - SelectedSource.SOURCE2: pgm.energy.user_readback, - } - - @pytest.fixture def sequence_class( sim_driver: AbstractAnalyserDriverIO, diff --git a/tests/devices/electron_analyser/specs/test_detector.py b/tests/devices/electron_analyser/specs/test_detector.py index e04e9a8b4a2..516a6b6c1aa 100644 --- a/tests/devices/electron_analyser/specs/test_detector.py +++ b/tests/devices/electron_analyser/specs/test_detector.py @@ -1,21 +1,24 @@ import pytest -from ophyd_async.core import SignalR, init_devices +from bluesky import RunEngine +from ophyd_async.core import init_devices from ophyd_async.testing import set_mock_value from dodal.devices.b07 import LensMode, PsuMode +from dodal.devices.electron_analyser import EnergySource from dodal.devices.electron_analyser.specs import SpecsDetector from dodal.testing.electron_analyser import create_detector @pytest.fixture async def sim_detector( - energy_sources: dict[str, SignalR[float]], + single_energy_source: EnergySource, + RE: RunEngine, ) -> SpecsDetector[LensMode, PsuMode]: async with init_devices(mock=True): sim_driver = await create_detector( SpecsDetector[LensMode, PsuMode], prefix="TEST:", - energy_sources=energy_sources, + energy_source=single_energy_source, ) return sim_driver diff --git a/tests/devices/electron_analyser/specs/test_driver_io.py b/tests/devices/electron_analyser/specs/test_driver_io.py index 9ae2adc89c2..25475401076 100644 --- a/tests/devices/electron_analyser/specs/test_driver_io.py +++ b/tests/devices/electron_analyser/specs/test_driver_io.py @@ -4,7 +4,7 @@ import pytest from bluesky import plan_stubs as bps from bluesky.run_engine import RunEngine -from ophyd_async.core import SignalR, init_devices +from ophyd_async.core import init_devices from ophyd_async.testing import ( assert_configuration, assert_reading, @@ -17,6 +17,7 @@ from dodal.devices.b07 import LensMode, PsuMode from dodal.devices.electron_analyser import ( EnergyMode, + EnergySource, ) from dodal.devices.electron_analyser.enums import EnergyMode from dodal.devices.electron_analyser.specs import ( @@ -32,13 +33,13 @@ @pytest.fixture async def sim_driver( - energy_sources: dict[str, SignalR[float]], + single_energy_source: EnergySource, ) -> SpecsAnalyserDriverIO[LensMode, PsuMode]: async with init_devices(mock=True): sim_driver = await create_driver( SpecsAnalyserDriverIO[LensMode, PsuMode], prefix="TEST:", - energy_sources=energy_sources, + energy_source=single_energy_source, ) return sim_driver @@ -51,10 +52,9 @@ async def test_analyser_sets_region_correctly( ) -> None: RE(bps.mv(sim_driver, region), wait=True) - energy_source = sim_driver._get_energy_source(region.excitation_energy_source) - expected_source = energy_source.name + excitation_energy = await sim_driver.energy_source.energy.get_value() + region.switch_energy_mode(EnergyMode.KINETIC, excitation_energy) - region.switch_energy_mode(EnergyMode.KINETIC, await energy_source.get_value()) get_mock_put(sim_driver.region_name).assert_called_once_with(region.name, wait=True) get_mock_put(sim_driver.energy_mode).assert_called_once_with( region.energy_mode, wait=True @@ -82,9 +82,6 @@ async def test_analyser_sets_region_correctly( get_mock_put(sim_driver.pass_energy).assert_called_once_with( region.pass_energy, wait=True ) - get_mock_put(sim_driver.excitation_energy_source).assert_called_once_with( - expected_source, wait=True - ) get_mock_put(sim_driver.slices).assert_called_once_with(region.slices, wait=True) get_mock_put(sim_driver.acquire_time).assert_called_once_with( region.acquire_time, wait=True @@ -118,8 +115,8 @@ async def test_analyser_sets_region_and_read_configuration_is_correct( prefix = sim_driver.name + "-" - energy_source = sim_driver._get_energy_source(region.excitation_energy_source) - region.switch_energy_mode(EnergyMode.KINETIC, await energy_source.get_value()) + excitation_energy = await sim_driver.energy_source.energy.get_value() + region.switch_energy_mode(EnergyMode.KINETIC, excitation_energy) await assert_configuration( sim_driver, @@ -133,7 +130,6 @@ async def test_analyser_sets_region_and_read_configuration_is_correct( f"{prefix}high_energy": partial_reading(region.high_energy), f"{prefix}energy_step": partial_reading(ANY), f"{prefix}pass_energy": partial_reading(region.pass_energy), - f"{prefix}excitation_energy_source": partial_reading(energy_source.name), f"{prefix}slices": partial_reading(region.slices), f"{prefix}acquire_time": partial_reading(region.acquire_time), f"{prefix}iterations": partial_reading(region.iterations), @@ -144,11 +140,12 @@ async def test_analyser_sets_region_and_read_configuration_is_correct( f"{prefix}angle_axis": partial_reading(ANY), f"{prefix}snapshot_values": partial_reading(region.values), f"{prefix}psu_mode": partial_reading(region.psu_mode), - }, + } + | await sim_driver.energy_source.read_configuration(), ) -@pytest.fixture +@pytest.mark.parametrize("region", TEST_SEQUENCE_REGION_NAMES, indirect=True) async def test_analyser_sets_region_and_read_is_correct( sim_driver: SpecsAnalyserDriverIO[LensMode, PsuMode], region: SpecsRegion[LensMode, PsuMode], @@ -156,9 +153,6 @@ async def test_analyser_sets_region_and_read_is_correct( ) -> None: RE(bps.mv(sim_driver, region), wait=True) - energy_source = sim_driver._get_energy_source(region.excitation_energy_source) - excitation_energy = await energy_source.get_value() - spectrum = np.array([1, 2, 3, 4, 5], dtype=float) expected_total_intensity = np.sum(spectrum) set_mock_value(sim_driver.spectrum, spectrum) @@ -167,11 +161,11 @@ async def test_analyser_sets_region_and_read_is_correct( await assert_reading( sim_driver, { - f"{prefix}excitation_energy": partial_reading(excitation_energy), f"{prefix}image": partial_reading([]), f"{prefix}spectrum": partial_reading(spectrum), f"{prefix}total_intensity": partial_reading(expected_total_intensity), - }, + } + | await sim_driver.energy_source.read(), ) @@ -183,8 +177,7 @@ async def test_specs_analyser_binding_energy_axis( ) -> None: RE(bps.mv(sim_driver, region)) - source = sim_driver._get_energy_source(region.excitation_energy_source) - excitation_energy = await source.get_value() + excitation_energy = await sim_driver.energy_source.energy.get_value() # Check binding energy is correct is_binding = await sim_driver.energy_mode.get_value() == EnergyMode.BINDING diff --git a/tests/devices/electron_analyser/test_detector.py b/tests/devices/electron_analyser/test_detector.py index 7b20b9cb43f..fa8b6e7367e 100644 --- a/tests/devices/electron_analyser/test_detector.py +++ b/tests/devices/electron_analyser/test_detector.py @@ -3,11 +3,12 @@ import pytest from bluesky import plan_stubs as bps from bluesky.run_engine import RunEngine -from ophyd_async.core import SignalR, init_devices +from ophyd_async.core import init_devices import dodal.devices.b07 as b07 import dodal.devices.i09 as i09 from dodal.devices.electron_analyser import ( + EnergySource, GenericElectronAnalyserDetector, ) from dodal.devices.electron_analyser.specs import SpecsDetector @@ -24,14 +25,14 @@ ) async def sim_detector( request: pytest.FixtureRequest, - energy_sources: dict[str, SignalR[float]], + single_energy_source: EnergySource, RE: RunEngine, ) -> GenericElectronAnalyserDetector: async with init_devices(mock=True): sim_detector = await create_detector( request.param, prefix="TEST:", - energy_sources=energy_sources, + energy_source=single_energy_source, ) return sim_detector diff --git a/tests/devices/electron_analyser/test_energy_sources.py b/tests/devices/electron_analyser/test_energy_sources.py index de41e4fd24e..beb5e9bf1a8 100644 --- a/tests/devices/electron_analyser/test_energy_sources.py +++ b/tests/devices/electron_analyser/test_energy_sources.py @@ -10,19 +10,16 @@ EnergySource, SelectedSource, ) -from dodal.devices.i09 import DCM -from dodal.devices.pgm import PGM async def test_single_energy_source_read( single_energy_source: EnergySource, - dcm: DCM, ) -> None: await assert_reading( single_energy_source, { - f"{dcm.energy_in_ev.name}": partial_reading( - await dcm.energy_in_ev.get_value() + f"{single_energy_source._source_ref().name}": partial_reading( + await single_energy_source._source_ref().get_value() ), }, ) @@ -30,13 +27,12 @@ async def test_single_energy_source_read( async def test_single_energy_souce_read_configuration( single_energy_source: EnergySource, - dcm: DCM, ) -> None: await assert_configuration( single_energy_source, { f"{single_energy_source.name}-wrapped_device_name": partial_reading( - dcm.energy_in_ev.name + single_energy_source._source_ref().name ), }, ) @@ -44,11 +40,9 @@ async def test_single_energy_souce_read_configuration( async def test_dual_energy_source_energy_is_correct_when_switching_between_sources( dual_energy_source: DualEnergySource, - dcm: DCM, - pgm: PGM, ) -> None: - dcm_energy_val = await dcm.energy_in_ev.get_value() - pgm_energy_val = await pgm.energy.user_readback.get_value() + dcm_energy_val = await dual_energy_source.source1.energy.get_value() + pgm_energy_val = await dual_energy_source.source2.energy.get_value() # Make sure energy sources values are different for this test so we can tell them a # part when switching @@ -62,39 +56,38 @@ async def test_dual_energy_source_energy_is_correct_when_switching_between_sourc async def test_dual_energy_souce_read( dual_energy_source: DualEnergySource, - dcm: DCM, - pgm: PGM, ) -> None: await dual_energy_source.selected_source.set(SelectedSource.SOURCE1) prefix = dual_energy_source.name + + source1_name = await dual_energy_source.source1.wrapped_device_name.get_value() + source1_energy_value = await dual_energy_source.source1.energy.get_value() + + source2_name = await dual_energy_source.source2.wrapped_device_name.get_value() + source2_energy_value = await dual_energy_source.source2.energy.get_value() + await assert_reading( dual_energy_source, { f"{prefix}-selected_source": partial_reading(SelectedSource.SOURCE1), - f"{dcm.energy_in_ev.name}": partial_reading( - await dcm.energy_in_ev.get_value() - ), - f"{pgm.energy.user_readback.name}": partial_reading( - await pgm.energy.user_readback.get_value() - ), + f"{source1_name}": partial_reading(source1_energy_value), + f"{source2_name}": partial_reading(source2_energy_value), }, ) async def test_dual_energy_souce_read_configuration( dual_energy_source: DualEnergySource, - dcm: DCM, - pgm: PGM, ) -> None: prefix = dual_energy_source.name await assert_configuration( dual_energy_source, { f"{prefix}-source1-wrapped_device_name": partial_reading( - dcm.energy_in_ev.name + dual_energy_source.source1._source_ref().name ), f"{prefix}-source2-wrapped_device_name": partial_reading( - pgm.energy.user_readback.name + dual_energy_source.source2._source_ref().name ), }, ) diff --git a/tests/devices/electron_analyser/vgscienta/test_detector.py b/tests/devices/electron_analyser/vgscienta/test_detector.py index 2158a81dd40..70fcd9af9c3 100644 --- a/tests/devices/electron_analyser/vgscienta/test_detector.py +++ b/tests/devices/electron_analyser/vgscienta/test_detector.py @@ -1,8 +1,10 @@ import numpy as np import pytest -from ophyd_async.core import SignalR, init_devices +from bluesky import RunEngine +from ophyd_async.core import init_devices from ophyd_async.testing import set_mock_value +from dodal.devices.electron_analyser import DualEnergySource from dodal.devices.electron_analyser.vgscienta import ( VGScientaDetector, ) @@ -12,13 +14,14 @@ @pytest.fixture async def sim_detector( - energy_sources: dict[str, SignalR[float]], + dual_energy_source: DualEnergySource, + RE: RunEngine, ) -> VGScientaDetector[LensMode, PsuMode, PassEnergy]: async with init_devices(mock=True): sim_driver = await create_detector( VGScientaDetector[LensMode, PsuMode, PassEnergy], prefix="TEST:", - energy_sources=energy_sources, + energy_source=dual_energy_source, ) return sim_driver diff --git a/tests/devices/electron_analyser/vgscienta/test_driver_io.py b/tests/devices/electron_analyser/vgscienta/test_driver_io.py index e33d4981377..cb8bb90d6b8 100644 --- a/tests/devices/electron_analyser/vgscienta/test_driver_io.py +++ b/tests/devices/electron_analyser/vgscienta/test_driver_io.py @@ -5,7 +5,7 @@ from bluesky import plan_stubs as bps from bluesky.run_engine import RunEngine from bluesky.utils import FailedStatus -from ophyd_async.core import SignalR, StrictEnum, init_devices +from ophyd_async.core import StrictEnum, init_devices from ophyd_async.testing import ( assert_configuration, assert_reading, @@ -15,7 +15,7 @@ set_mock_value, ) -from dodal.devices.electron_analyser import EnergyMode +from dodal.devices.electron_analyser import DualEnergySource, EnergyMode from dodal.devices.electron_analyser.vgscienta import ( VGScientaAnalyserDriverIO, VGScientaRegion, @@ -29,13 +29,14 @@ @pytest.fixture async def sim_driver( - energy_sources: dict[str, SignalR[float]], + dual_energy_source: DualEnergySource, + RE: RunEngine, ) -> VGScientaAnalyserDriverIO[LensMode, PsuMode, PassEnergy]: async with init_devices(mock=True): sim_driver = await create_driver( VGScientaAnalyserDriverIO[LensMode, PsuMode, PassEnergy], prefix="TEST:", - energy_sources=energy_sources, + energy_source=dual_energy_source, ) return sim_driver @@ -48,8 +49,8 @@ async def test_analyser_sets_region_correctly( ) -> None: RE(bps.mv(sim_driver, region), wait=True) - energy_source = sim_driver._get_energy_source(region.excitation_energy_source) - region.switch_energy_mode(EnergyMode.KINETIC, await energy_source.get_value()) + excitation_energy = await sim_driver.energy_source.energy.get_value() + region.switch_energy_mode(EnergyMode.KINETIC, excitation_energy) get_mock_put(sim_driver.region_name).assert_called_once_with(region.name, wait=True) get_mock_put(sim_driver.energy_mode).assert_called_once_with( @@ -73,9 +74,6 @@ async def test_analyser_sets_region_correctly( get_mock_put(sim_driver.pass_energy).assert_called_once_with( region.pass_energy, wait=True ) - get_mock_put(sim_driver.excitation_energy_source).assert_called_once_with( - energy_source.name, wait=True - ) get_mock_put(sim_driver.slices).assert_called_once_with(region.slices, wait=True) get_mock_put(sim_driver.acquire_time).assert_called_once_with( region.acquire_time, wait=True @@ -112,8 +110,8 @@ async def test_analyser_sets_region_and_read_configuration_is_correct( RE(bps.mv(sim_driver, region), wait=True) prefix = sim_driver.name + "-" - energy_source = sim_driver._get_energy_source(region.excitation_energy_source) - region.switch_energy_mode(EnergyMode.KINETIC, await energy_source.get_value()) + excitation_energy = await sim_driver.energy_source.energy.get_value() + region.switch_energy_mode(EnergyMode.KINETIC, excitation_energy) await assert_configuration( sim_driver, @@ -127,7 +125,6 @@ async def test_analyser_sets_region_and_read_configuration_is_correct( f"{prefix}high_energy": partial_reading(region.high_energy), f"{prefix}energy_step": partial_reading(region.energy_step), f"{prefix}pass_energy": partial_reading(region.pass_energy), - f"{prefix}excitation_energy_source": partial_reading(energy_source.name), f"{prefix}slices": partial_reading(region.slices), f"{prefix}iterations": partial_reading(region.iterations), f"{prefix}total_steps": partial_reading(ANY), @@ -144,11 +141,12 @@ async def test_analyser_sets_region_and_read_configuration_is_correct( f"{prefix}region_size_y": partial_reading(region.size_y), f"{prefix}sensor_max_size_y": partial_reading(ANY), f"{prefix}psu_mode": partial_reading(ANY), - }, + } + | await sim_driver.energy_source.read_configuration(), ) -@pytest.fixture +@pytest.mark.parametrize("region", TEST_SEQUENCE_REGION_NAMES, indirect=True) async def test_analyser_sets_region_and_read_is_correct( sim_driver: VGScientaAnalyserDriverIO[LensMode, PsuMode, PassEnergy], region: VGScientaRegion[LensMode, PassEnergy], @@ -156,22 +154,23 @@ async def test_analyser_sets_region_and_read_is_correct( ) -> None: RE(bps.mv(sim_driver, region), wait=True) - energy_source = sim_driver._get_energy_source(region.excitation_energy_source) - excitation_energy = await energy_source.get_value() - spectrum = np.array([1, 2, 3, 4, 5], dtype=float) expected_total_intensity = np.sum(spectrum) set_mock_value(sim_driver.spectrum, spectrum) prefix = sim_driver.name + "-" + await assert_reading( sim_driver, { - f"{prefix}excitation_energy": partial_reading(excitation_energy), + f"{prefix}energy_source-selected_source": partial_reading( + region.excitation_energy_source + ), f"{prefix}image": partial_reading(ANY), f"{prefix}spectrum": partial_reading(spectrum), f"{prefix}total_intensity": partial_reading(expected_total_intensity), - }, + } + | await sim_driver.energy_source.read(), ) @@ -179,10 +178,10 @@ async def test_analyser_sets_region_and_read_is_correct( async def test_analayser_binding_energy_is_correct( sim_driver: VGScientaAnalyserDriverIO[LensMode, PsuMode, PassEnergy], region: VGScientaRegion[LensMode, PassEnergy], + RE: RunEngine, ) -> None: - excitation_energy = await sim_driver._get_energy_source( - region.excitation_energy_source - ).get_value() + RE(bps.mv(sim_driver, region), wait=True) + excitation_energy = await sim_driver.energy_source.energy.get_value() # Check binding energy is correct energy_axis = [1, 2, 3, 4, 5] From b929e5b3b9ecd6f345dc5be101fa3c7ef1c3f65d Mon Sep 17 00:00:00 2001 From: Richard Dixey <185198552+RJCD-Diamond@users.noreply.github.com> Date: Tue, 23 Sep 2025 15:04:59 +0100 Subject: [PATCH 15/23] Remove i11 Static Path Provider to use Numtracker (#1569) * remove i11 static path provider * remove i11 static path comments * fix lint --- src/dodal/beamlines/i11.py | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/src/dodal/beamlines/i11.py b/src/dodal/beamlines/i11.py index b51ade4043a..c9446937ca7 100644 --- a/src/dodal/beamlines/i11.py +++ b/src/dodal/beamlines/i11.py @@ -1,13 +1,9 @@ -from pathlib import Path - from dodal.common.beamlines.beamline_utils import ( device_factory, get_path_provider, - set_path_provider, ) from dodal.common.beamlines.beamline_utils import set_beamline as set_utils_beamline from dodal.common.beamlines.device_helpers import DET_SUFFIX -from dodal.common.visit import RemoteDirectoryServiceClient, StaticVisitPathProvider from dodal.devices.cryostream import OxfordCryoStream from dodal.devices.eurotherm import ( EurothermGeneral, @@ -34,19 +30,6 @@ set_log_beamline(BL) set_utils_beamline(BL) -# Currently we must hard-code the visit, determining the visit at runtime requires -# infrastructure that is still WIP. -# Communication with GDA is also WIP so for now we determine an arbitrary scan number -# locally and write the commissioning directory. The scan number is not guaranteed to -# be unique and the data is at risk - this configuration is for testing only. -set_path_provider( - StaticVisitPathProvider( - BL, - Path(f"/dls/{BL}/data/2025/cm40625-3/bluesky"), - client=RemoteDirectoryServiceClient(f"http://{BL}-control:8088/api"), - ) -) - @device_factory() def mythen3() -> Mythen3: From c8c55c84ec4949aa2057700ef30d7af3a546cf49 Mon Sep 17 00:00:00 2001 From: Emily Arnold <222046505+EmsArnold@users.noreply.github.com> Date: Wed, 24 Sep 2025 09:46:02 +0100 Subject: [PATCH 16/23] add stats plugin to SAXS and WAXS detectors (#1572) --- src/dodal/beamlines/i22.py | 12 +++++++++++- src/dodal/devices/i22/nxsas.py | 2 ++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/src/dodal/beamlines/i22.py b/src/dodal/beamlines/i22.py index 6d1206cb29a..de7b93b014d 100644 --- a/src/dodal/beamlines/i22.py +++ b/src/dodal/beamlines/i22.py @@ -1,7 +1,7 @@ from pathlib import Path from ophyd_async.epics.adaravis import AravisDetector -from ophyd_async.epics.adcore import NDPluginBaseIO +from ophyd_async.epics.adcore import NDPluginBaseIO, NDPluginStatsIO from ophyd_async.epics.adpilatus import PilatusDetector from ophyd_async.fastcs.panda import HDFPanda @@ -68,6 +68,11 @@ def saxs() -> PilatusDetector: drv_suffix=CAM_SUFFIX, fileio_suffix=HDF5_SUFFIX, metadata_holder=metadata_holder, + plugins={ + "stats": NDPluginStatsIO( + prefix=f"{PREFIX.beamline_prefix}-EA-PILAT-01:STAT:" + ) + }, ) @@ -93,6 +98,11 @@ def waxs() -> PilatusDetector: drv_suffix=CAM_SUFFIX, fileio_suffix=HDF5_SUFFIX, metadata_holder=metadata_holder, + plugins={ + "stats": NDPluginStatsIO( + prefix=f"{PREFIX.beamline_prefix}-EA-PILAT-03:STAT:" + ) + }, ) diff --git a/src/dodal/devices/i22/nxsas.py b/src/dodal/devices/i22/nxsas.py index 5078ff83f98..eb906f91d30 100644 --- a/src/dodal/devices/i22/nxsas.py +++ b/src/dodal/devices/i22/nxsas.py @@ -7,6 +7,7 @@ from event_model.documents.event_descriptor import DataKey from ophyd_async.core import PathProvider from ophyd_async.epics.adaravis import AravisDetector +from ophyd_async.epics.adcore import NDPluginBaseIO from ophyd_async.epics.adpilatus import PilatusDetector ValueAndUnits = tuple[float, str] @@ -106,6 +107,7 @@ def __init__( fileio_suffix: str, metadata_holder: NXSasMetadataHolder, name: str = "", + plugins: dict[str, NDPluginBaseIO] | None = None, ): """Extends detector with configuration metadata required or desired to comply with the NXsas application definition. From 351ea93cfbde14211396daeba956d4a6d37e436b Mon Sep 17 00:00:00 2001 From: Raymond Fan Date: Wed, 24 Sep 2025 15:27:21 +0100 Subject: [PATCH 17/23] 1405 added lakeshore to i10 (#1558) * added lakeshore * grouping vector into function * reorder structure * added lakeshore 340 an 336 * undo spaces change in pyproject * complete test * more tests * make set wait in test * correct lakeshore 340 name * correct some docstring * add lakeshore docstring * add i16 lakeshore * add i06 lakeshores * correct default for lakeshore340 * add docstring for base signals * correct endstation name * correct docstring * remove private methods from docstring and add explanation to control_channel * correct docsting * add user_ to readback and setpoint * change zero offset to custom offset * remove lakeshore on beamlines * remove sub class * remove sub class * rename single control channel to no_pv_suffix_index * signal only * update test to check off_set * add test to check pv for no_pv_index * create channels for group of signal instead of device vector each signal. * test signal channel control * move pid into control channel * add docstring * corrected snake_case and added docstring to clarify channel is readback channel * add channel_rw f * correct ramp_enable channel type * dropping channel in readback * update branch and make lakeshore work * correct temperature control test with lakeshoreIO * remove add readable in set channel * make hints_channel for changing hint signal in plan * move class docs string to collect place * correct enum * remove else and use map instead for suffix * add docstings * correct grammar Co-authored-by: Dominic Oram * correct import * correct logic and test * move docstring * rename channel to current_hints_channel * add lakeshore to i10 and i10-1 * add lakeshore to beamline * Lakeshore added to init * revert un-indented changes * Update src/dodal/devices/temperture_controller/lakeshore/lakeshore.py Co-authored-by: oliwenmandiamond <136330507+oliwenmandiamond@users.noreply.github.com> * add docstrings * fix typo * remove hint_signal all together * move test to correct folder to mirror src * fix typos --------- Co-authored-by: Dominic Oram Co-authored-by: oliwenmandiamond <136330507+oliwenmandiamond@users.noreply.github.com> --- src/dodal/beamlines/i10-1.py | 25 ++++++ src/dodal/beamlines/i10.py | 10 +++ .../devices/temperture_controller/__init__.py | 4 +- .../lakeshore/lakeshore.py | 83 +++++++++++++++---- .../temperature_controller/__init__.py | 0 .../lakeshore/__init__.py | 0 .../lakeshore}/test_lakeshore.py | 35 ++------ .../lakeshore}/test_lakeshore_io.py | 0 8 files changed, 109 insertions(+), 48 deletions(-) create mode 100644 src/dodal/beamlines/i10-1.py rename tests/devices/{unit_tests => }/temperature_controller/__init__.py (100%) create mode 100644 tests/devices/temperature_controller/lakeshore/__init__.py rename tests/devices/{unit_tests/temperature_controller => temperature_controller/lakeshore}/test_lakeshore.py (79%) rename tests/devices/{unit_tests/temperature_controller => temperature_controller/lakeshore}/test_lakeshore_io.py (100%) diff --git a/src/dodal/beamlines/i10-1.py b/src/dodal/beamlines/i10-1.py new file mode 100644 index 00000000000..c84c890a556 --- /dev/null +++ b/src/dodal/beamlines/i10-1.py @@ -0,0 +1,25 @@ +from dodal.common.beamlines.beamline_utils import ( + device_factory, +) +from dodal.common.beamlines.beamline_utils import set_beamline as set_utils_beamline +from dodal.devices.synchrotron import Synchrotron +from dodal.devices.temperture_controller import Lakeshore336 +from dodal.log import set_beamline as set_log_beamline +from dodal.utils import BeamlinePrefix, get_beamline_name + +BL = get_beamline_name("i10-1") +PREFIX = BeamlinePrefix(BL, suffix="J") +set_log_beamline(BL) +set_utils_beamline(BL) + + +@device_factory() +def synchrotron() -> Synchrotron: + return Synchrotron() + + +@device_factory() +def em_temperature_controller() -> Lakeshore336: + return Lakeshore336( + prefix=f"{PREFIX.beamline_prefix}-EA-TCTRL-41:", + ) diff --git a/src/dodal/beamlines/i10.py b/src/dodal/beamlines/i10.py index 2a234274a62..ca1905f6d35 100644 --- a/src/dodal/beamlines/i10.py +++ b/src/dodal/beamlines/i10.py @@ -25,6 +25,9 @@ from dodal.devices.i10.slits import I10Slits, I10SlitsDrainCurrent from dodal.devices.motors import XYStage, XYZStage from dodal.devices.pgm import PGM +from dodal.devices.temperture_controller import ( + Lakeshore340, +) from dodal.log import set_beamline as set_log_beamline from dodal.utils import BeamlinePrefix, get_beamline_name @@ -153,6 +156,13 @@ def sample_stage() -> XYZStage: return XYZStage(prefix="ME01D-MO-CRYO-01:") +@device_factory() +def rasor_temperature_controller() -> Lakeshore340: + return Lakeshore340( + prefix="ME01D-EA-TCTRL-01:", + ) + + @device_factory() def rasor_femto() -> RasorFemto: return RasorFemto( diff --git a/src/dodal/devices/temperture_controller/__init__.py b/src/dodal/devices/temperture_controller/__init__.py index 4e32083a9a9..5391f25dd66 100644 --- a/src/dodal/devices/temperture_controller/__init__.py +++ b/src/dodal/devices/temperture_controller/__init__.py @@ -1,3 +1,3 @@ -from .lakeshore.lakeshore import Lakeshore +from .lakeshore.lakeshore import Lakeshore, Lakeshore336, Lakeshore340 -__all__ = ["Lakeshore"] +__all__ = ["Lakeshore336", "Lakeshore340", "Lakeshore"] diff --git a/src/dodal/devices/temperture_controller/lakeshore/lakeshore.py b/src/dodal/devices/temperture_controller/lakeshore/lakeshore.py index 27e5b8bb40a..8f283bdd7df 100644 --- a/src/dodal/devices/temperture_controller/lakeshore/lakeshore.py +++ b/src/dodal/devices/temperture_controller/lakeshore/lakeshore.py @@ -6,20 +6,28 @@ SignalDatatypeT, StandardReadable, StandardReadableFormat, + StrictEnum, derived_signal_rw, soft_signal_rw, ) -from ophyd_async.core._readable import _HintsFromName from .lakeshore_io import ( LakeshoreBaseIO, ) +class Heater336Settings(StrictEnum): + OFF = "Off" + LOW = "Low" + MEDIUM = "Medium" + HIGH = "High" + + class Lakeshore(LakeshoreBaseIO, StandardReadable, Movable[float]): """ Device for controlling and reading from a Lakeshore temperature controller. It supports multiple channels and PID control. + Attributes ---------- temperature : LakeshoreBaseIO @@ -77,15 +85,6 @@ def __init__( set_derived=self._set_control_channel, current_channel=self._control_channel, ) - - self._hints_channel = soft_signal_rw(int, initial_value=control_channel) - - self.hints_channel = derived_signal_rw( - raw_to_derived=self._get_hints_channel, - set_derived=self._set_hints_channel, - current_hints_channel=self._hints_channel, - ) - super().__init__( prefix=prefix, num_readback_channel=num_readback_channel, @@ -96,9 +95,10 @@ def __init__( self.add_readables( [setpoint.user_setpoint for setpoint in self.control_channels.values()] - + list(self.readback.values()) ) - self._has_hints = (_HintsFromName(self.readback[control_channel]),) + self.add_readables( + list(self.readback.values()), format=StandardReadableFormat.HINTED_SIGNAL + ) self.add_readables( [ @@ -147,9 +147,58 @@ async def _set_control_channel(self, value: int) -> None: self.control_channels[value].heater_output_range.read, ) - async def _set_hints_channel(self, readback_channel: int) -> None: - self._has_hints = (_HintsFromName(self.readback[readback_channel]),) - await self._hints_channel.set(readback_channel) - def _get_hints_channel(self, current_hints_channel: int) -> int: - return current_hints_channel +class Lakeshore336(Lakeshore): + def __init__( + self, + prefix: str, + control_channel: int = 1, + name: str = "", + ): + """ + Lakeshore 336 temperature controller. With 4 readback and control channels. + Heater settings are: Off, Low, Medium, High. + Parameters + ---------- + prefix : str + The EPICS prefix for the device. + control_channel : int, optional + The initial control channel (default is 1). + """ + super().__init__( + prefix=prefix, + num_readback_channel=4, + heater_setting=Heater336Settings, + control_channel=control_channel, + single_control_channel=False, + name=name, + ) + + +class Lakeshore340(Lakeshore): + def __init__( + self, + prefix: str, + control_channel: int = 1, + name: str = "", + ): + """Lakeshore 340 temperature controller. With 4 readback channels and a single + control channel. + Heater settings are in power from 0 to 5. 0 is 0 watt, 5 is 50 watt. + + Parameters + ---------- + prefix : str + The EPICS prefix for the device. + control_channel : int, optional + The initial control channel (default is 1). + """ + + super().__init__( + prefix=prefix, + num_readback_channel=4, + heater_setting=float, + control_channel=control_channel, + single_control_channel=True, + name=name, + ) diff --git a/tests/devices/unit_tests/temperature_controller/__init__.py b/tests/devices/temperature_controller/__init__.py similarity index 100% rename from tests/devices/unit_tests/temperature_controller/__init__.py rename to tests/devices/temperature_controller/__init__.py diff --git a/tests/devices/temperature_controller/lakeshore/__init__.py b/tests/devices/temperature_controller/lakeshore/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/devices/unit_tests/temperature_controller/test_lakeshore.py b/tests/devices/temperature_controller/lakeshore/test_lakeshore.py similarity index 79% rename from tests/devices/unit_tests/temperature_controller/test_lakeshore.py rename to tests/devices/temperature_controller/lakeshore/test_lakeshore.py index 1a738bfbf55..170611914ed 100644 --- a/tests/devices/unit_tests/temperature_controller/test_lakeshore.py +++ b/tests/devices/temperature_controller/lakeshore/test_lakeshore.py @@ -121,38 +121,15 @@ async def test_lakeshore_read( await assert_reading(lakeshore, expected_reading, full_match=False) -@pytest.mark.parametrize( - "readback_channel", - [3, 2, 1, 4], -) -async def test_lakeshore_count_with_hints( - lakeshore: Lakeshore, RE: RunEngine, readback_channel: int -): +async def test_lakeshore_hints_with_count(lakeshore: Lakeshore, RE: RunEngine): docs = defaultdict(list) def capture_emitted(name, doc): docs[name].append(doc) - RE(abs_set(lakeshore.hints_channel, readback_channel, wait=True)) RE(count([lakeshore]), capture_emitted) - assert ( - docs["descriptor"][0]["hints"]["lakeshore"]["fields"][0] - == f"lakeshore-readback-{readback_channel}" - ) - - -@pytest.mark.parametrize( - "readback_channel", - [3, 2, 1, 4], -) -async def test_lakeshore_hints_read( - lakeshore: Lakeshore, RE: RunEngine, readback_channel: int -): - docs = defaultdict(list) - - def capture_emitted(name, doc): - docs[name].append(doc) - - RE(abs_set(lakeshore.hints_channel, readback_channel), wait=True) - RE(count([lakeshore.hints_channel]), capture_emitted) - assert docs["event"][0]["data"]["lakeshore-hints_channel"] == readback_channel + for i in range(1, 5): + assert ( + docs["descriptor"][0]["hints"]["lakeshore"]["fields"][i - 1] + == f"lakeshore-readback-{i}" + ) diff --git a/tests/devices/unit_tests/temperature_controller/test_lakeshore_io.py b/tests/devices/temperature_controller/lakeshore/test_lakeshore_io.py similarity index 100% rename from tests/devices/unit_tests/temperature_controller/test_lakeshore_io.py rename to tests/devices/temperature_controller/lakeshore/test_lakeshore_io.py From 6e1d9145e88213ab935f43636866674f69a6ca3c Mon Sep 17 00:00:00 2001 From: olliesilvester <122091460+olliesilvester@users.noreply.github.com> Date: Wed, 24 Sep 2025 15:45:45 +0100 Subject: [PATCH 18/23] Add commissioning jungfrau device with temporary filewriter (#1548) * Add commissioning jungfrau device with temporary filewriter --- src/dodal/beamlines/i24.py | 24 ++++ .../devices/i24/commissioning_jungfrau.py | 114 ++++++++++++++++++ .../i24/test_commissioning_jungfrau.py | 66 ++++++++++ 3 files changed, 204 insertions(+) create mode 100644 src/dodal/devices/i24/commissioning_jungfrau.py create mode 100644 tests/devices/i24/test_commissioning_jungfrau.py diff --git a/src/dodal/beamlines/i24.py b/src/dodal/beamlines/i24.py index c2a07c52688..af6b37d5330 100644 --- a/src/dodal/beamlines/i24.py +++ b/src/dodal/beamlines/i24.py @@ -1,3 +1,7 @@ +from pathlib import PurePath + +from ophyd_async.core import AutoIncrementingPathProvider, StaticFilenameProvider + from dodal.common.beamlines.beamline_utils import ( BL, device_factory, @@ -8,6 +12,7 @@ from dodal.devices.i24.aperture import Aperture from dodal.devices.i24.beam_center import DetectorBeamCenter from dodal.devices.i24.beamstop import Beamstop +from dodal.devices.i24.commissioning_jungfrau import CommissioningJungfrau from dodal.devices.i24.dcm import DCM from dodal.devices.i24.dual_backlight import DualBacklight from dodal.devices.i24.focus_mirrors import FocusMirrorsMode @@ -187,3 +192,22 @@ def eiger_beam_center() -> DetectorBeamCenter: f"{PREFIX.beamline_prefix}-EA-EIGER-01:CAM:", "eiger_bc", ) + + +@device_factory() +def commissioning_jungfrau( + path_to_dir: str = "/tmp/jf", # Device factory doesn't allow for required args, + filename: str = "jf_output", # but these should be manually entered when commissioning +) -> CommissioningJungfrau: + """Get the commissionning Jungfrau 9M device, which uses a temporary filewriter + device in place of Odin while the detector is in commissioning. + Instantiates the device if it hasn't already been. + If this is called when already instantiated, it will return the existing object.""" + + return CommissioningJungfrau( + f"{PREFIX.beamline_prefix}-EA-JFRAU-01:", + f"{PREFIX.beamline_prefix}-JUNGFRAU-META:FD:", + AutoIncrementingPathProvider( + StaticFilenameProvider(filename), PurePath(path_to_dir) + ), + ) diff --git a/src/dodal/devices/i24/commissioning_jungfrau.py b/src/dodal/devices/i24/commissioning_jungfrau.py new file mode 100644 index 00000000000..caab896ed11 --- /dev/null +++ b/src/dodal/devices/i24/commissioning_jungfrau.py @@ -0,0 +1,114 @@ +import asyncio +from collections.abc import AsyncGenerator, AsyncIterator +from pathlib import Path + +from bluesky.protocols import StreamAsset +from event_model import DataKey # type: ignore +from ophyd_async.core import ( + AutoIncrementingPathProvider, + DetectorWriter, + StandardDetector, + StandardReadable, + StaticPathProvider, + observe_value, + wait_for_value, +) +from ophyd_async.epics.core import epics_signal_r, epics_signal_rw, epics_signal_rw_rbv +from ophyd_async.fastcs.jungfrau._controller import JungfrauController +from ophyd_async.fastcs.jungfrau._signals import JungfrauDriverIO + +from dodal.log import LOGGER + + +class JunfrauCommissioningWriter(DetectorWriter, StandardReadable): + """Implementation of the temporary filewriter used for Jungfrau commissioning on i24. + + The PVs on this device are responsible for writing files of a specified name + to a specified path, marking itself as "ready to write", and having a counter of + frames written, which must be zero'd at the ophyd level + """ + + def __init__( + self, + prefix, + path_provider: AutoIncrementingPathProvider | StaticPathProvider, + name="", + ) -> None: + with self.add_children_as_readables(): + self._path_info = path_provider + self.frame_counter = epics_signal_rw(int, f"{prefix}NumCaptured") + self.file_name = epics_signal_rw_rbv(str, f"{prefix}FileName") + self.file_path = epics_signal_rw_rbv(str, f"{prefix}FilePath") + self.writer_ready = epics_signal_r(int, f"{prefix}Ready_RBV") + super().__init__(name) + + async def open(self, name: str, exposures_per_event: int = 1) -> dict[str, DataKey]: + self._exposures_per_event = exposures_per_event + _path_info = self._path_info() + + # Commissioning Jungfrau plans allow you to override path, so check to see if file exists + requested_filepath = Path(_path_info.directory_path) / _path_info.filename + if requested_filepath.exists(): + raise FileExistsError( + f"Jungfrau was requested to write to {requested_filepath}, but this file already exists!" + ) + + await asyncio.gather( + self.file_name.set(_path_info.filename), + self.file_path.set(str(_path_info.directory_path)), + self.frame_counter.set(0), + ) + LOGGER.info( + f"Jungfrau writing to folder {_path_info.directory_path} with filename {_path_info.filename}" + ) + await wait_for_value(self.writer_ready, 1, timeout=10) + return await self._describe() + + async def _describe(self) -> dict[str, DataKey]: + # Dummy function, doesn't actually describe the dataset + + return { + "data": DataKey( + source="Commissioning writer", + shape=[-1], + dtype="array", + dtype_numpy=" AsyncGenerator[int, None]: + timeout = timeout * 2 # This filewriter is slow + async for num_captured in observe_value(self.frame_counter, timeout): + yield num_captured // (self._exposures_per_event) + + async def get_indices_written(self) -> int: + return await self.frame_counter.get_value() // self._exposures_per_event + + def collect_stream_docs( + self, name: str, indices_written: int + ) -> AsyncIterator[StreamAsset]: + raise NotImplementedError() + + async def close(self) -> None: ... + + +class CommissioningJungfrau( + StandardDetector[JungfrauController, JunfrauCommissioningWriter] +): + """Ophyd-async implementation of a Jungfrau 9M Detector, using a temporary + filewriter in place of Odin""" + + def __init__( + self, + prefix: str, + writer_prefix: str, + path_provider: AutoIncrementingPathProvider | StaticPathProvider, + name="", + ): + self.drv = JungfrauDriverIO(prefix) + writer = JunfrauCommissioningWriter(writer_prefix, path_provider) + controller = JungfrauController(self.drv) + super().__init__(controller, writer, name=name) diff --git a/tests/devices/i24/test_commissioning_jungfrau.py b/tests/devices/i24/test_commissioning_jungfrau.py new file mode 100644 index 00000000000..fd1a7035013 --- /dev/null +++ b/tests/devices/i24/test_commissioning_jungfrau.py @@ -0,0 +1,66 @@ +import asyncio +from pathlib import Path, PurePath +from unittest.mock import MagicMock + +import pytest +from ophyd_async.core import ( + AutoIncrementingPathProvider, + StaticFilenameProvider, + StaticPathProvider, + TriggerInfo, + init_devices, +) +from ophyd_async.testing import ( + set_mock_value, +) + +from dodal.devices.i24.commissioning_jungfrau import CommissioningJungfrau + + +@pytest.fixture +def jungfrau(tmpdir: Path) -> CommissioningJungfrau: + with init_devices(mock=True): + name = StaticFilenameProvider("jf_out") + path = AutoIncrementingPathProvider(name, PurePath(tmpdir)) + detector = CommissioningJungfrau("", "", path) + + return detector + + +async def test_jungfrau_with_temporary_writer( + jungfrau: CommissioningJungfrau, +): + set_mock_value(jungfrau._writer.writer_ready, 1) + set_mock_value(jungfrau._writer.frame_counter, 10) + jungfrau._writer._path_info = MagicMock() + await jungfrau.prepare(TriggerInfo(livetime=1e-3, exposures_per_event=5)) + assert await jungfrau._writer.frame_counter.get_value() == 0 + await jungfrau.kickoff() + status = jungfrau.complete() + + async def _do_fake_writing(): + for frame in range(1, 5): + set_mock_value(jungfrau._writer.frame_counter, frame) + assert not status.done + set_mock_value(jungfrau._writer.frame_counter, 5) + + await asyncio.gather(status, _do_fake_writing()) + jungfrau._writer._path_info.assert_called_once() + + +async def test_jungfrau_error_when_writing_to_existing_file(tmp_path: Path): + file_name = "test_file" + empty_file = tmp_path / file_name + empty_file.touch() + name = StaticFilenameProvider(file_name) + path = StaticPathProvider(name, PurePath(tmp_path)) + with init_devices(mock=True): + jungfrau = CommissioningJungfrau("", "", path) + set_mock_value(jungfrau._writer.writer_ready, 1) + with pytest.raises(FileExistsError): + await jungfrau.prepare(TriggerInfo(livetime=1e-3, exposures_per_event=5)) + + +def test_collect_stream_docs_raises_error(jungfrau: CommissioningJungfrau): + with pytest.raises(NotImplementedError): + jungfrau._writer.collect_stream_docs("jungfrau", 0) From 8899f1038d051c792f7018759a83bd8a673de789 Mon Sep 17 00:00:00 2001 From: olliesilvester <122091460+olliesilvester@users.noreply.github.com> Date: Wed, 24 Sep 2025 17:10:18 +0100 Subject: [PATCH 19/23] Add Vmxm FGS devices (#1458) * Correct mapping for vmxm zebra * Create 2d FGS device * Update FGS device, remove backlight --- src/dodal/beamlines/i02_1.py | 81 ++++++++++++++ src/dodal/devices/i02_1/__init__.py | 0 src/dodal/devices/i02_1/fast_grid_scan.py | 104 ++++++++++++++++++ src/dodal/devices/i02_1/sample_motors.py | 19 ++++ .../devices/zebra/zebra_constants_mapping.py | 2 +- tests/devices/i02_1/test_fast_grid_scan.py | 23 ++++ 6 files changed, 228 insertions(+), 1 deletion(-) create mode 100644 src/dodal/devices/i02_1/__init__.py create mode 100644 src/dodal/devices/i02_1/fast_grid_scan.py create mode 100644 src/dodal/devices/i02_1/sample_motors.py create mode 100644 tests/devices/i02_1/test_fast_grid_scan.py diff --git a/src/dodal/beamlines/i02_1.py b/src/dodal/beamlines/i02_1.py index 2464a2c0045..0cacb7a490c 100644 --- a/src/dodal/beamlines/i02_1.py +++ b/src/dodal/beamlines/i02_1.py @@ -2,6 +2,7 @@ from dodal.common.beamlines.beamline_utils import ( device_factory, + device_instantiation, ) from dodal.common.beamlines.beamline_utils import set_beamline as set_utils_beamline from dodal.devices.attenuator.attenuator import EnumFilterAttenuator @@ -11,6 +12,17 @@ I02_1FilterThreeSelections, I02_1FilterTwoSelections, ) +from dodal.devices.eiger import EigerDetector +from dodal.devices.i02_1.fast_grid_scan import TwoDFastGridScan +from dodal.devices.i02_1.sample_motors import SampleMotors +from dodal.devices.synchrotron import Synchrotron +from dodal.devices.zebra.zebra import Zebra +from dodal.devices.zebra.zebra_constants_mapping import ( + ZebraMapping, + ZebraSources, + ZebraTTLOutputs, +) +from dodal.devices.zocalo import ZocaloResults from dodal.log import set_beamline as set_log_beamline from dodal.utils import BeamlinePrefix, get_beamline_name @@ -18,6 +30,75 @@ PREFIX = BeamlinePrefix(BL, suffix="J") set_log_beamline(BL) set_utils_beamline(BL) +DAQ_CONFIGURATION_PATH = "/dls_sw/i02-1/software/daq_configuration" + +I02_1_ZEBRA_MAPPING = ZebraMapping( + outputs=ZebraTTLOutputs( + TTL_EIGER=4, TTL_XSPRESS3=3, TTL_FAST_SHUTTER=1, TTL_PILATUS=2 + ), + sources=ZebraSources(), +) + + +@device_factory() +def eiger(mock: bool = False) -> EigerDetector: + """Get the i02-1 Eiger device, instantiate it if it hasn't already been. + If this is called when already instantiated in i02-1, it will return the existing object. + """ + return device_instantiation( + device_factory=EigerDetector, + prefix=f"{PREFIX.beamline_prefix}-EA-EIGER-01:", + bl_prefix=False, + wait=False, + fake=mock, + name="eiger", + ) + + +@device_factory() +def zebra_fast_grid_scan() -> TwoDFastGridScan: + """Get the i02-1 zebra_fast_grid_scan device, instantiate it if it hasn't already been. + If this is called when already instantiated in i02-1, it will return the existing object. + """ + return TwoDFastGridScan( + prefix=f"{PREFIX.beamline_prefix}-MO-SAMP-11:FGS:", + ) + + +@device_factory() +def synchrotron() -> Synchrotron: + """Get the i02-1 synchrotron device, instantiate it if it hasn't already been. + If this is called when already instantiated in i02-1, it will return the existing object. + """ + return Synchrotron() + + +@device_factory() +def zebra() -> Zebra: + """Get the i02-1 zebra device, instantiate it if it hasn't already been. + If this is called when already instantiated in i02-1, it will return the existing object. + """ + return Zebra( + prefix=f"{PREFIX.beamline_prefix}-EA-ZEBRA-01:", + mapping=I02_1_ZEBRA_MAPPING, + ) + + +# Device not needed after https://github.com/DiamondLightSource/mx-bluesky/issues/1299 +@device_factory() +def zocalo() -> ZocaloResults: + """Get the i02-1 ZocaloResults device, instantiate it if it hasn't already been. + If this is called when already instantiated in i02-1, it will return the existing object. + """ + return ZocaloResults() + + +@device_factory() +def goniometer() -> SampleMotors: + """Get the i02-1 goniometer device, instantiate it if it hasn't already been. + If this is called when already instantiated in i02-1, it will return the existing object. + """ + return SampleMotors(f"{PREFIX.beamline_prefix}-MO-SAMP-01:") @device_factory() diff --git a/src/dodal/devices/i02_1/__init__.py b/src/dodal/devices/i02_1/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/dodal/devices/i02_1/fast_grid_scan.py b/src/dodal/devices/i02_1/fast_grid_scan.py new file mode 100644 index 00000000000..03813581ad9 --- /dev/null +++ b/src/dodal/devices/i02_1/fast_grid_scan.py @@ -0,0 +1,104 @@ +from ophyd_async.core import ( + Device, + Signal, + StandardReadable, + derived_signal_r, + soft_signal_r_and_setter, +) +from ophyd_async.epics.core import ( + epics_signal_r, + epics_signal_rw, + epics_signal_rw_rbv, + epics_signal_x, +) + +from dodal.devices.fast_grid_scan import ( + ZebraFastGridScan, +) +from dodal.log import LOGGER + + +class MotionProgram(Device): + def __init__(self, prefix: str, name: str = "") -> None: + self.running = epics_signal_r(int, prefix + "PROGBITS") + # Prog number PV doesn't exist for i02-1, but it's currently only used for logging + self.program_number, _ = soft_signal_r_and_setter(float, -1) + super().__init__(name) + + +class TwoDFastGridScan(ZebraFastGridScan): + """The EPICS interface for the 2D FGS differs slightly from the standard + version: + - No Z steps, Z step sizes, or Y2 start positions, + - Use exposure_time instead of dwell_time, + - No scan valid PV - see https://github.com/DiamondLightSource/mx-bluesky/issues/1203 + + This device abstracts away the differences by adding empty signals to the missing PV's. + Plans which expect the 3D grid scan device can then also use this. + + See https://github.com/DiamondLightSource/mx-bluesky/issues/1112 for long term solution + """ + + def __init__(self, prefix: str, name: str = "") -> None: + self.x_steps = epics_signal_rw_rbv(int, f"{prefix}X_NUM_STEPS") + self.y_steps = epics_signal_rw_rbv(int, f"{prefix}Y_NUM_STEPS") + self.x_step_size = epics_signal_rw_rbv(float, f"{prefix}X_STEP_SIZE") + self.y_step_size = epics_signal_rw_rbv(float, f"{prefix}Y_STEP_SIZE") + self.x_start = epics_signal_rw_rbv(float, f"{prefix}X_START") + self.y1_start = epics_signal_rw_rbv(float, f"{prefix}Y_START") + self.z1_start = epics_signal_rw_rbv(float, f"{prefix}Z_START") + self.motion_program = MotionProgram("BL02J-MO-STEP-11:") + + # While the prefix is called exposure time, the motion script uses this value + # to dwell on each trigger point - the names are interchangable here. + self.dwell_time_ms = epics_signal_rw_rbv(float, f"{prefix}EXPOSURE_TIME") + + # Z movement and second start positions don't exist in EPICS for 2D scan. + # Create soft signals for these so the class is structured like the common device. + self.z_steps, _ = soft_signal_r_and_setter(int, 0) + self.z_step_size, _ = soft_signal_r_and_setter(float, 0) + self.z2_start, _ = soft_signal_r_and_setter(float, 0) + self.y2_start, _ = soft_signal_r_and_setter(int, 0) + + # VMXm will trigger the grid scan through GDA, which has its own validity check, + # but this PV is being added: https://github.com/DiamondLightSource/mx-bluesky/issues/1203 + self.scan_invalid = soft_signal_r_and_setter(float, 0) + + self.run_cmd = epics_signal_x(f"{prefix}RUN.PROC") + self.stop_cmd = epics_signal_x(f"{prefix}STOP.PROC") + self.status = epics_signal_r(int, f"{prefix}SCAN_STATUS") + self.expected_images = derived_signal_r( + self._calculate_expected_images, x=self.x_steps, y=self.y_steps, z=0 + ) + + self.x_counter = epics_signal_r(int, f"{prefix}X_COUNTER") + self.y_counter = epics_signal_r(int, f"{prefix}Y_COUNTER") + + # Kickoff timeout in seconds + self.KICKOFF_TIMEOUT: float = 5.0 + + self.COMPLETE_STATUS: float = 60.0 + + self.movable_params: dict[str, Signal] = { + "x_steps": self.x_steps, + "y_steps": self.y_steps, + "x_step_size_mm": self.x_step_size, + "y_step_size_mm": self.y_step_size, + "x_start_mm": self.x_start, + "y1_start_mm": self.y1_start, + "z1_start_mm": self.z1_start, + } + + self.position_counter = self._create_position_counter(prefix) + + # Skip the FGSCommon init function as we have already overriden all the signals + StandardReadable.__init__(self, name) + + def _create_position_counter(self, prefix: str): + return epics_signal_rw( + int, f"{prefix}POS_COUNTER_RBV", write_pv=f"{prefix}POS_COUNTER" + ) + + def _calculate_expected_images(self, x: int, y: int, z: int) -> int: + LOGGER.info(f"Reading num of images found {x, y} images in each axis") + return x * y diff --git a/src/dodal/devices/i02_1/sample_motors.py b/src/dodal/devices/i02_1/sample_motors.py new file mode 100644 index 00000000000..7380cb5827b --- /dev/null +++ b/src/dodal/devices/i02_1/sample_motors.py @@ -0,0 +1,19 @@ +from ophyd_async.core import StandardReadable +from ophyd_async.epics.motor import Motor + + +class SampleMotors(StandardReadable): + """Virtual Smaract motors on i02-1 (VMXm)""" + + def __init__( + self, + prefix: str, + name: str = "", + ): + # See https://github.com/DiamondLightSource/mx-bluesky/issues/1212 + # regarding a potential motion issue with omega + with self.add_children_as_readables(): + self.x = Motor(f"{prefix}X") + self.z = Motor(f"{prefix}Z") + self.omega = Motor(f"{prefix}OMEGA") + super().__init__(name=name) diff --git a/src/dodal/devices/zebra/zebra_constants_mapping.py b/src/dodal/devices/zebra/zebra_constants_mapping.py index 1f6a9512108..ed64a6996fb 100644 --- a/src/dodal/devices/zebra/zebra_constants_mapping.py +++ b/src/dodal/devices/zebra/zebra_constants_mapping.py @@ -77,7 +77,7 @@ class ZebraMapping(ZebraMappingValidations): Zebra's hardware configuration and wiring. """ - # Zebra ophyd signal for connection can be accessed + # Zebra ophyd signal for output can be accessed # with, eg, zebra.output.out_pvs[zebra.mapping.outputs.TTL_DETECTOR] outputs: ZebraTTLOutputs = ZebraTTLOutputs() diff --git a/tests/devices/i02_1/test_fast_grid_scan.py b/tests/devices/i02_1/test_fast_grid_scan.py new file mode 100644 index 00000000000..d051d8583b2 --- /dev/null +++ b/tests/devices/i02_1/test_fast_grid_scan.py @@ -0,0 +1,23 @@ +from ophyd_async.core import init_devices + +from dodal.devices.fast_grid_scan import ZebraFastGridScan +from dodal.devices.i02_1.fast_grid_scan import TwoDFastGridScan + + +def test_fast_grid_scan_has_same_attributes_as_parent(RE): + # We never call super().__init__ on the FGS device, since it needs to override + # a lot of the attributes as soft signals. Useful to check that the device + # looks the same + with init_devices(mock=True): + vmxm_fgs = TwoDFastGridScan("vmxm") + parent_fgs = ZebraFastGridScan("parent") + vmxm_fgs_attrs = set(vars(vmxm_fgs).keys()) + parent_fgs_attrs = set(vars(parent_fgs).keys()) + + assert vmxm_fgs_attrs == parent_fgs_attrs + + +def test_calc_expected_images(RE): + with init_devices(mock=True): + vmxm_fgs = TwoDFastGridScan("vmxm") + assert vmxm_fgs._calculate_expected_images(5, 10, 100) == 50 From 0862c2f5b909eaa5953b9078d0797e559828c474 Mon Sep 17 00:00:00 2001 From: olliesilvester <122091460+olliesilvester@users.noreply.github.com> Date: Wed, 24 Sep 2025 17:47:46 +0100 Subject: [PATCH 20/23] Refactor common gridscan (#1479) * Refactor common gridscan --- src/dodal/beamlines/i02_1.py | 13 +- src/dodal/beamlines/i03.py | 8 +- src/dodal/beamlines/i04.py | 8 +- src/dodal/devices/fast_grid_scan.py | 194 ++++++++++++------ src/dodal/devices/i02_1/fast_grid_scan.py | 123 ++++------- .../parameters/experiment_parameter_base.py | 6 +- tests/devices/i02_1/__init__.py | 0 tests/devices/i02_1/test_fast_grid_scan.py | 34 +-- tests/devices/test_gridscan.py | 115 ++++++----- 9 files changed, 269 insertions(+), 232 deletions(-) create mode 100644 tests/devices/i02_1/__init__.py diff --git a/src/dodal/beamlines/i02_1.py b/src/dodal/beamlines/i02_1.py index 0cacb7a490c..427dad19d7c 100644 --- a/src/dodal/beamlines/i02_1.py +++ b/src/dodal/beamlines/i02_1.py @@ -13,7 +13,7 @@ I02_1FilterTwoSelections, ) from dodal.devices.eiger import EigerDetector -from dodal.devices.i02_1.fast_grid_scan import TwoDFastGridScan +from dodal.devices.i02_1.fast_grid_scan import ZebraFastGridScanTwoD from dodal.devices.i02_1.sample_motors import SampleMotors from dodal.devices.synchrotron import Synchrotron from dodal.devices.zebra.zebra import Zebra @@ -33,9 +33,7 @@ DAQ_CONFIGURATION_PATH = "/dls_sw/i02-1/software/daq_configuration" I02_1_ZEBRA_MAPPING = ZebraMapping( - outputs=ZebraTTLOutputs( - TTL_EIGER=4, TTL_XSPRESS3=3, TTL_FAST_SHUTTER=1, TTL_PILATUS=2 - ), + outputs=ZebraTTLOutputs(TTL_EIGER=2, TTL_XSPRESS3=3, TTL_FAST_SHUTTER=1), sources=ZebraSources(), ) @@ -56,12 +54,13 @@ def eiger(mock: bool = False) -> EigerDetector: @device_factory() -def zebra_fast_grid_scan() -> TwoDFastGridScan: +def zebra_fast_grid_scan() -> ZebraFastGridScanTwoD: """Get the i02-1 zebra_fast_grid_scan device, instantiate it if it hasn't already been. If this is called when already instantiated in i02-1, it will return the existing object. """ - return TwoDFastGridScan( - prefix=f"{PREFIX.beamline_prefix}-MO-SAMP-11:FGS:", + return ZebraFastGridScanTwoD( + prefix=f"{PREFIX.beamline_prefix}-MO-SAMP-11:", + motion_controller_prefix="BL02J-MO-STEP-11:", ) diff --git a/src/dodal/beamlines/i03.py b/src/dodal/beamlines/i03.py index d237bade83c..ddfb03d5b6d 100644 --- a/src/dodal/beamlines/i03.py +++ b/src/dodal/beamlines/i03.py @@ -25,7 +25,7 @@ from dodal.devices.detector.detector_motion import DetectorMotion from dodal.devices.diamond_filter import DiamondFilter, I03Filters from dodal.devices.eiger import EigerDetector -from dodal.devices.fast_grid_scan import PandAFastGridScan, ZebraFastGridScan +from dodal.devices.fast_grid_scan import PandAFastGridScan, ZebraFastGridScanThreeD from dodal.devices.fluorescence_detector_motion import FluorescenceDetector from dodal.devices.flux import Flux from dodal.devices.focusing_mirror import FocusingMirrorWithStripes, MirrorVoltages @@ -191,11 +191,13 @@ def fastcs_eiger() -> FastEiger: @device_factory() -def zebra_fast_grid_scan() -> ZebraFastGridScan: +def zebra_fast_grid_scan() -> ZebraFastGridScanThreeD: """Get the i03 zebra_fast_grid_scan device, instantiate it if it hasn't already been. If this is called when already instantiated in i03, it will return the existing object. """ - return ZebraFastGridScan(prefix=f"{PREFIX.beamline_prefix}-MO-SGON-01:") + return ZebraFastGridScanThreeD( + prefix=f"{PREFIX.beamline_prefix}-MO-SGON-01:", + ) @device_factory() diff --git a/src/dodal/beamlines/i04.py b/src/dodal/beamlines/i04.py index 6a033632978..ee83d7f005f 100644 --- a/src/dodal/beamlines/i04.py +++ b/src/dodal/beamlines/i04.py @@ -17,7 +17,7 @@ from dodal.devices.detector.detector_motion import DetectorMotion from dodal.devices.diamond_filter import DiamondFilter, I04Filters from dodal.devices.eiger import EigerDetector -from dodal.devices.fast_grid_scan import ZebraFastGridScan +from dodal.devices.fast_grid_scan import ZebraFastGridScanThreeD from dodal.devices.flux import Flux from dodal.devices.i03.dcm import DCM from dodal.devices.i04.constants import RedisConstants @@ -210,11 +210,13 @@ def set_params(eiger: EigerDetector): @device_factory() -def zebra_fast_grid_scan() -> ZebraFastGridScan: +def zebra_fast_grid_scan() -> ZebraFastGridScanThreeD: """Get the i04 zebra_fast_grid_scan device, instantiate it if it hasn't already been. If this is called when already instantiated in i04, it will return the existing object. """ - return ZebraFastGridScan(f"{PREFIX.beamline_prefix}-MO-SGON-01:") + return ZebraFastGridScanThreeD( + prefix=f"{PREFIX.beamline_prefix}-MO-SGON-01:", + ) @device_factory() diff --git a/src/dodal/devices/fast_grid_scan.py b/src/dodal/devices/fast_grid_scan.py index 8d9c22861f6..57abc4b4d07 100644 --- a/src/dodal/devices/fast_grid_scan.py +++ b/src/dodal/devices/fast_grid_scan.py @@ -9,9 +9,11 @@ AsyncStatus, Device, Signal, + SignalR, SignalRW, StandardReadable, derived_signal_r, + soft_signal_r_and_setter, wait_for_value, ) from ophyd_async.epics.core import ( @@ -20,7 +22,7 @@ epics_signal_rw_rbv, epics_signal_x, ) -from pydantic import field_validator +from pydantic import BaseModel, field_validator from pydantic.dataclasses import dataclass from dodal.log import LOGGER @@ -63,7 +65,7 @@ class GridScanParamsCommon(AbstractExperimentWithBeamParams): """ Common holder class for the parameters of a grid scan in a similar layout to EPICS. The parameters and functions of this class are common - to both the zebra and panda triggered fast grid scans. + to both the zebra and panda triggered fast grid scans in 2d or 3d. The grid specified is where data is taken e.g. it can be assumed the first frame is at x_start, y1_start, z1_start and subsequent frames are N*step_size away. @@ -71,15 +73,11 @@ class GridScanParamsCommon(AbstractExperimentWithBeamParams): x_steps: int = 1 y_steps: int = 1 - z_steps: int = 0 x_step_size_mm: float = 0.1 y_step_size_mm: float = 0.1 - z_step_size_mm: float = 0.1 x_start_mm: float = 0.1 y1_start_mm: float = 0.1 - y2_start_mm: float = 0.1 z1_start_mm: float = 0.1 - z2_start_mm: float = 0.1 # Whether to set the stub offsets after centering set_stub_offsets: bool = False @@ -92,16 +90,10 @@ def x_axis(self) -> GridAxis: def y_axis(self) -> GridAxis: return GridAxis(self.y1_start_mm, self.y_step_size_mm, self.y_steps) + # In 2D grid scans, z axis is just the start position @property def z_axis(self) -> GridAxis: - return GridAxis(self.z2_start_mm, self.z_step_size_mm, self.z_steps) - - def get_num_images(self): - return self.x_steps * (self.y_steps + self.z_steps) - - @property - def is_3d_grid_scan(self): - return self.z_steps > 0 + return GridAxis(self.z1_start_mm, 0, 1) def grid_position_to_motor_position(self, grid_position: ndarray) -> ndarray: """Converts a grid position, given as steps in the x, y, z grid, @@ -130,15 +122,33 @@ def grid_position_to_motor_position(self, grid_position: ndarray) -> ndarray: ) -ParamType = TypeVar("ParamType", bound=GridScanParamsCommon) - +class GridScanParamsThreeD(GridScanParamsCommon): + """Additional parameters required to do a 3 dimensional gridscan. -class ZebraGridScanParams(GridScanParamsCommon): - """ - Params for standard Zebra FGS. Adds on the dwell time + A 3D gridscan works by doing two 2D gridscans. The first of these grids is x_steps by + y_steps. The sample is then rotated by 90 degrees, and then the second grid is + x_steps by z_steps. """ - dwell_time_ms: float = 10 + # Start position for z and y during the second gridscan + z2_start_mm: float = 0.1 + y2_start_mm: float = 0.1 + + z_step_size_mm: float = 0.1 + + # Number of vertical steps during the second grid scan, after the rotation in omega + z_steps: int = 1 + + @property + def z_axis(self) -> GridAxis: + return GridAxis(self.z2_start_mm, self.z_step_size_mm, self.z_steps) + + +ParamType = TypeVar("ParamType", bound=GridScanParamsCommon, covariant=True) + + +class WithDwellTime(BaseModel): + dwell_time_ms: float = 213 @field_validator("dwell_time_ms") @classmethod @@ -154,7 +164,14 @@ def non_integer_dwell_time(cls, dwell_time_ms: float) -> float: return dwell_time_ms -class PandAGridScanParams(GridScanParamsCommon): +class ZebraGridScanParamsThreeD(GridScanParamsThreeD, WithDwellTime): + """ + Params for standard Zebra FGS. Adds on the dwell time, which is really the time + between trigger positions. + """ + + +class PandAGridScanParams(GridScanParamsThreeD): """ Params for panda constant-motion scan. Adds on the goniometer run-up distance """ @@ -163,53 +180,50 @@ class PandAGridScanParams(GridScanParamsCommon): class MotionProgram(Device): - def __init__(self, prefix: str, name: str = "") -> None: + def __init__(self, prefix: str, name: str = "", has_prog_num=True) -> None: super().__init__(name) self.running = epics_signal_r(int, prefix + "PROGBITS") - self.program_number = epics_signal_r(float, prefix + "CS1:PROG_NUM") + if has_prog_num: + self.program_number = epics_signal_r(float, prefix + "CS1:PROG_NUM") + else: + # Prog number PV doesn't currently exist for i02-1 + self.program_number = soft_signal_r_and_setter(float, -1)[0] class FastGridScanCommon(StandardReadable, Flyable, ABC, Generic[ParamType]): - """Device for a general fast grid scan + """Device containing the minimal signals for a general fast grid scan. When the motion program is started, the goniometer will move in a snake-like grid trajectory, - with X as the fast axis and Y as the slow axis. If Z steps isn't 0, the goniometer will - then rotate in the omega direction such that it moves from the X-Y, to the X-Z plane then - do a second grid scan. The detector is triggered after every x step. - See https://github.com/DiamondLightSource/hyperion/wiki/Coordinate-Systems for more + with X as the fast axis and Y as the slow axis. + + See ZebraFastGridScanThreeD as an example of how to implement. """ - def __init__(self, prefix: str, smargon_prefix: str, name: str = "") -> None: + def __init__( + self, prefix: str, motion_controller_prefix: str, name: str = "" + ) -> None: + super().__init__(name) self.x_steps = epics_signal_rw_rbv(int, f"{prefix}X_NUM_STEPS") self.y_steps = epics_signal_rw_rbv( int, f"{prefix}Y_NUM_STEPS" ) # Number of vertical steps during the first grid scan - self.z_steps = epics_signal_rw_rbv( - int, f"{prefix}Z_NUM_STEPS" - ) # Number of vertical steps during the second grid scan, after the rotation in omega self.x_step_size = epics_signal_rw_rbv(float, f"{prefix}X_STEP_SIZE") self.y_step_size = epics_signal_rw_rbv(float, f"{prefix}Y_STEP_SIZE") - self.z_step_size = epics_signal_rw_rbv(float, f"{prefix}Z_STEP_SIZE") self.x_start = epics_signal_rw_rbv(float, f"{prefix}X_START") self.y1_start = epics_signal_rw_rbv(float, f"{prefix}Y_START") - self.y2_start = epics_signal_rw_rbv(float, f"{prefix}Y2_START") self.z1_start = epics_signal_rw_rbv(float, f"{prefix}Z_START") - self.z2_start = epics_signal_rw_rbv(float, f"{prefix}Z2_START") - self.scan_invalid = epics_signal_r(float, f"{prefix}SCAN_INVALID") + # This can be created like a regular signal instead of an abstract method + # once https://github.com/DiamondLightSource/mx-bluesky/issues/1203 is done + self.scan_invalid = self._create_scan_invalid_signal(prefix) self.run_cmd = epics_signal_x(f"{prefix}RUN.PROC") self.stop_cmd = epics_signal_x(f"{prefix}STOP.PROC") self.status = epics_signal_r(int, f"{prefix}SCAN_STATUS") - self.expected_images = derived_signal_r( - self._calculate_expected_images, - x=self.x_steps, - y=self.y_steps, - z=self.z_steps, - ) + self.expected_images = self._create_expected_images_signal() - self.motion_program = MotionProgram(smargon_prefix) + self.motion_program = self._create_motion_program(motion_controller_prefix) self.position_counter = self._create_position_counter(prefix) @@ -221,23 +235,12 @@ def __init__(self, prefix: str, smargon_prefix: str, name: str = "") -> None: self.movable_params: dict[str, Signal] = { "x_steps": self.x_steps, "y_steps": self.y_steps, - "z_steps": self.z_steps, "x_step_size_mm": self.x_step_size, "y_step_size_mm": self.y_step_size, - "z_step_size_mm": self.z_step_size, "x_start_mm": self.x_start, "y1_start_mm": self.y1_start, - "y2_start_mm": self.y2_start, "z1_start_mm": self.z1_start, - "z2_start_mm": self.z2_start, } - super().__init__(name) - - def _calculate_expected_images(self, x: int, y: int, z: int) -> int: - LOGGER.info(f"Reading num of images found {x, y, z} images in each axis") - first_grid = x * y - second_grid = x * z - return first_grid + second_grid @AsyncStatus.wrap async def kickoff(self): @@ -266,25 +269,84 @@ async def complete(self): raise @abstractmethod - def _create_position_counter(self, prefix: str) -> SignalRW[int]: - pass + def _create_expected_images_signal(self) -> SignalR[int]: ... + @abstractmethod + def _create_position_counter(self, prefix: str) -> SignalRW[int]: ... -class ZebraFastGridScan(FastGridScanCommon[ZebraGridScanParams]): - """Device for standard Zebra FGS. In this scan, the goniometer's velocity profile follows a parabolic shape between X steps, - with the slowest points occuring at each X step. + # This can be created within init rather than as a separate method after https://github.com/DiamondLightSource/mx-bluesky/issues/1203 + @abstractmethod + def _create_scan_invalid_signal(self, prefix: str) -> SignalR[float]: ... + + # This can be created within init rather than as a separate method after https://github.com/DiamondLightSource/mx-bluesky/issues/1203 + @abstractmethod + def _create_motion_program( + self, motion_controller_prefix: str + ) -> MotionProgram: ... + + +class FastGridScanThreeD(FastGridScanCommon[ParamType]): + """Device for standard 3D FGS. + + After completeing the first grid, if Z steps isn't 0, the goniometer will + rotate in the omega direction such that it moves from the X-Y, to the X-Z plane then + do a second grid scan. The detector is triggered after every x step. + See https://github.com/DiamondLightSource/hyperion/wiki/Coordinate-Systems for more. + + Subclasses must implement _create_position_counter. """ def __init__(self, prefix: str, name: str = "") -> None: full_prefix = prefix + "FGS:" - # Time taken to travel between X steps - self.dwell_time_ms = epics_signal_rw_rbv(float, f"{full_prefix}DWELL_TIME") + # Number of vertical steps during the second grid scan, after the rotation in omega + self.z_steps = epics_signal_rw_rbv(int, f"{prefix}Z_NUM_STEPS") + self.z_step_size = epics_signal_rw_rbv(float, f"{prefix}Z_STEP_SIZE") + self.z2_start = epics_signal_rw_rbv(float, f"{prefix}Z2_START") + self.y2_start = epics_signal_rw_rbv(float, f"{prefix}Y2_START") self.x_counter = epics_signal_r(int, f"{full_prefix}X_COUNTER") self.y_counter = epics_signal_r(int, f"{full_prefix}Y_COUNTER") super().__init__(full_prefix, prefix, name) + self.movable_params["z_step_size_mm"] = self.z_step_size + self.movable_params["z2_start_mm"] = self.z2_start + self.movable_params["y2_start_mm"] = self.y2_start + self.movable_params["z_steps"] = self.z_steps + + def _create_expected_images_signal(self): + return derived_signal_r( + self._calculate_expected_images, + x=self.x_steps, + y=self.y_steps, + z=self.z_steps, + ) + + def _calculate_expected_images(self, x: int, y: int, z: int) -> int: + LOGGER.info(f"Reading num of images found {x, y, z} images in each axis") + first_grid = x * y + second_grid = x * z + return first_grid + second_grid + + def _create_scan_invalid_signal(self, prefix: str) -> SignalR[float]: + return epics_signal_r(float, f"{prefix}SCAN_INVALID") + + def _create_motion_program(self, motion_controller_prefix: str): + return MotionProgram(motion_controller_prefix) + + +class ZebraFastGridScanThreeD(FastGridScanThreeD[ZebraGridScanParamsThreeD]): + """Device for standard Zebra 3D FGS. + + In this scan, the goniometer's velocity profile follows a parabolic shape between X steps, + with the slowest points occuring at each X step. + """ + + def __init__(self, prefix: str, name: str = "") -> None: + full_prefix = prefix + "FGS:" + # Time taken to travel between X steps + self.dwell_time_ms = epics_signal_rw_rbv(float, f"{full_prefix}DWELL_TIME") + super().__init__(prefix, name) self.movable_params["dwell_time_ms"] = self.dwell_time_ms def _create_position_counter(self, prefix: str): @@ -293,8 +355,12 @@ def _create_position_counter(self, prefix: str): ) -class PandAFastGridScan(FastGridScanCommon[PandAGridScanParams]): - """Device for panda constant-motion scan""" +class PandAFastGridScan(FastGridScanThreeD[PandAGridScanParams]): + """Device for panda constant-motion scan. + + In this scan, the goniometer's velocity + is constant through each row. It doesn't slow down when going through trigger points. + """ def __init__(self, prefix: str, name: str = "") -> None: full_prefix = prefix + "PGS:" @@ -309,7 +375,7 @@ def __init__(self, prefix: str, name: str = "") -> None: self.run_up_distance_mm = epics_signal_rw_rbv( float, f"{full_prefix}RUNUP_DISTANCE" ) - super().__init__(full_prefix, prefix, name) + super().__init__(prefix, name) self.movable_params["run_up_distance_mm"] = self.run_up_distance_mm diff --git a/src/dodal/devices/i02_1/fast_grid_scan.py b/src/dodal/devices/i02_1/fast_grid_scan.py index 03813581ad9..f9834d4f24d 100644 --- a/src/dodal/devices/i02_1/fast_grid_scan.py +++ b/src/dodal/devices/i02_1/fast_grid_scan.py @@ -1,104 +1,61 @@ -from ophyd_async.core import ( - Device, - Signal, - StandardReadable, - derived_signal_r, - soft_signal_r_and_setter, -) -from ophyd_async.epics.core import ( - epics_signal_r, - epics_signal_rw, - epics_signal_rw_rbv, - epics_signal_x, -) +from ophyd_async.core import SignalR, derived_signal_r, soft_signal_r_and_setter +from ophyd_async.epics.core import epics_signal_rw_rbv from dodal.devices.fast_grid_scan import ( - ZebraFastGridScan, + FastGridScanCommon, + GridScanParamsCommon, + MotionProgram, + WithDwellTime, ) from dodal.log import LOGGER -class MotionProgram(Device): - def __init__(self, prefix: str, name: str = "") -> None: - self.running = epics_signal_r(int, prefix + "PROGBITS") - # Prog number PV doesn't exist for i02-1, but it's currently only used for logging - self.program_number, _ = soft_signal_r_and_setter(float, -1) - super().__init__(name) +class ZebraGridScanParamsTwoD(GridScanParamsCommon, WithDwellTime): + """ + Params for 2D Zebra FGS. Adds on the dwell time, which is really the time + between trigger positions. + """ -class TwoDFastGridScan(ZebraFastGridScan): - """The EPICS interface for the 2D FGS differs slightly from the standard +class ZebraFastGridScanTwoD(FastGridScanCommon[ZebraGridScanParamsTwoD]): + """i02-1's EPICS interface for the 2D FGS differs slightly from the standard 3D version: - - No Z steps, Z step sizes, or Y2 start positions, - - Use exposure_time instead of dwell_time, + - No Z steps, Z step sizes, or Y2 start positions, or Z2 start - No scan valid PV - see https://github.com/DiamondLightSource/mx-bluesky/issues/1203 - - This device abstracts away the differences by adding empty signals to the missing PV's. - Plans which expect the 3D grid scan device can then also use this. - - See https://github.com/DiamondLightSource/mx-bluesky/issues/1112 for long term solution + - No program_number - see https://github.com/DiamondLightSource/mx-bluesky/issues/1203 """ - def __init__(self, prefix: str, name: str = "") -> None: - self.x_steps = epics_signal_rw_rbv(int, f"{prefix}X_NUM_STEPS") - self.y_steps = epics_signal_rw_rbv(int, f"{prefix}Y_NUM_STEPS") - self.x_step_size = epics_signal_rw_rbv(float, f"{prefix}X_STEP_SIZE") - self.y_step_size = epics_signal_rw_rbv(float, f"{prefix}Y_STEP_SIZE") - self.x_start = epics_signal_rw_rbv(float, f"{prefix}X_START") - self.y1_start = epics_signal_rw_rbv(float, f"{prefix}Y_START") - self.z1_start = epics_signal_rw_rbv(float, f"{prefix}Z_START") - self.motion_program = MotionProgram("BL02J-MO-STEP-11:") + def __init__( + self, prefix: str, motion_controller_prefix: str, name: str = "" + ) -> None: + full_prefix = prefix + "FGS:" + super().__init__(full_prefix, motion_controller_prefix, name) - # While the prefix is called exposure time, the motion script uses this value - # to dwell on each trigger point - the names are interchangable here. - self.dwell_time_ms = epics_signal_rw_rbv(float, f"{prefix}EXPOSURE_TIME") + # This signal could be put in the common device if the prefix gets standardised. + # See https://github.com/DiamondLightSource/mx-bluesky/issues/1203 + self.dwell_time_ms = epics_signal_rw_rbv(float, f"{full_prefix}EXPOSURE_TIME") - # Z movement and second start positions don't exist in EPICS for 2D scan. - # Create soft signals for these so the class is structured like the common device. - self.z_steps, _ = soft_signal_r_and_setter(int, 0) - self.z_step_size, _ = soft_signal_r_and_setter(float, 0) - self.z2_start, _ = soft_signal_r_and_setter(float, 0) - self.y2_start, _ = soft_signal_r_and_setter(int, 0) + self.movable_params["dwell_time_ms"] = self.dwell_time_ms - # VMXm will trigger the grid scan through GDA, which has its own validity check, - # but this PV is being added: https://github.com/DiamondLightSource/mx-bluesky/issues/1203 - self.scan_invalid = soft_signal_r_and_setter(float, 0) - - self.run_cmd = epics_signal_x(f"{prefix}RUN.PROC") - self.stop_cmd = epics_signal_x(f"{prefix}STOP.PROC") - self.status = epics_signal_r(int, f"{prefix}SCAN_STATUS") - self.expected_images = derived_signal_r( - self._calculate_expected_images, x=self.x_steps, y=self.y_steps, z=0 + def _create_expected_images_signal(self): + return derived_signal_r( + self._calculate_expected_images, + x=self.x_steps, + y=self.y_steps, ) - self.x_counter = epics_signal_r(int, f"{prefix}X_COUNTER") - self.y_counter = epics_signal_r(int, f"{prefix}Y_COUNTER") - - # Kickoff timeout in seconds - self.KICKOFF_TIMEOUT: float = 5.0 - - self.COMPLETE_STATUS: float = 60.0 - - self.movable_params: dict[str, Signal] = { - "x_steps": self.x_steps, - "y_steps": self.y_steps, - "x_step_size_mm": self.x_step_size, - "y_step_size_mm": self.y_step_size, - "x_start_mm": self.x_start, - "y1_start_mm": self.y1_start, - "z1_start_mm": self.z1_start, - } + def _calculate_expected_images(self, x: int, y: int) -> int: + LOGGER.info(f"Reading num of images found {x, y} images in each axis") + return x * y - self.position_counter = self._create_position_counter(prefix) + # VMXm triggers the grid scan through GDA, which has its own validity check + # so whilst this PV is being added, it isn't essential + def _create_scan_invalid_signal(self, prefix: str) -> SignalR[float]: + return soft_signal_r_and_setter(float, 0)[0] - # Skip the FGSCommon init function as we have already overriden all the signals - StandardReadable.__init__(self, name) + def _create_motion_program(self, motion_controller_prefix): + return MotionProgram(motion_controller_prefix, has_prog_num=False) + # To be standardised in https://github.com/DiamondLightSource/mx-bluesky/issues/1203 def _create_position_counter(self, prefix: str): - return epics_signal_rw( - int, f"{prefix}POS_COUNTER_RBV", write_pv=f"{prefix}POS_COUNTER" - ) - - def _calculate_expected_images(self, x: int, y: int, z: int) -> int: - LOGGER.info(f"Reading num of images found {x, y} images in each axis") - return x * y + return epics_signal_rw_rbv(int, f"{prefix}POS_COUNTER") diff --git a/src/dodal/parameters/experiment_parameter_base.py b/src/dodal/parameters/experiment_parameter_base.py index 604453fd63f..c23fa043159 100644 --- a/src/dodal/parameters/experiment_parameter_base.py +++ b/src/dodal/parameters/experiment_parameter_base.py @@ -1,4 +1,4 @@ -from abc import ABC, abstractmethod +from abc import ABC from pydantic import BaseModel @@ -9,7 +9,3 @@ class AbstractExperimentParameterBase(BaseModel, ABC): class AbstractExperimentWithBeamParams(AbstractExperimentParameterBase): transmission_fraction: float - - @abstractmethod - def get_num_images(self) -> int: - pass diff --git a/tests/devices/i02_1/__init__.py b/tests/devices/i02_1/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/devices/i02_1/test_fast_grid_scan.py b/tests/devices/i02_1/test_fast_grid_scan.py index d051d8583b2..b4c536ec3e3 100644 --- a/tests/devices/i02_1/test_fast_grid_scan.py +++ b/tests/devices/i02_1/test_fast_grid_scan.py @@ -1,23 +1,23 @@ +import pytest from ophyd_async.core import init_devices +from ophyd_async.testing import set_mock_value -from dodal.devices.fast_grid_scan import ZebraFastGridScan -from dodal.devices.i02_1.fast_grid_scan import TwoDFastGridScan +from dodal.devices.i02_1.fast_grid_scan import ( + ZebraFastGridScanTwoD, +) -def test_fast_grid_scan_has_same_attributes_as_parent(RE): - # We never call super().__init__ on the FGS device, since it needs to override - # a lot of the attributes as soft signals. Useful to check that the device - # looks the same - with init_devices(mock=True): - vmxm_fgs = TwoDFastGridScan("vmxm") - parent_fgs = ZebraFastGridScan("parent") - vmxm_fgs_attrs = set(vars(vmxm_fgs).keys()) - parent_fgs_attrs = set(vars(parent_fgs).keys()) +@pytest.fixture +async def fast_grid_scan(): + async with init_devices(mock=True): + fast_grid_scan = ZebraFastGridScanTwoD(prefix="", motion_controller_prefix="") + return fast_grid_scan - assert vmxm_fgs_attrs == parent_fgs_attrs - -def test_calc_expected_images(RE): - with init_devices(mock=True): - vmxm_fgs = TwoDFastGridScan("vmxm") - assert vmxm_fgs._calculate_expected_images(5, 10, 100) == 50 +async def test_i02_1_gridscan_has_2d_behaviour(fast_grid_scan: ZebraFastGridScanTwoD): + three_d_movables = ["z_step_size_mm", "z2_start_mm", "y2_start_mm", "z_steps"] + for movable in three_d_movables: + assert movable not in fast_grid_scan.movable_params.keys() + set_mock_value(fast_grid_scan.x_steps, 5) + set_mock_value(fast_grid_scan.y_steps, 4) + assert await fast_grid_scan.expected_images.get_value() == 20 diff --git a/tests/devices/test_gridscan.py b/tests/devices/test_gridscan.py index 78fac3f9d86..9b56be77367 100644 --- a/tests/devices/test_gridscan.py +++ b/tests/devices/test_gridscan.py @@ -17,10 +17,11 @@ GridScanParamsCommon, PandAFastGridScan, PandAGridScanParams, - ZebraFastGridScan, - ZebraGridScanParams, + ZebraFastGridScanThreeD, + ZebraGridScanParamsThreeD, set_fast_grid_scan_params, ) +from dodal.devices.i02_1.fast_grid_scan import ZebraFastGridScanTwoD from dodal.devices.smargon import Smargon from dodal.testing import patch_all_motors @@ -35,7 +36,7 @@ def discard_status(st: Status | DeviceStatus): @pytest.fixture async def zebra_fast_grid_scan(): async with init_devices(mock=True): - zebra_fast_grid_scan = ZebraFastGridScan(name="fake_FGS", prefix="FGS") + zebra_fast_grid_scan = ZebraFastGridScanThreeD(name="fake_FGS", prefix="FGS") return zebra_fast_grid_scan @@ -48,6 +49,13 @@ async def panda_fast_grid_scan(): return panda_fast_grid_scan +@pytest.fixture +async def zebra_fast_grid_scan_2d(): + async with init_devices(mock=True): + fast_grid_scan = ZebraFastGridScanTwoD(prefix="", motion_controller_prefix="") + return fast_grid_scan + + @pytest.fixture async def smargon(): async with init_devices(mock=True): @@ -57,18 +65,17 @@ async def smargon(): yield smargon -@pytest.mark.parametrize( - "use_pgs", - [(False), (True)], +@pytest.fixture( + params=["zebra_fast_grid_scan", "panda_fast_grid_scan", "zebra_fast_grid_scan_2d"] ) +def grid_scan(request: pytest.FixtureRequest) -> FastGridScanCommon: + instance = request.getfixturevalue(request.param) + return instance + + async def test_given_settings_valid_when_kickoff_then_run_started( - use_pgs, - zebra_fast_grid_scan: ZebraFastGridScan, - panda_fast_grid_scan: PandAFastGridScan, + grid_scan: FastGridScanCommon, ): - grid_scan: ZebraFastGridScan | PandAFastGridScan = ( - panda_fast_grid_scan if use_pgs else zebra_fast_grid_scan - ) set_mock_value(grid_scan.scan_invalid, False) set_mock_value(grid_scan.position_counter, 0) set_mock_value(grid_scan.status, 1) @@ -78,18 +85,7 @@ async def test_given_settings_valid_when_kickoff_then_run_started( get_mock_put(grid_scan.run_cmd).assert_called_once() -@pytest.mark.parametrize( - "use_pgs", - [(False), (True)], -) -async def test_waits_for_running_motion( - use_pgs, - zebra_fast_grid_scan: ZebraFastGridScan, - panda_fast_grid_scan: PandAFastGridScan, -): - grid_scan: ZebraFastGridScan | PandAFastGridScan = ( - panda_fast_grid_scan if use_pgs else zebra_fast_grid_scan - ) +async def test_waits_for_running_motion(grid_scan: FastGridScanCommon): set_mock_value(grid_scan.motion_program.running, 1) grid_scan.KICKOFF_TIMEOUT = 0.01 @@ -114,7 +110,7 @@ async def test_waits_for_running_motion( ], ) async def test_given_different_step_numbers_then_expected_images_correct( - zebra_fast_grid_scan: ZebraFastGridScan, steps, expected_images + zebra_fast_grid_scan: ZebraFastGridScanThreeD, steps, expected_images ): set_mock_value(zebra_fast_grid_scan.x_steps, steps[0]) set_mock_value(zebra_fast_grid_scan.y_steps, steps[1]) @@ -127,19 +123,42 @@ async def test_given_different_step_numbers_then_expected_images_correct( assert result.plan_result == expected_images # type: ignore +@pytest.mark.parametrize( + "steps, expected_images", + [ + ((10, 10), 100), + ((30, 5), 150), + ((7, 0), 0), + ], +) +async def test_given_different_2d_step_numbers_then_expected_images_correct( + zebra_fast_grid_scan_2d: ZebraFastGridScanTwoD, steps, expected_images +): + set_mock_value(zebra_fast_grid_scan_2d.x_steps, steps[0]) + set_mock_value(zebra_fast_grid_scan_2d.y_steps, steps[1]) + + RE = RunEngine(call_returns_result=True) + + result = RE(bps.rd(zebra_fast_grid_scan_2d.expected_images)) + + assert result.plan_result == expected_images # type: ignore + + @pytest.mark.parametrize( "use_pgs", [(False), (True)], ) async def test_running_finished_with_all_images_done_then_complete_status_finishes_not_in_error( use_pgs, - zebra_fast_grid_scan: ZebraFastGridScan, + zebra_fast_grid_scan: ZebraFastGridScanThreeD, panda_fast_grid_scan: PandAFastGridScan, RE: RunEngine, ): + grid_scan: ZebraFastGridScanThreeD | PandAFastGridScan = ( + panda_fast_grid_scan if use_pgs else zebra_fast_grid_scan + ) num_pos_1d = 2 if use_pgs: - grid_scan = panda_fast_grid_scan RE( set_fast_grid_scan_params( grid_scan, @@ -149,11 +168,10 @@ async def test_running_finished_with_all_images_done_then_complete_status_finish ) ) else: - grid_scan = zebra_fast_grid_scan RE( set_fast_grid_scan_params( grid_scan, - ZebraGridScanParams( + ZebraGridScanParamsThreeD( transmission_fraction=0.01, x_steps=num_pos_1d, y_steps=num_pos_1d ), ) @@ -204,7 +222,7 @@ def check_parameter_validation(params, composite, expected_in_limits): @pytest.fixture def zebra_grid_scan_params(): - yield ZebraGridScanParams( + yield ZebraGridScanParamsThreeD( transmission_fraction=0.01, x_steps=10, y_steps=15, @@ -296,13 +314,9 @@ def test_given_x_y_z_out_of_range_then_converting_to_motor_coords_raises( assert np.allclose(motor_position, expected_value) -@pytest.mark.parametrize( - "pgs", - [(False), (True)], -) def test_can_run_fast_grid_scan_in_run_engine( - pgs, - zebra_fast_grid_scan: ZebraFastGridScan, + grid_scan: FastGridScanCommon, + zebra_fast_grid_scan: ZebraFastGridScanThreeD, panda_fast_grid_scan: PandAFastGridScan, RE: RunEngine, ): @@ -315,20 +329,10 @@ def kickoff_and_complete(device: FastGridScanCommon): set_mock_value(device.status, 0) yield from bps.wait("complete") - ( - RE(kickoff_and_complete(panda_fast_grid_scan)) - if pgs - else RE(kickoff_and_complete(zebra_fast_grid_scan)) - ) + (RE(kickoff_and_complete(grid_scan))) assert RE.state == "idle" -def test_given_x_y_z_steps_when_full_number_calculated_then_answer_is_as_expected( - common_grid_scan_params: GridScanParamsCommon, -): - assert common_grid_scan_params.get_num_images() == 350 - - @pytest.mark.parametrize( "test_dwell_times, expected_dwell_time_is_integer", [ @@ -352,14 +356,14 @@ def test_given_x_y_z_steps_when_full_number_calculated_then_answer_is_as_expecte ) def test_non_test_integer_dwell_time(test_dwell_times, expected_dwell_time_is_integer): if expected_dwell_time_is_integer: - params = ZebraGridScanParams( + params = ZebraGridScanParamsThreeD( dwell_time_ms=test_dwell_times, transmission_fraction=0.01, ) assert params.dwell_time_ms == test_dwell_times else: with pytest.raises(ValueError): - ZebraGridScanParams( + ZebraGridScanParamsThreeD( dwell_time_ms=test_dwell_times, transmission_fraction=0.01, ) @@ -368,7 +372,7 @@ def test_non_test_integer_dwell_time(test_dwell_times, expected_dwell_time_is_in @patch("dodal.devices.fast_grid_scan.LOGGER.error") async def test_timeout_on_complete_triggers_stop_and_logs_error( mock_log_error: MagicMock, - zebra_fast_grid_scan: ZebraFastGridScan, + zebra_fast_grid_scan: ZebraFastGridScanThreeD, ): zebra_fast_grid_scan.COMPLETE_STATUS = 0.01 zebra_fast_grid_scan.stop_cmd = AsyncMock() @@ -377,3 +381,14 @@ async def test_timeout_on_complete_triggers_stop_and_logs_error( await zebra_fast_grid_scan.complete() mock_log_error.assert_called_once() zebra_fast_grid_scan.stop_cmd.trigger.assert_awaited_once() + + +async def test_i02_1_gridscan_has_2d_behaviour( + zebra_fast_grid_scan_2d: ZebraFastGridScanTwoD, +): + three_d_movables = ["z_step_size_mm", "z2_start_mm", "y2_start_mm", "z_steps"] + for movable in three_d_movables: + assert movable not in zebra_fast_grid_scan_2d.movable_params.keys() + set_mock_value(zebra_fast_grid_scan_2d.x_steps, 5) + set_mock_value(zebra_fast_grid_scan_2d.y_steps, 4) + assert await zebra_fast_grid_scan_2d.expected_images.get_value() == 20 From abcca9f7a12e398b516df4bd1b3d35705ce944e6 Mon Sep 17 00:00:00 2001 From: Richard Dixey <185198552+RJCD-Diamond@users.noreply.github.com> Date: Thu, 25 Sep 2025 15:53:40 +0100 Subject: [PATCH 21/23] Update Tetramm for n TriggerInfo triggers and modern ophyd_async, to stop freezing on i22 (#1527) * skipped unused device * allow tetramm to recieve n triggers * use ophyd_async version of total_triggers * fixed most tests * removed assert from assert armed based on averaging time because aeraging time now dependent on file_io * remove reduntant set_avergaeing_time and mock exposure in test * added variable gate to tetramm * added unstage * make tetramm more reliable by setting an unstage * fix tests * stop acquisition before change trigger mode * set end to driver acquire false * tidy up * no timeout on wait_for_idle * added epics_signal_r back * added tests for freerunning disarm * minor edits * correctly asserted averging time == livetime in tests --------- Co-authored-by: root --- src/dodal/beamlines/i22.py | 2 +- src/dodal/devices/tetramm.py | 41 ++++++++++++++++++------------ tests/devices/test_tetramm.py | 48 ++++++++++++++++++++++++++++++----- 3 files changed, 67 insertions(+), 24 deletions(-) diff --git a/src/dodal/beamlines/i22.py b/src/dodal/beamlines/i22.py index de7b93b014d..6c91ba3c432 100644 --- a/src/dodal/beamlines/i22.py +++ b/src/dodal/beamlines/i22.py @@ -282,7 +282,7 @@ def linkam() -> Linkam3: return Linkam3(prefix=f"{PREFIX.beamline_prefix}-EA-TEMPC-05:") -@device_factory() +@device_factory(skip=True) def ppump() -> WatsonMarlow323Pump: """Sample Environment Peristaltic Pump""" return WatsonMarlow323Pump(f"{PREFIX.beamline_prefix}-EA-PUMP-01:") diff --git a/src/dodal/devices/tetramm.py b/src/dodal/devices/tetramm.py index 19a5839efff..e27b170864e 100644 --- a/src/dodal/devices/tetramm.py +++ b/src/dodal/devices/tetramm.py @@ -16,6 +16,7 @@ TriggerInfo, set_and_wait_for_value, soft_signal_r_and_setter, + wait_for_value, ) from ophyd_async.epics.adcore import ( ADHDFWriter, @@ -23,7 +24,7 @@ NDFileHDFIO, NDPluginBaseIO, ) -from ophyd_async.epics.core import PvSuffix, epics_signal_r, stop_busy_record +from ophyd_async.epics.core import PvSuffix, epics_signal_r class TetrammRange(StrictEnum): @@ -77,6 +78,7 @@ class TetrammController(DetectorController): _supported_trigger_types = { DetectorTrigger.EDGE_TRIGGER: TetrammTrigger.EXT_TRIGGER, DetectorTrigger.CONSTANT_GATE: TetrammTrigger.EXT_TRIGGER, + DetectorTrigger.VARIABLE_GATE: TetrammTrigger.EXT_TRIGGER, } """"On the TetrAMM ASCII mode requires a minimum value of ValuesPerRead of 500, [...] binary mode the minimum value of ValuesPerRead is 5." @@ -86,11 +88,9 @@ class TetrammController(DetectorController): """The TetrAMM always digitizes at 100 kHz""" _base_sample_rate: int = 100_000 - def __init__( - self, - driver: TetrammDriver, - ) -> None: + def __init__(self, driver: TetrammDriver, file_io: NDFileHDFIO) -> None: self.driver = driver + self._file_io = file_io self._arm_status: AsyncStatus | None = None def get_deadtime(self, exposure: float | None) -> float: @@ -107,13 +107,19 @@ async def prepare(self, trigger_info: TriggerInfo) -> None: if trigger_info.livetime is None: raise ValueError(f"{self.__class__.__name__} requires that livetime is set") + current_trig_status = await self.driver.trigger_mode.get_value() + + if current_trig_status == TetrammTrigger.FREE_RUN: # if freerun turn off first + await self.disarm() + # trigger mode must be set first and on its own! await self.driver.trigger_mode.set( self._supported_trigger_types[trigger_info.trigger] ) + await asyncio.gather( - self.driver.averaging_time.set(trigger_info.livetime), self.set_exposure(trigger_info.livetime), + self._file_io.num_capture.set(trigger_info.total_number_of_exposures), ) # raise an error if asked to trigger faster than the max. @@ -133,14 +139,18 @@ async def arm(self): self._arm_status = await self.start_acquiring_driver_and_ensure_status() async def wait_for_idle(self): - if self._arm_status and not self._arm_status.done: - await self._arm_status - self._arm_status = None + # tetramm never goes idle really, actually it is always acquiring + # so need to wait for the capture to finish instead + await wait_for_value(self._file_io.acquire, False, timeout=None) + + async def unstage(self): + await self.disarm() + await self._file_io.acquire.set(False) async def disarm(self): # We can't use caput callback as we already used it in arm() and we can't have # 2 or they will deadlock - await stop_busy_record(self.driver.acquire, False, timeout=1) + await set_and_wait_for_value(self.driver.acquire, False, timeout=1) async def set_exposure(self, exposure: float) -> None: """Set the exposure time and acquire period. @@ -164,7 +174,9 @@ async def set_exposure(self, exposure: float) -> None: "Tetramm exposure time must be at least " f"{minimum_samples * sample_time}s, asked to set it to {exposure}s" ) - await self.driver.averaging_time.set(samples_per_reading * sample_time) + await self.driver.averaging_time.set( + samples_per_reading * sample_time + ) # correct async def start_acquiring_driver_and_ensure_status(self) -> AsyncStatus: """Start acquiring driver, raising ValueError if the detector is in a bad state. @@ -202,10 +214,7 @@ async def np_datatype(self) -> str: async def shape(self) -> tuple[int, int]: return ( int(await self._driver.num_channels.get_value()), - int( - await self._driver.averaging_time.get_value() - / await self._driver.sample_time.get_value(), - ), + int(await self._driver.to_average.get_value()), ) @@ -223,7 +232,7 @@ def __init__( ): self.driver = TetrammDriver(prefix + drv_suffix) self.file_io = NDFileHDFIO(prefix + fileio_suffix) - controller = TetrammController(self.driver) + controller = TetrammController(self.driver, self.file_io) self.current1 = epics_signal_r(float, prefix + "Cur1:MeanValue_RBV") self.current2 = epics_signal_r(float, prefix + "Cur2:MeanValue_RBV") diff --git a/tests/devices/test_tetramm.py b/tests/devices/test_tetramm.py index 7209a56b53d..8333ff570ea 100644 --- a/tests/devices/test_tetramm.py +++ b/tests/devices/test_tetramm.py @@ -50,13 +50,17 @@ def supported_trigger_info() -> TriggerInfo: return TriggerInfo( number_of_events=1, trigger=DetectorTrigger.CONSTANT_GATE, - deadtime=1.0, - livetime=0.02, + deadtime=1e-4, + livetime=1, exposure_timeout=None, ) -VALID_TEST_EXPOSURE_TIME = 1 / 19 +VALID_TEST_TOTAL_TRIGGERS = 5 +VALID_TEST_EXPOSURE_TIME_PER_COLLECTION = 1 / 6 +VALID_TEST_EXPOSURE_TIME = ( + VALID_TEST_EXPOSURE_TIME_PER_COLLECTION / VALID_TEST_TOTAL_TRIGGERS +) VALID_TEST_DEADTIME = 1 / 100 @@ -97,7 +101,6 @@ async def test_set_invalid_exposure_for_number_of_values_per_reading( "trigger_type", [ DetectorTrigger.INTERNAL, - DetectorTrigger.VARIABLE_GATE, ], ) async def test_arm_raises_value_error_for_invalid_trigger_type( @@ -107,6 +110,7 @@ async def test_arm_raises_value_error_for_invalid_trigger_type( accepted_types = [ "EDGE_TRIGGER", "CONSTANT_GATE", + "VARIABLE_GATE", ] with pytest.raises( TypeError, @@ -225,10 +229,15 @@ async def test_stage_sets_up_accurate_describe_output( await tetramm.stage() await tetramm.prepare(supported_trigger_info) + averaging_time = await tetramm.driver.averaging_time.get_value() + averaging_time = round(averaging_time, 3) # avoid floating point issues + + assert averaging_time == 1.0 + assert await tetramm.describe() == { "tetramm": { "source": "mock+ca://MY-TETRAMM:HDF5:FullFileName_RBV", - "shape": [1, 4, 1999], + "shape": [1, 4, averaging_time], "dtype_numpy": " None: sample_time = await driver.sample_time.get_value() samples_per_reading = int(VALID_TEST_EXPOSURE_TIME / sample_time) - exposure = samples_per_reading * sample_time + averaging_time = samples_per_reading * sample_time assert (await driver.trigger_mode.get_value()) is TetrammTrigger.EXT_TRIGGER - assert (await driver.averaging_time.get_value()) == exposure assert (await driver.values_per_reading.get_value()) == 5 assert (await driver.acquire.get_value()) == 1 + + assert (await driver.averaging_time.get_value()) == averaging_time From 9a1aeeacec5d4151a2489e323374eb8ac8990b67 Mon Sep 17 00:00:00 2001 From: rtuck99 Date: Fri, 26 Sep 2025 10:51:16 +0100 Subject: [PATCH 22/23] Add event loop fuzzer unit test fixture (#1577) * Add event loop fuzzer --- docs/how-to/write-tests.md | 30 +++++++++++++++++++++ tests/conftest.py | 55 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 85 insertions(+) diff --git a/docs/how-to/write-tests.md b/docs/how-to/write-tests.md index fcc3a6d5480..7fdbb6513a4 100644 --- a/docs/how-to/write-tests.md +++ b/docs/how-to/write-tests.md @@ -120,3 +120,33 @@ async def test_my_device_read(sim_my_device: MyDevice, RE: RunEngine) -> None: } ) ``` + +## Test performance and reliability + +Dodal has well over 1000 unit tests and developers will run the full unit test suite frequently on their local +machines, therefore it is imperative that the unit tests are both reliable and run quickly. Ideally the test suite +should run in about a minute, so if your test takes more than a fraction of a second to complete then consider +making it faster. The GitHub CI will fail if any test takes longer than 1 second. + +Tests that involve concurrency can sometimes be unreliable due to subtle race conditions and variations in timing +caused by caching, garbage collection and other factors. +Often these manifest only on certain machines or in CI and are difficult to reproduce. Whilst the odd test failure +can be worked around by re-running the tests, this is annoying and a build up of such flaky tests is undesirable so +it is preferable to fix the test. + +### Event loop fuzzer + +To assist in the reproduction of concurrency-related test failures, there is an event loop fuzzer available as a +pytest fixture. The fuzzer introduces random delays into the ``asyncio`` event loop. You can use it as by +requesting ``event_loop_fuzzing`` as a fixture. It is also recommended when debugging to parametrize the test to +introduce a good number of iterations in order to ensure the problem has a good chance to show up, but remember to +remove the parametrization afterwards. + +```Python + import pytest + # repeat the test a number of times + @pytest.mark.parametrize("i", range(0, 100)) + async def my_unreliable_test(i, event_loop_fuzzing): + # Do some stuff in here + +``` diff --git a/tests/conftest.py b/tests/conftest.py index 60cb1ca982c..aaf9b193ca2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,7 +1,12 @@ +import asyncio import importlib import os +import threading +import time from collections.abc import AsyncGenerator from pathlib import Path +from random import random +from threading import Thread from types import ModuleType from unittest.mock import patch @@ -76,3 +81,53 @@ async def baton_in_commissioning_mode() -> AsyncGenerator[Baton]: set_mock_value(baton.commissioning, True) yield baton set_commissioning_signal(None) + + +@pytest.fixture +async def event_loop_fuzzing(): + """ + This fixture can be used to try and detect / reproduce intermittent test failures + caused by race conditions and timing issues, which are often difficult to replicate + due to caching etc. causing timing to be different on a development machine compared + to when the test runs in CI. + + It works by attaching a fuzzer to the current event loop which randomly schedules + a fixed delay into the event loop thread every few milliseconds. The idea is that + over a number of iterations, there should be sufficient timing variation introduced + that the failure can be reproduced. + + Examples: + Example usage: + >>> import pytest + >>> # repeat the test a number of times + >>> @pytest.mark.parametrize("i", range(0, 100)) + ... async def my_unreliable_test(i, event_loop_fuzzing): + ... # Do some stuff in here + ... ... + """ + FUZZ_PROBABILITY = 0.05 + FUZZ_DELAY_S = 0.05 + FUZZ_PERIOD_S = 0.001 + stop_running = threading.Event() + event_loop = asyncio.get_running_loop() + + def delay(finished_event: threading.Event): + time.sleep(FUZZ_DELAY_S) + finished_event.set() + + def fuzz(): + while not stop_running.is_set(): + if random() < FUZZ_PROBABILITY: + delay_is_finished = threading.Event() + event_loop.call_soon_threadsafe(delay, delay_is_finished) + delay_is_finished.wait() + + time.sleep(FUZZ_PERIOD_S) + + fuzzer_thread = Thread(group=None, target=fuzz, name="Event loop fuzzer") + fuzzer_thread.start() + try: + yield None + finally: + stop_running.set() + fuzzer_thread.join() From 3fe8d16ff80ebb979a360098616f9ba40b900cb9 Mon Sep 17 00:00:00 2001 From: Paul Coe Date: Mon, 29 Sep 2025 12:44:27 +0100 Subject: [PATCH 23/23] dodal#1384 applies pydantic model validation to attenuator motor positions --- .../attenuator_motor_squad.py | 49 +++++++------------ .../test_attenuator_motor_squad.py | 24 ++++----- .../test_attenuator_position_demand.py | 7 ++- 3 files changed, 36 insertions(+), 44 deletions(-) diff --git a/src/dodal/devices/i19/access_controlled/attenuator_motor_squad.py b/src/dodal/devices/i19/access_controlled/attenuator_motor_squad.py index 4f92db09665..b3ebef4f749 100644 --- a/src/dodal/devices/i19/access_controlled/attenuator_motor_squad.py +++ b/src/dodal/devices/i19/access_controlled/attenuator_motor_squad.py @@ -1,7 +1,7 @@ -from typing import Annotated, Any +from typing import Annotated, Any, Self from ophyd_async.core import AsyncStatus -from pydantic import BaseModel, field_validator +from pydantic import BaseModel, model_validator from pydantic.types import PositiveInt, StringConstraints from dodal.devices.i19.access_controlled.hutch_access import ACCESS_DEVICE_NAME @@ -16,24 +16,23 @@ class AttenuatorMotorPositionDemands(BaseModel): continuous_demands: dict[PermittedKeyStr, float] = {} indexed_demands: dict[PermittedKeyStr, PositiveInt] = {} - @field_validator("indexed_demands") - - def no_keys_clash(cls, values, **kwargs): - continuous = values["continuous_demands"] - indexed = values["indexed_demands"] - if len(indexed) < 1: - return values - for key in indexed: - if key in continuous: - message = f"Attenuator Position Demand Key clash: {key}. Require distinct keys for axis names on continuous motors and indexed positions." - raise ValueError(message) - return values + @model_validator(mode="after") + def no_keys_clash(self) -> Self: + common_key_filter = filter( + lambda k: k in self.continuous_demands, self.indexed_demands + ) + common_key_count = sum(1 for _ in common_key_filter) + if common_key_count < 1: + return self + else: + ks: str = "key" if common_key_count == 1 else "keys" + error_msg = ( + f"{common_key_count} common {ks} found in distinct motor demands" + ) + raise ValueError(error_msg) def restful_format(self) -> dict[PermittedKeyStr, Any]: - combined: dict[PermittedKeyStr, Any] = {} - combined.update(self.continuous_demands) - combined.update(self.indexed_demands) - return combined + return self.continuous_demands | self.indexed_demands class AttenuatorMotorSquad(OpticsBlueApiDevice): @@ -48,18 +47,6 @@ class AttenuatorMotorSquad(OpticsBlueApiDevice): The name of the hutch that wants to operate the optics device is passed to the \ access controlled device upon instantiation of the latter. - Nomenclature: - Note this class was originally called - -MotorSet - ( but objections were based on "set" being an overloaded word ) - - -Gang might have worked, - ( but a gang of mechanical devices are able to be mechanically coupled - and move together; whereas the motors here are completely mutually independent ) - - -Clique - ( as in a set of friends, is an accurate choice, is unlikely to be overloaded, just sounds pretentious). - For details see the architecture described in \ https://github.com/DiamondLightSource/i19-bluesky/issues/30. """ @@ -68,7 +55,7 @@ class AttenuatorMotorSquad(OpticsBlueApiDevice): async def set(self, value: AttenuatorMotorPositionDemands): invoking_hutch = str(self._get_invoking_hutch().value) request_params = { - "name": "operate_motor_clique_plan", + "name": "operate_motor_squad_plan", "params": { "experiment_hutch": invoking_hutch, "access_device": ACCESS_DEVICE_NAME, diff --git a/tests/devices/i19/access_controlled/test_attenuator_motor_squad.py b/tests/devices/i19/access_controlled/test_attenuator_motor_squad.py index b93a9f6cf21..83655a23474 100644 --- a/tests/devices/i19/access_controlled/test_attenuator_motor_squad.py +++ b/tests/devices/i19/access_controlled/test_attenuator_motor_squad.py @@ -28,37 +28,37 @@ def given_an_unhappy_restful_response() -> AsyncMock: async def given_a_squad_of_attenuator_motors( hutch: HutchState, ) -> AttenuatorMotorSquad: - motor_clique = AttenuatorMotorSquad(hutch, name="attenuator_motor_clique") - await motor_clique.connect(mock=True) + motor_squad = AttenuatorMotorSquad(hutch, name="attenuator_motor_squad") + await motor_squad.connect(mock=True) - motor_clique.url = "http://test-blueapi.url" - return motor_clique + motor_squad.url = "http://test-blueapi.url" + return motor_squad @pytest.fixture -async def eh1_motor_clique(RE: RunEngine) -> AttenuatorMotorSquad: +async def eh1_motor_squad(RE: RunEngine) -> AttenuatorMotorSquad: return await given_a_squad_of_attenuator_motors(HutchState.EH1) @pytest.fixture -async def eh2_motor_clique(RE: RunEngine) -> AttenuatorMotorSquad: +async def eh2_motor_squad(RE: RunEngine) -> AttenuatorMotorSquad: return await given_a_squad_of_attenuator_motors(HutchState.EH2) @pytest.mark.parametrize("invoking_hutch", [HutchState.EH1, HutchState.EH2]) -async def test_that_motor_clique_can_be_instantiated(invoking_hutch): - motor_clique: AttenuatorMotorSquad = await given_a_squad_of_attenuator_motors( +async def test_that_motor_squad_can_be_instantiated(invoking_hutch): + motor_squad: AttenuatorMotorSquad = await given_a_squad_of_attenuator_motors( invoking_hutch ) - assert isinstance(motor_clique, AttenuatorMotorSquad) + assert isinstance(motor_squad, AttenuatorMotorSquad) @pytest.mark.parametrize("invoking_hutch", [HutchState.EH1, HutchState.EH2]) @patch( - "dodal.devices.i19.access_controlled.attenuator_motor_clique.OpticsBlueApiDevice.set", + "dodal.devices.i19.access_controlled.attenuator_motor_squad.OpticsBlueApiDevice.set", new_callable=AsyncMock, ) -async def test_when_motor_clique_is_set_that_expected_request_params_are_passed( +async def test_when_motor_squad_is_set_that_expected_request_params_are_passed( internal_setter, invoking_hutch ): motors: AttenuatorMotorSquad = await given_a_squad_of_attenuator_motors( @@ -69,7 +69,7 @@ async def test_when_motor_clique_is_set_that_expected_request_params_are_passed( expected_hutch: str = invoking_hutch.value expected_device: str = "access_control" - expected_request_name: str = "operate_motor_clique_plan" + expected_request_name: str = "operate_motor_squad_plan" expected_parameters: dict = { "experiment_hutch": expected_hutch, "access_device": expected_device, diff --git a/tests/devices/i19/access_controlled/test_attenuator_position_demand.py b/tests/devices/i19/access_controlled/test_attenuator_position_demand.py index 1afbb87a3e2..fe51ea96670 100644 --- a/tests/devices/i19/access_controlled/test_attenuator_position_demand.py +++ b/tests/devices/i19/access_controlled/test_attenuator_position_demand.py @@ -97,7 +97,12 @@ def test_that_attenuator_position_demand_triplet_provides_expected_rest_format() def test_that_attenuator_position_raises_error_when_discrete_and_continuous_demands_overload_axis_label(): wedge_position_demands = {"x": 0.1, "v": 90.1} wheel_position_demands = {"w": 6, "v": 7} - anticipated_message: str = "Clashing keys in common between wheel and wedge for attenuation position demand: v" + preamble: str = ( + "1 validation error for AttenuatorMotorPositionDemands\n Value error," + ) + anticipated_message: str = ( + f"{preamble} 1 common key found in distinct motor demands" + ) with pytest.raises(expected_exception=ValueError, match=anticipated_message): AttenuatorMotorPositionDemands( continuous_demands=wedge_position_demands,