softioc
Advanced tools
| import atexit | ||
| import sys | ||
| import threading | ||
| import traceback | ||
| from datetime import datetime | ||
| from os import rename | ||
| from pathlib import Path | ||
| from shutil import copy2 | ||
| import numpy | ||
| import yaml | ||
| SAV_SUFFIX = "softsav" | ||
| SAVB_SUFFIX = "softsavB" | ||
| DEFAULT_SAVE_PERIOD = 30.0 | ||
| def _ndarray_representer(dumper, array): | ||
| return dumper.represent_sequence( | ||
| "tag:yaml.org,2002:seq", array.tolist(), flow_style=True | ||
| ) | ||
| yaml.add_representer(numpy.ndarray, _ndarray_representer, Dumper=yaml.Dumper) | ||
| def configure( | ||
| directory, | ||
| name, | ||
| save_period=DEFAULT_SAVE_PERIOD, | ||
| timestamped_backups=True, | ||
| enabled=True, | ||
| ): | ||
| """This should be called before initialising the IOC. Configures the | ||
| autosave thread for periodic backing up of PV values. | ||
| Args: | ||
| directory: string or Path giving directory path where autosave backup | ||
| files are saved and loaded. | ||
| name: string name of the root used for naming backup files. This | ||
| is usually the same as the device name. | ||
| save_period: time in seconds between backups. Backups are only performed | ||
| if PV values have changed. | ||
| timestamped_backups: boolean which determines if backups of existing | ||
| autosave files are timestamped on IOC restart. True by default, if | ||
| False then backups get overwritten on each IOC restart. | ||
| enabled: boolean which enables or disables autosave, set to True by | ||
| default, or False if configure not called. | ||
| """ | ||
| directory_path = Path(directory) | ||
| if not directory_path.is_dir(): | ||
| raise FileNotFoundError( | ||
| f"{directory} is not a valid autosave directory" | ||
| ) | ||
| AutosaveConfig.directory = directory_path | ||
| AutosaveConfig.timestamped_backups = timestamped_backups | ||
| AutosaveConfig.save_period = save_period | ||
| AutosaveConfig.enabled = enabled | ||
| AutosaveConfig.device_name = name | ||
| class AutosaveConfig: | ||
| directory = None | ||
| device_name = None | ||
| timestamped_backups = True | ||
| save_period = DEFAULT_SAVE_PERIOD | ||
| enabled = False | ||
| def start_autosave_thread(): | ||
| worker = threading.Thread( | ||
| target=Autosave._loop, | ||
| ) | ||
| worker.start() | ||
| atexit.register(_shutdown_autosave_thread, worker) | ||
| def _shutdown_autosave_thread(worker): | ||
| Autosave._stop() | ||
| worker.join() | ||
| def _parse_autosave_fields(fields): | ||
| if not fields: | ||
| return [] | ||
| elif fields is True: | ||
| return ["VAL"] | ||
| elif isinstance(fields, list): | ||
| return fields | ||
| elif isinstance(fields, str): | ||
| return [fields] | ||
| else: | ||
| raise ValueError(f"Could not parse autosave fields argument: {fields}") | ||
| def add_pv_to_autosave(pv, name, fields): | ||
| """Configures a PV for autosave | ||
| Args: | ||
| pv: a PV object inheriting ProcessDeviceSupportCore | ||
| name: the key by which the PV value is saved to and loaded from a | ||
| backup. This is typically the same as the PV name. | ||
| fields: used to determine which fields of a PV are tracked by autosave. | ||
| The allowed options are a single string such as "VAL" or "EGU", | ||
| a list of strings such as ["VAL", "EGU"], a boolean True which | ||
| evaluates to ["VAL"] or False to track no fields. If the PV is | ||
| created inside an Autosave context manager, the fields passed to the | ||
| context manager are also tracked by autosave. | ||
| """ | ||
| if fields is False: | ||
| # if autosave=False explicitly set, override context manager | ||
| return | ||
| fields = set(_parse_autosave_fields(fields)) | ||
| # instantiate context to get thread local class variables via instance | ||
| context = _AutosaveContext() | ||
| if context._in_cm: # _fields should always be a list if in context manager | ||
| fields.update(context._fields) | ||
| for field in fields: | ||
| field_name = name if field == "VAL" else f"{name}.{field}" | ||
| Autosave._pvs[field_name] = _AutosavePV(pv, field) | ||
| def load_autosave(): | ||
| Autosave._load() | ||
| class _AutosavePV: | ||
| def __init__(self, pv, field): | ||
| if field == "VAL": | ||
| self.get = pv.get | ||
| self.set = pv.set | ||
| else: | ||
| self.get = lambda: pv.get_field(field) | ||
| self.set = lambda val: setattr(pv, field, val) | ||
| def _get_current_sav_path(): | ||
| return ( | ||
| AutosaveConfig.directory / f"{AutosaveConfig.device_name}.{SAV_SUFFIX}" | ||
| ) | ||
| def _get_tmp_sav_path(): | ||
| return ( | ||
| AutosaveConfig.directory / f"{AutosaveConfig.device_name}.{SAVB_SUFFIX}" | ||
| ) | ||
| def _get_timestamped_backup_sav_path(timestamp): | ||
| sav_path = _get_current_sav_path() | ||
| return sav_path.parent / ( | ||
| sav_path.name + timestamp.strftime("_%y%m%d-%H%M%S") | ||
| ) | ||
| def _get_backup_sav_path(): | ||
| sav_path = _get_current_sav_path() | ||
| return sav_path.parent / (sav_path.name + ".bu") | ||
| class _AutosaveContext(threading.local): | ||
| _instance = None | ||
| _lock = threading.Lock() | ||
| _fields = None | ||
| _in_cm = False | ||
| def __new__(cls, fields=None): | ||
| if cls._instance is None: | ||
| with cls._lock: | ||
| if not cls._instance: | ||
| cls._instance = super().__new__(cls) | ||
| if cls._instance._in_cm and fields is not None: | ||
| cls._instance._fields = fields or [] | ||
| return cls._instance | ||
| def reset(self): | ||
| self._fields = None | ||
| self._in_cm = False | ||
| class Autosave: | ||
| _pvs = {} | ||
| _last_saved_state = {} | ||
| _last_saved_time = datetime.now() | ||
| _stop_event = threading.Event() | ||
| _loop_started = False | ||
| def __init__(self, fields=True): | ||
| """ | ||
| When called as a context manager, any PVs created in the context have | ||
| the fields provided by the fields argument added to autosave backups. | ||
| Args: | ||
| fields: a list of string field names to be periodically saved to a | ||
| backup file, which are loaded from on IOC restart. | ||
| The allowed options are a single string such as "VAL" or "EGU", | ||
| a list of strings such as ["VAL", "EGU"], a boolean True which | ||
| evaluates to ["VAL"] or False to track no additional fields. | ||
| If the autosave keyword is already specified in a PV's | ||
| initialisation, the list of fields to track are combined. | ||
| """ | ||
| context = _AutosaveContext() | ||
| if context._in_cm: | ||
| raise RuntimeError( | ||
| "Can not instantiate Autosave when already in context manager" | ||
| ) | ||
| fields = _parse_autosave_fields(fields) | ||
| context._fields = fields | ||
| def __enter__(self): | ||
| context = _AutosaveContext() | ||
| context._in_cm = True | ||
| def __exit__(self, A, B, C): | ||
| context = _AutosaveContext() | ||
| context.reset() | ||
| @classmethod | ||
| def __backup_sav_file(cls): | ||
| if ( | ||
| not AutosaveConfig.directory | ||
| or not AutosaveConfig.directory.is_dir() | ||
| ): | ||
| print( | ||
| f"Could not back up autosave as {AutosaveConfig.directory} is" | ||
| " not a valid directory", | ||
| file=sys.stderr, | ||
| ) | ||
| return | ||
| sav_path = _get_current_sav_path() | ||
| if AutosaveConfig.timestamped_backups: | ||
| backup_path = _get_timestamped_backup_sav_path(cls._last_saved_time) | ||
| else: | ||
| backup_path = _get_backup_sav_path() | ||
| if sav_path.is_file(): | ||
| copy2(sav_path, backup_path) | ||
| else: | ||
| print( | ||
| f"Could not back up autosave, {sav_path} is not a file", | ||
| file=sys.stderr, | ||
| ) | ||
| @classmethod | ||
| def __get_state(cls): | ||
| state = {} | ||
| for pv_field, pv in cls._pvs.items(): | ||
| try: | ||
| state[pv_field] = pv.get() | ||
| except Exception: | ||
| print(f"Exception getting {pv_field}", file=sys.stderr) | ||
| traceback.print_exc() | ||
| return state | ||
| @classmethod | ||
| def __set_pvs_from_saved_state(cls): | ||
| for pv_field, value in cls._last_saved_state.items(): | ||
| try: | ||
| pv = cls._pvs[pv_field] | ||
| pv.set(value) | ||
| except Exception: | ||
| print( | ||
| f"Exception setting {pv_field} to {value}", | ||
| file=sys.stderr, | ||
| ) | ||
| traceback.print_exc() | ||
| @classmethod | ||
| def __state_changed(cls, state): | ||
| return cls._last_saved_state.keys() != state.keys() or any( | ||
| # checks equality for builtins and numpy arrays | ||
| not numpy.array_equal(state[key], cls._last_saved_state[key]) | ||
| for key in state | ||
| ) | ||
| @classmethod | ||
| def _save(cls): | ||
| state = cls.__get_state() | ||
| if cls.__state_changed(state): | ||
| sav_path = _get_current_sav_path() | ||
| tmp_path = _get_tmp_sav_path() | ||
| # write to temporary file first then use atomic os.rename | ||
| # to safely update stored state | ||
| with open(tmp_path, "w") as backup: | ||
| yaml.dump(state, backup, indent=4) | ||
| rename(tmp_path, sav_path) | ||
| cls._last_saved_state = state | ||
| cls._last_saved_time = datetime.now() | ||
| @classmethod | ||
| def _load(cls): | ||
| if not AutosaveConfig.enabled or not cls._pvs: | ||
| return | ||
| if not AutosaveConfig.device_name: | ||
| raise RuntimeError( | ||
| "Device name is not known to autosave thread, " | ||
| "call autosave.configure() with keyword argument name" | ||
| ) | ||
| if not AutosaveConfig.directory: | ||
| raise RuntimeError( | ||
| "Autosave directory is not known, call " | ||
| "autosave.configure() with keyword argument directory" | ||
| ) | ||
| if not AutosaveConfig.directory.is_dir(): | ||
| raise FileNotFoundError( | ||
| f"{AutosaveConfig.directory} is not a valid autosave directory" | ||
| ) | ||
| cls.__backup_sav_file() | ||
| sav_path = _get_current_sav_path() | ||
| if not sav_path or not sav_path.is_file(): | ||
| print( | ||
| f"Could not load autosave values from file {sav_path}", | ||
| file=sys.stderr, | ||
| ) | ||
| return | ||
| with open(sav_path, "r") as f: | ||
| cls._last_saved_state = yaml.full_load(f) | ||
| cls.__set_pvs_from_saved_state() | ||
| @classmethod | ||
| def _stop(cls): | ||
| cls._stop_event.set() | ||
| @classmethod | ||
| def _loop(cls): | ||
| if not AutosaveConfig.enabled or not cls._pvs or cls._loop_started: | ||
| return | ||
| cls._loop_started = True | ||
| while True: | ||
| try: | ||
| cls._stop_event.wait(timeout=AutosaveConfig.save_period) | ||
| cls._save() | ||
| if cls._stop_event.is_set(): # Stop requested | ||
| return | ||
| except Exception: | ||
| traceback.print_exc() |
| from conftest import get_multiprocessing_context, select_and_recv | ||
| from softioc import autosave, builder, softioc, device_core, asyncio_dispatcher | ||
| from unittest.mock import patch | ||
| import pytest | ||
| import threading | ||
| import numpy | ||
| import re | ||
| import yaml | ||
| import time | ||
| DEVICE_NAME = "MY-DEVICE" | ||
| @pytest.fixture(autouse=True) | ||
| def reset_autosave_setup_teardown(): | ||
| default_save_period = autosave.AutosaveConfig.save_period | ||
| default_device_name = autosave.AutosaveConfig.device_name | ||
| default_directory = autosave.AutosaveConfig.directory | ||
| default_enabled = autosave.AutosaveConfig.enabled | ||
| default_tb = autosave.AutosaveConfig.timestamped_backups | ||
| default_pvs = autosave.Autosave._pvs.copy() | ||
| default_state = autosave.Autosave._last_saved_state.copy() | ||
| default_cm_save_fields = autosave._AutosaveContext._fields | ||
| default_instance = autosave._AutosaveContext._instance | ||
| yield | ||
| autosave.AutosaveConfig.save_period = default_save_period | ||
| autosave.AutosaveConfig.device_name = default_device_name | ||
| autosave.AutosaveConfig.directory = default_directory | ||
| autosave.AutosaveConfig.enabled = default_enabled | ||
| autosave.AutosaveConfig.timestamped_backups = default_tb | ||
| autosave.Autosave._pvs = default_pvs | ||
| autosave.Autosave._last_saved_state = default_state | ||
| autosave.Autosave._stop_event = threading.Event() | ||
| autosave._AutosaveContext._fields = default_cm_save_fields | ||
| autosave._AutosaveContext._instance = default_instance | ||
| @pytest.fixture | ||
| def existing_autosave_dir(tmp_path): | ||
| state = { | ||
| "SAVED-AO": 20.0, | ||
| "SAVED-AI": 20.0, | ||
| "SAVED-BO": 1, | ||
| "SAVED-BI": 1, | ||
| "SAVED-LONGIN": 20, | ||
| "SAVED-LONGOUT": 20, | ||
| "SAVED-INT64IN": 100, | ||
| "SAVED-INT64OUT": 100, | ||
| "SAVED-MBBI": 15, | ||
| "SAVED-MBBO": 15, | ||
| "SAVED-STRINGIN": "test string in", | ||
| "SAVED-STRINGOUT": "test string out", | ||
| "SAVED-LONGSTRINGIN": "test long string in", | ||
| "SAVED-LONGSTRINGOUT": "test long string out", | ||
| "SAVED-ACTION": 1, | ||
| "SAVED-WAVEFORMIN": [1, 2, 3, 4], | ||
| "SAVED-WAVEFORMOUT": [1, 2, 3, 4], | ||
| "SAVED-WAVEFORMIN-STRINGS": ["test", "waveform", "strings"], | ||
| "SAVED-WAVEFORMOUT-STRINGS": ["test", "waveform", "strings"], | ||
| } | ||
| with open(tmp_path / f"{DEVICE_NAME}.softsav", "w") as f: | ||
| yaml.dump(state, f, indent=4) | ||
| with open(tmp_path / f"{DEVICE_NAME}.softsav.bu", "w") as f: | ||
| yaml.dump({"OUT-OF-DATE-KEY": "out of date value"}, f, indent=4) | ||
| return tmp_path | ||
| def test_configure(tmp_path): | ||
| assert autosave.AutosaveConfig.enabled is False | ||
| autosave.configure(tmp_path, DEVICE_NAME) | ||
| assert autosave.AutosaveConfig.device_name == DEVICE_NAME | ||
| assert autosave.AutosaveConfig.directory == tmp_path | ||
| assert autosave.AutosaveConfig.enabled is True | ||
| assert autosave.AutosaveConfig.timestamped_backups is True | ||
| def test_autosave_defaults(): | ||
| assert autosave.Autosave._pvs == {} | ||
| assert autosave.Autosave._last_saved_state == {} | ||
| assert isinstance(autosave.Autosave._stop_event, threading.Event) | ||
| assert not autosave.Autosave._stop_event.is_set() | ||
| assert autosave.AutosaveConfig.save_period == 30.0 | ||
| assert autosave.AutosaveConfig.device_name is None | ||
| assert autosave.AutosaveConfig.directory is None | ||
| assert autosave.AutosaveConfig.enabled is False | ||
| assert autosave.AutosaveConfig.timestamped_backups is True | ||
| def test_configure_dir_doesnt_exist(tmp_path): | ||
| DEVICE_NAME = "MY_DEVICE" | ||
| builder.aOut("MY-RECORD", autosave=True) | ||
| with pytest.raises(FileNotFoundError): | ||
| autosave.configure(tmp_path / "subdir-doesnt-exist", DEVICE_NAME) | ||
| def test_returns_if_init_called_before_configure(): | ||
| autosave.Autosave() | ||
| assert autosave.AutosaveConfig.enabled is False | ||
| def test_all_record_types_saveable(tmp_path): | ||
| autosave.configure(tmp_path, DEVICE_NAME) | ||
| number_types = [ | ||
| "aIn", | ||
| "aOut", | ||
| "boolIn", | ||
| "boolOut", | ||
| "longIn", | ||
| "longOut", | ||
| "int64In", | ||
| "int64Out", | ||
| "mbbIn", | ||
| "mbbOut", | ||
| "Action", | ||
| ] | ||
| string_types = ["stringIn", "stringOut", "longStringIn", "longStringOut"] | ||
| waveform_types = ["WaveformIn", "WaveformOut"] | ||
| for pv_type in number_types: | ||
| pv = getattr(builder, pv_type)(pv_type, autosave=True) | ||
| for pv_type in string_types: | ||
| pv = getattr(builder, pv_type)(pv_type, autosave=True) | ||
| pv.set("test string") | ||
| for pv_type in waveform_types: | ||
| getattr(builder, pv_type)(pv_type, numpy.zeros((100)), autosave=True) | ||
| getattr(builder, pv_type)( | ||
| f"{pv_type}_of_chars", "test waveform string", autosave=True | ||
| ) | ||
| getattr(builder, pv_type)( | ||
| f"{pv_type}_of_strings", ["array", "of", "strings"], autosave=True | ||
| ) | ||
| autosaver = autosave.Autosave() | ||
| autosaver._save() | ||
| with open(tmp_path / f"{DEVICE_NAME}.softsav", "r") as f: | ||
| saved = yaml.full_load(f) | ||
| for pv_type in number_types + string_types + waveform_types: | ||
| assert pv_type in saved | ||
| def test_can_save_fields(tmp_path): | ||
| builder.aOut("SAVEVAL", autosave=["VAL", "DISA"]) | ||
| builder.aOut("DONTSAVEVAL", autosave=["SCAN"]) | ||
| # we need to patch get_field as we can't call builder.LoadDatabase() | ||
| # and softioc.iocInit() in unit tests | ||
| with patch( | ||
| "softioc.device.ProcessDeviceSupportCore.get_field", return_value="0" | ||
| ): | ||
| autosave.configure(tmp_path, DEVICE_NAME) | ||
| autosaver = autosave.Autosave() | ||
| assert "SAVEVAL" in autosaver._pvs | ||
| assert "SAVEVAL.DISA" in autosaver._pvs | ||
| assert "DONTSAVEVAL" not in autosaver._pvs | ||
| assert "DONTSAVEVAL.SCAN" in autosaver._pvs | ||
| autosaver._save() | ||
| with open(tmp_path / f"{DEVICE_NAME}.softsav", "r") as f: | ||
| saved = yaml.full_load(f) | ||
| assert "SAVEVAL" in saved | ||
| assert "SAVEVAL.DISA" in saved | ||
| assert "DONTSAVEVAL" not in saved | ||
| assert "DONTSAVEVAL.SCAN" in saved | ||
| def test_stop_event(tmp_path): | ||
| autosave.configure(tmp_path, DEVICE_NAME) | ||
| builder.aOut("DUMMYRECORD", autosave=True) | ||
| worker = threading.Thread( | ||
| target=autosave.Autosave._loop, | ||
| ) | ||
| try: | ||
| worker.daemon = True | ||
| worker.start() | ||
| assert not autosave.Autosave._stop_event.is_set() | ||
| assert worker.is_alive() | ||
| autosave.Autosave._stop() | ||
| assert autosave.Autosave._stop_event.is_set() | ||
| finally: | ||
| worker.join(timeout=1) | ||
| @pytest.mark.parametrize( | ||
| "timestamped,regex", | ||
| [ | ||
| (False, r"^" + DEVICE_NAME + r"\.softsav_[0-9]{6}-[0-9]{6}$"), | ||
| (True, r"^" + DEVICE_NAME + r"\.softsav\.bu$"), | ||
| ], | ||
| ) | ||
| def test_backup_on_load(existing_autosave_dir, timestamped, regex): | ||
| autosave.configure( | ||
| existing_autosave_dir, DEVICE_NAME, timestamped_backups=timestamped | ||
| ) | ||
| # backup only performed if there are any pvs to save | ||
| builder.aOut("SAVED-AO", autosave=True) | ||
| autosave.load_autosave() | ||
| backup_files = list(existing_autosave_dir.glob("*.softsav_*")) | ||
| # assert backup is <name>.softsav_yymmdd-HHMMSS or <name>.softsav.bu | ||
| any(re.match(regex, file.name) for file in backup_files) | ||
| if not timestamped: | ||
| # test that existing .bu file gets overwritten | ||
| with open(existing_autosave_dir / f"{DEVICE_NAME}.softsav.bu") as f: | ||
| state = yaml.full_load(f) | ||
| assert "OUT-OF-DATE-KEY" not in state | ||
| assert "SAVED-AO" in state | ||
| def test_autosave_key_names(tmp_path): | ||
| builder.aOut("DEFAULTNAME", autosave=True) | ||
| builder.aOut("DEFAULTNAMEAFTERPREFIXSET", autosave=True) | ||
| autosave.configure(tmp_path, DEVICE_NAME) | ||
| autosaver = autosave.Autosave() | ||
| autosaver._save() | ||
| with open(tmp_path / f"{DEVICE_NAME}.softsav", "r") as f: | ||
| saved = yaml.full_load(f) | ||
| assert "DEFAULTNAME" in saved | ||
| assert "DEFAULTNAMEAFTERPREFIXSET" in saved | ||
| def test_context_manager(tmp_path): | ||
| builder.aOut("MANUAL", autosave=["VAL", "EGU"]) | ||
| with autosave.Autosave(["VAL", "PINI"]): | ||
| builder.aOut("AUTOMATIC") | ||
| builder.aOut( | ||
| "AUTOMATIC-EXTRA-FIELD", autosave=["SCAN"] | ||
| ) | ||
| autosave.configure(tmp_path, DEVICE_NAME) | ||
| with patch( | ||
| "softioc.device.ProcessDeviceSupportCore.get_field", return_value="0" | ||
| ): | ||
| autosaver = autosave.Autosave() | ||
| autosaver._save() | ||
| with open(tmp_path / f"{DEVICE_NAME}.softsav", "r") as f: | ||
| saved = yaml.full_load(f) | ||
| assert "MANUAL" in saved | ||
| assert "MANUAL.EGU" in saved | ||
| assert "AUTOMATIC" in saved | ||
| assert "AUTOMATIC.PINI" in saved | ||
| assert "AUTOMATIC-EXTRA-FIELD" in saved | ||
| assert "AUTOMATIC-EXTRA-FIELD.SCAN" in saved | ||
| assert "AUTOMATIC-EXTRA-FIELD.PINI" in saved | ||
| def check_all_record_types_load_properly(device_name, autosave_dir, conn): | ||
| autosave.configure(autosave_dir, device_name) | ||
| pv_aOut = builder.aOut("SAVED-AO", autosave=True) | ||
| pv_aIn = builder.aIn("SAVED-AI", autosave=True) | ||
| pv_boolOut = builder.boolOut("SAVED-BO", autosave=True) | ||
| pv_boolIn = builder.boolIn("SAVED-BI", autosave=True) | ||
| pv_longIn = builder.longIn("SAVED-LONGIN", autosave=True) | ||
| pv_longOut = builder.longOut("SAVED-LONGOUT", autosave=True) | ||
| pv_int64In = builder.int64In("SAVED-INT64IN", autosave=True) | ||
| pv_int64Out = builder.int64Out("SAVED-INT64OUT", autosave=True) | ||
| pv_mbbIn = builder.mbbIn("SAVED-MBBI", autosave=True) | ||
| pv_mbbOut = builder.mbbOut("SAVED-MBBO", autosave=True) | ||
| pv_stringIn = builder.stringIn("SAVED-STRINGIN", autosave=True) | ||
| pv_stringOut = builder.stringOut("SAVED-STRINGOUT", autosave=True) | ||
| pv_longStringIn = builder.longStringIn("SAVED-LONGSTRINGIN", autosave=True) | ||
| pv_longStringOut = builder.longStringOut( | ||
| "SAVED-LONGSTRINGOUT", autosave=True | ||
| ) | ||
| pv_Action = builder.Action("SAVED-ACTION", autosave=True) | ||
| pv_WaveformIn = builder.WaveformIn( | ||
| "SAVED-WAVEFORMIN", numpy.zeros((4)), autosave=True | ||
| ) | ||
| pv_WaveformOut = builder.WaveformOut( | ||
| "SAVED-WAVEFORMOUT", numpy.zeros((4)), autosave=True | ||
| ) | ||
| pv_WaveformIn_strings = builder.WaveformIn( | ||
| "SAVED-WAVEFORMIN-STRINGS", | ||
| ["initial", "waveform", "strings"], | ||
| autosave=True, | ||
| ) | ||
| pv_WaveformOut_strings = builder.WaveformOut( | ||
| "SAVED-WAVEFORMOUT-STRINGS", | ||
| ["initial", "waveform", "strings"], | ||
| autosave=True, | ||
| ) | ||
| assert pv_aOut.get() == 0.0 | ||
| assert pv_aIn.get() == 0.0 | ||
| assert pv_boolOut.get() == 0 | ||
| assert pv_boolIn.get() == 0 | ||
| assert pv_longIn.get() == 0 | ||
| assert pv_longOut.get() == 0 | ||
| assert pv_int64In.get() == 0 | ||
| assert pv_int64Out.get() == 0 | ||
| assert pv_mbbIn.get() == 0 | ||
| assert pv_mbbOut.get() == 0 | ||
| assert pv_stringIn.get() == "" | ||
| assert pv_stringOut.get() == "" | ||
| assert pv_longStringIn.get() == "" | ||
| assert pv_longStringOut.get() == "" | ||
| assert pv_Action.get() == 0 | ||
| assert (pv_WaveformIn.get() == numpy.array([0, 0, 0, 0])).all() | ||
| assert (pv_WaveformOut.get() == numpy.array([0, 0, 0, 0])).all() | ||
| assert pv_WaveformIn_strings.get() == ["initial", "waveform", "strings"] | ||
| assert pv_WaveformOut_strings.get() == ["initial", "waveform", "strings"] | ||
| # load called automatically when LoadDatabase() called | ||
| builder.LoadDatabase() | ||
| assert pv_aOut.get() == 20.0 | ||
| assert pv_aIn.get() == 20.0 | ||
| assert pv_boolOut.get() == 1 | ||
| assert pv_boolIn.get() == 1 | ||
| assert pv_longIn.get() == 20 | ||
| assert pv_longOut.get() == 20 | ||
| assert pv_int64In.get() == 100 | ||
| assert pv_int64Out.get() == 100 | ||
| assert pv_mbbIn.get() == 15 | ||
| assert pv_mbbOut.get() == 15 | ||
| assert pv_stringIn.get() == "test string in" | ||
| assert pv_stringOut.get() == "test string out" | ||
| assert pv_longStringIn.get() == "test long string in" | ||
| assert pv_longStringOut.get() == "test long string out" | ||
| assert pv_Action.get() == 1 | ||
| assert (pv_WaveformIn.get() == numpy.array([1, 2, 3, 4])).all() | ||
| assert (pv_WaveformOut.get() == numpy.array([1, 2, 3, 4])).all() | ||
| assert pv_WaveformIn_strings.get() == ["test", "waveform", "strings"] | ||
| assert pv_WaveformOut_strings.get() == ["test", "waveform", "strings"] | ||
| conn.send("D") # "Done" | ||
| def test_actual_ioc_load(existing_autosave_dir): | ||
| ctx = get_multiprocessing_context() | ||
| parent_conn, child_conn = ctx.Pipe() | ||
| ioc_process = ctx.Process( | ||
| target=check_all_record_types_load_properly, | ||
| args=(DEVICE_NAME, existing_autosave_dir, child_conn), | ||
| ) | ||
| ioc_process.start() | ||
| # If we never receive D it probably means an assert failed | ||
| select_and_recv(parent_conn, "D") | ||
| def check_all_record_types_save_properly(device_name, autosave_dir, conn): | ||
| autosave.configure(autosave_dir, device_name, save_period=1) | ||
| builder.aOut("aOut", autosave=True, initial_value=20.0) | ||
| builder.aIn("aIn", autosave=True, initial_value=20.0) | ||
| builder.boolOut("boolOut", autosave=True, initial_value=1) | ||
| builder.boolIn("boolIn", autosave=True, initial_value=1) | ||
| builder.longIn("longIn", autosave=True, initial_value=20) | ||
| builder.longOut("longOut", autosave=True, initial_value=20) | ||
| builder.int64In("int64In", autosave=True, initial_value=100) | ||
| builder.int64Out("int64Out", autosave=True, initial_value=100) | ||
| builder.mbbIn("mbbIn", autosave=True, initial_value=15) | ||
| builder.mbbOut("mbbOut", autosave=True, initial_value=15) | ||
| builder.stringIn("stringIn", autosave=True, initial_value="test string in") | ||
| builder.stringOut( | ||
| "stringOut", autosave=True, initial_value="test string out" | ||
| ) | ||
| builder.longStringIn( | ||
| "longStringIn", autosave=True, initial_value="test long string in" | ||
| ) | ||
| builder.longStringOut( | ||
| "longStringOut", autosave=True, initial_value="test long string out" | ||
| ) | ||
| builder.Action("Action", autosave=True, initial_value=1) | ||
| builder.WaveformIn("WaveformIn", [1, 2, 3, 4], autosave=True) | ||
| builder.WaveformOut("WaveformOut", [1, 2, 3, 4], autosave=True) | ||
| builder.LoadDatabase() | ||
| dispatcher = asyncio_dispatcher.AsyncioDispatcher() | ||
| softioc.iocInit(dispatcher) | ||
| # wait long enough to ensure one save has occurred | ||
| time.sleep(2) | ||
| with open(autosave_dir / f"{device_name}.softsav", "r") as f: | ||
| saved = yaml.full_load(f) | ||
| assert saved["aOut"] == 20.0 | ||
| assert saved["aIn"] == 20.0 | ||
| assert saved["boolOut"] == 1 | ||
| assert saved["boolIn"] == 1 | ||
| assert saved["longIn"] == 20 | ||
| assert saved["longOut"] == 20 | ||
| assert saved["int64In"] == 100 | ||
| assert saved["int64Out"] == 100 | ||
| assert saved["mbbIn"] == 15 | ||
| assert saved["mbbOut"] == 15 | ||
| assert saved["stringIn"] == "test string in" | ||
| assert saved["stringOut"] == "test string out" | ||
| assert saved["longStringIn"] == "test long string in" | ||
| assert saved["longStringOut"] == "test long string out" | ||
| assert saved["Action"] == 1 | ||
| assert (saved["WaveformIn"] == numpy.array([1, 2, 3, 4])).all() | ||
| assert (saved["WaveformOut"] == numpy.array([1, 2, 3, 4])).all() | ||
| autosave.Autosave._stop() | ||
| # force autosave thread to stop to ensure pytest exits | ||
| conn.send("D") | ||
| def test_actual_ioc_save(tmp_path): | ||
| ctx = get_multiprocessing_context() | ||
| parent_conn, child_conn = ctx.Pipe() | ||
| ioc_process = ctx.Process( | ||
| target=check_all_record_types_save_properly, | ||
| args=(DEVICE_NAME, tmp_path, child_conn), | ||
| ) | ||
| ioc_process.start() | ||
| # If we never receive D it probably means an assert failed | ||
| select_and_recv(parent_conn, "D") | ||
| def check_autosave_field_names_contain_device_prefix( | ||
| device_name, tmp_path, conn | ||
| ): | ||
| autosave.configure(tmp_path, device_name, save_period=1) | ||
| builder.aOut("BEFORE", autosave=["VAL", "EGU"]) | ||
| builder.SetDeviceName(device_name) | ||
| builder.aOut("AFTER", autosave=["VAL", "EGU"]) | ||
| builder.LoadDatabase() | ||
| dispatcher = asyncio_dispatcher.AsyncioDispatcher() | ||
| softioc.iocInit(dispatcher) | ||
| time.sleep(2) | ||
| with open(tmp_path / f"{device_name}.softsav", "r") as f: | ||
| saved = yaml.full_load(f) | ||
| assert "BEFORE" in saved.keys() | ||
| assert f"{device_name}:AFTER" in saved.keys() | ||
| autosave.Autosave._stop() | ||
| conn.send("D") | ||
| def test_autosave_field_names_contain_device_prefix(tmp_path): | ||
| ctx = get_multiprocessing_context() | ||
| parent_conn, child_conn = ctx.Pipe() | ||
| ioc_process = ctx.Process( | ||
| target=check_autosave_field_names_contain_device_prefix, | ||
| args=(DEVICE_NAME, tmp_path, child_conn), | ||
| ) | ||
| ioc_process.start() | ||
| # If we never receive D it probably means an assert failed | ||
| select_and_recv(parent_conn, "D") | ||
| def test_context_manager_thread_safety(tmp_path): | ||
| autosave.configure(tmp_path, DEVICE_NAME) | ||
| in_cm_event = threading.Event() | ||
| def create_pv_in_thread(name): | ||
| in_cm_event.wait() | ||
| builder.aOut(name, autosave=False) | ||
| pv_thread_before_cm = threading.Thread( | ||
| target=create_pv_in_thread, args=["PV-FROM-THREAD-BEFORE"]) | ||
| pv_thread_in_cm = threading.Thread( | ||
| target=create_pv_in_thread, args=["PV-FROM-THREAD-DURING"]) | ||
| pv_thread_before_cm.start() | ||
| with autosave.Autosave(["VAL", "EGU"]): | ||
| in_cm_event.set() | ||
| builder.aOut("PV-FROM-CM") | ||
| pv_thread_in_cm.start() | ||
| pv_thread_in_cm.join() | ||
| pv_thread_before_cm.join() | ||
| assert "PV-FROM-THREAD-BEFORE" not in autosave.Autosave._pvs | ||
| assert "PV-FROM-THREAD-DURING" not in autosave.Autosave._pvs | ||
| assert device_core.LookupRecord("PV-FROM-THREAD-BEFORE") | ||
| assert device_core.LookupRecord("PV-FROM-THREAD-DURING") | ||
| def test_nested_context_managers_raises(tmp_path): | ||
| autosave.configure(tmp_path, DEVICE_NAME) | ||
| with autosave.Autosave(["SCAN"]): | ||
| with pytest.raises(RuntimeError): | ||
| with autosave.Autosave(False): | ||
| builder.aOut("MY-PV") | ||
| with pytest.raises(RuntimeError): | ||
| autosave.Autosave() | ||
| def test_autosave_arguments(tmp_path): | ||
| autosave.configure(tmp_path, DEVICE_NAME) | ||
| builder.aOut("TRUE", autosave=True) | ||
| builder.aOut("FIELDS", autosave=["LOPR", "HOPR"]) | ||
| builder.aOut("FALSE", autosave=False) | ||
| builder.aOut("SINGLE-FIELD", autosave="EGU") | ||
| builder.aOut("AUTO-VAL", autosave="VAL") | ||
| assert set(autosave.Autosave._pvs) == { | ||
| "TRUE", "FIELDS.LOPR", "FIELDS.HOPR", "SINGLE-FIELD.EGU", "AUTO-VAL"} | ||
| autosave.Autosave._pvs = {} | ||
| builder.ClearRecords() | ||
| with autosave.Autosave(): # True by default | ||
| builder.aOut("AUTO-TRUE", autosave=False) | ||
| builder.aOut("FIELDS", autosave=["LOPR", "HOPR"]) | ||
| assert set(autosave.Autosave._pvs) == { | ||
| "FIELDS", "FIELDS.LOPR", "FIELDS.HOPR"} | ||
| autosave.Autosave._pvs = {} | ||
| builder.ClearRecords() | ||
| with autosave.Autosave(["EGU"]): | ||
| builder.aOut("AUTO-FALSE") | ||
| builder.aOut("FIELDS", autosave=["PINI"]) | ||
| assert set(autosave.Autosave._pvs) == { | ||
| "AUTO-FALSE.EGU", "FIELDS.EGU", "FIELDS.PINI"} | ||
| autosave.Autosave._pvs = {} | ||
| builder.ClearRecords() | ||
| with autosave.Autosave(False): | ||
| builder.aOut("AUTO-DEFAULT") | ||
| builder.aOut("AUTO-TRUE", autosave=True) | ||
| assert set(autosave.Autosave._pvs) == {"AUTO-TRUE"} | ||
| autosave.Autosave._pvs = {} | ||
| builder.ClearRecords() | ||
| with autosave.Autosave("LOPR"): # single field | ||
| builder.aOut("FIELDS", autosave="HOPR") | ||
| assert set(autosave.Autosave._pvs) == {"FIELDS.HOPR", "FIELDS.LOPR"} |
| import asyncio | ||
| from contextlib import nullcontext | ||
| import pytest | ||
| from conftest import ( | ||
| aioca_cleanup, | ||
| log, | ||
| create_random_prefix, | ||
| TIMEOUT, | ||
| select_and_recv, | ||
| get_multiprocessing_context, | ||
| ) | ||
| from softioc import asyncio_dispatcher, builder, softioc | ||
| class TestPVAccess: | ||
| """Tests related to PVAccess""" | ||
| record_name = "PVA_AOut" | ||
| record_value = 10 | ||
| def pva_test_func(self, device_name, conn, use_pva): | ||
| builder.SetDeviceName(device_name) | ||
| builder.aOut(self.record_name, initial_value=self.record_value) | ||
| dispatcher = asyncio_dispatcher.AsyncioDispatcher() | ||
| builder.LoadDatabase() | ||
| softioc.iocInit(dispatcher=dispatcher, enable_pva=use_pva) | ||
| conn.send("R") # "Ready" | ||
| log("CHILD: Sent R over Connection to Parent") | ||
| # Keep process alive while main thread works. | ||
| while (True): | ||
| if conn.poll(TIMEOUT): | ||
| val = conn.recv() | ||
| if val == "D": # "Done" | ||
| break | ||
| @pytest.mark.asyncio | ||
| @pytest.mark.parametrize( | ||
| "use_pva,expectation", | ||
| [ | ||
| (True, nullcontext()), | ||
| (False, pytest.raises(asyncio.TimeoutError)) | ||
| ] | ||
| ) | ||
| async def test_pva_enable_disable(self, use_pva, expectation): | ||
| """Test that we can enable and disable PVA, perform PVAccess requests | ||
| when enabled, and that we can always do Channel Access requests""" | ||
| ctx = get_multiprocessing_context() | ||
| parent_conn, child_conn = ctx.Pipe() | ||
| device_name = create_random_prefix() | ||
| process = ctx.Process( | ||
| target=self.pva_test_func, | ||
| args=(device_name, child_conn, use_pva), | ||
| ) | ||
| process.start() | ||
| from aioca import caget | ||
| from p4p.client.asyncio import Context | ||
| try: | ||
| # Wait for message that IOC has started | ||
| select_and_recv(parent_conn, "R") | ||
| record_full_name = device_name + ":" + self.record_name | ||
| ret_val = await caget(record_full_name, timeout=TIMEOUT) | ||
| assert ret_val == self.record_value | ||
| with expectation as _: | ||
| with Context("pva") as ctx: | ||
| # Short timeout as, if the above CA connection has happened | ||
| # there's no need to wait a very long time for the PVA | ||
| # connection | ||
| pva_val = await asyncio.wait_for( | ||
| ctx.get(record_full_name), | ||
| timeout=2 | ||
| ) | ||
| assert pva_val == self.record_value | ||
| finally: | ||
| # Clear the cache before stopping the IOC stops | ||
| # "channel disconnected" error messages | ||
| aioca_cleanup() | ||
| parent_conn.send("D") # "Done" | ||
| process.join(timeout=TIMEOUT) |
+22
-19
@@ -1,6 +0,6 @@ | ||
| Metadata-Version: 2.1 | ||
| Metadata-Version: 2.4 | ||
| Name: softioc | ||
| Version: 4.5.0 | ||
| Version: 4.6.0 | ||
| Summary: Embed an EPICS IOC in a Python process | ||
| Home-page: https://github.com/dls-controls/pythonSoftIOC | ||
| Home-page: https://github.com/DiamondLightSource/pythonSoftIOC | ||
| Author: Michael Abbott | ||
@@ -11,3 +11,2 @@ Author-email: Michael.Abbott@diamond.ac.uk | ||
| Classifier: License :: OSI Approved :: Apache Software License | ||
| Classifier: Programming Language :: Python :: 3.6 | ||
| Classifier: Programming Language :: Python :: 3.7 | ||
@@ -17,9 +16,10 @@ Classifier: Programming Language :: Python :: 3.8 | ||
| Classifier: Programming Language :: Python :: 3.10 | ||
| Requires-Python: >=3.6 | ||
| Requires-Python: >=3.7 | ||
| Description-Content-Type: text/x-rst | ||
| License-File: LICENSE | ||
| Requires-Dist: epicscorelibs<7.0.7.99.1,>=7.0.7.99.0.2 | ||
| Requires-Dist: pvxslibs>=1.2.2 | ||
| Requires-Dist: epicscorelibs<7.0.7.99.2,>=7.0.7.99.1.2a1 | ||
| Requires-Dist: pvxslibs>=1.3.2a2 | ||
| Requires-Dist: numpy | ||
| Requires-Dist: epicsdbbuilder>=1.4 | ||
| Requires-Dist: pyyaml>=6.0 | ||
| Provides-Extra: useful | ||
@@ -34,7 +34,10 @@ Requires-Dist: cothread; extra == "useful" | ||
| Requires-Dist: sphinx==4.3.2; extra == "dev" | ||
| Requires-Dist: sphinx-rtd-theme-github-versions; extra == "dev" | ||
| Requires-Dist: sphinx-rtd-theme==1.0.0; extra == "dev" | ||
| Requires-Dist: sphinx-rtd-theme-github-versions==1.1; extra == "dev" | ||
| Requires-Dist: pytest-asyncio; extra == "dev" | ||
| Requires-Dist: aioca>=1.6; extra == "dev" | ||
| Requires-Dist: cothread; sys_platform != "win32" and extra == "dev" | ||
| Requires-Dist: p4p; extra == "dev" | ||
| Requires-Dist: p4p>=4.2.0a2; extra == "dev" | ||
| Dynamic: license-file | ||
| Dynamic: requires-dist | ||
@@ -54,5 +57,5 @@ pythonSoftIOC | ||
| PyPI ``pip install softioc`` | ||
| Source code https://github.com/dls-controls/pythonSoftIOC | ||
| Documentation https://dls-controls.github.io/pythonSoftIOC | ||
| Changelog https://github.com/dls-controls/pythonSoftIOC/blob/master/CHANGELOG.rst | ||
| Source code https://github.com/DiamondLightSource/pythonSoftIOC | ||
| Documentation https://DiamondLightSource.github.io/pythonSoftIOC | ||
| Changelog https://github.com/DiamondLightSource/pythonSoftIOC/blob/master/CHANGELOG.rst | ||
| ============== ============================================================== | ||
@@ -91,12 +94,12 @@ | ||
| .. |code_ci| image:: https://github.com/dls-controls/pythonSoftIOC/workflows/Code%20CI/badge.svg?branch=master | ||
| :target: https://github.com/dls-controls/pythonSoftIOC/actions?query=workflow%3A%22Code+CI%22 | ||
| .. |code_ci| image:: https://github.com/DiamondLightSource/pythonSoftIOC/workflows/Code%20CI/badge.svg?branch=master | ||
| :target: https://github.com/DiamondLightSource/pythonSoftIOC/actions?query=workflow%3A%22Code+CI%22 | ||
| :alt: Code CI | ||
| .. |docs_ci| image:: https://github.com/dls-controls/pythonSoftIOC/workflows/Docs%20CI/badge.svg?branch=master | ||
| :target: https://github.com/dls-controls/pythonSoftIOC/actions?query=workflow%3A%22Docs+CI%22 | ||
| .. |docs_ci| image:: https://github.com/DiamondLightSource/pythonSoftIOC/workflows/Docs%20CI/badge.svg?branch=master | ||
| :target: https://github.com/DiamondLightSource/pythonSoftIOC/actions?query=workflow%3A%22Docs+CI%22 | ||
| :alt: Docs CI | ||
| .. |coverage| image:: https://codecov.io/gh/dls-controls/pythonSoftIOC/branch/master/graph/badge.svg | ||
| :target: https://codecov.io/gh/dls-controls/pythonSoftIOC | ||
| .. |coverage| image:: https://codecov.io/gh/DiamondLightSource/pythonSoftIOC/branch/master/graph/badge.svg | ||
| :target: https://codecov.io/gh/DiamondLightSource/pythonSoftIOC | ||
| :alt: Test Coverage | ||
@@ -116,2 +119,2 @@ | ||
| See https://dls-controls.github.io/pythonSoftIOC for more detailed documentation. | ||
| See https://DiamondLightSource.github.io/pythonSoftIOC for more detailed documentation. |
+1
-1
| [build-system] | ||
| requires = ["setuptools", "wheel", "setuptools_dso>=2.1", "epicscorelibs>=7.0.7.99.0.2"] | ||
| requires = ["setuptools", "wheel", "setuptools_dso>=2.1", "epicscorelibs>=7.0.7.99.1.1a3"] | ||
| build-backend = "setuptools.build_meta:__legacy__" |
+10
-10
@@ -14,5 +14,5 @@ pythonSoftIOC | ||
| PyPI ``pip install softioc`` | ||
| Source code https://github.com/dls-controls/pythonSoftIOC | ||
| Documentation https://dls-controls.github.io/pythonSoftIOC | ||
| Changelog https://github.com/dls-controls/pythonSoftIOC/blob/master/CHANGELOG.rst | ||
| Source code https://github.com/DiamondLightSource/pythonSoftIOC | ||
| Documentation https://DiamondLightSource.github.io/pythonSoftIOC | ||
| Changelog https://github.com/DiamondLightSource/pythonSoftIOC/blob/master/CHANGELOG.rst | ||
| ============== ============================================================== | ||
@@ -51,12 +51,12 @@ | ||
| .. |code_ci| image:: https://github.com/dls-controls/pythonSoftIOC/workflows/Code%20CI/badge.svg?branch=master | ||
| :target: https://github.com/dls-controls/pythonSoftIOC/actions?query=workflow%3A%22Code+CI%22 | ||
| .. |code_ci| image:: https://github.com/DiamondLightSource/pythonSoftIOC/workflows/Code%20CI/badge.svg?branch=master | ||
| :target: https://github.com/DiamondLightSource/pythonSoftIOC/actions?query=workflow%3A%22Code+CI%22 | ||
| :alt: Code CI | ||
| .. |docs_ci| image:: https://github.com/dls-controls/pythonSoftIOC/workflows/Docs%20CI/badge.svg?branch=master | ||
| :target: https://github.com/dls-controls/pythonSoftIOC/actions?query=workflow%3A%22Docs+CI%22 | ||
| .. |docs_ci| image:: https://github.com/DiamondLightSource/pythonSoftIOC/workflows/Docs%20CI/badge.svg?branch=master | ||
| :target: https://github.com/DiamondLightSource/pythonSoftIOC/actions?query=workflow%3A%22Docs+CI%22 | ||
| :alt: Docs CI | ||
| .. |coverage| image:: https://codecov.io/gh/dls-controls/pythonSoftIOC/branch/master/graph/badge.svg | ||
| :target: https://codecov.io/gh/dls-controls/pythonSoftIOC | ||
| .. |coverage| image:: https://codecov.io/gh/DiamondLightSource/pythonSoftIOC/branch/master/graph/badge.svg | ||
| :target: https://codecov.io/gh/DiamondLightSource/pythonSoftIOC | ||
| :alt: Test Coverage | ||
@@ -76,2 +76,2 @@ | ||
| See https://dls-controls.github.io/pythonSoftIOC for more detailed documentation. | ||
| See https://DiamondLightSource.github.io/pythonSoftIOC for more detailed documentation. |
+8
-8
| [metadata] | ||
| name = softioc | ||
| description = Embed an EPICS IOC in a Python process | ||
| url = https://github.com/dls-controls/pythonSoftIOC | ||
| url = https://github.com/DiamondLightSource/pythonSoftIOC | ||
| author = Michael Abbott | ||
@@ -13,3 +13,2 @@ author_email = Michael.Abbott@diamond.ac.uk | ||
| License :: OSI Approved :: Apache Software License | ||
| Programming Language :: Python :: 3.6 | ||
| Programming Language :: Python :: 3.7 | ||
@@ -22,3 +21,3 @@ Programming Language :: Python :: 3.8 | ||
| packages = softioc | ||
| python_requires = >=3.6 | ||
| python_requires = >=3.7 | ||
@@ -50,7 +49,8 @@ [options.entry_points] | ||
| sphinx ==4.3.2 | ||
| sphinx-rtd-theme-github-versions | ||
| sphinx-rtd-theme ==1.0.0 | ||
| sphinx-rtd-theme-github-versions ==1.1 | ||
| pytest-asyncio | ||
| aioca >=1.6 | ||
| cothread; sys_platform != "win32" | ||
| p4p | ||
| p4p>=4.2.0a2 | ||
@@ -60,5 +60,5 @@ [flake8] | ||
| extend-ignore = | ||
| F401 F403 F405 # Allow from module import * | ||
| E251 # Allow call(param = value) | ||
| E301 E302 E303 E305 # Allow any number of blank lines | ||
| F401 F403 F405 | ||
| E251 | ||
| E301 E302 E303 E305 | ||
@@ -65,0 +65,0 @@ [tool:pytest] |
+3
-2
@@ -107,7 +107,8 @@ import os | ||
| epicscorelibs.version.abi_requires(), | ||
| "pvxslibs>=1.2.2", | ||
| "pvxslibs>=1.3.2a2", | ||
| "numpy", | ||
| "epicsdbbuilder>=1.4" | ||
| "epicsdbbuilder>=1.4", | ||
| "pyyaml>=6.0" | ||
| ], | ||
| zip_safe = False, # setuptools_dso is not compatible with eggs! | ||
| ) |
@@ -1,6 +0,6 @@ | ||
| Metadata-Version: 2.1 | ||
| Metadata-Version: 2.4 | ||
| Name: softioc | ||
| Version: 4.5.0 | ||
| Version: 4.6.0 | ||
| Summary: Embed an EPICS IOC in a Python process | ||
| Home-page: https://github.com/dls-controls/pythonSoftIOC | ||
| Home-page: https://github.com/DiamondLightSource/pythonSoftIOC | ||
| Author: Michael Abbott | ||
@@ -11,3 +11,2 @@ Author-email: Michael.Abbott@diamond.ac.uk | ||
| Classifier: License :: OSI Approved :: Apache Software License | ||
| Classifier: Programming Language :: Python :: 3.6 | ||
| Classifier: Programming Language :: Python :: 3.7 | ||
@@ -17,9 +16,10 @@ Classifier: Programming Language :: Python :: 3.8 | ||
| Classifier: Programming Language :: Python :: 3.10 | ||
| Requires-Python: >=3.6 | ||
| Requires-Python: >=3.7 | ||
| Description-Content-Type: text/x-rst | ||
| License-File: LICENSE | ||
| Requires-Dist: epicscorelibs<7.0.7.99.1,>=7.0.7.99.0.2 | ||
| Requires-Dist: pvxslibs>=1.2.2 | ||
| Requires-Dist: epicscorelibs<7.0.7.99.2,>=7.0.7.99.1.2a1 | ||
| Requires-Dist: pvxslibs>=1.3.2a2 | ||
| Requires-Dist: numpy | ||
| Requires-Dist: epicsdbbuilder>=1.4 | ||
| Requires-Dist: pyyaml>=6.0 | ||
| Provides-Extra: useful | ||
@@ -34,7 +34,10 @@ Requires-Dist: cothread; extra == "useful" | ||
| Requires-Dist: sphinx==4.3.2; extra == "dev" | ||
| Requires-Dist: sphinx-rtd-theme-github-versions; extra == "dev" | ||
| Requires-Dist: sphinx-rtd-theme==1.0.0; extra == "dev" | ||
| Requires-Dist: sphinx-rtd-theme-github-versions==1.1; extra == "dev" | ||
| Requires-Dist: pytest-asyncio; extra == "dev" | ||
| Requires-Dist: aioca>=1.6; extra == "dev" | ||
| Requires-Dist: cothread; sys_platform != "win32" and extra == "dev" | ||
| Requires-Dist: p4p; extra == "dev" | ||
| Requires-Dist: p4p>=4.2.0a2; extra == "dev" | ||
| Dynamic: license-file | ||
| Dynamic: requires-dist | ||
@@ -54,5 +57,5 @@ pythonSoftIOC | ||
| PyPI ``pip install softioc`` | ||
| Source code https://github.com/dls-controls/pythonSoftIOC | ||
| Documentation https://dls-controls.github.io/pythonSoftIOC | ||
| Changelog https://github.com/dls-controls/pythonSoftIOC/blob/master/CHANGELOG.rst | ||
| Source code https://github.com/DiamondLightSource/pythonSoftIOC | ||
| Documentation https://DiamondLightSource.github.io/pythonSoftIOC | ||
| Changelog https://github.com/DiamondLightSource/pythonSoftIOC/blob/master/CHANGELOG.rst | ||
| ============== ============================================================== | ||
@@ -91,12 +94,12 @@ | ||
| .. |code_ci| image:: https://github.com/dls-controls/pythonSoftIOC/workflows/Code%20CI/badge.svg?branch=master | ||
| :target: https://github.com/dls-controls/pythonSoftIOC/actions?query=workflow%3A%22Code+CI%22 | ||
| .. |code_ci| image:: https://github.com/DiamondLightSource/pythonSoftIOC/workflows/Code%20CI/badge.svg?branch=master | ||
| :target: https://github.com/DiamondLightSource/pythonSoftIOC/actions?query=workflow%3A%22Code+CI%22 | ||
| :alt: Code CI | ||
| .. |docs_ci| image:: https://github.com/dls-controls/pythonSoftIOC/workflows/Docs%20CI/badge.svg?branch=master | ||
| :target: https://github.com/dls-controls/pythonSoftIOC/actions?query=workflow%3A%22Docs+CI%22 | ||
| .. |docs_ci| image:: https://github.com/DiamondLightSource/pythonSoftIOC/workflows/Docs%20CI/badge.svg?branch=master | ||
| :target: https://github.com/DiamondLightSource/pythonSoftIOC/actions?query=workflow%3A%22Docs+CI%22 | ||
| :alt: Docs CI | ||
| .. |coverage| image:: https://codecov.io/gh/dls-controls/pythonSoftIOC/branch/master/graph/badge.svg | ||
| :target: https://codecov.io/gh/dls-controls/pythonSoftIOC | ||
| .. |coverage| image:: https://codecov.io/gh/DiamondLightSource/pythonSoftIOC/branch/master/graph/badge.svg | ||
| :target: https://codecov.io/gh/DiamondLightSource/pythonSoftIOC | ||
| :alt: Test Coverage | ||
@@ -116,2 +119,2 @@ | ||
| See https://dls-controls.github.io/pythonSoftIOC for more detailed documentation. | ||
| See https://DiamondLightSource.github.io/pythonSoftIOC for more detailed documentation. |
@@ -1,5 +0,6 @@ | ||
| epicscorelibs<7.0.7.99.1,>=7.0.7.99.0.2 | ||
| pvxslibs>=1.2.2 | ||
| epicscorelibs<7.0.7.99.2,>=7.0.7.99.1.2a1 | ||
| pvxslibs>=1.3.2a2 | ||
| numpy | ||
| epicsdbbuilder>=1.4 | ||
| pyyaml>=6.0 | ||
@@ -11,6 +12,7 @@ [dev] | ||
| sphinx==4.3.2 | ||
| sphinx-rtd-theme-github-versions | ||
| sphinx-rtd-theme==1.0.0 | ||
| sphinx-rtd-theme-github-versions==1.1 | ||
| pytest-asyncio | ||
| aioca>=1.6 | ||
| p4p | ||
| p4p>=4.2.0a2 | ||
@@ -17,0 +19,0 @@ [dev:sys_platform != "win32"] |
@@ -12,2 +12,3 @@ LICENSE | ||
| softioc/asyncio_dispatcher.py | ||
| softioc/autosave.py | ||
| softioc/builder.py | ||
@@ -84,5 +85,7 @@ softioc/cothread_dispatcher.py | ||
| tests/test_asyncio.py | ||
| tests/test_autosave.py | ||
| tests/test_cothread.py | ||
| tests/test_deviocstats.py | ||
| tests/test_pvaccess.py | ||
| tests/test_record_values.py | ||
| tests/test_records.py |
+3
-11
| '''Python soft IOC module.''' | ||
| import os | ||
| import ctypes | ||
| from setuptools_dso.runtime import find_dso | ||
| import epicscorelibs.path | ||
| import pvxslibs.path | ||
| from epicscorelibs.ioc import \ | ||
| iocshRegisterCommon, registerRecordDeviceDriver, pdbbase | ||
| from epicscorelibs.ioc import iocshRegisterCommon, pdbbase | ||
@@ -18,3 +14,3 @@ # Do this as early as possible, in case we happen to use cothread | ||
| # before we call iocshRegisterCommon | ||
| from .imports import dbLoadDatabase | ||
| from .imports import dbLoadDatabase, registerRecordDeviceDriver | ||
| from ._version_git import __version__ | ||
@@ -26,11 +22,7 @@ | ||
| dbLoadDatabase('base.dbd', base_dbd_path, None) | ||
| dbLoadDatabase('pvxsIoc.dbd', pvxslibs.path.dbd_path, None) | ||
| iocStats = os.path.join(os.path.dirname(__file__), "iocStats", "devIocStats") | ||
| dbLoadDatabase('devIocStats.dbd', iocStats, None) | ||
| ctypes.CDLL(find_dso('pvxslibs.lib.pvxsIoc'), ctypes.RTLD_GLOBAL) | ||
| os.environ.setdefault('PVXS_QSRV_ENABLE', 'YES') | ||
| if registerRecordDeviceDriver(pdbbase): | ||
| raise RuntimeError('Error registering') | ||
| registerRecordDeviceDriver(pdbbase) | ||
| __all__ = ['__version__'] |
| # Compute a version number from a git repo or archive | ||
| # This file is released into the public domain. Generated by: | ||
| # versiongit-1.0 (https://github.com/dls-controls/versiongit) | ||
| # versiongit-1.0 (https://github.com/DiamondLightSource/versiongit) | ||
| import os | ||
@@ -11,4 +11,4 @@ import re | ||
| # These will be filled in if git archive is run or by setup.py cmdclasses | ||
| GIT_REFS = 'tag: 4.5.0' | ||
| GIT_SHA1 = 'eea8794' | ||
| GIT_REFS = 'tag: 4.6.0' | ||
| GIT_SHA1 = 'acbb1a6' | ||
@@ -15,0 +15,0 @@ # Git describe gives us sha1, last version-like tag, and commits since then |
+1
-0
@@ -8,2 +8,3 @@ # Severity code definitions taken from EPICS alarm.h | ||
| # Some alarm code definitions taken from EPICS alarm.h | ||
| # NO_ALARM = 0 | ||
| READ_ALARM = 1 | ||
@@ -10,0 +11,0 @@ WRITE_ALARM = 2 |
@@ -6,5 +6,6 @@ import asyncio | ||
| import atexit | ||
| import signal | ||
| class AsyncioDispatcher: | ||
| def __init__(self, loop=None): | ||
| def __init__(self, loop=None, debug=False): | ||
| """A dispatcher for `asyncio` based IOCs, suitable to be passed to | ||
@@ -16,17 +17,26 @@ `softioc.iocInit`. Means that `on_update` callback functions can be | ||
| Event Loop will be created and run in a dedicated thread. | ||
| ``debug`` is passed through to ``asyncio.run()``. | ||
| For a clean exit, call ``softioc.interactive_ioc(..., call_exit=False)`` | ||
| """ | ||
| if loop is None: | ||
| # will wait until worker is executing the new loop | ||
| started = threading.Event() | ||
| # Make one and run it in a background thread | ||
| self.loop = asyncio.new_event_loop() | ||
| worker = threading.Thread(target=self.loop.run_forever) | ||
| self.__worker = threading.Thread( | ||
| target=asyncio.run, | ||
| args=(self.__inloop(started),), | ||
| kwargs={'debug': debug}) | ||
| # Explicitly manage worker thread as part of interpreter shutdown. | ||
| # Otherwise threading module will deadlock trying to join() | ||
| # before our atexit hook runs, while the loop is still running. | ||
| worker.daemon = True | ||
| self.__worker.daemon = True | ||
| @atexit.register | ||
| def aioJoin(worker=worker, loop=self.loop): | ||
| loop.call_soon_threadsafe(loop.stop) | ||
| worker.join() | ||
| worker.start() | ||
| self.__worker.start() | ||
| started.wait() | ||
| self.__atexit = atexit.register(self.__shutdown) | ||
| assert self.loop is not None and self.loop.is_running() | ||
| elif not loop.is_running(): | ||
@@ -37,2 +47,33 @@ raise ValueError("Provided asyncio event loop is not running") | ||
| def close(self): | ||
| if self.__atexit is not None: | ||
| atexit.unregister(self.__atexit) | ||
| self.__atexit = None | ||
| self.__shutdown() | ||
| def wait_for_quit(self): | ||
| stop_event = threading.Event() | ||
| def signal_handler(signum, frame): | ||
| stop_event.set() | ||
| signal.signal(signal.SIGINT, signal_handler) | ||
| signal.signal(signal.SIGTERM, signal_handler) | ||
| stop_event.wait() | ||
| async def __inloop(self, started): | ||
| self.loop = asyncio.get_running_loop() | ||
| self.__interrupt = asyncio.Event() | ||
| started.set() | ||
| del started | ||
| await self.__interrupt.wait() | ||
| def __shutdown(self): | ||
| if self.__worker is not None: | ||
| self.loop.call_soon_threadsafe(self.__interrupt.set) | ||
| self.__worker.join() | ||
| self.__worker = None | ||
| def __call__( | ||
@@ -49,6 +90,13 @@ self, | ||
| await ret | ||
| except Exception: | ||
| logging.exception("Exception when running dispatched callback") | ||
| finally: | ||
| if completion: | ||
| completion(*completion_args) | ||
| except Exception: | ||
| logging.exception("Exception when running dispatched callback") | ||
| asyncio.run_coroutine_threadsafe(async_wrapper(), self.loop) | ||
| def __enter__(self): | ||
| return self | ||
| def __exit__(self, A, B, C): | ||
| self.close() |
+13
-1
@@ -6,2 +6,3 @@ import os | ||
| from .softioc import dbLoadDatabase | ||
| from .autosave import load_autosave | ||
@@ -82,3 +83,14 @@ from epicsdbbuilder import * | ||
| def int64In(name, LOPR=None, HOPR=None, EGU=None, **fields): | ||
| _set_in_defaults(fields) | ||
| fields.setdefault('MDEL', -1) | ||
| return PythonDevice.int64in( | ||
| name, LOPR = LOPR, HOPR = HOPR, EGU = EGU, **fields) | ||
| def int64Out(name, DRVL=None, DRVH=None, EGU=None, **fields): | ||
| _set_out_defaults(fields) | ||
| _set_scalar_out_defaults(fields, DRVL, DRVH) | ||
| return PythonDevice.int64out(name, EGU = EGU, **fields) | ||
| # Field name prefixes for mbbi/mbbo records. | ||
@@ -293,2 +305,3 @@ _mbbPrefixes = [ | ||
| ready for operation.''' | ||
| load_autosave() | ||
| from tempfile import mkstemp | ||
@@ -317,3 +330,2 @@ fd, database = mkstemp('.db') | ||
| SetDeviceName = SetPrefix | ||
| def SetDeviceName(name): | ||
@@ -320,0 +332,0 @@ SetPrefix(name) |
@@ -0,1 +1,3 @@ | ||
| import inspect | ||
| import logging | ||
@@ -22,2 +24,4 @@ class CothreadDispatcher: | ||
| self.wait_for_quit = cothread.WaitForQuit | ||
| def __call__( | ||
@@ -30,5 +34,13 @@ self, | ||
| def wrapper(): | ||
| func(*func_args) | ||
| if completion: | ||
| completion(*completion_args) | ||
| try: | ||
| func(*func_args) | ||
| except Exception: | ||
| logging.exception("Exception when running dispatched callback") | ||
| finally: | ||
| if completion: | ||
| completion(*completion_args) | ||
| assert not inspect.iscoroutinefunction(func) | ||
| assert not inspect.iscoroutinefunction(completion) | ||
| self.__dispatcher(wrapper) |
| from __future__ import print_function | ||
| import sys | ||
| from . import imports | ||
@@ -9,37 +8,3 @@ from .fields import RecordFactory | ||
| # Black magic lifted from six.py (http://pypi.python.org/pypi/six/) to replace | ||
| # use of __metaclass__ for metaclass definition | ||
| def with_metaclass(meta, *bases): | ||
| class metaclass(meta): | ||
| def __new__(cls, name, this_bases, d): | ||
| return meta(name, bases, d) | ||
| return type.__new__(metaclass, 'temporary_class', (), {}) | ||
| class InitClass(type): | ||
| def __new__(cls, name, bases, dict): | ||
| if '__init_class__' in dict: | ||
| dict['__init_class__'] = classmethod(dict['__init_class__']) | ||
| return type.__new__(cls, name, bases, dict) | ||
| def __init__(cls, name, bases, dict): | ||
| type.__init__(cls, name, bases, dict) | ||
| # Binds self.__super.method to the appropriate superclass method | ||
| setattr(cls, '_%s__super' % name.lstrip('_'), super(cls)) | ||
| # Binds cls.__super_cls().method to the appropriate superclass | ||
| # class method. Unfortunately the .__super form doesn't work | ||
| # with class methods, only instance methods. | ||
| setattr( | ||
| cls, '_%s__super_cls' % name, | ||
| classmethod(lambda child: super(cls, child))) | ||
| # Finally call the class initialisatio nmethod. | ||
| cls.__init_class__() | ||
| class DeviceCommon(with_metaclass(InitClass)): | ||
| '''Adds support for an __init_class__ method called when the class or any | ||
| of its subclasses is constructed. Also adds auto-super functionality | ||
| (see iocbuilder.support.autosuper).''' | ||
| def __init_class__(cls): pass | ||
| class DeviceCommon: | ||
| # By requiring that DeviceCommon be a common base class for the entire | ||
@@ -87,6 +52,7 @@ # Python device hierarchy, we can use this __init__ to test for unused | ||
| def __init_class__(cls): | ||
| def __init_subclass__(cls): | ||
| '''Record support initialisation, called once during class | ||
| initialisation for each sub-class. This registers record support for | ||
| the specified device name.''' | ||
| if not hasattr(cls, '_record_type_'): | ||
@@ -97,7 +63,2 @@ # If no record type has been specified then we're a base class | ||
| # Create an empty device directory. | ||
| # (Potentially this belongs in a mix-in for resolving the linkage | ||
| # between record and instances, but for now this is hard-wired here.) | ||
| cls.__device_directory = {} | ||
| # Convert the list of fields into a dictionary suitable for record | ||
@@ -200,3 +161,3 @@ # lookup. | ||
| self.__ioscanpvt = imports.IOSCANPVT() | ||
| self.__super.__init__(name, **kargs) | ||
| super().__init__(name, **kargs) | ||
@@ -252,3 +213,3 @@ | ||
| self._RecordDirectory[name] = self | ||
| self.__super.__init__(name, **kargs) | ||
| super().__init__(name, **kargs) | ||
@@ -255,0 +216,0 @@ |
+63
-34
@@ -6,3 +6,3 @@ import os | ||
| import numpy | ||
| from . import autosave | ||
| from . import alarm | ||
@@ -57,2 +57,7 @@ from . import fields | ||
| # all record types can support autosave | ||
| def __init__(self, name, **kargs): | ||
| autosave_fields = kargs.pop("autosave", None) | ||
| autosave.add_pv_to_autosave(self, name, autosave_fields) | ||
| super().__init__(name, **kargs) | ||
@@ -127,3 +132,3 @@ # Most subclasses (all except waveforms) define a ctypes constructor for the | ||
| self._value = (value, alarm.NO_ALARM, alarm.UDF_ALARM, None) | ||
| self.__super.__init__(name, **kargs) | ||
| super().__init__(name, **kargs) | ||
@@ -143,3 +148,3 @@ def _process(self, record): | ||
| def set(self, value, | ||
| severity=alarm.NO_ALARM, alarm=alarm.UDF_ALARM, timestamp=None): | ||
| severity=alarm.NO_ALARM, alarm=alarm.NO_ALARM, timestamp=None): | ||
| '''Updates the stored value and triggers an update. The alarm | ||
@@ -183,6 +188,14 @@ severity and timestamp can also be specified if appropriate.''' | ||
| if 'initial_value' in kargs: | ||
| self._value = self._value_to_epics(kargs.pop('initial_value')) | ||
| value = self._value_to_epics(kargs.pop('initial_value')) | ||
| initial_severity = alarm.NO_ALARM | ||
| initial_status = alarm.NO_ALARM | ||
| else: | ||
| self._value = None | ||
| value = self._default_value() | ||
| # To maintain backwards compatibility, if there is no initial value | ||
| # we mark the record as invalid | ||
| initial_severity = alarm.INVALID_ALARM | ||
| initial_status = alarm.UDF_ALARM | ||
| self._value = (value, initial_severity, initial_status) | ||
| self._blocking = kargs.pop('blocking', blocking) | ||
@@ -192,3 +205,3 @@ if self._blocking: | ||
| self.__super.__init__(name, **kargs) | ||
| super().__init__(name, **kargs) | ||
@@ -199,14 +212,13 @@ def init_record(self, record): | ||
| allowing out records to have a sensible initial value.''' | ||
| if self._value is None: | ||
| # Cannot set in __init__ (like we do for In records), as we want | ||
| # the record alarm status to be set if no value was provided | ||
| # Probably related to PythonSoftIOC issue #53 | ||
| self._value = self._default_value() | ||
| else: | ||
| self._write_value(record, self._value) | ||
| if 'MLST' in self._fields_: | ||
| record.MLST = self._value | ||
| record.TIME = time.time() | ||
| record.UDF = 0 | ||
| recGblResetAlarms(record) | ||
| self._write_value(record, self._value[0]) | ||
| if 'MLST' in self._fields_: | ||
| record.MLST = self._value[0] | ||
| record.TIME = time.time() | ||
| record.UDF = 0 | ||
| record.NSEV = self._value[1] | ||
| record.NSTA = self._value[2] | ||
| recGblResetAlarms(record) | ||
| return self._epics_rc_ | ||
@@ -226,5 +238,9 @@ | ||
| # Ignore memoized value, retrieve it from the VAL field instead | ||
| value = self._read_value(record) | ||
| _, severity, alarm = self._value | ||
| self.process_severity(record, severity, alarm) | ||
| if not self.__always_update and \ | ||
| self._compare_values(value, self._value): | ||
| self._compare_values(value, self._value[0]): | ||
| # If the value isn't making a change then don't do anything. | ||
@@ -238,7 +254,7 @@ return EPICS_OK | ||
| # value. | ||
| self._write_value(record, self._value) | ||
| self._write_value(record, self._value[0]) | ||
| return EPICS_ERROR | ||
| else: | ||
| # Value is good. Hang onto it, let users know the value has changed | ||
| self._value = value | ||
| self._value = (value, severity, alarm) | ||
| record.UDF = 0 | ||
@@ -260,3 +276,14 @@ if self.__on_update and self.__enable_write: | ||
| def set(self, value, process=True): | ||
| def set_alarm(self, severity, alarm): | ||
| '''Updates the alarm status without changing the stored value. An | ||
| update is triggered, and a timestamp can optionally be specified.''' | ||
| self._value = (self._value[0], severity, alarm) | ||
| self.set( | ||
| self.get(), | ||
| severity=severity, | ||
| alarm=alarm) | ||
| def set(self, value, process=True, | ||
| severity=alarm.NO_ALARM, alarm=alarm.NO_ALARM): | ||
| '''Special routine to set the value directly.''' | ||
@@ -267,5 +294,5 @@ value = self._value_to_epics(value) | ||
| except AttributeError: | ||
| # Record not initialised yet. Record the value for when | ||
| # Record not initialised yet. Record data for when | ||
| # initialisation occurs | ||
| self._value = value | ||
| self._value = (value, severity, alarm) | ||
| else: | ||
@@ -279,8 +306,3 @@ # The array parameter is used to keep the raw pointer alive | ||
| def get(self): | ||
| if self._value is None: | ||
| # Before startup complete if no value set return default value | ||
| value = self._default_value() | ||
| else: | ||
| value = self._value | ||
| return self._epics_to_value(value) | ||
| return self._epics_to_value(self._value[0]) | ||
@@ -314,2 +336,4 @@ | ||
| longout = _Device_Out('longout', c_int32, fields.DBF_LONG, EPICS_OK) | ||
| int64in = _Device_In('int64in', c_int64, fields.DBF_INT64, EPICS_OK) | ||
| int64out = _Device_Out('int64out', c_int64, fields.DBF_INT64, EPICS_OK) | ||
| bi = _Device_In('bi', c_uint16, fields.DBF_CHAR, NO_CONVERT) | ||
@@ -377,3 +401,3 @@ bo = _Device_Out('bo', c_uint16, fields.DBF_CHAR, NO_CONVERT) | ||
| record.UDF = int(numpy.isnan(self._value[0])) | ||
| return self.__super._process(record) | ||
| return super()._process(record) | ||
@@ -432,7 +456,7 @@ class ao(ProcessDeviceSupportOut): | ||
| self._nelm = _wf_nelm | ||
| self.__super.__init__(name, **kargs) | ||
| super().__init__(name, **kargs) | ||
| def init_record(self, record): | ||
| self._dbf_type_ = record.FTVL | ||
| return self.__super.init_record(record) | ||
| return super().init_record(record) | ||
@@ -466,4 +490,9 @@ def _read_value(self, record): | ||
| assert len(value) <= self._nelm, 'Value too long for waveform' | ||
| return numpy.copy(value) | ||
| value = numpy.copy(value) | ||
| # As we return a reference to the numpy array, ensure it cannot be | ||
| # modified under our noses | ||
| value.flags.writeable = False | ||
| return value | ||
| def _epics_to_value(self, value): | ||
@@ -470,0 +499,0 @@ if self._dtype.char == 'S': |
+19
-11
@@ -35,3 +35,3 @@ /* Provide EPICS functions in Python format */ | ||
| * versions, so to avoid unpleasant surprises, we compute thes values here in C | ||
| * and pass them back to the Python layer. */ | ||
| * and pass them back to the Python layer. Order matches dbFldTypes.h. */ | ||
| static PyObject *get_DBF_values(PyObject *self, PyObject *args) | ||
@@ -47,2 +47,4 @@ { | ||
| ADD_ENUM(dict, DBF_ULONG); | ||
| ADD_ENUM(dict, DBF_INT64); | ||
| ADD_ENUM(dict, DBF_UINT64); | ||
| ADD_ENUM(dict, DBF_FLOAT); | ||
@@ -103,6 +105,9 @@ ADD_ENUM(dict, DBF_DOUBLE); | ||
| short dbrType; | ||
| void *pbuffer; | ||
| PyObject *buffer_ptr; | ||
| long length; | ||
| if (!PyArg_ParseTuple(args, "shnl", &name, &dbrType, &pbuffer, &length)) | ||
| if (!PyArg_ParseTuple(args, "shOl", &name, &dbrType, &buffer_ptr, &length)) | ||
| return NULL; | ||
| void *pbuffer = PyLong_AsVoidPtr(buffer_ptr); | ||
| if (!pbuffer) | ||
| return NULL; | ||
@@ -116,11 +121,11 @@ struct dbAddr dbAddr; | ||
| /* There are two important locks to consider at this point: The Global | ||
| * Interpreter Lock (GIL) and the EPICS record lock. A deadlock is possible if | ||
| * this thread holds the GIL and wants the record lock (which happens inside | ||
| * dbPutField), and there exists another EPICS thread that has the record lock | ||
| * and wants to call Python (which requires the GIL). | ||
| * This can occur if this code is called as part of an asynchronous on_update | ||
| * Interpreter Lock (GIL) and the EPICS record lock. A deadlock is possible | ||
| * if this thread holds the GIL and wants the record lock (which happens | ||
| * inside dbPutField), and there exists another EPICS thread that has the | ||
| * record lock and wants to call Python (which requires the GIL). This can | ||
| * occur if this code is called as part of an asynchronous on_update | ||
| * callback. | ||
| * Therefore, we must ensure we relinquish the GIL while we perform this | ||
| * EPICS call, to avoid potential deadlocks. | ||
| * See https://github.com/dls-controls/pythonSoftIOC/issues/119. */ | ||
| * See https://github.com/DiamondLightSource/pythonSoftIOC/issues/119. */ | ||
| Py_BEGIN_ALLOW_THREADS | ||
@@ -140,6 +145,9 @@ put_result = dbPutField(&dbAddr, dbrType, pbuffer, length); | ||
| short dbrType; | ||
| void *pbuffer; | ||
| PyObject *buffer_ptr; | ||
| long length; | ||
| if (!PyArg_ParseTuple(args, "shnl", &name, &dbrType, &pbuffer, &length)) | ||
| if (!PyArg_ParseTuple(args, "shOl", &name, &dbrType, &buffer_ptr, &length)) | ||
| return NULL; | ||
| void *pbuffer = PyLong_AsVoidPtr(buffer_ptr); | ||
| if (!pbuffer) | ||
| return NULL; | ||
@@ -146,0 +154,0 @@ struct dbAddr dbAddr; |
+3
-13
@@ -5,7 +5,6 @@ '''Access to fields within a record structure.''' | ||
| import sys | ||
| from ctypes import * | ||
| from .imports import get_field_offsets, get_DBF_values | ||
| import numpy | ||
| from epicscorelibs.ca.dbr import * | ||
@@ -36,2 +35,4 @@ from epicscorelibs.ca.dbr import ca_timestamp, EPICS_epoch | ||
| DBF_LONG: c_int32, | ||
| DBF_INT64: c_int64, | ||
| DBF_UINT64: c_uint64, | ||
| DBF_ULONG: c_int32, # Should be uint32, but causes trouble later. | ||
@@ -47,14 +48,3 @@ DBF_FLOAT: c_float, | ||
| # Mapping from basic DBR_ codes to DBF_ values | ||
| DbrToDbfCode = { | ||
| DBR_STRING: DBF_STRING, | ||
| DBR_SHORT: DBF_SHORT, | ||
| DBR_FLOAT: DBF_FLOAT, | ||
| DBR_ENUM: DBF_ENUM, | ||
| DBR_CHAR: DBF_CHAR, | ||
| DBR_LONG: DBF_LONG, | ||
| DBR_DOUBLE: DBF_DOUBLE | ||
| } | ||
| class RecordFactory(object): | ||
@@ -61,0 +51,0 @@ def __init__(self, record_type, fields): |
@@ -108,2 +108,5 @@ '''External DLL imports used for implementing Python EPICS device support. | ||
| registerRecordDeviceDriver = dbCore.registerAllRecordDeviceDrivers | ||
| registerRecordDeviceDriver.argtypes = (c_void_p,) | ||
| registerRecordDeviceDriver.errcheck = expect_success | ||
@@ -110,0 +113,0 @@ __all__ = [ |
@@ -27,3 +27,5 @@ '''Device support files for pythonSoftIoc Python EPICS device support. | ||
| 'on_update', 'on_update_name', 'validate', 'always_update', | ||
| 'initial_value', '_wf_nelm', '_wf_dtype', 'blocking'] | ||
| 'initial_value', '_wf_nelm', '_wf_dtype', 'blocking', | ||
| 'autosave' | ||
| ] | ||
| device_kargs = {} | ||
@@ -93,4 +95,5 @@ for keyword in DeviceKeywords: | ||
| for name in [ | ||
| 'ai', 'bi', 'longin', 'mbbi', 'stringin', | ||
| 'ao', 'bo', 'longout', 'mbbo', 'stringout', 'waveform']: | ||
| 'ai', 'bi', 'longin', 'mbbi', 'stringin', 'int64in', | ||
| 'ao', 'bo', 'longout', 'mbbo', 'stringout', 'int64out', | ||
| 'waveform']: | ||
| builder = getattr(epicsdbbuilder.records, name) | ||
@@ -97,0 +100,0 @@ record = getattr(device, name) |
+27
-4
@@ -0,1 +1,2 @@ | ||
| import ctypes | ||
| import os | ||
@@ -7,3 +8,7 @@ import sys | ||
| from . import imports, device | ||
| import pvxslibs.path | ||
| from epicscorelibs.ioc import pdbbase | ||
| from setuptools_dso.runtime import find_dso | ||
| from . import autosave, imports, device | ||
| from . import cothread_dispatcher | ||
@@ -20,3 +25,3 @@ | ||
| def iocInit(dispatcher=None): | ||
| def iocInit(dispatcher=None, enable_pva=True): | ||
| '''This must be called exactly once after loading all EPICS database files. | ||
@@ -27,4 +32,5 @@ After this point the EPICS IOC is running and serving PVs. | ||
| dispatcher: A callable with signature ``dispatcher(func, *args)``. Will | ||
| be called in response to caput on a record. If not supplied use | ||
| `cothread` as a dispatcher. | ||
| be called in response to caput on a record. If not supplied uses | ||
| ``cothread`` as the dispatcher. | ||
| enable_pva: Specify whether to enable the PV Access Server in this IOC. | ||
@@ -39,3 +45,11 @@ See Also: | ||
| device.dispatcher = dispatcher | ||
| if enable_pva: | ||
| dbLoadDatabase('pvxsIoc.dbd', pvxslibs.path.dbd_path, None) | ||
| ctypes.CDLL(find_dso('pvxslibs.lib.pvxsIoc'), ctypes.RTLD_GLOBAL) | ||
| imports.registerRecordDeviceDriver(pdbbase) | ||
| imports.iocInit() | ||
| autosave.start_autosave_thread() | ||
@@ -359,1 +373,10 @@ | ||
| safeEpicsExit(0) | ||
| def non_interactive_ioc(): | ||
| '''Function to run the IOC in non-interactive mode. This mode is useful for | ||
| running the IOC as a background process without user interaction. | ||
| This function expects a stop signal. When it receives one, the IOC stops. | ||
| ''' | ||
| device.dispatcher.wait_for_quit() | ||
| safeEpicsExit(0) |
@@ -7,5 +7,10 @@ import asyncio | ||
| from conftest import requires_cothread, ADDRESS, select_and_recv | ||
| from conftest import ( | ||
| ADDRESS, select_and_recv, | ||
| log, get_multiprocessing_context, TIMEOUT, | ||
| create_random_prefix | ||
| ) | ||
| from softioc.asyncio_dispatcher import AsyncioDispatcher | ||
| from softioc import builder, softioc | ||
@@ -135,1 +140,71 @@ @pytest.mark.asyncio | ||
| AsyncioDispatcher(loop=event_loop) | ||
| def asyncio_dispatcher_test_func(device_name, child_conn): | ||
| log("CHILD: Child started") | ||
| builder.SetDeviceName(device_name) | ||
| with AsyncioDispatcher() as dispatcher: | ||
| # Create some records | ||
| ai = builder.aIn('AI', initial_value=5) | ||
| builder.aOut('AO', initial_value=12.45, always_update=True, | ||
| on_update=lambda v: ai.set(v)) | ||
| # Boilerplate get the IOC started | ||
| builder.LoadDatabase() | ||
| softioc.iocInit(dispatcher) | ||
| # Start processes required to be run after iocInit | ||
| async def update(): | ||
| while True: | ||
| ai.set(ai.get() + 1) | ||
| await asyncio.sleep(0.01) | ||
| dispatcher(update) | ||
| log("CHILD: Sending Ready") | ||
| child_conn.send("R") | ||
| # Keep process alive while main thread runs CAGET | ||
| if child_conn.poll(TIMEOUT): | ||
| val = child_conn.recv() | ||
| assert val == "D", "Did not receive expected Done character" | ||
| async def test_asyncio_dispatcher_as_context_manager(): | ||
| """Test that the asyncio dispatcher can be used as a context manager""" | ||
| ctx = get_multiprocessing_context() | ||
| parent_conn, child_conn = ctx.Pipe() | ||
| device_name = create_random_prefix() | ||
| process = ctx.Process( | ||
| target=asyncio_dispatcher_test_func, | ||
| args=(device_name, child_conn), | ||
| ) | ||
| process.start() | ||
| log("PARENT: Child started, waiting for R command") | ||
| from aioca import caget | ||
| try: | ||
| # Wait for message that IOC has started | ||
| select_and_recv(parent_conn, "R") | ||
| # ao_val = await caget(device_name + ":AO") | ||
| ao_val = await caget(device_name + ":AO") | ||
| assert ao_val == 12.45 | ||
| # Confirm the value of the AI record is increasing | ||
| ai_val_1 = await caget(device_name + ":AI") | ||
| await asyncio.sleep(1) | ||
| ai_val_2 = await caget(device_name + ":AI") | ||
| assert ai_val_2 > ai_val_1 | ||
| finally: | ||
| parent_conn.send("D") # "Done" | ||
| process.join(timeout=TIMEOUT) | ||
| assert process.exitcode == 0 # clean exit |
@@ -5,3 +5,3 @@ import signal | ||
| from conftest import requires_cothread, ADDRESS, select_and_recv | ||
| from conftest import requires_cothread, ADDRESS, log, select_and_recv | ||
@@ -16,4 +16,5 @@ @requires_cothread | ||
| with Listener(ADDRESS) as listener, listener.accept() as conn: | ||
| log("test_cothread_ioc wait for Ready") | ||
| select_and_recv(conn, "R") # "Ready" | ||
| log("test_cothread_ioc Ready received") | ||
@@ -36,5 +37,8 @@ # Start | ||
| log("test_cothread_ioc sending Done") | ||
| conn.send("D") # "Done" | ||
| log("test_cothread_ioc waiting for Done") | ||
| select_and_recv(conn, "D") # "Done" | ||
| log("test_cothread_ioc received Done") | ||
@@ -41,0 +45,0 @@ # Stop |
@@ -1,4 +0,4 @@ | ||
| import multiprocessing | ||
| from typing import List | ||
| import numpy | ||
| import p4p.nt | ||
| import pytest | ||
@@ -15,3 +15,4 @@ | ||
| TIMEOUT, | ||
| get_multiprocessing_context | ||
| get_multiprocessing_context, | ||
| in_records, | ||
| ) | ||
@@ -58,2 +59,4 @@ | ||
| builder.longStringOut, | ||
| builder.int64In, | ||
| builder.int64Out, | ||
| ], | ||
@@ -66,12 +69,2 @@ ids=record_func_names, | ||
| # A list of all In records, used to filter out various tests | ||
| in_records = [ | ||
| builder.aIn, | ||
| builder.boolIn, | ||
| builder.longIn, | ||
| builder.mbbIn, | ||
| builder.stringIn, | ||
| builder.WaveformIn, | ||
| builder.longStringIn, | ||
| ] | ||
@@ -102,2 +95,4 @@ def record_values_names(fixture_value): | ||
| ("longOut_int", builder.longOut, 5, 5, int), | ||
| ("int64In_int", builder.int64In, 65, 65, int), | ||
| ("int64Out_int", builder.int64Out, 65, 65, int), | ||
| ("boolIn_int", builder.boolIn, 1, 1, int), | ||
@@ -151,33 +146,5 @@ ("boolOut_int", builder.boolOut, 1, 1, int), | ||
| ( | ||
| "wIn_int", | ||
| builder.WaveformIn, | ||
| 567, | ||
| numpy.array([567.], dtype=numpy.float64), | ||
| numpy.ndarray, | ||
| ), | ||
| ( | ||
| "wOut_int", | ||
| builder.WaveformOut, | ||
| 567, | ||
| numpy.array([567.], dtype=numpy.float64), | ||
| numpy.ndarray, | ||
| ), | ||
| ( | ||
| "wIn_float", | ||
| builder.WaveformIn, | ||
| 12.345, | ||
| numpy.array([12.345], dtype=numpy.float64), | ||
| numpy.ndarray, | ||
| ), | ||
| ( | ||
| "wOut_float", | ||
| builder.WaveformOut, | ||
| 12.345, | ||
| numpy.array([12.345], dtype=numpy.float64), | ||
| numpy.ndarray, | ||
| ), | ||
| ( | ||
| "wIn_bytes", | ||
| builder.WaveformIn, | ||
| b"HELLO\0WORLD", | ||
| [72, 69, 76, 76, 79, 0, 87, 79, 82, 76, 68], | ||
| numpy.array( | ||
@@ -191,3 +158,3 @@ [72, 69, 76, 76, 79, 0, 87, 79, 82, 76, 68], dtype=numpy.uint8 | ||
| builder.WaveformOut, | ||
| b"HELLO\0WORLD", | ||
| [72, 69, 76, 76, 79, 0, 87, 79, 82, 76, 68], | ||
| numpy.array( | ||
@@ -332,11 +299,22 @@ [72, 69, 76, 76, 79, 0, 87, 79, 82, 76, 68], dtype=numpy.uint8 | ||
| the actual value. Handles both scalar and waveform data""" | ||
| # This function is shared between functions that may pass in either a | ||
| # native Python type, or the value returned from p4p, which must be | ||
| # unwrapped | ||
| if type(actual_value) is p4p.nt.enum.ntenum: | ||
| actual_val_type = type(actual_value.raw["value"].get("index")) | ||
| elif isinstance(actual_value, p4p.nt.scalar.ntwrappercommon): | ||
| actual_val_type = type(actual_value.raw["value"]) | ||
| else: | ||
| actual_val_type = type(actual_value) | ||
| try: | ||
| if type(expected_value) == float and isnan(expected_value): | ||
| if type(expected_value) is float and isnan(expected_value): | ||
| assert isnan(actual_value) # NaN != Nan, so needs special case | ||
| elif creation_func in [builder.WaveformOut, builder.WaveformIn]: | ||
| assert numpy.array_equal(actual_value, expected_value) | ||
| assert type(actual_value) == expected_type | ||
| assert actual_val_type == expected_type | ||
| else: | ||
| assert actual_value == expected_value | ||
| assert type(actual_value) == expected_type | ||
| assert actual_val_type == expected_type | ||
| except AssertionError as e: | ||
@@ -403,3 +381,3 @@ msg = ( | ||
| # Related to this issue: | ||
| # https://github.com/dls-controls/pythonSoftIOC/issues/37 | ||
| # https://github.com/DiamondLightSource/pythonSoftIOC/issues/37 | ||
@@ -496,14 +474,6 @@ out_rec = creation_func(record_name, **kwarg) | ||
| # Cannot do these imports before the subprocess starts, as the subprocess | ||
| # would inherit cothread's internal state which would break things! | ||
| from cothread import Yield | ||
| from cothread.catools import caget, caput, _channel_cache | ||
| from cothread.dbr import DBR_CHAR_STR | ||
| from p4p.client.thread import Context | ||
| ctx = Context('pva') | ||
| try: | ||
| # cothread remembers connected IOCs. As we potentially restart the same | ||
| # named IOC multiple times, we have to purge the cache else the | ||
| # result from caget/caput cache would be a DisconnectError during the | ||
| # second test | ||
| _channel_cache.purge() | ||
@@ -519,27 +489,11 @@ for configuration in record_configurations: | ||
| # Infer some required keywords from parameters | ||
| kwargs = {} | ||
| put_kwarg = {} | ||
| if creation_func in [builder.longStringIn, builder.longStringOut]: | ||
| kwargs["datatype"] = DBR_CHAR_STR | ||
| if (creation_func in [builder.WaveformIn, builder.WaveformOut] | ||
| and type(initial_value) is bytes): | ||
| # There's a bug in caput that means DBR_CHAR_STR doesn't | ||
| # truncate the array of the target record, meaning .get() | ||
| # returns all NELM rather than just NORD elements. Instead we | ||
| # encode the data ourselves | ||
| initial_value = numpy.frombuffer( | ||
| initial_value, dtype = numpy.uint8) | ||
| if set_enum == SetValueEnum.CAPUT: | ||
| if get_enum == GetValueEnum.GET: | ||
| select_and_recv(parent_conn) | ||
| caput( | ||
| DEVICE_NAME + ":" + record_name, | ||
| initial_value, | ||
| wait=True, | ||
| **kwargs, | ||
| **put_kwarg, | ||
| ) | ||
| ctx.put(DEVICE_NAME + ":" + record_name, | ||
| initial_value, | ||
| None, | ||
| timeout=TIMEOUT, | ||
| wait=True, | ||
| ) | ||
@@ -549,19 +503,7 @@ if get_enum == GetValueEnum.GET: | ||
| # Ensure IOC process has time to execute. | ||
| # I saw failures on MacOS where it appeared the IOC had not | ||
| # processed the put'ted value as the caget returned the same | ||
| # value as was originally passed in. | ||
| Yield(timeout=TIMEOUT) | ||
| if get_enum == GetValueEnum.GET: | ||
| rec_val = select_and_recv(parent_conn) | ||
| else: | ||
| rec_val = caget( | ||
| DEVICE_NAME + ":" + record_name, | ||
| timeout=TIMEOUT, | ||
| **kwargs, | ||
| ) | ||
| # '+' operator used to convert cothread's types into Python | ||
| # native types e.g. "+ca_int" -> int | ||
| rec_val = +rec_val | ||
| rec_val = ctx.get(DEVICE_NAME + ":" + record_name, | ||
| timeout=TIMEOUT,) | ||
@@ -589,5 +531,2 @@ if ( | ||
| finally: | ||
| # Purge cache to suppress spurious "IOC disconnected" exceptions | ||
| _channel_cache.purge() | ||
| parent_conn.send("D") # "Done" | ||
@@ -752,2 +691,4 @@ | ||
| ("default_longIn", builder.longIn, None, 0, int), | ||
| ("default_int64Out", builder.int64Out, None, 0, int), | ||
| ("default_int64In", builder.int64In, None, 0, int), | ||
| ("default_boolOut", builder.boolOut, None, 0, int), | ||
@@ -777,2 +718,4 @@ ("default_boolIn", builder.boolIn, None, 0, int), | ||
| (builder.longIn, 0, int), | ||
| (builder.int64Out, 0, int), | ||
| (builder.int64In, 0, int), | ||
| (builder.boolOut, 0, int), | ||
@@ -779,0 +722,0 @@ (builder.boolIn, 0, int), |
+263
-15
| import asyncio | ||
| import subprocess | ||
| import sys | ||
| import numpy | ||
| import os | ||
| import pytest | ||
| from enum import Enum | ||
@@ -16,7 +15,8 @@ from conftest import ( | ||
| select_and_recv, | ||
| get_multiprocessing_context | ||
| get_multiprocessing_context, | ||
| in_records | ||
| ) | ||
| from softioc import asyncio_dispatcher, builder, softioc | ||
| from softioc import alarm | ||
| from softioc import alarm, asyncio_dispatcher, builder, softioc | ||
| from softioc.builder import ClearRecords | ||
| from softioc.device import SetBlocking | ||
@@ -30,11 +30,2 @@ from softioc.device_core import LookupRecord, LookupRecordList | ||
| in_records = [ | ||
| builder.aIn, | ||
| builder.boolIn, | ||
| builder.longIn, | ||
| builder.mbbIn, | ||
| builder.stringIn, | ||
| builder.WaveformIn, | ||
| ] | ||
| def test_records(tmp_path): | ||
@@ -44,2 +35,3 @@ # Ensure we definitely unload all records that may be hanging over from | ||
| from sim_records import create_records | ||
| ClearRecords() | ||
| create_records() | ||
@@ -354,2 +346,15 @@ | ||
| def test_waveform_values_not_modifiable(): | ||
| """Test that arrays returned from waveform records are not modifiable""" | ||
| wi = builder.WaveformIn("WI", [1, 2, 3]) | ||
| wo = builder.WaveformOut("WO", [1, 2, 3]) | ||
| with pytest.raises(ValueError): | ||
| wi.get()[0] = 5 | ||
| with pytest.raises(ValueError): | ||
| wo.get()[0] = 5 | ||
| def validate_fixture_names(params): | ||
@@ -368,2 +373,3 @@ """Provide nice names for the out_records fixture in TestValidate class""" | ||
| (builder.longOut, 7, 0), | ||
| (builder.int64Out, 54, 0), | ||
| (builder.stringOut, "HI", ""), | ||
@@ -511,2 +517,3 @@ (builder.mbbOut, 2, 0), | ||
| builder.longOut, | ||
| builder.int64Out, | ||
| builder.stringOut, | ||
@@ -914,2 +921,132 @@ builder.mbbOut, | ||
| def blocking_test_func_broken_on_update( | ||
| self, device_name, conn, use_asyncio | ||
| ): | ||
| builder.SetDeviceName(device_name) | ||
| count_rec = builder.longIn("BLOCKING-COUNTER", initial_value=0) | ||
| async def async_blocking_broken_on_update(new_val): | ||
| """on_update function that always throws an exception""" | ||
| log("CHILD: blocking_broken_on_update starting") | ||
| completed_count = count_rec.get() + 1 | ||
| count_rec.set(completed_count) | ||
| log( | ||
| f"CHILD: blocking_update_func updated count: {completed_count}", | ||
| ) | ||
| raise Exception("on_update is broken!") | ||
| def sync_blocking_broken_on_update(new_val): | ||
| """on_update function that always throws an exception""" | ||
| log("CHILD: blocking_broken_on_update starting") | ||
| completed_count = count_rec.get() + 1 | ||
| count_rec.set(completed_count) | ||
| log( | ||
| f"CHILD: blocking_update_func updated count: {completed_count}", | ||
| ) | ||
| raise Exception("on_update is broken!") | ||
| if use_asyncio: | ||
| on_update_callback = async_blocking_broken_on_update | ||
| else: | ||
| on_update_callback = sync_blocking_broken_on_update | ||
| builder.longOut( | ||
| "BLOCKING-BROKEN-ON-UPDATE", | ||
| on_update=on_update_callback, | ||
| always_update=True, | ||
| blocking=True | ||
| ) | ||
| if use_asyncio: | ||
| dispatcher = asyncio_dispatcher.AsyncioDispatcher() | ||
| else: | ||
| dispatcher = None | ||
| builder.LoadDatabase() | ||
| softioc.iocInit(dispatcher) | ||
| conn.send("R") # "Ready" | ||
| log("CHILD: Sent R over Connection to Parent") | ||
| # Keep process alive while main thread runs CAGET | ||
| if not use_asyncio: | ||
| log("CHILD: Beginning cothread poll_list") | ||
| import cothread | ||
| cothread.poll_list([(conn.fileno(), cothread.POLLIN)], TIMEOUT) | ||
| if conn.poll(TIMEOUT): | ||
| val = conn.recv() | ||
| assert val == "D", "Did not receive expected Done character" | ||
| log("CHILD: Received exit command, child exiting") | ||
| @requires_cothread | ||
| @pytest.mark.asyncio | ||
| @pytest.mark.parametrize("use_asyncio", [True, False]) | ||
| async def test_blocking_broken_on_update(self, use_asyncio): | ||
| """Test that a blocking record with an on_update record that will | ||
| always throw an exception will not permanently block record processing. | ||
| Runs using both cothread and asyncio dispatchers in the IOC.""" | ||
| ctx = get_multiprocessing_context() | ||
| parent_conn, child_conn = ctx.Pipe() | ||
| device_name = create_random_prefix() | ||
| process = ctx.Process( | ||
| target=self.blocking_test_func_broken_on_update, | ||
| args=(device_name, child_conn, use_asyncio), | ||
| ) | ||
| process.start() | ||
| log("PARENT: Child started, waiting for R command") | ||
| from aioca import caget, caput | ||
| try: | ||
| # Wait for message that IOC has started | ||
| select_and_recv(parent_conn, "R") | ||
| log("PARENT: received R command") | ||
| assert await caget(device_name + ":BLOCKING-COUNTER") == 0 | ||
| log("PARENT: BLOCKING-COUNTER was 0") | ||
| await caput( | ||
| device_name + ":BLOCKING-BROKEN-ON-UPDATE", | ||
| 1, | ||
| wait=True, | ||
| timeout=TIMEOUT | ||
| ) | ||
| assert await caget(device_name + ":BLOCKING-COUNTER") == 1 | ||
| await caput( | ||
| device_name + ":BLOCKING-BROKEN-ON-UPDATE", | ||
| 2, | ||
| wait=True, | ||
| timeout=TIMEOUT | ||
| ) | ||
| assert await caget(device_name + ":BLOCKING-COUNTER") == 2 | ||
| finally: | ||
| # Clear the cache before stopping the IOC stops | ||
| # "channel disconnected" error messages | ||
| aioca_cleanup() | ||
| log("PARENT: Sending Done command to child") | ||
| parent_conn.send("D") # "Done" | ||
| process.join(timeout=TIMEOUT) | ||
| log(f"PARENT: Join completed with exitcode {process.exitcode}") | ||
| if process.exitcode is None: | ||
| pytest.fail("Process did not terminate") | ||
| class TestGetSetField: | ||
@@ -1118,3 +1255,3 @@ """Tests related to get_field and set_field on records""" | ||
| """Tests related to recursive set() calls. See original issue here: | ||
| https://github.com/dls-controls/pythonSoftIOC/issues/119""" | ||
| https://github.com/DiamondLightSource/pythonSoftIOC/issues/119""" | ||
@@ -1226,1 +1363,112 @@ recursive_record_name = "RecursiveLongOut" | ||
| pytest.fail("Process did not finish cleanly, terminating") | ||
| class TestAlarms: | ||
| """Tests related to record alarm status""" | ||
| # Record creation function and associated PV name | ||
| records = [ | ||
| (builder.aIn, "AI_AlarmPV"), | ||
| (builder.boolIn, "BI_AlarmPV"), | ||
| (builder.int64Out, "I64I_AlarmPV"), | ||
| (builder.longIn, "LI_AlarmPV"), | ||
| (builder.mbbIn, "MBBI_AlarmPV"), | ||
| (builder.stringIn, "SI_AlarmPV"), | ||
| (builder.WaveformIn, "WI_AlarmPV"), | ||
| (builder.longStringIn, "LSI_AlarmPV"), | ||
| (builder.aOut, "AO_AlarmPV"), | ||
| (builder.boolOut, "BO_AlarmPV"), | ||
| (builder.int64Out, "I64O_AlarmPV"), | ||
| (builder.longOut, "LO_AlarmPV"), | ||
| (builder.stringOut, "SO_AlarmPV"), | ||
| (builder.mbbOut, "MBBO_AlarmPV"), | ||
| (builder.WaveformOut, "WO_AlarmPV"), | ||
| (builder.longStringOut, "LSO_AlarmPV"), | ||
| ] | ||
| severity = alarm.INVALID_ALARM | ||
| status = alarm.DISABLE_ALARM | ||
| class SetEnum(Enum): | ||
| """Enum to specify when set_alarm should be called""" | ||
| PRE_INIT = 0 | ||
| POST_INIT = 1 | ||
| def alarm_test_func(self, device_name, conn, set_enum: SetEnum): | ||
| builder.SetDeviceName(device_name) | ||
| pvs = [] | ||
| for record_func, name in self.records: | ||
| kwargs = {} | ||
| if record_func in [builder.WaveformOut, builder.WaveformIn]: | ||
| kwargs["length"] = WAVEFORM_LENGTH | ||
| pvs.append(record_func(name, **kwargs)) | ||
| if set_enum == self.SetEnum.PRE_INIT: | ||
| log("CHILD: Setting alarm before init") | ||
| for pv in pvs: | ||
| pv.set_alarm(self.severity, self.status) | ||
| builder.LoadDatabase() | ||
| softioc.iocInit() | ||
| if set_enum == self.SetEnum.POST_INIT: | ||
| log("CHILD: Setting alarm after init") | ||
| for pv in pvs: | ||
| pv.set_alarm(self.severity, self.status) | ||
| conn.send("R") # "Ready" | ||
| log("CHILD: Sent R over Connection to Parent") | ||
| # Keep process alive while main thread works. | ||
| while (True): | ||
| if conn.poll(TIMEOUT): | ||
| val = conn.recv() | ||
| if val == "D": # "Done" | ||
| break | ||
| @requires_cothread | ||
| @pytest.mark.parametrize("set_enum", [SetEnum.PRE_INIT, SetEnum.POST_INIT]) | ||
| def test_set_alarm_severity_status(self, set_enum): | ||
| """Test that set_alarm function allows setting severity and status""" | ||
| ctx = get_multiprocessing_context() | ||
| parent_conn, child_conn = ctx.Pipe() | ||
| device_name = create_random_prefix() | ||
| process = ctx.Process( | ||
| target=self.alarm_test_func, | ||
| args=(device_name, child_conn, set_enum), | ||
| ) | ||
| process.start() | ||
| from cothread.catools import caget, _channel_cache, FORMAT_CTRL | ||
| try: | ||
| # Wait for message that IOC has started | ||
| select_and_recv(parent_conn, "R") | ||
| # Suppress potential spurious warnings | ||
| _channel_cache.purge() | ||
| for _, name in self.records: | ||
| ret_val = caget( | ||
| device_name + ":" + name, | ||
| timeout=TIMEOUT, | ||
| format=FORMAT_CTRL | ||
| ) | ||
| assert ret_val.severity == self.severity, \ | ||
| f"Severity mismatch for record {name}" | ||
| assert ret_val.status == self.status, \ | ||
| f"Status mismatch for record {name}" | ||
| finally: | ||
| # Suppress potential spurious warnings | ||
| _channel_cache.purge() | ||
| parent_conn.send("D") # "Done" | ||
| process.join(timeout=TIMEOUT) |
Sorry, the diff of this file is not supported yet
Alert delta unavailable
Currently unable to show alert delta for PyPI packages.
398668
12.59%90
3.45%4805
27.89%