Skip to content

Commit

Permalink
173 stream resource seperate classes (#429)
Browse files Browse the repository at this point in the history
Co-authored-by: Seher Karakuzu <[email protected]>
Co-authored-by: Dan Allan <[email protected]>
Co-authored-by: skarakuzu <[email protected]>
Co-authored-by: Abigail Emery <[email protected]>
  • Loading branch information
5 people authored Jul 4, 2024
1 parent b153c50 commit dbbcf28
Show file tree
Hide file tree
Showing 18 changed files with 368 additions and 332 deletions.
1 change: 0 additions & 1 deletion .github/workflows/_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@ jobs:
with:
python-version: ${{ inputs.python-version }}
pip-install: ".[dev]"

- name: Run tests
run: tox -e tests

Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ dependencies = [
"packaging",
"pint",
"bluesky>=1.13.0a3",
"event-model<1.21.0",
"event_model",
"p4p",
"pyyaml",
"colorlog",
Expand Down
10 changes: 0 additions & 10 deletions src/ophyd_async/epics/areadetector/writers/_hdfdataset.py

This file was deleted.

54 changes: 0 additions & 54 deletions src/ophyd_async/epics/areadetector/writers/_hdffile.py

This file was deleted.

121 changes: 121 additions & 0 deletions src/ophyd_async/epics/areadetector/writers/general_hdffile.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
from dataclasses import dataclass, field
from pathlib import Path
from typing import Iterator, List, Sequence
from urllib.parse import urlunparse

import event_model
from event_model import (
ComposeStreamResource,
ComposeStreamResourceBundle,
StreamDatum,
StreamResource,
)

from ophyd_async.core import DirectoryInfo


@dataclass
class _HDFDataset:
data_key: str
dataset: str
shape: Sequence[int] = field(default_factory=tuple)
multiplier: int = 1
swmr: bool = False


SLICE_NAME = "AD_HDF5_SWMR_SLICE"


def versiontuple(v):
return tuple(map(int, (v.split("."))))


class _HDFFile:
"""
:param directory_info: Contains information about how to construct a StreamResource
:param full_file_name: Absolute path to the file to be written
:param datasets: Datasets to write into the file
"""

def __init__(
self,
directory_info: DirectoryInfo,
full_file_name: Path,
datasets: List[_HDFDataset],
hostname: str = "localhost",
) -> None:
self._last_emitted = 0
self._hostname = hostname

if len(datasets) == 0:
self._bundles = []
return None

if versiontuple(event_model.__version__) < versiontuple("1.21.0"):
path = f"{full_file_name}"
root = str(directory_info.root)
bundler_composer = ComposeStreamResource()

self._bundles: List[ComposeStreamResourceBundle] = [
bundler_composer(
spec=SLICE_NAME,
root=root,
resource_path=path,
data_key=ds.data_key,
resource_kwargs={
"path": ds.dataset,
"multiplier": ds.multiplier,
"swmr": ds.swmr,
},
)
for ds in datasets
]
else:
bundler_composer = ComposeStreamResource()

uri = urlunparse(
(
"file",
self._hostname,
str((directory_info.root / full_file_name).absolute()),
"",
"",
None,
)
)

self._bundles: List[ComposeStreamResourceBundle] = [
bundler_composer(
mimetype="application/x-hdf5",
uri=uri,
data_key=ds.data_key,
parameters={
"dataset": ds.dataset,
"swmr": ds.swmr,
"multiplier": ds.multiplier,
},
uid=None,
validate=True,
)
for ds in datasets
]

def stream_resources(self) -> Iterator[StreamResource]:
for bundle in self._bundles:
yield bundle.stream_resource_doc

def stream_data(self, indices_written: int) -> Iterator[StreamDatum]:
# Indices are relative to resource
if indices_written > self._last_emitted:
indices = {
"start": self._last_emitted,
"stop": indices_written,
}
self._last_emitted = indices_written
for bundle in self._bundles:
yield bundle.compose_stream_datum(indices)
return None

def close(self) -> None:
for bundle in self._bundles:
bundle.close()
13 changes: 9 additions & 4 deletions src/ophyd_async/epics/areadetector/writers/hdf_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,7 @@
)
from ophyd_async.core.signal import observe_value

from ._hdfdataset import _HDFDataset
from ._hdffile import _HDFFile
from .general_hdffile import _HDFDataset, _HDFFile
from .nd_file_hdf import FileWriteMode, NDFileHDF


Expand Down Expand Up @@ -72,7 +71,12 @@ async def open(self, multiplier: int = 1) -> Dict[str, DataKey]:
outer_shape = (multiplier,) if multiplier > 1 else ()
# Add the main data
self._datasets = [
_HDFDataset(name, "/entry/data/data", detector_shape, multiplier)
_HDFDataset(
data_key=name,
dataset="/entry/data/data",
shape=detector_shape,
multiplier=multiplier,
)
]
# And all the scalar datasets
for ds_name, ds_path in self._scalar_datasets_paths.items():
Expand All @@ -84,8 +88,9 @@ async def open(self, multiplier: int = 1) -> Dict[str, DataKey]:
multiplier,
)
)

describe = {
ds.name: DataKey(
ds.data_key: DataKey(
source=self.hdf.full_file_name.source,
shape=outer_shape + tuple(ds.shape),
dtype="array" if ds.shape else "number",
Expand Down
2 changes: 1 addition & 1 deletion src/ophyd_async/panda/writers/_hdf_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
wait_for_value,
)
from ophyd_async.core.signal import observe_value
from ophyd_async.epics.areadetector.writers.general_hdffile import _HDFDataset, _HDFFile

from .._common_blocks import CommonPandaBlocks
from ._panda_hdf_file import _HDFDataset, _HDFFile


class PandaHDFWriter(DetectorWriter):
Expand Down
54 changes: 0 additions & 54 deletions src/ophyd_async/panda/writers/_panda_hdf_file.py

This file was deleted.

Loading

0 comments on commit dbbcf28

Please sign in to comment.