Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

173 stream resource seperate classes #429

Merged
merged 39 commits into from
Jul 4, 2024
Merged
Show file tree
Hide file tree
Changes from 38 commits
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
2da8dbe
initial attempt to refactoring _HDFFile class
May 3, 2024
87f765c
few more changes to merge _HDFFile classes
May 7, 2024
9978024
function to distinguish event model version
Jun 4, 2024
02ef379
event model version check: some more changes
Jun 4, 2024
872a14c
few more changes
Jun 5, 2024
5a59c21
completed initial refactoring
Jun 5, 2024
2aaab09
try downloading event-model via https
Jun 5, 2024
81623e5
addressed comments
Jun 11, 2024
69d229e
Add event-model-version to CI test matrix.
danielballan Jun 6, 2024
c553804
Declare event-model-version.
danielballan Jun 6, 2024
d5d21cc
Default to latest event-model
danielballan Jun 6, 2024
b8402d2
Fix syntax
danielballan Jun 6, 2024
0eb1eb0
some fix for tests
skarakuzu Jun 11, 2024
9c1cb91
some more test fixes
skarakuzu Jun 12, 2024
7516df5
some more test fixes
skarakuzu Jun 12, 2024
8c6750e
some more test fixes
skarakuzu Jun 12, 2024
eb733f9
(#397) added dataclass to extend maxshape and fillvalue
subinsaji Jun 24, 2024
e04d95a
testing to see what happens in CI when maxshape, fillvalue, dtype are…
subinsaji Jun 25, 2024
5ac26f2
lint error fixes
subinsaji Jun 25, 2024
284ab94
Optional needed if setting type to None
subinsaji Jun 25, 2024
18b3bb8
merging dev into this branch
subinsaji Jun 25, 2024
0db1b03
fixing tests
subinsaji Jun 26, 2024
ca78e1d
small fix
subinsaji Jun 26, 2024
6eef859
remove Optional
subinsaji Jun 27, 2024
af525c3
modify dataclasses and attempt at fixing tests
subinsaji Jun 27, 2024
83771ab
Merge branch 'main' into 173-StreamResource-seperate-classes
subinsaji Jul 2, 2024
077d651
(#374) drop old event model
subinsaji Jul 2, 2024
061b141
saving changes
subinsaji Jul 3, 2024
e91d1f3
dropping changes reffering to event model in workflows
subinsaji Jul 3, 2024
b1cd6d5
reorder and add back swmr
subinsaji Jul 3, 2024
b6c2d4a
saving work in progress
subinsaji Jul 3, 2024
3116ca2
fix dtype test
subinsaji Jul 3, 2024
77ee2d4
revert back to old event model
subinsaji Jul 3, 2024
bb05afd
Add branching to panda writer test
abbiemery Jul 4, 2024
5c7e8cb
Add branching for different event model verions to hdf_panda test
abbiemery Jul 4, 2024
cad091f
Update resource kwargs for older style stream resource
abbiemery Jul 4, 2024
4765382
Update tests for older style stream resource
abbiemery Jul 4, 2024
28e7378
Update incorrect resource path for detector tests
abbiemery Jul 4, 2024
f7652a5
Put correct values in pattern generator init
abbiemery Jul 4, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .github/workflows/_test.yml
coretl marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@ jobs:
with:
python-version: ${{ inputs.python-version }}
pip-install: ".[dev]"

- name: Run tests
run: tox -e tests

Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ dependencies = [
"packaging",
"pint",
"bluesky>=1.13.0a3",
"event-model<1.21.0",
"event_model",
"p4p",
"pyyaml",
"colorlog",
Expand Down
10 changes: 0 additions & 10 deletions src/ophyd_async/epics/areadetector/writers/_hdfdataset.py

This file was deleted.

54 changes: 0 additions & 54 deletions src/ophyd_async/epics/areadetector/writers/_hdffile.py

This file was deleted.

121 changes: 121 additions & 0 deletions src/ophyd_async/epics/areadetector/writers/general_hdffile.py
abbiemery marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
from dataclasses import dataclass, field
from pathlib import Path
from typing import Iterator, List, Sequence
from urllib.parse import urlunparse

import event_model
from event_model import (
ComposeStreamResource,
ComposeStreamResourceBundle,
StreamDatum,
StreamResource,
)

from ophyd_async.core import DirectoryInfo


@dataclass
class _HDFDataset:
data_key: str
dataset: str
shape: Sequence[int] = field(default_factory=tuple)
multiplier: int = 1
swmr: bool = False

abbiemery marked this conversation as resolved.
Show resolved Hide resolved

SLICE_NAME = "AD_HDF5_SWMR_SLICE"


def versiontuple(v):
return tuple(map(int, (v.split("."))))


class _HDFFile:
"""
:param directory_info: Contains information about how to construct a StreamResource
:param full_file_name: Absolute path to the file to be written
:param datasets: Datasets to write into the file
"""

def __init__(
self,
directory_info: DirectoryInfo,
full_file_name: Path,
datasets: List[_HDFDataset],
hostname: str = "localhost",
) -> None:
self._last_emitted = 0
self._hostname = hostname

if len(datasets) == 0:
self._bundles = []
return None

if versiontuple(event_model.__version__) < versiontuple("1.21.0"):
path = f"{full_file_name}"
root = str(directory_info.root)
bundler_composer = ComposeStreamResource()

self._bundles: List[ComposeStreamResourceBundle] = [
bundler_composer(
spec=SLICE_NAME,
root=root,
resource_path=path,
data_key=ds.data_key,
resource_kwargs={
"path": ds.dataset,
"multiplier": ds.multiplier,
"swmr": ds.swmr,
},
)
for ds in datasets
]
else:
bundler_composer = ComposeStreamResource()

uri = urlunparse(
(
"file",
self._hostname,
str((directory_info.root / full_file_name).absolute()),
"",
"",
None,
)
)

self._bundles: List[ComposeStreamResourceBundle] = [
bundler_composer(
mimetype="application/x-hdf5",
uri=uri,
data_key=ds.data_key,
parameters={
"dataset": ds.dataset,
"swmr": ds.swmr,
"multiplier": ds.multiplier,
},
uid=None,
validate=True,
)
for ds in datasets
]

def stream_resources(self) -> Iterator[StreamResource]:
for bundle in self._bundles:
yield bundle.stream_resource_doc

def stream_data(self, indices_written: int) -> Iterator[StreamDatum]:
# Indices are relative to resource
if indices_written > self._last_emitted:
indices = {
"start": self._last_emitted,
"stop": indices_written,
}
self._last_emitted = indices_written
for bundle in self._bundles:
yield bundle.compose_stream_datum(indices)
return None

def close(self) -> None:
for bundle in self._bundles:
bundle.close()
13 changes: 9 additions & 4 deletions src/ophyd_async/epics/areadetector/writers/hdf_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,7 @@
)
from ophyd_async.core.signal import observe_value

from ._hdfdataset import _HDFDataset
from ._hdffile import _HDFFile
from .general_hdffile import _HDFDataset, _HDFFile
from .nd_file_hdf import FileWriteMode, NDFileHDF


Expand Down Expand Up @@ -72,7 +71,12 @@ async def open(self, multiplier: int = 1) -> Dict[str, DataKey]:
outer_shape = (multiplier,) if multiplier > 1 else ()
# Add the main data
self._datasets = [
_HDFDataset(name, "/entry/data/data", detector_shape, multiplier)
_HDFDataset(
data_key=name,
dataset="/entry/data/data",
shape=detector_shape,
multiplier=multiplier,
)
]
# And all the scalar datasets
for ds_name, ds_path in self._scalar_datasets_paths.items():
Expand All @@ -84,8 +88,9 @@ async def open(self, multiplier: int = 1) -> Dict[str, DataKey]:
multiplier,
)
)

describe = {
ds.name: DataKey(
ds.data_key: DataKey(
source=self.hdf.full_file_name.source,
shape=outer_shape + tuple(ds.shape),
dtype="array" if ds.shape else "number",
Expand Down
2 changes: 1 addition & 1 deletion src/ophyd_async/panda/writers/_hdf_writer.py
abbiemery marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
wait_for_value,
)
from ophyd_async.core.signal import observe_value
from ophyd_async.epics.areadetector.writers.general_hdffile import _HDFDataset, _HDFFile

from .._common_blocks import CommonPandaBlocks
from ._panda_hdf_file import _HDFDataset, _HDFFile


class PandaHDFWriter(DetectorWriter):
Expand Down
54 changes: 0 additions & 54 deletions src/ophyd_async/panda/writers/_panda_hdf_file.py

This file was deleted.

Loading
Loading