Skip to content

Commit

Permalink
Using a mocked Asyncio Client for the hdf5 tests
Browse files Browse the repository at this point in the history
  • Loading branch information
evalott100 committed Aug 7, 2023
1 parent aa540e6 commit 2c5dee1
Show file tree
Hide file tree
Showing 3 changed files with 29 additions and 128 deletions.
6 changes: 5 additions & 1 deletion src/pandablocks_ioc/_hdf_ioc.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,8 +212,12 @@ async def _handle_hdf5_data(self) -> None:
pipeline[0].queue.put_nowait(
EndData(captured_frames, EndReason.OK)
)

break
elif not isinstance(data, EndData):
raise RuntimeError(
f"data was recieved that was of type {type(data)}, not"
"StartData, EndData, ReadyData or FrameData"
)
# Ignore EndData - handle terminating capture with the Capture
# record or when we capture the requested number of frames

Expand Down
110 changes: 19 additions & 91 deletions tests/fixtures/mocked_panda.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
GetLine,
Put,
)
from pandablocks.connections import DataConnection
from pandablocks.responses import (
BitMuxFieldInfo,
BlockInfo,
Expand All @@ -48,72 +49,6 @@
BOBFILE_DIR = Path(__file__).parent.parent / "test-bobfiles"
TIMEOUT = 10

"""
@pytest.fixture
def default_responses_decoded(table_data) -> dict:
\"""A dummy server that responds to all the requests introspect_panda makes
during its operation.
Note that the order of responses was determined by trial and error.\"""
get_changes_scalar_data = (
# Note the deliberate concatenation across lines - this must be a single
# entry in the list
"!PCAP.TRIG_EDGE=Falling\n!PCAP.GATE=CLOCK1.OUT\n!PCAP.GATE.DELAY=1\n"
"!*METADATA.LABEL_PCAP1=PcapMetadataLabel\n"
"!SEQ1.TABLE<\n."
)
# Transform the plain list of values into one that PandA would send
return dict(
[
("*BLOCKS?", "!PCAP 1\n!SEQ 1\n."),
("*DESC.PCAP?", "OK =PCAP Desc"),
("*DESC.SEQ?", "OK =SEQ Desc"),
("PCAP.*?", "!TRIG_EDGE 3 param enum\n!GATE 1 bit_mux\n."),
("SEQ.*?", "!TABLE 7 table\n."),
("*CHANGES?", get_changes_scalar_data),
("*DESC.PCAP.TRIG_EDGE?", "!Rising\n!Falling\n!Either\n."),
("*ENUMS.PCAP.TRIG_EDGE?", "OK =Gate Desc"),
("*DESC.PCAP.GATE?", "OK =Trig Edge Desc"),
("PCAP1.GATE.MAX_DELAY?", "OK =100"),
("*ENUMS.PCAP.GATE?", "!TTLIN1.VAL\n!INENC1.A\n!CLOCK1.OUT\n."),
("*DESC.SEQ.TABLE?", "OK =Sequencer table of lines"),
("SEQ1.TABLE.MAX_LENGTH?", "OK =16384"),
("SEQ1.TABLE.FIELDS?", table_fields_data),
("SEQ1.TABLE?", get_changes_multiline_data),
("*ENUMS.SEQ1.TABLE[].TRIGGER?", trigger_field_labels),
("*DESC.SEQ1.TABLE[].REPEATS?", "OK =Number of times the line will repeat"),
(
"*DESC.SEQ1.TABLE[].TRIGGER?",
"OK =The trigger condition to start the phases",
),
(
"*DESC.SEQ1.TABLE[].POSITION?",
"OK =The position that can be used in trigger condition",
),
(
"*DESC.SEQ1.TABLE[].TIME1?",
"OK =The time the optional phase 1 should take",
),
("*DESC.SEQ1.TABLE[].OUTA1?", "OK =Output A value during phase 1"),
("*DESC.SEQ1.TABLE[].OUTB1?", "OK =Output B value during phase 1"),
("*DESC.SEQ1.TABLE[].OUTC1?", "OK =Output C value during phase 1"),
("*DESC.SEQ1.TABLE[].OUTD1?", "OK =Output D value during phase 1"),
("*DESC.SEQ1.TABLE[].OUTE1?", "OK =Output E value during phase 1"),
("*DESC.SEQ1.TABLE[].OUTF1?", "OK =Output F value during phase 1"),
(
"*DESC.SEQ1.TABLE[].TIME2?",
"OK =The time the optional phase 2 should take",
),
("*DESC.SEQ1.TABLE[].OUTA2?", "OK =Output A value during phase 2"),
("*DESC.SEQ1.TABLE[].OUTB2?", "OK =Output B value during phase 2"),
("*DESC.SEQ1.TABLE[].OUTC2?", "OK =Output C value during phase 2"),
("*DESC.SEQ1.TABLE[].OUTD2?", "OK =Output D value during phase 2"),
("*DESC.SEQ1.TABLE[].OUTE2?", "OK =Output E value during phase 2"),
("*DESC.SEQ1.TABLE[].OUTF2?", "OK =Output F value during phase 2"),
]
)
"""


@pytest_asyncio.fixture
def mocked_time_record_updater():
Expand Down Expand Up @@ -220,27 +155,6 @@ def __eq__(self, o):
return same


class AsyncIteratorWrapper:
def __init__(self, path: Path, size: int):
self.f = open(path, "rb")
self.size = size
self.data = self.f.read(size)

async def __aiter__(self):
return self

async def __anext__(self):
if self.data:
old_data = self.data
self.data = self.f.read(self.size)
yield old_data
else:
raise StopAsyncIteration

def __del__(self):
self.f.close()


class MockedAsyncioClient:
def __init__(self, response_handler: ResponseHandler) -> None:
self.response_handler = response_handler
Expand All @@ -260,10 +174,24 @@ def is_connected(self):
async def close(self):
pass

async def data(*_, **__):
yield AsyncIteratorWrapper(
Path(__file__).parent.parent / "raw_dump.txt", 200000
)
async def data(
self,
scaled: bool = True,
flush_period: Optional[float] = None,
frame_timeout: Optional[float] = None,
):
flush_every_frame = flush_period is None
conn = DataConnection()
conn.connect(scaled)
try:
f = open(Path(__file__).parent.parent / "raw_dump.txt", "rb")
for raw in chunked_read(f, 200000):
for data in conn.receive_bytes(
raw, flush_every_frame=flush_every_frame
):
yield data
finally:
f.close()


def get_multiprocessing_context():
Expand Down
41 changes: 5 additions & 36 deletions tests/test_hdf_ioc.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,18 @@
import logging
import time
from asyncio import CancelledError
from io import BufferedReader
from pathlib import Path
from typing import AsyncGenerator, Generator, Iterator
from typing import AsyncGenerator, Generator
from uuid import uuid4
import h5py

import h5py
import numpy
import pytest
import pytest_asyncio
from aioca import caget, caput, camonitor
from aioca import caget, camonitor, caput
from fixtures.mocked_panda import (
MockedAsyncioClient,
TEST_PREFIX,
TIMEOUT,
MockedAsyncioClient,
Rows,
custom_logger,
get_multiprocessing_context,
Expand All @@ -40,34 +38,6 @@
HDF5_PREFIX = NAMESPACE_PREFIX + ":HDF5"


def chunked_read(f: BufferedReader, size: int) -> Iterator[bytes]:
data = f.read(size)
while data:
yield data
data = f.read(size)


@pytest_asyncio.fixture
def slow_dump():
with open(Path(__file__).parent / "slow_dump.txt", "rb") as f:
# Simulate small chunked read, sized so we hit the middle of a "BIN " marker
yield chunked_read(f, 44)


@pytest_asyncio.fixture
def fast_dump():
with open(Path(__file__).parent / "fast_dump.txt", "rb") as f:
# Simulate larger chunked read
yield chunked_read(f, 500)


@pytest_asyncio.fixture
def raw_dump():
with open(Path(__file__).parent / "raw_dump.txt", "rb") as f:
# Simulate largest chunked read
yield chunked_read(f, 200000)


DUMP_FIELDS = [
FieldCapture(
name="PCAP.BITS2",
Expand Down Expand Up @@ -307,10 +277,9 @@ def hdf5_subprocess_ioc(


@pytest.mark.asyncio
async def test_hdf5_ioc(mocked_panda_standard_responses):
async def test_hdf5_ioc(hdf5_subprocess_ioc):
"""Run the HDF5 module as its own IOC and check the expected records are created,
with some default values checked"""
# HDF5_PREFIX = TEST_PREFIX + ":HDF5"
val = await caget(HDF5_PREFIX + ":FilePath")

# Default value of longStringOut is an array of a single NULL byte
Expand Down

0 comments on commit 2c5dee1

Please sign in to comment.