Skip to content

Commit

Permalink
Merge pull request #81 from dmgav/abs-ts
Browse files Browse the repository at this point in the history
Save 'arm_time', 'start_time' and 'hw_time_offset_ns' to file
  • Loading branch information
evalott100 authored Mar 15, 2024
2 parents e6f8b67 + 8d95812 commit 9c01c18
Show file tree
Hide file tree
Showing 8 changed files with 157 additions and 7 deletions.
35 changes: 35 additions & 0 deletions docs/tutorials/commandline-hdf.md
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,41 @@ $ h5diff /tmp/panda-capture-1.h5 /tmp/panda-capture-2.h5
$ h5diff /tmp/panda-capture-1.h5 /tmp/panda-capture-3.h5
```

## Absolute timestamps

Starting with v3.0, PandABox firmware supports absolute timestamping of
the collected data. PandA still collects relative timestamps for
individual data points that are saved as arrays to HDF5 file. In addition,
the absolute timestamp for the start of the measurement is saved to HDF5
file and can be used to convert relative timestamps to absolute timestamps

The absolute timestamp is saved as a set of attributes of the root group of
the HDF5 file. The attributes are optional and set only if the respective
parameters were captured by PandABox and received by the IOC. The following
attributes are used:

- ``arm_time`` - the time when the Panda (PCAP block) was armed, saved as
a string in the ISO 8601 UTC format. This parameter is mostly used for
debugging.

- ``start_time`` - the start time (PCAP block is armed and enabled) of
the measurement in seconds since the epoch, saved as a string in the
ISO 8601 UTC format. Uses hardware provided timestamp (e.g. PTP or MRF) if available,
falling back to the system timestamp.

- ``hw_time_offset_ns`` - the offset in nanoseconds (*int64*) that should be added to
to ``start_time`` to get back to the system timestamp. The attribute is
present only if Panda is configured to use hardware-based absolute timestamps
(PTP or MRF).

The following code may be used to read the absolute timestamp from the HDF5 file.
Use ``pandas.Timestamp`` object if nanosecond accuracy is required (standard
``dataframe`` object is limited to microsecond precision).

```{literalinclude} ../../examples/load_abs_timestamps.py
```


## Collecting more data faster

The test data is produced by a SEQ Block, configured to produce a high level
Expand Down
29 changes: 29 additions & 0 deletions examples/load_abs_timestamps.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import sys

import h5py
import pandas as pd

if __name__ == "__main__":
with h5py.File(sys.argv[1], "r") as f:
arm_time = f.attrs.get("arm_time", None)
start_time = f.attrs.get("start_time", None)
hw_time_offset_ns = f.attrs.get("hw_time_offset_ns", None)

print(f"Arm time: {arm_time!r}")
print(f"Start time: {start_time!r}")
print(f"Hardware time offset: {hw_time_offset_ns!r} ns")

if start_time:
# Compute and print the start time that includes the offset
ts_start = pd.Timestamp(start_time)
if hw_time_offset_ns:
ts_start += pd.Timedelta(nanoseconds=hw_time_offset_ns)
print(f"Start time (system clock instead of hardware clock): {ts_start}")


# Expected output:
#
# Arm time: '2024-03-05T20:27:12.607841574Z'
# Start time: '2024-03-05T20:27:12.605729480Z'
# Hardware time offset: 2155797 ns
# Start TS including the offset: 2024-03-05T20:27:08.605729480Z
9 changes: 9 additions & 0 deletions src/pandablocks/connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,12 +328,21 @@ def _handle_header_body(self):
self._frame_dtype = np.dtype(
[(f"{f.name}.{f.capture}", f.type) for f in fields]
)

try:
hw_time_offset_ns = np.int64(data.get("hw_time_offset_ns", ""))
except ValueError:
hw_time_offset_ns = None

yield StartData(
fields=fields,
missed=int(data.get("missed")),
process=str(data.get("process")),
format=str(data.get("format")),
sample_bytes=sample_bytes,
arm_time=data.get("arm_time", None),
start_time=data.get("start_time", None),
hw_time_offset_ns=hw_time_offset_ns,
)
self._next_handler = self._handle_header_end

Expand Down
9 changes: 9 additions & 0 deletions src/pandablocks/hdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,15 @@ def open_file(self, data: StartData):
raw = data.process == "Raw"
self.datasets = [self.create_dataset(field, raw) for field in data.fields]
self.hdf_file.swmr_mode = True

# Save parameters
if data.arm_time is not None:
self.hdf_file.attrs["arm_time"] = data.arm_time
if data.start_time is not None:
self.hdf_file.attrs["start_time"] = data.start_time
if data.hw_time_offset_ns is not None:
self.hdf_file.attrs["hw_time_offset_ns"] = data.hw_time_offset_ns

logging.info(
f"Opened '{self.file_path}' with {data.sample_bytes} byte samples "
f"stored in {len(self.datasets)} datasets"
Expand Down
3 changes: 3 additions & 0 deletions src/pandablocks/responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -266,6 +266,9 @@ class StartData(Data):
process: str
format: str
sample_bytes: int
arm_time: Optional[str]
start_time: Optional[str]
hw_time_offset_ns: Optional[int]


@dataclass
Expand Down
24 changes: 22 additions & 2 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,26 @@ def fast_dump():
yield chunked_read(f, 500)


@pytest_asyncio.fixture
def fast_dump_with_extra_header_params():
"""
Add header parameters to `fast_dump.bin` binary stream. The fixture
is used for testing absolute timing parameters passed in the header.
"""

def fast_dump(extra_header_params):
param_to_replace = 'sample_bytes="52"'
params = " ".join(f'{k}="{v}"' for k, v in extra_header_params.items())
params = " ".join([param_to_replace, params]) if params else param_to_replace
with open(Path(__file__).parent / "data_dumps/fast_dump.bin", "rb") as f:
# Simulate larger chunked read
data = chunked_read(f, 500)
for buffer in data:
yield buffer.replace(param_to_replace.encode(), params.encode())

return fast_dump


@pytest_asyncio.fixture
def raw_dump():
with open(Path(__file__).parent / "data_dumps/raw_dump.bin", "rb") as f:
Expand Down Expand Up @@ -150,7 +170,7 @@ def __eq__(self, o):
def slow_dump_expected():
yield [
ReadyData(),
StartData(DUMP_FIELDS, 0, "Scaled", "Framed", 52),
StartData(DUMP_FIELDS, 0, "Scaled", "Framed", 52, None, None, None),
FrameData(Rows([0, 1, 1, 3, 5.6e-08, 1, 2])),
FrameData(Rows([8, 2, 2, 6, 1.000000056, 2, 4])),
FrameData(Rows([0, 3, 3, 9, 2.000000056, 3, 6])),
Expand All @@ -164,7 +184,7 @@ def slow_dump_expected():
def fast_dump_expected():
yield [
ReadyData(),
StartData(DUMP_FIELDS, 0, "Scaled", "Framed", 52),
StartData(DUMP_FIELDS, 0, "Scaled", "Framed", 52, None, None, None),
FrameData(
Rows(
[0, 1, 1, 3, 5.6e-08, 1, 2],
Expand Down
46 changes: 46 additions & 0 deletions tests/test_asyncio.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import asyncio
import copy

import pytest

Expand Down Expand Up @@ -50,6 +51,51 @@ async def test_asyncio_data(
assert fast_dump_expected == events


@pytest.mark.parametrize(
"timing_params",
[
{},
{
"arm_time": "2024-03-05T20:27:12.607841574Z",
"start_time": "2024-03-05T20:27:12.608875498Z",
},
{
"arm_time": "2024-03-05T20:27:12.607841574Z",
"start_time": "2024-03-05T20:27:12.608875498Z",
"hw_time_offset_ns": 100555,
},
],
)
async def test_asyncio_data_with_abs_timing(
dummy_server_async,
fast_dump_with_extra_header_params,
fast_dump_expected,
timing_params,
):
"""
The test for handling of `arm_time`, `start_time` and `hw_time_offset_ns`
parameters passed in the header. The test is reusing the existing `fast_dump`
and `fast_dump_expected` by adding timing parameters to the header in
the binary stream and replacing the expected `StartData` attributes with
the expected values.
"""
dummy_server_async.data = fast_dump_with_extra_header_params(timing_params)
events = []
async with AsyncioClient("localhost") as client:
async for data in client.data(frame_timeout=1):
events.append(data)
if len(events) == len(fast_dump_expected):
break
fast_dump_expected = list(fast_dump_expected)

# Replace attributes in `StartData` with the expected values
fast_dump_expected[1] = copy.deepcopy(fast_dump_expected[1])
for attr_name in timing_params:
setattr(fast_dump_expected[1], attr_name, timing_params[attr_name])

assert fast_dump_expected == events


async def test_asyncio_data_timeout(dummy_server_async, fast_dump):
dummy_server_async.data = fast_dump
async with AsyncioClient("localhost") as client:
Expand Down
9 changes: 4 additions & 5 deletions tests/test_hdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,7 @@

import numpy as np

from pandablocks.hdf import (
Pipeline,
create_default_pipeline,
stop_pipeline,
)
from pandablocks.hdf import Pipeline, create_default_pipeline, stop_pipeline
from pandablocks.responses import EndData, EndReason, FieldCapture, FrameData, StartData


Expand Down Expand Up @@ -44,6 +40,9 @@ def __init__(self):
"Scaled",
"Framed",
52,
"2024-03-05T20:27:12.607841574Z",
"2024-03-05T20:27:12.608875498Z",
100555,
),
)
pipeline[0].queue.put_nowait(
Expand Down

0 comments on commit 9c01c18

Please sign in to comment.