Skip to content

Commit

Permalink
Merge pull request #1621 from glotzerlab/fix/hdf5-scalar-detection
Browse files Browse the repository at this point in the history
Fix/hdf5 scalar detection
  • Loading branch information
joaander authored Sep 26, 2023
2 parents dd42125 + 881ad4c commit 0092d45
Show file tree
Hide file tree
Showing 2 changed files with 34 additions and 1 deletion.
28 changes: 28 additions & 0 deletions hoomd/md/pytest/test_hdf5.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,3 +153,31 @@ def test_mode(tmp_path, create_md_sim):
if sim.device.communicator.rank == 0:
with h5py.File(fn, "r") as fh:
assert len(fh["hoomd-data/foo/bar"]) == 2


def test_type_handling(tmp_path, create_md_sim):
logger = hoomd.logging.Logger(categories=['scalar'])
sim = create_md_sim
fn = tmp_path / "types.h5"
loggables = {
int: lambda: 42,
float: lambda: 0.0,
bool: lambda: True,
np.uint32: lambda: np.uint32(42),
np.float32: lambda: np.float32(3.1415),
np.bool_: lambda: np.bool_(True)
}
for key, value in loggables.items():
logger[str(key)] = (value, "scalar")
hdf5_writer = hoomd.write.HDF5Log(1, fn, logger, mode="w")
sim.operations.writers.append(hdf5_writer)
sim.run(1)

rank = sim.device.communicator.rank
del sim

if rank == 0:
with h5py.File(fn, "r") as fh:
for key in loggables:
type_ = key if key not in (float, int, bool) else np.dtype(key)
assert fh[f"hoomd-data/{str(key)}"].dtype == type_
7 changes: 6 additions & 1 deletion hoomd/write/hdf5.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,12 @@ def _initialize_datasets(self, log_dict):
chunk_size = None
if category == "scalar":
data_shape = (1,)
dtype = "f8" if isinstance(value, float) else "i8"
if isinstance(value, (np.number, np.bool_)):
dtype = value.dtype
elif isinstance(value, int):
dtype = np.dtype(bool) if isinstance(value, bool) else "i8"
else:
dtype = "f8"
chunk_size = (self._SCALAR_CHUNK,)
else:
if not isinstance(value, np.ndarray):
Expand Down

0 comments on commit 0092d45

Please sign in to comment.